pax_global_header 0000666 0000000 0000000 00000000064 14131311270 0014503 g ustar 00root root 0000000 0000000 52 comment=bf9c244d507a01b919c3e49611a7be4c4230c805
ospd-21.4.4/ 0000775 0000000 0000000 00000000000 14131311270 0012540 5 ustar 00root root 0000000 0000000 ospd-21.4.4/.coveragerc 0000664 0000000 0000000 00000000135 14131311270 0014660 0 ustar 00root root 0000000 0000000 # Configuration for coverage.py
[run]
omit =
tests/*
*/__init__.py
source =
ospd
ospd-21.4.4/.github/ 0000775 0000000 0000000 00000000000 14131311270 0014100 5 ustar 00root root 0000000 0000000 ospd-21.4.4/.github/CODEOWNERS 0000664 0000000 0000000 00000000102 14131311270 0015464 0 ustar 00root root 0000000 0000000 # default reviewers
* @greenbone/ospd-maintainers
ospd-21.4.4/.github/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000001464 14131311270 0017706 0 ustar 00root root 0000000 0000000 **What**:
**Why**:
**How**:
**Checklist**:
- [ ] Tests
- [ ] [CHANGELOG](https://github.com/greenbone/ospd/blob/master/CHANGELOG.md) Entry
ospd-21.4.4/.github/dependabot.yml 0000664 0000000 0000000 00000000220 14131311270 0016722 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: weekly
time: "04:00"
open-pull-requests-limit: 10
ospd-21.4.4/.github/workflows/ 0000775 0000000 0000000 00000000000 14131311270 0016135 5 ustar 00root root 0000000 0000000 ospd-21.4.4/.github/workflows/ci-python.yml 0000664 0000000 0000000 00000003440 14131311270 0020573 0 ustar 00root root 0000000 0000000 name: Build and test Python package
on:
push:
branches: [ master, ospd-21.04, ospd-20.08 ]
pull_request:
branches: [ master, ospd-21.04, ospd-20.08 ]
jobs:
linting:
name: Linting
runs-on: 'ubuntu-latest'
strategy:
matrix:
python-version:
- 3.7
- 3.8
- 3.9
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install poetry and dependencies
uses: greenbone/actions/poetry@v1
- name: Check with black, pylint and pontos.version
uses: greenbone/actions/lint-python@v1
with:
packages: ospd tests
test:
name: Run all tests
runs-on: 'ubuntu-latest'
strategy:
matrix:
python-version:
- 3.7
- 3.8
- 3.9
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install poetry and dependencies
uses: greenbone/actions/poetry@v1
- name: Run unit tests
run: poetry run python -m unittest
codecov:
name: Upload coverage to codecov.io
needs: test
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install poetry and dependencies
uses: greenbone/actions/poetry@v1
- name: Calculate and upload coverage to codecov.io
uses: greenbone/actions/coverage-python@v1
with:
test-command: -m unittest ospd-21.4.4/.github/workflows/codeql-analysis-python.yml 0000664 0000000 0000000 00000001311 14131311270 0023263 0 ustar 00root root 0000000 0000000 name: "CodeQL"
on:
push:
branches: [ master, ospd-21.04, ospd-20.08 ]
pull_request:
branches: [ master, ospd-21.04, ospd-20.08 ]
schedule:
- cron: '30 5 * * 0' # 5:30h on Sundays
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1 ospd-21.4.4/.github/workflows/deploy-pypi.yml 0000664 0000000 0000000 00000001150 14131311270 0021130 0 ustar 00root root 0000000 0000000 name: Deploy on PyPI
on:
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade poetry
python -m pip install --upgrade twine
- name: Build and publish
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: |
poetry build
twine upload dist/* ospd-21.4.4/.github/workflows/release-pontos-manually.yml 0000664 0000000 0000000 00000003642 14131311270 0023445 0 ustar 00root root 0000000 0000000 name: Manually release gvm-libs with pontos
on:
workflow_dispatch:
jobs:
release-patch:
env:
GITHUB_USER: ${{ secrets.GREENBONE_BOT }}
GITHUB_MAIL: ${{ secrets.GREENBONE_BOT_MAIL }}
GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }}
GPG_KEY: ${{ secrets.GPG_KEY }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
name: Release patch with pontos
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v2
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install poetry and dependencies
uses: greenbone/actions/poetry@v1
- name: Tell git who I am
run: |
git config --global user.name "${{ env.GITHUB_USER }}"
git config --global user.email "${{ env.GITHUB_MAIL }}"
git remote set-url origin https://${{ env.GITHUB_TOKEN }}@github.com/${{ github.repository }}
- run: echo "Current Branch is $GITHUB_BASE_REF"
- name: Prepare patch release with pontos
run: |
poetry run pontos-release prepare --patch
echo "VERSION=$(poetry run pontos-version show)" >> $GITHUB_ENV
- name: Release with pontos
run: |
poetry run pontos-release release
- name: Import key from secrets
run: |
echo -e "${{ env.GPG_KEY }}" >> tmp.file
gpg \
--pinentry-mode loopback \
--passphrase ${{ env.GPG_PASSPHRASE }} \
--import tmp.file
rm tmp.file
- name: Sign with pontos-release sign
run: |
echo "Signing assets for ${{env.VERSION}}"
poetry run pontos-release sign \
--signing-key ${{ env.GPG_FINGERPRINT }} \
--passphrase ${{ env.GPG_PASSPHRASE }} \
--release-version ${{ env.VERSION }}
ospd-21.4.4/.github/workflows/release-pontos-patch.yml 0000664 0000000 0000000 00000004272 14131311270 0022722 0 ustar 00root root 0000000 0000000 name: Release Python package patch version with pontos
on:
pull_request:
types: [closed]
branches: [ 'ospd-21.04', 'ospd-20.08' ]
jobs:
release-patch:
env:
GITHUB_USER: ${{ secrets.GREENBONE_BOT }}
GITHUB_MAIL: ${{ secrets.GREENBONE_BOT_MAIL }}
GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }}
GPG_KEY: ${{ secrets.GPG_KEY }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
name: Release patch with pontos
# If the label 'make patch release' is set. If PR is closed because of an merge
if: contains( github.event.pull_request.labels.*.name, 'make patch release') && github.event.pull_request.merged == true
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v2
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install poetry and dependencies
uses: greenbone/actions/poetry@v1
- name: Tell git who I am
run: |
git config --global user.name "${{ env.GITHUB_USER }}"
git config --global user.email "${{ env.GITHUB_MAIL }}"
git remote set-url origin https://${{ env.GITHUB_TOKEN }}@github.com/${{ github.repository }}
- run: echo "Current Branch is $GITHUB_BASE_REF"
- name: Prepare patch release with pontos
run: |
poetry run pontos-release prepare --patch
echo "VERSION=$(poetry run pontos-version show)" >> $GITHUB_ENV
- name: Release with pontos
run: |
poetry run pontos-release release
- name: Import key from secrets
run: |
echo -e "${{ env.GPG_KEY }}" >> tmp.file
gpg \
--pinentry-mode loopback \
--passphrase ${{ env.GPG_PASSPHRASE }} \
--import tmp.file
rm tmp.file
- name: Sign with pontos-release sign
run: |
echo "Signing assets for ${{env.VERSION}}"
poetry run pontos-release sign \
--signing-key ${{ env.GPG_FINGERPRINT }} \
--passphrase ${{ env.GPG_PASSPHRASE }} \
--release-version ${{ env.VERSION }}
ospd-21.4.4/.gitignore 0000664 0000000 0000000 00000000130 14131311270 0014522 0 ustar 00root root 0000000 0000000 __pycache__
*.pyc
*.log
.egg
*.egg-info
dist
build
_build
.idea
.vscode
.coverage
.venv
ospd-21.4.4/.pylintrc 0000664 0000000 0000000 00000034666 14131311270 0014424 0 ustar 00root root 0000000 0000000 [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=lxml
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=docs
# Pickle collected data for later comparisons.
persistent=yes
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
# bad-continuation is disabled because of a bug in pylint.
# See https://github.com/ambv/black/issues/48 and https://github.com/PyCQA/pylint/issues/289
disable=len-as-condition,
attribute-defined-outside-init,
missing-docstring,
bad-continuation,
R
#disable=print-statement,
# parameter-unpacking,
# unpacking-in-except,
# old-raise-syntax,
# backtick,
# long-suffix,
# old-ne-operator,
# old-octal-literal,
# import-star-module-level,
# non-ascii-bytes-literal,
# raw-checker-failed,
# bad-inline-option,
# locally-disabled,
# locally-enabled,
# file-ignored,
# suppressed-message,
# useless-suppression,
# deprecated-pragma,
# apply-builtin,
# basestring-builtin,
# buffer-builtin,
# cmp-builtin,
# coerce-builtin,
# execfile-builtin,
# file-builtin,
# long-builtin,
# raw_input-builtin,
# reduce-builtin,
# standarderror-builtin,
# unicode-builtin,
# xrange-builtin,
# coerce-method,
# delslice-method,
# getslice-method,
# setslice-method,
# no-absolute-import,
# old-division,
# dict-iter-method,
# dict-view-method,
# next-method-called,
# metaclass-assignment,
# indexing-exception,
# raising-string,
# reload-builtin,
# oct-method,
# hex-method,
# nonzero-method,
# cmp-method,
# input-builtin,
# round-builtin,
# intern-builtin,
# unichr-builtin,
# map-builtin-not-iterating,
# zip-builtin-not-iterating,
# range-builtin-not-iterating,
# filter-builtin-not-iterating,
# using-cmp-argument,
# eq-without-hash,
# div-method,
# idiv-method,
# rdiv-method,
# exception-message-attribute,
# invalid-str-codec,
# sys-max-int,
# bad-python3-import,
# deprecated-string-function,
# deprecated-str-translate-call,
# deprecated-itertools-function,
# deprecated-types-field,
# next-method-defined,
# dict-items-not-iterating,
# dict-keys-not-iterating,
# dict-values-not-iterating
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=no
# Deactivate the evaluation score.
score=no
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=optparse.Values,sys.exit
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[BASIC]
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{1,40}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{1,40}$
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Regular expression matching correct class attribute names.
class-attribute-rgx=([a-z_][a-z0-9_]{1,40})|([A-Z_][A-Z0-9_]{1,30})$
# Naming style matching correct class names
class-naming-style=PascalCase
# Naming style matching correct constant names
const-naming-style=UPPER_CASE
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=3
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]+$
# Good variable names which should always be accepted, separated by a comma
good-names=e,
f,
i,
j,
k,
ex,
Run,
logger,
_
# Include a hint for the correct naming format with invalid-name
include-naming-hint=yes
# Regular expression matching correct inline iteration names.
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]+$
# Regular expression which should only match correct module names
module-rgx=([a-z]+)|(test_*)$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]+$
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=LF
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=80
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method
max-args=15
# Maximum number of attributes for a class (see R0902).
max-attributes=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=30
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=0
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
ospd-21.4.4/CHANGELOG.md 0000664 0000000 0000000 00000035345 14131311270 0014363 0 ustar 00root root 0000000 0000000 # Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [21.4.4] - 2021-10-12
### Changed
- Use better defaults for file paths and permissions [#429](https://github.com/greenbone/ospd/pull/429)
- Downgrade required version for psutil to 5.5.1 [#453](https://github.com/greenbone/ospd/pull/453)
### Fixed
- Fix resume scan. [#464](https://github.com/greenbone/ospd/pull/464)
[21.4.4]: https://github.com/greenbone/ospd/compare/v21.4.3...v21.4.4
## [21.4.3] - 2021-08-04
### Changed
- Stopping scan. [#437](https://github.com/greenbone/ospd/pull/437)
[21.4.3]: https://github.com/greenbone/ospd/compare/v21.4.1...v21.4.3
## [21.4.1] - 2021-06-23
### Fixed
[21.4.1]: https://github.com/greenbone/ospd/compare/v21.4.0...ospd-21.04
## [21.4.0] (2021-04-16)
### Added
- Add target option for supplying dedicated port list for alive detection (Boreas only) via OSP. [#323](https://github.com/greenbone/ospd/pull/323)
- Add target option for supplying alive test methods via separate elements. [#329](https://github.com/greenbone/ospd/pull/329)
### Removed
- Remove python3.5 support and deprecated methods. [#316](https://github.com/greenbone/ospd/pull/316)
[21.4.0]: https://github.com/greenbone/ospd/compare/ospd-20.08...v21.4.0
## [20.8.4] (Unreleased)
### Added
### Changed
### Deprecated
### Removed
### Fixed
[Unreleased]: https://github.com/greenbone/ospd/compare/v20.8.3...v21.4.0
## [20.8.3] - 2021-08-04
### Fixed
- Do not start all queued scans simultaneously once available memory is enough. [#401](https://github.com/greenbone/ospd/pull/401)
- Remove the pid file if there is no process for the pid or the process name does not match. [#405](https://github.com/greenbone/ospd/pull/405)
- Fix regex of gvmcg titles for get_performance command. [#413](https://github.com/greenbone/ospd/pull/413)
[21.4.3]: https://github.com/greenbone/ospd/compare/v20.8.2...v20.8.3
## [20.8.2] (2021-02-01)
### Added
- Allow the scanner to update total count of hosts. [#332](https://github.com/greenbone/ospd/pull/332)
- Add more debug logging. [#352](https://github.com/greenbone/ospd/pull/352)
- Set end_time for interrupted scans. [#353](https://github.com/greenbone/ospd/pull/353)
- Add method to get a single host scan progress. [#363](https://github.com/greenbone/ospd/pull/363)
### Fixed
- Fix OSP version. [#326](https://github.com/greenbone/ospd/pull/326)
- Use empty string instead of None for credential. [#335](https://github.com/greenbone/ospd/pull/335)
- Fix target_to_ipv4_short(). [#338](https://github.com/greenbone/ospd/pull/338)
- Fix malformed target. [#341](https://github.com/greenbone/ospd/pull/341)
- Initialize end_time with create_scan. [#354](https://github.com/greenbone/ospd/pull/354)
- Fix get_count_total(). Accept -1 value set by the server. [#355](https://github.com/greenbone/ospd/pull/355)
- Fix get_count_total(). Consider 0 value set by the server. [#366](https://github.com/greenbone/ospd/pull/366)
- Remove exclude hosts which do not belong to the target for the scan progress calculation.
[#377](https://github.com/greenbone/ospd/pull/377)
[#380](https://github.com/greenbone/ospd/pull/380)
[20.8.2]: https://github.com/greenbone/ospd/compare/v20.8.1...ospd-20.08
## [20.8.1] (2020-08-12)
### Fixed
- Fix deploy and upload to pypi. [#312](https://github.com/greenbone/ospd/pull/312)
- Fix metadata for Python wheel distributable [#313](https://github.com/greenbone/ospd/pull/313)
[20.8.1]: https://github.com/greenbone/ospd/compare/v20.8.0...v20.8.1
## [20.8.0] (2020-08-11)
### Added
- Add solution method to solution of vt object. [#166](https://github.com/greenbone/ospd/pull/166)
- Add wait_for_children(). [#167](https://github.com/greenbone/ospd/pull/167)
- Extend osp to accept target options. [#194](https://github.com/greenbone/ospd/pull/194)
- Accept reverse_lookup_only and reverse_lookup_unify target's options. [#195](https://github.com/greenbone/ospd/pull/195)
- Add 'total' and 'sent' attributes to element for cmd response. [#206](https://github.com/greenbone/ospd/pull/206)
- Add new get_memory_usage command. [#207](https://github.com/greenbone/ospd/pull/207)
- Add lock-file-dir configuration option. [#218](https://github.com/greenbone/ospd/pull/218)
- Add details attribute to get_vts command. [#222](https://github.com/greenbone/ospd/pull/222)
- Add [pontos](https://github.com/greenbone/pontos) as dev dependency for
managing the version information in ospd [#254](https://github.com/greenbone/ospd/pull/254)
- Add more info about scan progress with progress attribute in get_scans cmd. [#266](https://github.com/greenbone/ospd/pull/266)
- Add support for scan queuing
[#278](https://github.com/greenbone/ospd/pull/278)
[#279](https://github.com/greenbone/ospd/pull/279)
[#281](https://github.com/greenbone/ospd/pull/281)
- Extend results with optional argument URI [#282](https://github.com/greenbone/ospd/pull/282)
- Add new scan status INTERRUPTED.
[#288](https://github.com/greenbone/ospd/pull/288)
[#289](https://github.com/greenbone/ospd/pull/289)
- Extend get_vts with attribute version_only and return the version [#291](https://github.com/greenbone/ospd/pull/291)
- Allow to set all openvas parameters which are not strict openvas only parameters via osp. [#301](https://github.com/greenbone/ospd/pull/301)
### Changes
- Modify __init__() method and use new syntax for super(). [#186](https://github.com/greenbone/ospd/pull/186)
- Create data manager and spawn new process to keep the vts dictionary. [#191](https://github.com/greenbone/ospd/pull/191)
- Update daemon start sequence. Run daemon.check before daemon.init now. [#197](https://github.com/greenbone/ospd/pull/197)
- Improve get_vts cmd response, sending the vts piece by piece.[#201](https://github.com/greenbone/ospd/pull/201)
- Start the server before initialize to respond to the client.[#209](https://github.com/greenbone/ospd/pull/209)
- Use an iterator to get the vts when get_vts cmd is called. [#216](https://github.com/greenbone/ospd/pull/216)
- Update license to AGPL-3.0+ [#241](https://github.com/greenbone/ospd/pull/241)
- Replaced pipenv with poetry for dependency management. `poetry install` works
a bit different then `pipenv install`. It installs dev packages by default and
also ospd in editable mode. This means after running poetry install ospd will
directly be importable in the virtual python environment. [#252](https://github.com/greenbone/ospd/pull/252)
- Progress bar calculation does not take in account the dead hosts. [#266](https://github.com/greenbone/ospd/pull/266)
- Show progress as integer for get_scans. [#269](https://github.com/greenbone/ospd/pull/269)
- Make scan_id attribute mandatory for get_scans. [#270](https://github.com/greenbone/ospd/pull/270)
- Ignore subsequent SIGINT once inside exit_cleanup(). [#273](https://github.com/greenbone/ospd/pull/273)
- Simplify start_scan() [#275](https://github.com/greenbone/ospd/pull/275)
- Make ospd-openvas to shut down gracefully
[#302](https://github.com/greenbone/ospd/pull/302)
[#307](https://github.com/greenbone/ospd/pull/307)
- Do not add all params which are in the OSPD_PARAMS dict to the params which are set as scan preferences. [#305](https://github.com/greenbone/ospd/pull/305)
### Fixed
- Fix stop scan. Wait for the scan process to be stopped before delete it from the process table. [#204](https://github.com/greenbone/ospd/pull/204)
- Fix get_scanner_details(). [#210](https://github.com/greenbone/ospd/pull/210)
- Fix thread lib leak using daemon mode for python 3.7. [#272](https://github.com/greenbone/ospd/pull/272)
- Fix scan progress in which all hosts are dead or excluded. [#295](https://github.com/greenbone/ospd/pull/295)
- Stop all running scans before exiting [#303](https://github.com/greenbone/ospd/pull/303)
- Fix start of parallel queued task. [#304](https://github.com/greenbone/ospd/pull/304)
- Strip trailing commas from the target list. [#306](https://github.com/greenbone/ospd/pull/306)
### Removed
- Remove support for resume task. [#266](https://github.com/greenbone/ospd/pull/266)
[20.8.0]: https://github.com/greenbone/ospd/compare/ospd-2.0...ospd-20.08
## [2.0.1]
### Added
- Add clean_forgotten_scans(). [#171](https://github.com/greenbone/ospd/pull/171)
- Extend OSP with finished_hosts to improve resume task. [#177](https://github.com/greenbone/ospd/pull/177)
### Changed
- Set loglevel to debug for some message. [#159](https://github.com/greenbone/ospd/pull/159)
- Improve error handling when stop a scan. [#163](https://github.com/greenbone/ospd/pull/163)
- Check the existence and status of an scan_id. [#179](https://github.com/greenbone/ospd/pull/179)
### Fixed
- Fix set permission in unix socket. [#157](https://github.com/greenbone/ospd/pull/157)
- Fix VT filter. [#165](https://github.com/greenbone/ospd/pull/165)
- Remove from exclude_host list the hosts passed as finished too. [#183](https://github.com/greenbone/ospd/pull/183)
[2.0.1]: https://github.com/greenbone/ospd/compare/v2.0.0...ospd-2.0
## [2.0.0] (2019-10-11)
### Added
- Add OSP command get_vts and the vts dictionary. [#12](https://github.com/greenbone/ospd/pull/12) [#60](https://github.com/greenbone/ospd/pull/60) [#72](https://github.com/greenbone/ospd/pull/72) [#73](https://github.com/greenbone/ospd/pull/73) [#93](https://github.com/greenbone/ospd/pull/93)
- Add optional custom elements for VT information. [#15](https://github.com/greenbone/ospd/pull/15)
- Allow clients to choose TLS versions > 1.0. [#18](https://github.com/greenbone/ospd/pull/18)
- Add element "vts" to parameters for starting scans. [#19](https://github.com/greenbone/ospd/pull/19) [#26](https://github.com/greenbone/ospd/pull/26)
- Add dummy stop_scan method to be implemented in the wrapper. [#24](https://github.com/greenbone/ospd/pull/24) [#53](https://github.com/greenbone/ospd/pull/53) [#129](https://github.com/greenbone/ospd/pull/129)
- Extend OSP command get_vts with vt_params. [#28](https://github.com/greenbone/ospd/pull/28)
- Add vt_selection to start_scan command. [#31](https://github.com/greenbone/ospd/pull/31) [#58](https://github.com/greenbone/ospd/pull/58) [#105](https://github.com/greenbone/ospd/pull/105)
- Add support for multi-target task adding targets with their own port list, credentials and host list to start_scan command. [#34](https://github.com/greenbone/ospd/pull/34) [#38](https://github.com/greenbone/ospd/pull/38) [#39](https://github.com/greenbone/ospd/pull/39) [#41](https://github.com/greenbone/ospd/pull/41)) [#127](https://github.com/greenbone/ospd/pull/127) [#134](https://github.com/greenbone/ospd/pull/134)
- Add support for parallel scans. [#42](https://github.com/greenbone/ospd/pull/42) [#142](https://github.com/greenbone/ospd/pull/142)
- Add functions for port manipulation. [#44](https://github.com/greenbone/ospd/pull/44)
- Add as subelement of in . [#45](https://github.com/greenbone/ospd/pull/45)
- Add pop_results attribute to . [#46](https://github.com/greenbone/ospd/pull/46)
- Add methods to set and get the vts feed version. [#79](https://github.com/greenbone/ospd/pull/79)
- Add cvss module. [#88](https://github.com/greenbone/ospd/pull/88)
- Add filter option to OSP get_vts command. [#94](https://github.com/greenbone/ospd/pull/94)
- Allows to set the logging domain from the wrapper. [#97](https://github.com/greenbone/ospd/pull/97)
- Add option for logging into a specified log file. [#98](https://github.com/greenbone/ospd/pull/98)
- Add option for logging into a specified log file. [#98](https://github.com/greenbone/ospd/pull/98)
- Add scans status to improve the progress and add support to resume tasks. [#100](https://github.com/greenbone/ospd/pull/) [#101](https://github.com/greenbone/ospd/pull/101) [#102](https://github.com/greenbone/ospd/pull/102) [#103](https://github.com/greenbone/ospd/pull/103)
- Add support for exclude hosts. [#107](https://github.com/greenbone/ospd/pull/107)
- Add hostname attribute to results. [#108](https://github.com/greenbone/ospd/pull/108)
- Add the --niceness option. [#109](https://github.com/greenbone/ospd/pull/109)
- Add support for configuration file. [#122](https://github.com/greenbone/ospd/pull/122)
- Add option to set unix socket mode permission. [#123](https://github.com/greenbone/ospd/pull/123)
- Add pid file creation to avoid having two daemons. [#126](https://github.com/greenbone/ospd/pull/126) [#128](https://github.com/greenbone/ospd/pull/128)
- Add OSP command. [#131](https://github.com/greenbone/ospd/pull/131) [#137](https://github.com/greenbone/ospd/pull/137)
- Add method to check if a target finished cleanly or crashed. [#133](https://github.com/greenbone/ospd/pull/133)
- Add the --stream-timeout option to configure the socket timeout. [#136](https://github.com/greenbone/ospd/pull/136)
- Add support to handle multiple requests simultaneously.
[#136](https://github.com/greenbone/ospd/pull/136), [#139](https://github.com/greenbone/ospd/pull/139)
### Changed
- Improve documentation.
- Improve Unittest.
- Send the response data in block of given length instead of sending all at once. [#35](https://github.com/greenbone/ospd/pull/35)
- Makes the socket a non-blocking socket. [#78](https://github.com/greenbone/ospd/pull/78)
- Refactor misc. [#111](https://github.com/greenbone/ospd/pull/111)
- Refactor error module. [#95](https://github.com/greenbone/ospd/pull/95) [#112](https://github.com/greenbone/ospd/pull/112)
- Refactor ospd connection handling. [#114](https://github.com/greenbone/ospd/pull/114)
- Use ordered dictionary to maintain the results order. [#119](https://github.com/greenbone/ospd/pull/119)
- Refactor ospd. [#120](https://github.com/greenbone/ospd/pull/120)
- Set default unix socket path to /var/run/ospd/ospd.sock and default pid file path to /var/run/ospd.pid. [#140](https://github.com/greenbone/ospd/pull/140)
- Do not add a host detail result with the host status. [#145](https://github.com/greenbone/ospd/pull/145)
- Do not log the received command. [#151](https://github.com/greenbone/ospd/pull/151)
### Fixed
- Fix scan progress. [#47](https://github.com/greenbone/ospd/pull/47)
- Documentation has been improved.
- Improve connection handling. [#80](https://github.com/greenbone/ospd/pull/80)
- Fix target_to_ipv4_short(). [#99](https://github.com/greenbone/ospd/pull/99)
- Handle write error if the client disconnects abruptly. [#135](https://github.com/greenbone/ospd/pull/135)
- Improve error handling when sending data. [#147](https://github.com/greenbone/ospd/pull/147)
- Fix classifier in setup.py. [#154](https://github.com/greenbone/ospd/pull/154)
[2.0]: https://github.com/greenbone/ospd/compare/ospd-1.3...master
## [1.3] (2018-06-05)
### Added
- Support for unix sockets has been added.
### Removed
- OSP has been renamed to Open Scanner Protocol.
### Changed
- Support Python 3 only.
- Documentation has been updated.
ospd-21.4.4/COPYING 0000664 0000000 0000000 00000102330 14131311270 0013572 0 ustar 00root root 0000000 0000000 GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
.
ospd-21.4.4/MANIFEST.in 0000664 0000000 0000000 00000000170 14131311270 0014274 0 ustar 00root root 0000000 0000000 include CHANGELOG.md COPYING Poetry.toml Poetry.lock pyproject.toml README.md setup.py
recursive-include tests *.py
ospd-21.4.4/README.md 0000664 0000000 0000000 00000010755 14131311270 0014027 0 ustar 00root root 0000000 0000000 
# ospd
[](https://github.com/greenbone/ospd/releases)
[](https://pypi.org/project/ospd/)
[](https://scrutinizer-ci.com/g/greenbone/ospd/?branch=ospd-21.04)
[](https://codecov.io/gh/greenbone/ospd)
[](https://circleci.com/gh/greenbone/ospd/tree/ospd-21.04)
ospd is a base class for scanner wrappers which share the same communication
protocol: OSP (Open Scanner Protocol). OSP creates a unified interface for
different security scanners and makes their control flow and scan results
consistently available under the central Greenbone Vulnerability Manager service.
OSP is similar in many ways to GMP (Greenbone Management Protocol): XML-based,
stateless and non-permanent connection.
The design supports wrapping arbitrary scanners with same protocol OSP,
sharing the core daemon options while adding scanner specific parameters and
options.
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Releases](#releases)
- [Installation](#installation)
- [Requirements](#requirements)
- [Install using pip](#install-using-pip)
- [How to write your own OSP Scanner Wrapper](#how-to-write-your-own-osp-scanner-wrapper)
- [Support](#support)
- [Maintainer](#maintainer)
- [Contributing](#contributing)
- [License](#license)
## Releases

All [release files](https://github.com/greenbone/ospd/releases) are signed with
the [Greenbone Community Feed integrity key](https://community.greenbone.net/t/gcf-managing-the-digital-signatures/101).
This gpg key can be downloaded at https://www.greenbone.net/GBCommunitySigningKey.asc
and the fingerprint is `8AE4 BE42 9B60 A59B 311C 2E73 9823 FAA6 0ED1 E580`.
## Installation
### Requirements
ospd requires Python >= 3.7 along with the following libraries:
- python3-paramiko
- python3-lxml
- python3-defusedxml
### Install using pip
You can install ospd from the Python Package Index using [pip](https://pip.pypa.io/):
python3 -m pip install ospd
Alternatively download or clone this repository and install the latest development version:
python3 -m pip install .
## How to write your own OSP Scanner Wrapper
As a core you need to derive from the class OSPDaemon from ospd.py.
See the documentation there for the single steps to establish the
full wrapper.
See the file [doc/INSTALL-ospd-scanner.md](doc/INSTALL-ospd-scanner.md) about how to register a OSP scanner at
the Greenbone Vulnerability Manager which will automatically establish a full
GUI integration for the Greenbone Security Assistant (GSA).
There are some online resources about this topic:
## Support
For any question on the usage of OSPD please use the [Greenbone Community Portal](https://community.greenbone.net/c/osp). If you found a problem with the software, please [create an issue](https://github.com/greenbone/ospd/issues) on GitHub.
## Maintainer
This project is maintained by [Greenbone Networks GmbH](https://www.greenbone.net/).
## Contributing
Your contributions are highly appreciated. Please [create a pull request](https://github.com/greenbone/ospd/pulls) on GitHub. For bigger changes, please discuss it first in the [issues](https://github.com/greenbone/ospd/issues).
For development you should use [poetry](https://python-poetry.org)
to keep you python packages separated in different environments. First install
poetry via pip
python3 -m pip install --user poetry
Afterwards run
poetry install
in the checkout directory of ospd (the directory containing the
`pyproject.toml` file) to install all dependencies including the packages only
required for development.
The ospd repository uses [autohooks](https://github.com/greenbone/autohooks)
to apply linting and auto formatting via git hooks. Please ensure the git hooks
are active.
poetry install
poetry run autohooks activate --force
## License
Copyright (C) 2009-2020 [Greenbone Networks GmbH](https://www.greenbone.net/)
Licensed under the [GNU Affero General Public License v3.0 or later](COPYING).
ospd-21.4.4/RELEASE.md 0000664 0000000 0000000 00000010304 14131311270 0014140 0 ustar 00root root 0000000 0000000 # Release Instructions
Before creating a new release carefully consider the version number of the new
release. We are following [Semantic Versioning](https://semver.org/) and
[PEP440](https://www.python.org/dev/peps/pep-0440/).
## Preparing the Required Python Packages
* Install twine for pypi package uploads
```sh
python3 -m pip install --user --upgrade twine
```
## Configuring the Access to the Python Package Index (PyPI)
*Note:* This is only necessary for users performing the release process for the
first time.
* Create an account at [Test PyPI](https://packaging.python.org/guides/using-testpypi/).
* Create an account at [PyPI](https://pypi.org/).
* Create a pypi configuration file `~/.pypirc` with the following content (Note:
`` must be replaced):
```ini
[distutils]
index-servers =
pypi
testpypi
[pypi]
username =
[testpypi]
repository = https://test.pypi.org/legacy/
username =
```
## Prepare testing the Release
* Fetch upstream changes and create a branch:
```sh
git fetch upstream
git checkout -b create-new-release upstream/master
```
* Get the current version number
```sh
poetry run python -m pontos.version show
```
* Update the version number to some alpha version e.g.
```sh
poetry run python -m pontos.version update 1.2.3a1
```
## Uploading to the PyPI Test Instance
* Create a source and wheel distribution:
```sh
rm -rf dist build ospd.egg-info
poetry build
```
* Upload the archives in `dist` to [Test PyPI](https://test.pypi.org/):
```sh
twine upload -r testpypi dist/*
```
* Check if the package is available at .
## Testing the Uploaded Package
* Create a test directory:
```sh
mkdir ospd-install-test
cd ospd-install-test
python3 -m venv test-env
source test-env/bin/activate
pip install -U pip # ensure the environment uses a recent version of pip
pip install --pre -I --extra-index-url https://test.pypi.org/simple/ ospd
```
* Check install version with a Python script:
```sh
python3 -c "from ospd import __version__; print(__version__)"
```
* Remove test environment:
```sh
deactivate
cd ..
rm -rf ospd-install-test
```
## Prepare the Release
* Determine new release version number
If the output is something like `1.2.3.dev1` or `1.2.3a1`, the new version
should be `1.2.3`.
* Update to new version number (`` must be replaced by the version
from the last step)
```sh
cd path/to/git/clone/of/ospd
poetry run python -m pontos.version update
```
* Update the `CHANGELOG.md` file:
* Change `[unreleased]` to new release version.
* Add a release date.
* Update reference to Github diff.
* Remove empty sub sections like *Deprecated*.
* Create a git commit:
```sh
git add .
git commit -m "Prepare release "
```
## Performing the Release on GitHub
* Create a pull request (PR) for the earlier commit:
```sh
git push origin
```
Open GitHub and create a PR against .
* Ask another developer/maintainer to review and merge the PR.
* Once the PR is merged, update the local `master` branch:
```sh
git fetch upstream
git rebase upstream/master master
```
* Create a git tag:
```sh
git tag v
```
Or even a tag signed with a personal GPG key:
```sh
git tag --sign --message "Tagging the release" v
```
* Push changes and tag to Github:
```sh
git push --tags upstream
```
## Uploading to the 'real' PyPI
* Uploading to PyPI is done automatically by pushing a git tag via CircleCI
* Check if new version is available at .
## Bumping `master` Branch to the Next Version
* Update to a Development Version
The next version should contain an incremented minor version and a dev suffix
e.g. 2.3.0.dev1
```sh
poetry run python -m pontos.version update
```
* Create a commit for the version bump:
```sh
git add .
git commit -m "Update version after release"
git push upstream
```
## Announcing the Release
* Create a Github release:
See https://help.github.com/articles/creating-releases/
ospd-21.4.4/doc/ 0000775 0000000 0000000 00000000000 14131311270 0013305 5 ustar 00root root 0000000 0000000 ospd-21.4.4/doc/HTML.xsl 0000664 0000000 0000000 00000062760 14131311270 0014614 0 ustar 00root root 0000000 0000000
0
1
0
="
"
="
"
0
...
<
/>
<
>
</
>
<
>
</
>
|
.
|
|
.
|
1 Summary of Data Types
4.
Data Type
In short: .
.1 RNC
4 Data Types Details
|
.
|
2 Summary of Elements
5 Element Details
5.
Element
In short: .
.1 Structure
.2 RNC
.3 Example:
,
or
"
"
text
@
()
.
<>
<_response>
<>
()
.
<>
.
The group
One of
6.
Command
In short: .
.1 Structure
.2 RNC
.3 Example:
|
. |
3 Summary of Commands
6 Command Details
7 Summary of Scanner Parameters Types
8.
8 Compatibility Changes in Version
Protocol definition
ospd-21.4.4/doc/INSTALL-ospd-scanner 0000777 0000000 0000000 00000000000 14131311270 0022745 2INSTALL-ospd-scanner.md ustar 00root root 0000000 0000000 ospd-21.4.4/doc/INSTALL-ospd-scanner.md 0000664 0000000 0000000 00000012701 14131311270 0017330 0 ustar 00root root 0000000 0000000 General Installation Instructions for OSPD-based Scanners
=========================================================
This is a general description about installing an ospd-based scanner wrapper
implementation.
The actual scanner implementation usually has individual installation
instructions and may refer to this general guide.
In the following guide, replace `ospd-scanner` with the name of the actual OSPD
scanner.
Install in a Virtual Environment
--------------------------------
The recommended way to install `ospd-scanner` is to do so inside a virtual
environment (`virtualenv` or `venv`).
This way, the server and its dependency are well isolated from system-wide
updates, making it easier to upgrade it, delete it, or install dependencies
only for it.
Refer to the Python documentation for setting up virtual environments for
further information.
First you need to create a virtual environment somewhere on your system, for
example with the following command:
virtualenv ospd-scanner
Installing `ospd-scanner` inside your newly created virtual environment could
then be done with the following command:
ospd-scanner/bin/pip install ospd_scanner-x.y.z.tar.gz
Note: As `ospd` is not (yet) available through PyPI, you probably want to
install it manually first inside your virtual environment prior to installing
`ospd-scanner`.
To run `ospd-scanner`, just start the Python script installed inside the
virtual environment:
ospd-scanner/bin/ospd-scanner
Install (Sub-)System-wide
-------------------------
To install `ospd-scanner` into directory `` run this command:
python3 setup.py install --prefix=
The default for `` is `/usr/local`.
Be aware that this might automatically download and install missing
Python packages. To prevent this, you should install the prerequisites
first with the mechanism of your system (for example via `apt` or `rpm`).
You may need to set the `PYTHONPATH` like this before running
the install command:
export PYTHONPATH=/lib/python3.7/site-packages/
The actual value for `PYTHONPATH` depends on your Python version.
Creating certificates
---------------------
An OSPD service can be started using a Unix domain socket (only on
respective systems) or using a TCP socket. The latter uses TLS-based
encryption and authorization while the first is not encrypted and uses
the standard file access rights for authorization.
For the TCP socket communication it is mandatory to use adequate
TLS certificates which you need for each of your OSPD service. You may use
the same certificates for all services if you like.
By default, those certificates are used which are also used by GVM
(see paths with `ospd-scanner --help`). Of course this works only
if installed in the same environment.
In case you do not have already a certificate to use, you may quickly
create your own one (can be used for multiple ospd daemons) using the
`gvm-manage-certs` tool provided with `gvmd`
():
gvm-manage-certs -s
And sign it with the CA checked for by the client. The client is usually
Greenbone Vulnerability Manager for which a global trusted CA certificate
can be configured.
Registering an OSP daemon at Greenbone Vulnerability Manager
------------------------------------------------------------
The file [README](../README.md) explains how to control the OSP daemon via
command line.
It is also possible to register an OSP daemon at the Greenbone Vulnerability
Manager and then use GMP clients to control the OSP daemon, for example the
web interface GSA.
You can register either via the GUI (`Configuration -> Scanners`) and create
a new Scanner there.
Or you can create a scanner via `gvmd` command line (adjust host,
port, paths, etc. for your daemon):
gvmd --create-scanner="OSP Scanner" --scanner-host=127.0.0.1 --scanner-port=1234 \
--scanner-type="OSP" --scanner-ca-pub=/usr/var/lib/gvm/CA/cacert.pem \
--scanner-key-pub=/usr/var/lib/gvm/CA/clientcert.pem \
--scanner-key-priv=/usr/var/lib/gvm/private/CA/clientkey.pem
or for local running ospd-scanner via file socket:
gvmd --create-scanner="OSP Scanner" --scanner-type="OSP" --scanner-host=/var/run/ospd-scanner.sock
Please note that the scanner created via `gvmd` like above will be created with
read permissions to all pre-configured roles.
Check whether Greenbone Vulnerability Manager can connect to the OSP daemon:
$ gvmd --get-scanners
08b69003-5fc2-4037-a479-93b440211c73 OpenVAS Default
3566ddf1-cecf-4491-8bcc-5d62a87404c3 OSP Scanner
$ gvmd --verify-scanner=3566ddf1-cecf-4491-8bcc-5d62a87404c3
Scanner version: 1.0.
Of course, using GMP via command line tools provided by
[gvm-tools](https://github.com/greenbone/gvm-tools) to register an OSP Scanner
is also possible as a third option.
Documentation
-------------
Source code documentation can be accessed over the usual methods,
for example (replace "scanner" by the scanner name):
$ python3
>>> import ospd_scanner.wrapper
>>> help (ospd_scanner.wrapper)
An equivalent to this is:
pydoc3 ospd_scanner.wrapper
To explore the code documentation in a web browser:
$ pydoc3 -p 12345
pydoc server ready at http://localhost:12345/
For further options see the `man` page of `pydoc`.
Creating a source archive
-------------------------
To create a .tar.gz file for the `ospd-scanner` module run this command:
python3 setup.py sdist
This will create the archive file in the subdirectory `dist`.
ospd-21.4.4/doc/OSP.xml 0000664 0000000 0000000 00000147774 14131311270 0014514 0 ustar 00root root 0000000 0000000
Open Scanner Protocol
OSP
The Open Scanner Protocol
21.04
boolean
0 or 1
xsd:token { pattern = "[01]" }
epoch_time
A date, in Unix format
integer
integer
An integer
integer
status
Status code describing the result of a command
xsd:token { pattern = "[1-5][0-9][0-9]" }
string
A string
text
uuid
A Universally Unique Identifier (UUID)
xsd:token { pattern = "[0-9abcdefABCDEF\-]{1,40}" }
vt_id
Identifier for a vulnerability test
xsd:token { pattern = "[0-9a-zA-Z_\-.:]{1,80}" }
credential
A credential consisting of type, service, port, username and password.
type
string
1
service
string
1
port
string
username
password
username
text
password
text
Credential with port
scanuser
mypass
Credential without port
smbuser
mypass
scanner_params
Contains elements that represent scanner specific parameters
e
e
Element that represents a scanner specific parameters
string
string
scanner_params
443
1
fast_scan
targets
List of targets
target
Two targets
...
...
target
A scan target consisting of hosts, a port selection and credentials
hosts
ports
credentials
exclude_hosts
finished_hosts
alive_test_ports
alive_test
alive_test_methods
reverse_lookup_unify
reverse_lookup_only
hosts
One or many hosts. The list is comma-separated. Each entry can be a IP address, a CIDR notation, a hostname, a IP range. IPs can be v4 or v6
string
ports
string
A list of ports that is the same for the given hosts
credentials
One or many credentials containing the credential for the given hosts
credential
exclude_hosts
One or many hosts to exclude. The list is comma-separated. Each entry can be a IP address, a CIDR notation, a hostname, a IP range. IPs can be v4 or v6. Each wrapper must handle the exclude hosts
string
finished_hosts
One or many finished hosts to exclude when the client resumes a task. The list is comma-separated. Each entry can be an IP address, a CIDR notation, a hostname, a IP range. IPs can be v4 or v6. The listed hosts will be set as finished before starting the scan. Each wrapper must handle the finished hosts
string
alive_test
Alive test type to be performed against the target
integer
alive_test_methods
Alive test methods to be performed against the target
icmp
icmp
ICMP ping
boolean
tcp_syn
tcp_syn
TCP-SYN ping
boolean
tcp_ack
tcp_ack
TCP-ACK ping
boolean
arp
arp
ARP ping
boolean
consider_alive
consider_alive
Consider the target to be alive
boolean
alive_test_ports
Dedicated port list for alive detection. Used for TCP-SYN and TCP-ACK ping when Boreas (scanner preference test_alive_hosts_only) is enabled. If no port list is provided ports 80, 137, 587, 3128, 8081 are used as defaults
string
reverse_lookup_only
Only scan IP addresses that can be resolved into a DNS name
string
reverse_lookup_unify
If multiple IP addresses resolve to the same DNS name the DNS name will only get scanned once
string
Target without credentials
example.org
T:22,U:5060
0
22,80,123
0
0
Target with two credentials
192.168.1.0/24
1,2,3,80,443
...
...
192.168.1.10-15
192.168.1.1-3
vt_group
Collection of Vulnerability Test
filter
string
1
VT group filtered by family name
vt_selection
Contains elements that represent a Vulnerability Test or a collection of Vulnerability Tests to be executed and their parameters
vt_single
vt_group
VT with parameters and VT group
200
yes
vt_single
Elements that represent Vulnerability Tests
vt_id
Identifier for a vulnerability test
vt_id
1
vt_value
VT with parameters
200
yes
vt_value
Vulnerability Test parameter
id
string
1
string
Parameters for a single VT
200
yes
help
Get the help text
format
Help format
xml
text
status
status
1
status_text
text
1
text
Get the help text
ID of scan to delete
Delete a finished scan
Help format. Could be text or xml
Print the commands help
Return various versions
ID of scan stop.
Stop a currently running scan.
Return scanner description and parameters
Optional UUID value to set as scan ID
Target hosts to scan in a comma-separated list
Ports list to scan as comma-separated list
Optional number of parallel scans to run
Scan profile
Target port
Use HTTPS
w3af scan timeout
Start a new scan
ID of scan to stop
Stop a currently running scan
Mandatory ID of a specific scan to get
Whether to return the full scan report
Whether to remove the fetched results
Maximum number of results to fetch. Only considered if pop_results is enabled. Default = None, which means that all available results are returned
List the scans in buffer
Return system report
Name of report.
Time of first data point in report.
Time of last data point in report.
get_performance
Return performance information from an external program
start
Interval start
int
end
Interval end
int
titles
Interval title to get
text
status
status
1
status_text
text
1
text
Some output.
get_scans
Get a stored scan in buffer
scan_id
Scan UUID
uuid
details
Whether to get full scan reports
boolean
progress
Whether to return a detailed progress information
boolean
pop_results
Whether to remove the fetched results
boolean
max_results
Maximum number of results to fetch. Only considered if pop_results is enabled. Default = None, which means that all available results are returned
int
status
status
1
status_text
text
1
scan
scan
id
uuid
target
string
start_time
epoch_time
end_time
epoch_time
progress
integer
status
string
results
Get a scan report summary
The URL: "https://192.168.1.252/" has a path
disclosure vulnerability which discloses "/var/www/phpinfo.php"
...
...
Get a scan report summary
The URL: "https://192.168.1.252/" has a path
disclosure vulnerability which discloses "/var/www/phpinfo.php"
...
Get a scan progress summary
delete_scan
Delete a finished scan
scan_id
Scan UUID
uuid
status
status
1
status_text
text
1
Delete a scan successfully
get_version
Return various versions
status
status
1
status_text
text
1
protocol
daemon
scanner
protocol
name
version
version
name
daemon
name
version
version
name
scanner
name
version
version
name
Get protocol, scanner and daemon versions
1.0
OSP
generic version
generic ospd
1.6.0.4
w3af
get_scanner_details
Return scanner description and parameters
list_all
List all available scanner parameters. Not only those visible to the client.
boolean
status
status
1
status_text
text
1
description
scanner_params
description
scanner_params
scanner_param
scanner_param
id
string
type
string
name
description
default
mandatory
name
description
default
mandatory
Get scanner details
...
Scan profile
Scan profiles are predefined set of plugins and customized configurations.
fast_scan|fast_scan|audit_high_risk|full_audit|OWASP_TOP10|bruteforce|empty_profile|web_infrastructure|full_audit_spider_man|sitemap
Show HTTP request status
Whether to show the HTTP request's status in results
0
Dry Run
Whether to dry run scan.
0
Show HTTP response status
Whether to show the HTTP response's status in results
0
Seed path
Path to start with
/
Debug Mode
Whether to get extra scan debug information.
0
Target port
Port on target host to scan
80
Use HTTPS
Whether the target application is running over HTTPS
0
get_vts
Return information about vulnerability tests, if offered by scanner
vt_id
Identifier for vulnerability test
vt_id
filter
Filter to get a sub group of a VT collection
string
details
Return more details about vulnerability tests, if offered by the scanner
string
status
status
1
status_text
text
1
vts
vts
vt
vt
id
vt_id
name
creation_time
modification_time
params
refs
dependencies
summary
impact
affected
insight
solution
detection
severities
custom
name
creation_time
modification_time
params
param
param
type
string
id
string
refs
ref
ref
type
string
id
string
dependencies
dependency
dependency
vt_id
vt_id
summary
impact
affected
insight
solution
type
Solution type, for example "VendorFix"
string
method
Solution method, for example "DebianAPTUpgrade"
string
detection
severities
severity
severity
type
string
origin
date
value
origin
Optional reference to the origin of the severity
string
date
Optional timestamp in seconds since epoch. Defaults to VT creation date
string
value
The actual value, the format must correspond with the type
string
custom
Get information for all available vulnerability tests
Check for presence of vulnerability X
Check for presence of vulnerability Y
Get information for a single vulnerability test
Check for presence of vulnerability X
1200603949
1567429142
Check the version of App
App in OS v2
App is a small but very powerful app.
Please Install the Updated Packages.
Get the installed version with the help of detect NVT and check if the version is vulnerable or not.
CVE-2014-9116
1200603949
AV:N/AC:L/Au:N/C:N/I:N/A:P
Get information for a filtered collection of vulnerability test without details
Check for presence of vulnerability X
1200603949
1567429142
Check the version of App
App in OS v2
App is a small but very powerful app.
Please Install the Updated Packages.
Get the installed version with the help of detect NVT and check if the version is vulnerable or not.
CVE-2014-9116
1200603949
AV:N/AC:L/Au:N/C:N/I:N/A:P
Get information for a vulnerability test with custom data
Check for presence of vulnerability X
First custom element
second custom element
Get information for a vulnerability test with VT parameters data
Check for presence of vulnerability X
Timeout
Vulnerability Test Timeout
300
Scan UDP
1
First custom element
second custom element
start_scan
Start a new scan
target
Target hosts to scan in a comma-separated list
string
ports
Ports list to scan as comma-separated list
string
scan_id
Optional UUID value to use as scan ID
uuid
parallel
Optional number of parallel scan to run
integer
scanner_params
vt_selection
targets
scanner_params
Contains elements that represent scanner specific parameters
vt_selection
Contains elements that represent a Vulnerability Test or a collection of Vulnerability Tests to be executed and their parameters
targets
Contains elements that represent a target to execute a scan against. If target and port attributes are present this element is not take into account
status
status
1
status_text
text
1
id
id
New scan's UUID
Start a new scan. Legacy mode
443
1
fast_scan
2f616d53-595f-4785-9b97-4395116ca118
Start a new scan with multi-targets running simultaneously. Each one has a different port list and one of them has credentials for authenticated scans.
...
....
...
192.168.1.0/24
1,2,3,80,443
...
192.168.1.10-15
192.168.1.1-3
2f616d53-595f-4785-9b97-4395116ca118
stop_scan
Stop a currently running scan
scan_id
ID of scan to stop
uuid
status
status
1
status_text
text
1
Stop a scan
get_memory_usage
Return memory usage information of the osp daemon
unit
Size unit for the memory. b for bytes, kb for kilobytes and mb for megabytes.
text
status
status
1
status_text
text
1
processes
List of running processes
process
Single running processes
name
Name of the process
string
pid
Process ID
int
rss
Resident Set Size. Non-swapped physical memory of the process
int
vms
Virtual Memory Size. Total amount of virtual memory used by the process.
int
shared
Memory shared with other processes
int
127182
239616
135168
...
integer
An integer value
string
A string
boolean
0 or 1
selection
A value out of the | separated values list
credential_up
The username:password of a credential
file
A file's content
ovaldef_file
An ovaldef file's content that is base64 encoded
START_SCAN
Add support for dedicated port list for alive detection
Target element received new target option alive_test_ports.
21.04
START_SCAN
Add support for supplying alive test methods via separate elements.
Target element received new optional target option alive_test_methods with subelements imcp, tcp_ack, tcp_syn, arp and consider_alive.
21.04
GET_VTS
Returned object changes and extends severity information
The element SEVERITY inside SEVERITIES of the returned VT moves the origin and the value
into explicit elements ORIGIN and VALUE. Furthremore the element DATE is added.
21.04
GET_VTS
Returned object extended with solution method
The element SOLUTION of the returned VT object has a new optional attribute METHOD
that describes the solution method in case the VT offers such a detail.
20.08
GET_VTS
Returned object extended with amount of VTS
The main element vts has the new TOTAL attribute with the amount of vts in the collection and the new optional SENT attribute with the amount of vts which matched a given filter.
20.08
GET_VTS
Add attribute DETAILS
The new attribute DETAILS allows to get more details about a vulnerability test if the scanner support it. By default is set True, and for a lighter response must be explicitly set to False.
20.08
GET_VTS
Add attribute SHA256_HASH
The new attribute SHA256_HASH is calculated from the vulnerability test OID, modification time and VT preferences, sorted by OID. The new attribute is helpful for feed integrity check.
20.08
GET_VTS
Add attribute VTS_VERSION
The feed version is included in the get_vts command response.
20.08
START_SCAN
Add support for target options
Target element receive new target options reverse_lookup_only, reverse_lookup_unify and alive_test.
20.08
GET_SCANS
Add scan progress details
New attribute PROGRESS to specify whether to return a detailed progress information.
20.08
GET_VTS
command added
Added new command to retrieve information about vulnerability tests a scanner might offer.
1.2
START_SCAN
vts optional element added
Added optional element vts to allow the client to specify a vts list
to use for the scan and their parameters.
1.2
START_SCAN
target optional element added
Added optional element targets to specify different hosts with a different port list and credentials. This is take in account only if target and port attributes are not present in start_scan tag.
1.2
START_SCAN
parallel attribute added
Added optional attribute parallel to specify the number of simultaneous scans to be run.
1.2
STOP_SCAN
command added
Added new command to stop a currently running scan.
1.1
START_SCAN
scan_id attribute added
The scan_id attribute was added to allow the client to specify a UUID as
the ID of the scan.
1.1
START_SCAN
ports attribute added
The ports attribute was added to allow the client to specify a ports list
to use for the scan.
1.1
Scanner Parameters Types
Type credential_up added
Introduce an aggregated type to express a username:password tuple.
1.1
GET_PERFORMANCE
Command added
Added new command to get performance from an external program.
1.2
ospd-21.4.4/doc/USAGE-ospd-scanner 0000777 0000000 0000000 00000000000 14131311270 0022241 2USAGE-ospd-scanner.md ustar 00root root 0000000 0000000 ospd-21.4.4/doc/USAGE-ospd-scanner.md 0000664 0000000 0000000 00000006730 14131311270 0017073 0 ustar 00root root 0000000 0000000 General Usage Instructions for ospd-based Scanners
--------------------------------------------------
This is a general description about using an ospd-based scanner wrapper
implementation.
The actual scanner implementation has individual usage instructions for anything
that goes beyond this general guide.
In the following description replace `ospd-scanner` with the name of the actual
OSPD scanner.
See the documentation of your ospd-based scanner and the general instructions in
the [INSTALL-ospd-scanner.md](INSTALL-ospd-scanner.md) file on how to hand over
full control to the Greenbone Vulnerability Manager.
This usage guide explains how to use an OSP scanner independently of Greenbone
Vulnerability Manager, for example when developing a new ospd-based scanner or
for testing purposes.
Open Scanner Protocol
---------------------
Using an ospd-based scanner means using the Open Scanner Protocol (OSP). This is
what Greenbone Vulnerability Manager does. See the ospd module for the original
specification available in [ospd/doc/OSP.xml](OSP.xml).
There is also an online version available at
.
gvm-tools
---------
The `gvm-tools` help to make accessing the OSP interface easier.
They can be obtained from .
This module provides the commands `gvm-cli` and `gvm-pyshell`.
Starting an ospd-based scanner
------------------------------
All ospd-based scanners share a set of command-line options such as
`--help`, `--bind-address`, `--port`, `--key-file`, `--timeout`, etc.
For example, to see the command line options you can run:
ospd-scanner --help
To run an instance of `ospd-scanner` listening on Unix domain socket:
ospd-scanner -u /var/run/ospd-scanner.sock &
To run a test instance of `ospd-scanner` on local TCP port 1234:
ospd-scanner -b 127.0.0.1 -p 1234 &
Add `--log-level=DEBUG` to enable maximum debugging output.
Parameter for `--log-level` can be one of `DEBUG`, `INFO`, `WARNING`, `ERROR` or
`CRITICAL` (in order of priority).
Controlling an OSP scanner
--------------------------
You can use command line tools provided by the `gvm-tools` module to interact
with an OSP scanner.
To get a description of the interface:
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml ""
Starting a scan (scanner parameters can be added according to the description
printed as response to the `` command):
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml=""
Start a scan for ospd-based scanners that use the builtin-support for SSH
authentication:
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml="myuser:mypassword"
Start a scan for two vulnerability tests `vt_id_1` and `vt_id_2` of an ospd-based
scanner:
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml="vt_id_1, vt_id_2"
Show the list of scans with status and results:
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml=""
Delete a scan from this list (only finished scans can be deleted):
gvm-cli socket --sockpath /var/run/ospd-scanner.sock --xml=""
ospd-21.4.4/doc/example-ospd-logging.conf 0000664 0000000 0000000 00000002433 14131311270 0020200 0 ustar 00root root 0000000 0000000 ### If a custom logging configuration is required, the following
### items under General must be present in the configuration file.
### Examples for customization are given. Just comment/uncomment
### the corresponding lines and do necessary adjustments.
### For official documentation visit
### https://docs.python.org/3.7/library/logging.config.html#configuration-file-format
## General
####################
[loggers]
keys=root
#keys=root, ospd_openvas
[logger_root]
level=NOTSET
handlers=default_handler
### There is already an existen default_handler.
### Uncomment the following to extend the existent handler list
####################
#[handlers]
#keys=default_handler, custom_syslog
### Example for a custom handler. Custom must be added to the handlers list,
####################
#[handler_custom]
#class=FileHandler
#level=DEBUG
#formatter=file
#args=('some_path_to_log_file.log', 'a')
#[handler_custom_syslog]
#class=handlers.SysLogHandler
#level=DEBUG
#formatter=file
#args=('/dev/log', handlers.SysLogHandler.LOG_USER)
### Specific logging configuration for a single module. In the following
### example, the ospd_openvas.lock module will log with debug level.
####################
#[logger_ospd_openvas]
#level=DEBUG
#handlers=custom_syslog
#qualname=ospd_openvas.lock
#propagate=0 ospd-21.4.4/doc/example-ospd.conf 0000664 0000000 0000000 00000001635 14131311270 0016557 0 ustar 00root root 0000000 0000000 [OSPD - openvas]
## General
pid_file = install-prefix/var/run/ospd/openvas.pid
lock_file_dir = install-prefix/var/run/
stream_timeout = 1
max_scans = 3
min_free_mem_scan_queue = 1000
max_queued_scans = 0
# Log config
log_level = DEBUG
log_file = install-prefix/var/log/gvm/openvas.log
log_config = install-prefix/.config/ospd-logging.conf
## Unix socket settings
socket_mode = 0o770
unix_socket = install-prefix/var/run/ospd/openvas.sock
## TLS socket settings and certificates.
#port = 9390
#bind_address = 0.0.0.0
#key_file = install-prefix/var/lib/gvm/private/CA/serverkey.pem
#cert_file = install-prefix/var/lib/gvm/CA/servercert.pem
#ca_file = install-prefix/var/lib/gvm/CA/cacert.pem
[OSPD - some wrapper]
log_level = DEBUG
socket_mode = 0o770
unix_socket = install-prefix/var/run/ospd/ospd-wrapper.sock
pid_file = install-prefix/var/run/ospd/ospd-wrapper.pid
log_file = install-prefix/var/log/gvm/ospd-wrapper.log
ospd-21.4.4/doc/generate 0000775 0000000 0000000 00000001713 14131311270 0015027 0 ustar 00root root 0000000 0000000 #!/bin/bash
# Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
# Schema generator script: HTML.
DOCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
xsltproc ${DOCDIR}/HTML.xsl ${DOCDIR}/OSP.xml > ${DOCDIR}/osp.html
ospd-21.4.4/doc/rnc.xsl 0000664 0000000 0000000 00000037730 14131311270 0014631 0 ustar 00root root 0000000 0000000
↵
↵
### Preamble
start = command | response
command
=
|
response
=
_response
|
|
|
" }
xsd:token { pattern = "
#
.
attribute
{
}
?
_response
|
|
" }
xsd:token { pattern = "
*
(
&
)
?
(
|
)
ERROR
""
text # RNC limitation:
&
&
#
.
= element
# type
{
}
= element
{
}
= element
{
}
ospd-21.4.4/ospd/ 0000775 0000000 0000000 00000000000 14131311270 0013505 5 ustar 00root root 0000000 0000000 ospd-21.4.4/ospd/__init__.py 0000664 0000000 0000000 00000001512 14131311270 0015615 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
# Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" OSPd core module. """
from .__version__ import __version__
ospd-21.4.4/ospd/__version__.py 0000664 0000000 0000000 00000000147 14131311270 0016342 0 ustar 00root root 0000000 0000000 # pylint: disable=invalid-name
# THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH!
__version__ = "21.4.4"
ospd-21.4.4/ospd/command/ 0000775 0000000 0000000 00000000000 14131311270 0015123 5 ustar 00root root 0000000 0000000 ospd-21.4.4/ospd/command/__init__.py 0000664 0000000 0000000 00000001527 14131311270 0017241 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import ospd.command.command # required to register all commands
from .registry import get_commands
ospd-21.4.4/ospd/command/command.py 0000664 0000000 0000000 00000052354 14131311270 0017124 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import multiprocessing
import re
import logging
import subprocess
from decimal import Decimal
from typing import Optional, Dict, Any, Union, Iterator
from xml.etree.ElementTree import Element, SubElement
import psutil
from ospd.errors import OspdCommandError
from ospd.misc import valid_uuid, create_process
from ospd.protocol import OspRequest, OspResponse
from ospd.xml import (
simple_response_str,
get_elements_from_dict,
XmlStringHelper,
)
from .initsubclass import InitSubclassMeta
from .registry import register_command
logger = logging.getLogger(__name__)
class BaseCommand(metaclass=InitSubclassMeta):
name = None
description = None
attributes = None
elements = None
must_be_initialized = None
def __init_subclass__(cls, **kwargs):
super_cls = super()
if hasattr(super_cls, '__init_subclass__'):
super_cls.__init_subclass__(**kwargs)
register_command(cls)
def __init__(self, daemon):
self._daemon = daemon
def get_name(self) -> str:
return self.name
def get_description(self) -> str:
return self.description
def get_attributes(self) -> Optional[Dict[str, Any]]:
return self.attributes
def get_elements(self) -> Optional[Dict[str, Any]]:
return self.elements
def handle_xml(self, xml: Element) -> Union[bytes, Iterator[bytes]]:
raise NotImplementedError()
def as_dict(self):
return {
'name': self.get_name(),
'attributes': self.get_attributes(),
'description': self.get_description(),
'elements': self.get_elements(),
}
def __repr__(self):
return '<{} description="{}" attributes={} elements={}>'.format(
self.name, self.description, self.attributes, self.elements
)
class HelpCommand(BaseCommand):
name = "help"
description = 'Print the commands help.'
attributes = {'format': 'Help format. Could be text or xml.'}
must_be_initialized = False
def handle_xml(self, xml: Element) -> bytes:
help_format = xml.get('format')
if help_format is None or help_format == "text":
# Default help format is text.
return simple_response_str(
'help', 200, 'OK', self._daemon.get_help_text()
)
elif help_format == "xml":
text = get_elements_from_dict(
{k: v.as_dict() for k, v in self._daemon.commands.items()}
)
return simple_response_str('help', 200, 'OK', text)
raise OspdCommandError('Bogus help format', 'help')
class GetVersion(BaseCommand):
name = "get_version"
description = 'Return various version information'
must_be_initialized = False
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
Return:
Response string for command.
"""
protocol = Element('protocol')
for name, value in [
('name', 'OSP'),
('version', self._daemon.get_protocol_version()),
]:
elem = SubElement(protocol, name)
elem.text = value
daemon = Element('daemon')
for name, value in [
('name', self._daemon.get_daemon_name()),
('version', self._daemon.get_daemon_version()),
]:
elem = SubElement(daemon, name)
elem.text = value
scanner = Element('scanner')
for name, value in [
('name', self._daemon.get_scanner_name()),
('version', self._daemon.get_scanner_version()),
]:
elem = SubElement(scanner, name)
elem.text = value
content = [protocol, daemon, scanner]
vts_version = self._daemon.get_vts_version()
if vts_version:
vts = Element('vts')
elem = SubElement(vts, 'version')
elem.text = vts_version
content.append(vts)
return simple_response_str('get_version', 200, 'OK', content)
GVMCG_TITLES = [
'cpu-.*',
'proc',
'mem',
'swap',
'load',
'df-.*',
'disk-sd[a-z][0-9]-rw',
'disk-sd[a-z][0-9]-load',
'disk-sd[a-z][0-9]-io-load',
'interface-eth.*-traffic',
'interface-eth.*-err-rate',
'interface-eth.*-err',
'sensors-.*_temperature-.*',
'sensors-.*_fanspeed-.*',
'sensors-.*_voltage-.*',
'titles',
] # type: List[str]
class GetPerformance(BaseCommand):
name = "get_performance"
description = 'Return system report'
attributes = {
'start': 'Time of first data point in report.',
'end': 'Time of last data point in report.',
'title': 'Name of report.',
}
must_be_initialized = False
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
@return: Response string for command.
"""
start = xml.attrib.get('start')
end = xml.attrib.get('end')
titles = xml.attrib.get('titles')
cmd = ['gvmcg']
if start:
try:
int(start)
except ValueError:
raise OspdCommandError(
'Start argument must be integer.', 'get_performance'
) from None
cmd.append(start)
if end:
try:
int(end)
except ValueError:
raise OspdCommandError(
'End argument must be integer.', 'get_performance'
) from None
cmd.append(end)
if titles:
combined = "(" + ")|(".join(GVMCG_TITLES) + ")"
forbidden = "^[^|&;]+$"
if re.match(combined, titles) and re.match(forbidden, titles):
cmd.append(titles)
else:
raise OspdCommandError(
'Arguments not allowed', 'get_performance'
)
try:
output = subprocess.check_output(cmd)
except (subprocess.CalledProcessError, OSError) as e:
raise OspdCommandError(
'Bogus get_performance format. %s' % e, 'get_performance'
) from None
return simple_response_str(
'get_performance', 200, 'OK', output.decode()
)
class GetScannerDetails(BaseCommand):
name = 'get_scanner_details'
description = 'Return scanner description and parameters'
must_be_initialized = True
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
@return: Response string for command.
"""
list_all = xml.get('list_all')
list_all = True if list_all == '1' else False
desc_xml = Element('description')
desc_xml.text = self._daemon.get_scanner_description()
scanner_params = self._daemon.get_scanner_params()
if not list_all:
scanner_params = {
key: value
for (key, value) in scanner_params.items()
if value.get('visible_for_client')
}
details = [
desc_xml,
OspResponse.create_scanner_params_xml(scanner_params),
]
return simple_response_str('get_scanner_details', 200, 'OK', details)
class DeleteScan(BaseCommand):
name = 'delete_scan'
description = 'Delete a finished scan.'
attributes = {'scan_id': 'ID of scan to delete.'}
must_be_initialized = False
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
@return: Response string for command.
"""
scan_id = xml.get('scan_id')
if scan_id is None:
return simple_response_str(
'delete_scan', 404, 'No scan_id attribute'
)
if not self._daemon.scan_exists(scan_id):
text = "Failed to find scan '{0}'".format(scan_id)
return simple_response_str('delete_scan', 404, text)
self._daemon.check_scan_process(scan_id)
if self._daemon.delete_scan(scan_id):
return simple_response_str('delete_scan', 200, 'OK')
raise OspdCommandError('Scan in progress', 'delete_scan')
class GetVts(BaseCommand):
name = 'get_vts'
description = 'List of available vulnerability tests.'
attributes = {
'vt_id': 'ID of a specific vulnerability test to get.',
'filter': 'Optional filter to get an specific vt collection.',
}
must_be_initialized = True
def handle_xml(self, xml: Element) -> Iterator[bytes]:
"""Handles command.
Writes the vt collection on the stream.
The element accept two optional arguments.
vt_id argument receives a single vt id.
filter argument receives a filter selecting a sub set of vts.
If both arguments are given, the vts which match with the filter
are return.
@return: Response string for command on fail.
"""
self._daemon.vts.is_cache_available = False
xml_helper = XmlStringHelper()
vt_id = xml.get('vt_id')
vt_filter = xml.get('filter')
_details = xml.get('details')
version_only = xml.get('version_only')
vt_details = False if _details == '0' else True
if self._daemon.vts and vt_id and vt_id not in self._daemon.vts:
self._daemon.vts.is_cache_available = True
text = "Failed to find vulnerability test '{0}'".format(vt_id)
raise OspdCommandError(text, 'get_vts', 404)
filtered_vts = None
if vt_filter and not version_only:
try:
filtered_vts = self._daemon.vts_filter.get_filtered_vts_list(
self._daemon.vts, vt_filter
)
except OspdCommandError as filter_error:
self._daemon.vts.is_cache_available = True
raise filter_error
if not version_only:
vts_selection = self._daemon.get_vts_selection_list(
vt_id, filtered_vts
)
# List of xml pieces with the generator to be iterated
yield xml_helper.create_response('get_vts')
begin_vts_tag = xml_helper.create_element('vts')
begin_vts_tag = xml_helper.add_attr(
begin_vts_tag, "vts_version", self._daemon.get_vts_version()
)
val = len(self._daemon.vts)
begin_vts_tag = xml_helper.add_attr(begin_vts_tag, "total", val)
if filtered_vts and not version_only:
val = len(filtered_vts)
begin_vts_tag = xml_helper.add_attr(begin_vts_tag, "sent", val)
if self._daemon.vts.sha256_hash is not None:
begin_vts_tag = xml_helper.add_attr(
begin_vts_tag, "sha256_hash", self._daemon.vts.sha256_hash
)
yield begin_vts_tag
if not version_only:
for vt in self._daemon.get_vt_iterator(vts_selection, vt_details):
yield xml_helper.add_element(self._daemon.get_vt_xml(vt))
yield xml_helper.create_element('vts', end=True)
yield xml_helper.create_response('get_vts', end=True)
self._daemon.vts.is_cache_available = True
class StopScan(BaseCommand):
name = 'stop_scan'
description = 'Stop a currently running scan.'
attributes = {'scan_id': 'ID of scan to stop.'}
must_be_initialized = True
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
@return: Response string for command.
"""
scan_id = xml.get('scan_id')
if scan_id is None or scan_id == '':
raise OspdCommandError('No scan_id attribute', 'stop_scan')
self._daemon.stop_scan(scan_id)
# Don't send response until the scan is stopped.
try:
self._daemon.scan_processes[scan_id].join()
except KeyError:
pass
return simple_response_str('stop_scan', 200, 'OK')
class GetScans(BaseCommand):
name = 'get_scans'
description = 'Get information about a scan in buffer.'
attributes = {
'scan_id': 'Mandatory ID of a specific scan to get.',
'details': 'Whether to return the full scan report.',
'pop_results': 'Whether to remove the fetched results.',
'max_results': 'Maximum number of results to fetch.',
'progress': 'Whether to return a detailed scan progress',
}
must_be_initialized = False
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
@return: Response string for command.
"""
scan_id = xml.get('scan_id')
if scan_id is None or scan_id == '':
raise OspdCommandError('No scan_id attribute', 'get_scans')
details = xml.get('details')
pop_res = xml.get('pop_results')
max_res = xml.get('max_results')
progress = xml.get('progress')
if details and details == '0':
details = False
else:
details = True
pop_res = pop_res and pop_res == '1'
if max_res:
max_res = int(max_res)
progress = progress and progress == '1'
responses = []
if scan_id in self._daemon.scan_collection.ids_iterator():
self._daemon.check_scan_process(scan_id)
scan = self._daemon.get_scan_xml(
scan_id, details, pop_res, max_res, progress
)
responses.append(scan)
else:
text = "Failed to find scan '{0}'".format(scan_id)
return simple_response_str('get_scans', 404, text)
return simple_response_str('get_scans', 200, 'OK', responses)
class StartScan(BaseCommand):
name = 'start_scan'
description = 'Start a new scan.'
attributes = {
'target': 'Target host to scan',
'ports': 'Ports list to scan',
'scan_id': 'Optional UUID value to use as scan ID',
'parallel': 'Optional nummer of parallel target to scan',
}
must_be_initialized = False
def get_elements(self):
elements = {}
if self.elements:
elements.update(self.elements)
scanner_params = elements.get('scanner_params', {}).copy()
elements['scanner_params'] = scanner_params
scanner_params.update(
{
k: v['description']
for k, v in self._daemon.scanner_params.items()
}
)
return elements
def handle_xml(self, xml: Element) -> bytes:
"""Handles command.
Return:
Response string for command.
"""
current_queued_scans = self._daemon.get_count_queued_scans()
if (
self._daemon.max_queued_scans
and current_queued_scans >= self._daemon.max_queued_scans
):
logger.info(
'Maximum number of queued scans set to %d reached.',
self._daemon.max_queued_scans,
)
raise OspdCommandError(
'Maximum number of queued scans set to %d reached.'
% self._daemon.max_queued_scans,
'start_scan',
)
target_str = xml.get('target')
ports_str = xml.get('ports')
# For backward compatibility, if target and ports attributes are set,
# element is ignored.
if target_str is None or ports_str is None:
target_element = xml.find('targets/target')
if target_element is None:
raise OspdCommandError('No targets or ports', 'start_scan')
else:
scan_target = OspRequest.process_target_element(target_element)
else:
scan_target = {
'hosts': target_str,
'ports': ports_str,
'credentials': {},
'exclude_hosts': '',
'finished_hosts': '',
'options': {},
}
logger.warning(
"Legacy start scan command format is being used, which "
"is deprecated since 20.08. Please read the documentation "
"for start scan command."
)
scan_id = xml.get('scan_id')
if scan_id is not None and scan_id != '' and not valid_uuid(scan_id):
raise OspdCommandError('Invalid scan_id UUID', 'start_scan')
if xml.get('parallel'):
logger.warning(
"parallel attribute of start_scan will be ignored, sice "
"parallel scan is not supported by OSPd."
)
scanner_params = xml.find('scanner_params')
if scanner_params is None:
raise OspdCommandError('No scanner_params element', 'start_scan')
# params are the parameters we got from the XML.
params = self._daemon.preprocess_scan_params(scanner_params)
# VTS is an optional element. If present should not be empty.
vt_selection = {} # type: Dict
scanner_vts = xml.find('vt_selection')
if scanner_vts is not None:
if len(scanner_vts) == 0:
raise OspdCommandError('VTs list is empty', 'start_scan')
else:
vt_selection = OspRequest.process_vts_params(scanner_vts)
# Dry run case.
dry_run = 'dry_run' in params and int(params['dry_run'])
if dry_run:
scan_params = None
else:
scan_params = self._daemon.process_scan_params(params)
scan_id_aux = scan_id
scan_id = self._daemon.create_scan(
scan_id, scan_target, scan_params, vt_selection
)
if not scan_id:
id_ = Element('id')
id_.text = scan_id_aux
return simple_response_str('start_scan', 100, 'Continue', id_)
logger.info(
'Scan %s added to the queue in position %d.',
scan_id,
current_queued_scans + 1,
)
if dry_run:
scan_func = self._daemon.dry_run_scan
scan_process = create_process(
func=scan_func, args=(scan_id, scan_target)
)
self._daemon.scan_processes[scan_id] = scan_process
scan_process.start()
id_ = Element('id')
id_.text = scan_id
return simple_response_str('start_scan', 200, 'OK', id_)
class GetMemoryUsage(BaseCommand):
name = "get_memory_usage"
description = "print the memory consumption of all processes"
attributes = {
'unit': 'Unit for displaying memory consumption (b = bytes, '
'kb = kilobytes, mb = megabytes). Defaults to b.'
}
must_be_initialized = False
@staticmethod
def _get_memory(value: int, unit: str = None) -> str:
if not unit:
return str(value)
unit = unit.lower()
if unit == 'kb':
return str(Decimal(value) / 1024)
if unit == 'mb':
return str(Decimal(value) / (1024 * 1024))
return str(value)
@staticmethod
def _create_process_element(name: str, pid: int):
process_element = Element('process')
process_element.set('name', name)
process_element.set('pid', str(pid))
return process_element
@classmethod
def _add_memory_info(
cls, process_element: Element, pid: int, unit: str = None
):
try:
ps_process = psutil.Process(pid)
except psutil.NoSuchProcess:
return
memory = ps_process.memory_info()
rss_element = Element('rss')
rss_element.text = cls._get_memory(memory.rss, unit)
process_element.append(rss_element)
vms_element = Element('vms')
vms_element.text = cls._get_memory(memory.vms, unit)
process_element.append(vms_element)
shared_element = Element('shared')
shared_element.text = cls._get_memory(memory.shared, unit)
process_element.append(shared_element)
def handle_xml(self, xml: Element) -> bytes:
processes_element = Element('processes')
unit = xml.get('unit')
current_process = multiprocessing.current_process()
process_element = self._create_process_element(
current_process.name, current_process.pid
)
self._add_memory_info(process_element, current_process.pid, unit)
processes_element.append(process_element)
for proc in multiprocessing.active_children():
process_element = self._create_process_element(proc.name, proc.pid)
self._add_memory_info(process_element, proc.pid, unit)
processes_element.append(process_element)
return simple_response_str('get_memory', 200, 'OK', processes_element)
ospd-21.4.4/ospd/command/initsubclass.py 0000664 0000000 0000000 00000003242 14131311270 0020201 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=bad-mcs-classmethod-argument, no-member
_has_init_subclass = hasattr( # pylint: disable=invalid-name
type, "__init_subclass__"
)
if not _has_init_subclass:
class InitSubclassMeta(type):
"""Metaclass that implements PEP 487 protocol"""
def __new__(cls, name, bases, ns, **kwargs):
__init_subclass__ = ns.pop("__init_subclass__", None)
if __init_subclass__:
__init_subclass__ = classmethod(__init_subclass__)
ns["__init_subclass__"] = __init_subclass__
return super().__new__(cls, name, bases, ns, **kwargs)
def __init__(cls, name, bases, ns, **kwargs):
super().__init__(name, bases, ns)
super_class = super(cls, cls)
if hasattr(super_class, "__init_subclass__"):
super_class.__init_subclass__.__func__(cls, **kwargs)
else:
InitSubclassMeta = type # type: ignore
ospd-21.4.4/ospd/command/registry.py 0000664 0000000 0000000 00000002165 14131311270 0017351 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
from typing import List
__COMMANDS = []
def register_command(command: object) -> None:
"""Register a command class"""
__COMMANDS.append(command)
def remove_command(command: object) -> None:
"""Unregister a command class"""
__COMMANDS.remove(command)
def get_commands() -> List[object]:
"""Return the list of registered command classes"""
return __COMMANDS
ospd-21.4.4/ospd/config.py 0000664 0000000 0000000 00000003147 14131311270 0015331 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
"""
Module to store ospd configuration settings
"""
import configparser
import logging
from pathlib import Path
from typing import Dict
logger = logging.getLogger(__name__)
class Config:
def __init__(self, section: str = 'main') -> None:
self._parser = configparser.ConfigParser(default_section=section)
self._config = {} # type: Dict
self._defaults = {} # type: Dict
def load(self, filepath: Path, def_section: str = 'main') -> None:
path = filepath.expanduser()
parser = configparser.ConfigParser(default_section=def_section)
with path.open() as f:
parser.read_file(f)
self._defaults.update(parser.defaults())
for key, value in parser.items(def_section):
self._config.setdefault(def_section, dict())[key] = value
def defaults(self) -> Dict:
return self._defaults
ospd-21.4.4/ospd/cvss.py 0000664 0000000 0000000 00000011241 14131311270 0015034 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Common Vulnerability Scoring System handling class. """
import math
from typing import List, Dict, Optional
CVSS_V2_METRICS = {
'AV': {'L': 0.395, 'A': 0.646, 'N': 1.0},
'AC': {'H': 0.35, 'M': 0.61, 'L': 0.71},
'Au': {'M': 0.45, 'S': 0.56, 'N': 0.704},
'C': {'N': 0.0, 'P': 0.275, 'C': 0.660},
'I': {'N': 0.0, 'P': 0.275, 'C': 0.660},
'A': {'N': 0.0, 'P': 0.275, 'C': 0.660},
} # type: Dict
CVSS_V3_METRICS = {
'AV': {'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2},
'AC': {'L': 0.77, 'H': 0.44},
'PR_SU': {'N': 0.85, 'L': 0.62, 'H': 0.27},
'PR_SC': {'N': 0.85, 'L': 0.68, 'H': 0.50},
'UI': {'N': 0.85, 'R': 0.62},
'S': {'U': False, 'C': True},
'C': {'H': 0.56, 'L': 0.22, 'N': 0},
'I': {'H': 0.56, 'L': 0.22, 'N': 0},
'A': {'H': 0.56, 'L': 0.22, 'N': 0},
} # type: Dict
class CVSS(object):
""" Handle cvss vectors and calculate the cvss scoring"""
@staticmethod
def roundup(value: float) -> float:
"""It rounds up to 1 decimal. """
return math.ceil(value * 10) / 10
@staticmethod
def _parse_cvss_base_vector(cvss_vector: str) -> List:
"""Parse a string containing a cvss base vector.
Arguments:
cvss_vector (str): cvss base vector to be parsed.
Return list with the string values of each vector element.
"""
vector_as_list = cvss_vector.split('/')
return [item.split(':')[1] for item in vector_as_list]
@classmethod
def cvss_base_v2_value(cls, cvss_base_vector: str) -> Optional[float]:
"""Calculate the cvss base score from a cvss base vector
for cvss version 2.
Arguments:
cvss_base_vector (str) Cvss base vector v2.
Return the calculated score
"""
if not cvss_base_vector:
return None
_av, _ac, _au, _c, _i, _a = cls._parse_cvss_base_vector(
cvss_base_vector
)
_impact = 10.41 * (
1
- (1 - CVSS_V2_METRICS['C'].get(_c))
* (1 - CVSS_V2_METRICS['I'].get(_i))
* (1 - CVSS_V2_METRICS['A'].get(_a))
)
_exploitability = (
20
* CVSS_V2_METRICS['AV'].get(_av)
* CVSS_V2_METRICS['AC'].get(_ac)
* CVSS_V2_METRICS['Au'].get(_au)
)
f_impact = 0 if _impact == 0 else 1.176
cvss_base = ((0.6 * _impact) + (0.4 * _exploitability) - 1.5) * f_impact
return round(cvss_base, 1)
@classmethod
def cvss_base_v3_value(cls, cvss_base_vector: str) -> Optional[float]:
"""Calculate the cvss base score from a cvss base vector
for cvss version 3.
Arguments:
cvss_base_vector (str) Cvss base vector v3.
Return the calculated score, None on fail.
"""
if not cvss_base_vector:
return None
_ver, _av, _ac, _pr, _ui, _s, _c, _i, _a = cls._parse_cvss_base_vector(
cvss_base_vector
)
scope_changed = CVSS_V3_METRICS['S'].get(_s)
isc_base = 1 - (
(1 - CVSS_V3_METRICS['C'].get(_c))
* (1 - CVSS_V3_METRICS['I'].get(_i))
* (1 - CVSS_V3_METRICS['A'].get(_a))
)
if scope_changed:
_priv_req = CVSS_V3_METRICS['PR_SC'].get(_pr)
else:
_priv_req = CVSS_V3_METRICS['PR_SU'].get(_pr)
_exploitability = (
8.22
* CVSS_V3_METRICS['AV'].get(_av)
* CVSS_V3_METRICS['AC'].get(_ac)
* _priv_req
* CVSS_V3_METRICS['UI'].get(_ui)
)
if scope_changed:
_impact = 7.52 * (isc_base - 0.029) - 3.25 * pow(
isc_base - 0.02, 15
)
_base_score = min(1.08 * (_impact + _exploitability), 10)
else:
_impact = 6.42 * isc_base
_base_score = min(_impact + _exploitability, 10)
if _impact > 0:
return cls.roundup(_base_score)
return 0
ospd-21.4.4/ospd/datapickler.py 0000664 0000000 0000000 00000011473 14131311270 0016350 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Pickle Handler class
"""
import logging
import pickle
import os
from hashlib import sha256
from pathlib import Path
from typing import BinaryIO, Any
from ospd.errors import OspdCommandError
logger = logging.getLogger(__name__)
OWNER_ONLY_RW_PERMISSION = 0o600
class DataPickler:
def __init__(self, storage_path: str):
self._storage_path = storage_path
self._storage_fd = None
def _fd_opener(self, path: str, flags: int) -> BinaryIO:
os.umask(0)
flags = os.O_CREAT | os.O_WRONLY
self._storage_fd = os.open(path, flags, mode=OWNER_ONLY_RW_PERMISSION)
return self._storage_fd
def _fd_close(self) -> None:
try:
self._storage_fd.close()
self._storage_fd = None
except Exception: # pylint: disable=broad-except
pass
def remove_file(self, filename: str) -> None:
""" Remove the file containing a scan_info pickled object """
storage_file_path = Path(self._storage_path) / filename
try:
storage_file_path.unlink()
except Exception as e: # pylint: disable=broad-except
logger.error('Not possible to delete %s. %s', filename, e)
def store_data(self, filename: str, data_object: Any) -> str:
""" Pickle a object and store it in a file named"""
storage_file_path = Path(self._storage_path) / filename
try:
# create parent directories recursively
parent_dir = storage_file_path.parent
parent_dir.mkdir(parents=True, exist_ok=True)
except Exception as e:
raise OspdCommandError(
'Not possible to access dir for %s. %s' % (filename, e),
'start_scan',
) from e
try:
pickled_data = pickle.dumps(data_object)
except pickle.PicklingError as e:
raise OspdCommandError(
'Not possible to pickle scan info for %s. %s' % (filename, e),
'start_scan',
) from e
try:
with open(
str(storage_file_path), 'wb', opener=self._fd_opener
) as scan_info_f:
scan_info_f.write(pickled_data)
except Exception as e: # pylint: disable=broad-except
self._fd_close()
raise OspdCommandError(
'Not possible to store scan info for %s. %s' % (filename, e),
'start_scan',
) from e
self._fd_close()
return self._pickled_data_hash_generator(pickled_data)
def load_data(self, filename: str, original_data_hash: str) -> Any:
"""Unpickle the stored data in the filename. Perform an
intengrity check of the read data with the the hash generated
with the original data.
Return:
Dictionary containing the scan info. None otherwise.
"""
storage_file_path = Path(self._storage_path) / filename
pickled_data = None
try:
with storage_file_path.open('rb') as scan_info_f:
pickled_data = scan_info_f.read()
except Exception as e: # pylint: disable=broad-except
logger.error(
'Not possible to read pickled data from %s. %s', filename, e
)
return
unpickled_scan_info = None
try:
unpickled_scan_info = pickle.loads(pickled_data)
except pickle.UnpicklingError as e:
logger.error(
'Not possible to read pickled data from %s. %s', filename, e
)
return
pickled_scan_info_hash = self._pickled_data_hash_generator(pickled_data)
if original_data_hash != pickled_scan_info_hash:
logger.error('Unpickled data from %s corrupted.', filename)
return
return unpickled_scan_info
def _pickled_data_hash_generator(self, pickled_data: bytes) -> str:
""" Calculate the sha256 hash of a pickled data """
if not pickled_data:
return
hash_sha256 = sha256()
hash_sha256.update(pickled_data)
return hash_sha256.hexdigest()
ospd-21.4.4/ospd/errors.py 0000664 0000000 0000000 00000003532 14131311270 0015376 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" OSP class for handling errors.
"""
from ospd.xml import simple_response_str
class OspdError(Exception):
""" Base error class for all Ospd related errors """
class RequiredArgument(OspdError):
"""Raised if a required argument/parameter is missing
Derives from :py:class:`OspdError`
"""
def __init__(self, function: str, argument: str) -> None:
# pylint: disable=super-init-not-called
self.function = function
self.argument = argument
def __str__(self) -> str:
return "{}: Argument {} is required".format(
self.function, self.argument
)
class OspdCommandError(OspdError):
"""This is an exception that will result in an error message to the
client"""
def __init__(
self, message: str, command: str = 'osp', status: int = 400
) -> None:
super().__init__(message)
self.message = message
self.command = command
self.status = status
def as_xml(self) -> str:
""" Return the error in xml format. """
return simple_response_str(self.command, self.status, self.message)
ospd-21.4.4/ospd/logger.py 0000664 0000000 0000000 00000005563 14131311270 0015347 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import logging
import os
import configparser
from logging.config import fileConfig
from pathlib import Path
from typing import Optional
DEFAULT_HANDLER_CONSOLE = {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'file',
'args': 'sys.stdout,',
}
DEFAULT_HANDLER_FILE = {
'class': 'FileHandler',
'level': 'INFO',
'formatter': 'file',
}
DEFAULT_HANDLER_SYSLOG = {
'class': 'handlers.SysLogHandler',
'level': 'INFO',
'formatter': 'syslog',
'args': '("/dev/log", handlers.SysLogHandler.LOG_USER)',
}
DEFAULT_HANDLERS = {'keys': 'default_handler'}
DEFAULT_FORMATTERS = {'keys': 'file,syslog'}
DEFAULT_FORMATTER_FILE = {
'format': 'OSPD['
+ str(os.getpid())
+ '] %(asctime)s: %(levelname)s: (%(name)s) %(message)s',
'datefmt': '',
}
DEFAULT_FORMATTER_SYSLOG = {
'format': 'OSPD['
+ str(os.getpid())
+ '] %(levelname)s: (%(name)s) %(message)s',
'datefmt': '',
}
DEFAULT_LOGGERS = {'keys': 'root'}
DEFAULT_ROOT_LOGGER = {
'level': 'NOTSET',
'handlers': 'default_handler',
'propagate': '0',
}
def init_logging(
log_level: int,
*,
log_file: Optional[str] = None,
log_config: Optional[str] = None,
foreground: Optional[bool] = False,
):
config = configparser.ConfigParser()
config['handlers'] = DEFAULT_HANDLERS
config['formatters'] = DEFAULT_FORMATTERS
config['formatter_file'] = DEFAULT_FORMATTER_FILE
config['formatter_syslog'] = DEFAULT_FORMATTER_SYSLOG
if foreground:
config['handler_default_handler'] = DEFAULT_HANDLER_CONSOLE
elif log_file:
config['handler_default_handler'] = DEFAULT_HANDLER_FILE
config['handler_default_handler']['args'] = "('" + log_file + "', 'a')"
else:
config['handler_default_handler'] = DEFAULT_HANDLER_SYSLOG
config['handler_default_handler']['level'] = log_level
log_config_path = Path(log_config)
if log_config_path.exists():
config.read(log_config)
else:
config['loggers'] = DEFAULT_LOGGERS
config['logger_root'] = DEFAULT_ROOT_LOGGER
fileConfig(config, disable_existing_loggers=False)
logging.getLogger()
ospd-21.4.4/ospd/main.py 0000664 0000000 0000000 00000010455 14131311270 0015010 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import logging
import os
import sys
import atexit
import signal
from functools import partial
from typing import Type, Optional
from pathlib import Path
from ospd.misc import go_to_background, create_pid
from ospd.ospd import OSPDaemon
from ospd.parser import create_parser, ParserType
from ospd.server import TlsServer, UnixSocketServer, BaseServer
from ospd.logger import init_logging
COPYRIGHT = """Copyright (C) 2014-2021 Greenbone Networks GmbH
License GPLv2+: GNU GPL version 2 or later
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law."""
LOGGER = logging.getLogger(__name__)
def print_version(daemon: OSPDaemon, file=sys.stdout):
""" Prints the server version and license information."""
scanner_name = daemon.get_scanner_name()
server_version = daemon.get_server_version()
protocol_version = daemon.get_protocol_version()
daemon_name = daemon.get_daemon_name()
daemon_version = daemon.get_daemon_version()
print(
"OSP Server for {0}: {1}".format(scanner_name, server_version),
file=file,
)
print("OSP: {0}".format(protocol_version), file=file)
print("{0}: {1}".format(daemon_name, daemon_version), file=file)
print(file=file)
print(COPYRIGHT, file=file)
def exit_cleanup(
pidfile: str,
server: BaseServer,
daemon: OSPDaemon,
_signum=None,
_frame=None,
) -> None:
""" Removes the pidfile before ending the daemon. """
signal.signal(signal.SIGINT, signal.SIG_IGN)
pidpath = Path(pidfile)
if not pidpath.is_file():
return
with pidpath.open() as f:
if int(f.read()) == os.getpid():
LOGGER.debug("Performing exit clean up")
daemon.daemon_exit_cleanup()
LOGGER.info("Shutting-down server ...")
server.close()
LOGGER.debug("Finishing daemon process")
pidpath.unlink()
sys.exit()
def main(
name: str,
daemon_class: Type[OSPDaemon],
parser: Optional[ParserType] = None,
):
""" OSPD Main function. """
if not parser:
parser = create_parser(name)
args = parser.parse_arguments()
if args.version:
args.foreground = True
init_logging(
args.log_level,
log_file=args.log_file,
log_config=args.log_config,
foreground=args.foreground,
)
if args.port == 0:
server = UnixSocketServer(
args.unix_socket, args.socket_mode, args.stream_timeout
)
else:
server = TlsServer(
args.address,
args.port,
args.cert_file,
args.key_file,
args.ca_file,
args.stream_timeout,
)
daemon = daemon_class(**vars(args))
if args.version:
print_version(daemon)
sys.exit()
if args.list_commands:
print(daemon.get_help_text())
sys.exit()
if not args.foreground:
go_to_background()
if not create_pid(args.pid_file):
sys.exit()
# Set signal handler and cleanup
atexit.register(
exit_cleanup, pidfile=args.pid_file, server=server, daemon=daemon
)
signal.signal(
signal.SIGTERM, partial(exit_cleanup, args.pid_file, server, daemon)
)
signal.signal(
signal.SIGINT, partial(exit_cleanup, args.pid_file, server, daemon)
)
if not daemon.check():
return 1
LOGGER.info(
"Starting %s version %s.",
daemon.daemon_info['name'],
daemon.daemon_info['version'],
)
daemon.init(server)
daemon.run()
return 0
ospd-21.4.4/ospd/misc.py 0000664 0000000 0000000 00000010251 14131311270 0015011 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=too-many-lines
""" Miscellaneous classes and functions related to OSPD.
"""
import logging
import os
import sys
import uuid
import multiprocessing
from typing import Any, Callable, Iterable
from pathlib import Path
import psutil
LOGGER = logging.getLogger(__name__)
def create_process(
func: Callable, *, args: Iterable[Any] = None
) -> multiprocessing.Process:
return multiprocessing.Process(target=func, args=args)
class ResultType(object):
""" Various scan results types values. """
ALARM = 0
LOG = 1
ERROR = 2
HOST_DETAIL = 3
@classmethod
def get_str(cls, result_type: int) -> str:
""" Return string name of a result type. """
if result_type == cls.ALARM:
return "Alarm"
elif result_type == cls.LOG:
return "Log Message"
elif result_type == cls.ERROR:
return "Error Message"
elif result_type == cls.HOST_DETAIL:
return "Host Detail"
else:
assert False, "Erroneous result type {0}.".format(result_type)
@classmethod
def get_type(cls, result_name: str) -> int:
""" Return string name of a result type. """
if result_name == "Alarm":
return cls.ALARM
elif result_name == "Log Message":
return cls.LOG
elif result_name == "Error Message":
return cls.ERROR
elif result_name == "Host Detail":
return cls.HOST_DETAIL
else:
assert False, "Erroneous result name {0}.".format(result_name)
def valid_uuid(value) -> bool:
""" Check if value is a valid UUID. """
try:
uuid.UUID(value, version=4)
return True
except (TypeError, ValueError, AttributeError):
return False
def go_to_background() -> None:
""" Daemonize the running process. """
try:
if os.fork():
sys.exit()
except OSError as errmsg:
LOGGER.error('Fork failed: %s', errmsg)
sys.exit(1)
def create_pid(pidfile: str) -> bool:
"""Check if there is an already running daemon and creates the pid file.
Otherwise gives an error."""
pid = str(os.getpid())
new_process = psutil.Process(int(pid))
new_process_name = new_process.name()
pidpath = Path(pidfile)
if pidpath.is_file():
process_name = None
with pidpath.open('r') as pidfile:
current_pid = pidfile.read()
try:
process = psutil.Process(int(current_pid))
process_name = process.name()
except psutil.NoSuchProcess:
pass
if process_name == new_process_name:
LOGGER.error(
"There is an already running process. See %s.",
str(pidpath.absolute()),
)
return False
else:
LOGGER.debug(
"There is an existing pid file '%s', but the PID %s belongs to "
"the process %s. It seems that %s was abruptly stopped. "
"Removing the pid file.",
str(pidpath.absolute()),
current_pid,
process_name,
new_process_name,
)
try:
with pidpath.open(mode='w') as f:
f.write(pid)
except (FileNotFoundError, PermissionError) as e:
LOGGER.error(
"Failed to create pid file %s. %s", str(pidpath.absolute()), e
)
return False
return True
ospd-21.4.4/ospd/network.py 0000664 0000000 0000000 00000033343 14131311270 0015556 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Helper module for network related functions
"""
import binascii
import collections
import itertools
import logging
import re
import socket
import struct
from typing import List, Optional, Tuple
__LOGGER = logging.getLogger(__name__)
def target_to_ipv4(target: str) -> Optional[List]:
""" Attempt to return a single IPv4 host list from a target string. """
try:
socket.inet_pton(socket.AF_INET, target)
return [target]
except socket.error:
return None
def target_to_ipv6(target: str) -> Optional[List]:
""" Attempt to return a single IPv6 host list from a target string. """
try:
socket.inet_pton(socket.AF_INET6, target)
return [target]
except socket.error:
return None
def ipv4_range_to_list(start_packed, end_packed) -> Optional[List]:
""" Return a list of IPv4 entries from start_packed to end_packed. """
new_list = list()
start = struct.unpack('!L', start_packed)[0]
end = struct.unpack('!L', end_packed)[0]
for value in range(start, end + 1):
new_ip = socket.inet_ntoa(struct.pack('!L', value))
new_list.append(new_ip)
return new_list
def target_to_ipv4_short(target: str) -> Optional[List]:
""" Attempt to return a IPv4 short range list from a target string. """
splitted = target.split('-')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET, splitted[0])
end_value = int(splitted[1])
except (socket.error, ValueError):
return None
# For subnet with mask lower than /24, ip addresses ending in .0 are
# allowed.
# The next code checks for a range starting with a A.B.C.0.
# For the octet equal to 0, bytes() returns an empty binary b'',
# which must be handle in a special way.
_start_value = bytes(start_packed[3])
if _start_value:
start_value = int(binascii.hexlify(_start_value), 16)
elif _start_value == b'':
start_value = 0
else:
return None
if end_value < 0 or end_value > 255 or end_value < start_value:
return None
end_packed = start_packed[0:3] + struct.pack('B', end_value)
return ipv4_range_to_list(start_packed, end_packed)
def target_to_ipv4_cidr(target: str) -> Optional[List]:
""" Attempt to return a IPv4 CIDR list from a target string. """
splitted = target.split('/')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET, splitted[0])
block = int(splitted[1])
except (socket.error, ValueError):
return None
if block <= 0 or block > 30:
return None
start_value = int(binascii.hexlify(start_packed), 16) >> (32 - block)
start_value = (start_value << (32 - block)) + 1
end_value = (start_value | (0xFFFFFFFF >> block)) - 1
start_packed = struct.pack('!I', start_value)
end_packed = struct.pack('!I', end_value)
return ipv4_range_to_list(start_packed, end_packed)
def target_to_ipv6_cidr(target: str) -> Optional[List]:
""" Attempt to return a IPv6 CIDR list from a target string. """
splitted = target.split('/')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET6, splitted[0])
block = int(splitted[1])
except (socket.error, ValueError):
return None
if block <= 0 or block > 126:
return None
start_value = int(binascii.hexlify(start_packed), 16) >> (128 - block)
start_value = (start_value << (128 - block)) + 1
end_value = (start_value | (int('ff' * 16, 16) >> block)) - 1
high = start_value >> 64
low = start_value & ((1 << 64) - 1)
start_packed = struct.pack('!QQ', high, low)
high = end_value >> 64
low = end_value & ((1 << 64) - 1)
end_packed = struct.pack('!QQ', high, low)
return ipv6_range_to_list(start_packed, end_packed)
def target_to_ipv4_long(target: str) -> Optional[List]:
""" Attempt to return a IPv4 long-range list from a target string. """
splitted = target.split('-')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET, splitted[0])
end_packed = socket.inet_pton(socket.AF_INET, splitted[1])
except socket.error:
return None
if end_packed < start_packed:
return None
return ipv4_range_to_list(start_packed, end_packed)
def ipv6_range_to_list(start_packed, end_packed) -> List:
""" Return a list of IPv6 entries from start_packed to end_packed. """
new_list = list()
start = int(binascii.hexlify(start_packed), 16)
end = int(binascii.hexlify(end_packed), 16)
for value in range(start, end + 1):
high = value >> 64
low = value & ((1 << 64) - 1)
new_ip = socket.inet_ntop(
socket.AF_INET6, struct.pack('!2Q', high, low)
)
new_list.append(new_ip)
return new_list
def target_to_ipv6_short(target: str) -> Optional[List]:
""" Attempt to return a IPv6 short-range list from a target string. """
splitted = target.split('-')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET6, splitted[0])
end_value = int(splitted[1], 16)
except (socket.error, ValueError):
return None
start_value = int(binascii.hexlify(start_packed[14:]), 16)
if end_value < 0 or end_value > 0xFFFF or end_value < start_value:
return None
end_packed = start_packed[:14] + struct.pack('!H', end_value)
return ipv6_range_to_list(start_packed, end_packed)
def target_to_ipv6_long(target: str) -> Optional[List]:
""" Attempt to return a IPv6 long-range list from a target string. """
splitted = target.split('-')
if len(splitted) != 2:
return None
try:
start_packed = socket.inet_pton(socket.AF_INET6, splitted[0])
end_packed = socket.inet_pton(socket.AF_INET6, splitted[1])
except socket.error:
return None
if end_packed < start_packed:
return None
return ipv6_range_to_list(start_packed, end_packed)
def target_to_hostname(target: str) -> Optional[List]:
""" Attempt to return a single hostname list from a target string. """
if len(target) == 0 or len(target) > 255:
return None
if not re.match(r'^[\w.-]+$', target):
return None
return [target]
def target_to_list(target: str) -> Optional[List]:
""" Attempt to return a list of single hosts from a target string. """
# Is it an IPv4 address ?
new_list = target_to_ipv4(target)
# Is it an IPv6 address ?
if not new_list:
new_list = target_to_ipv6(target)
# Is it an IPv4 CIDR ?
if not new_list:
new_list = target_to_ipv4_cidr(target)
# Is it an IPv6 CIDR ?
if not new_list:
new_list = target_to_ipv6_cidr(target)
# Is it an IPv4 short-range ?
if not new_list:
new_list = target_to_ipv4_short(target)
# Is it an IPv4 long-range ?
if not new_list:
new_list = target_to_ipv4_long(target)
# Is it an IPv6 short-range ?
if not new_list:
new_list = target_to_ipv6_short(target)
# Is it an IPv6 long-range ?
if not new_list:
new_list = target_to_ipv6_long(target)
# Is it a hostname ?
if not new_list:
new_list = target_to_hostname(target)
return new_list
def target_str_to_list(target_str: str) -> Optional[List]:
"""Parses a targets string into a list of individual targets.
Return a list of hosts, None if supplied target_str is None or
empty, or an empty list in case of malformed target.
"""
new_list = list()
if not target_str:
return None
target_str = target_str.strip(',')
for target in target_str.split(','):
target = target.strip()
target_list = target_to_list(target)
if target_list:
new_list.extend(target_list)
else:
__LOGGER.info("%s: Invalid target value", target)
return []
return list(collections.OrderedDict.fromkeys(new_list))
def resolve_hostname(hostname: str) -> Optional[str]:
""" Returns IP of a hostname. """
assert hostname
try:
return socket.gethostbyname(hostname)
except socket.gaierror:
return None
def is_valid_address(address: str) -> bool:
if not address:
return False
try:
socket.inet_pton(socket.AF_INET, address)
except OSError:
# invalid IPv4 address
try:
socket.inet_pton(socket.AF_INET6, address)
except OSError:
# invalid IPv6 address
return False
return True
def get_hostname_by_address(address: str) -> str:
""" Returns hostname of an address. """
if not is_valid_address(address):
return ''
try:
hostname = socket.getfqdn(address)
except (socket.gaierror, socket.herror):
return ''
if hostname == address:
return ''
return hostname
def port_range_expand(portrange: str) -> Optional[List]:
"""
Receive a port range and expands it in individual ports.
@input Port range.
e.g. "4-8"
@return List of integers.
e.g. [4, 5, 6, 7, 8]
"""
if not portrange or '-' not in portrange:
__LOGGER.info("Invalid port range format")
return None
port_list = list()
for single_port in range(
int(portrange[: portrange.index('-')]),
int(portrange[portrange.index('-') + 1 :]) + 1,
):
port_list.append(single_port)
return port_list
def port_str_arrange(ports: str) -> str:
"""Gives a str in the format (always tcp listed first).
T:U:
"""
b_tcp = ports.find("T")
b_udp = ports.find("U")
if (b_udp != -1 and b_tcp != -1) and b_udp < b_tcp:
return ports[b_tcp:] + ports[b_udp:b_tcp]
return ports
def ports_str_check_failed(port_str: str) -> bool:
"""
Check if the port string is well formed.
Return True if fail, False other case.
"""
pattern = r'[^TU:0-9, \-]'
if (
re.search(pattern, port_str)
or port_str.count('T') > 1
or port_str.count('U') > 1
or port_str.count(':') < (port_str.count('T') + port_str.count('U'))
):
return True
return False
def ports_as_list(port_str: str) -> Tuple[Optional[List], Optional[List]]:
"""
Parses a ports string into two list of individual tcp and udp ports.
@input string containing a port list
e.g. T:1,2,3,5-8 U:22,80,600-1024
@return two list of sorted integers, for tcp and udp ports respectively.
"""
if not port_str:
__LOGGER.info("Invalid port value")
return [None, None]
if ports_str_check_failed(port_str):
__LOGGER.info("{0}: Port list malformed.")
return [None, None]
tcp_list = list()
udp_list = list()
ports = port_str.replace(' ', '')
b_tcp = ports.find("T")
b_udp = ports.find("U")
if ports[b_tcp - 1] == ',':
ports = ports[: b_tcp - 1] + ports[b_tcp:]
if ports[b_udp - 1] == ',':
ports = ports[: b_udp - 1] + ports[b_udp:]
ports = port_str_arrange(ports)
tports = ''
uports = ''
# TCP ports listed first, then UDP ports
if b_udp != -1 and b_tcp != -1:
tports = ports[ports.index('T:') + 2 : ports.index('U:')]
uports = ports[ports.index('U:') + 2 :]
# Only UDP ports
elif b_tcp == -1 and b_udp != -1:
uports = ports[ports.index('U:') + 2 :]
# Only TCP ports
elif b_udp == -1 and b_tcp != -1:
tports = ports[ports.index('T:') + 2 :]
else:
tports = ports
if tports:
for port in tports.split(','):
if '-' in port:
tcp_list.extend(port_range_expand(port))
else:
tcp_list.append(int(port))
tcp_list.sort()
if uports:
for port in uports.split(','):
if '-' in port:
udp_list.extend(port_range_expand(port))
else:
udp_list.append(int(port))
udp_list.sort()
return (tcp_list, udp_list)
def get_tcp_port_list(port_str: str) -> Optional[List]:
""" Return a list with tcp ports from a given port list in string format """
return ports_as_list(port_str)[0]
def get_udp_port_list(port_str: str) -> Optional[List]:
""" Return a list with udp ports from a given port list in string format """
return ports_as_list(port_str)[1]
def port_list_compress(port_list: List) -> str:
""" Compress a port list and return a string. """
if not port_list or len(port_list) == 0:
__LOGGER.info("Invalid or empty port list.")
return ''
port_list = sorted(set(port_list))
compressed_list = []
for _key, group in itertools.groupby(
enumerate(port_list), lambda t: t[1] - t[0]
):
group = list(group)
if group[0][1] == group[-1][1]:
compressed_list.append(str(group[0][1]))
else:
compressed_list.append(str(group[0][1]) + '-' + str(group[-1][1]))
return ','.join(compressed_list)
ospd-21.4.4/ospd/ospd.py 0000664 0000000 0000000 00000152603 14131311270 0015033 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=too-many-lines
""" OSP Daemon core class.
"""
import logging
import socket
import ssl
import multiprocessing
import time
import os
from pprint import pformat
from typing import List, Any, Iterator, Dict, Optional, Iterable, Tuple, Union
from xml.etree.ElementTree import Element, SubElement
import defusedxml.ElementTree as secET
import psutil
from ospd import __version__
from ospd.command import get_commands
from ospd.errors import OspdCommandError
from ospd.misc import ResultType, create_process
from ospd.network import resolve_hostname, target_str_to_list
from ospd.protocol import RequestParser
from ospd.scan import ScanCollection, ScanStatus, ScanProgress
from ospd.server import BaseServer, Stream
from ospd.vtfilter import VtsFilter
from ospd.vts import Vts
from ospd.xml import elements_as_text, get_result_xml, get_progress_xml
logger = logging.getLogger(__name__)
PROTOCOL_VERSION = __version__
SCHEDULER_CHECK_PERIOD = 10 # in seconds
MIN_TIME_BETWEEN_START_SCAN = 60 # in seconds
BASE_SCANNER_PARAMS = {
'debug_mode': {
'type': 'boolean',
'name': 'Debug Mode',
'default': 0,
'mandatory': 0,
'description': 'Whether to get extra scan debug information.',
},
'dry_run': {
'type': 'boolean',
'name': 'Dry Run',
'default': 0,
'mandatory': 0,
'description': 'Whether to dry run scan.',
},
} # type: Dict
def _terminate_process_group(process: multiprocessing.Process) -> None:
os.killpg(os.getpgid(process.pid), 15)
class OSPDaemon:
"""Daemon class for OSP traffic handling.
Every scanner wrapper should subclass it and make necessary additions and
changes.
* Add any needed parameters in __init__.
* Implement check() method which verifies scanner availability and other
environment related conditions.
* Implement process_scan_params and exec_scan methods which are
specific to handling the command, executing the wrapped
scanner and storing the results.
* Implement other methods that assert to False such as get_scanner_name,
get_scanner_version.
* Use Call set_command_attributes at init time to add scanner command
specific options eg. the w3af profile for w3af wrapper.
"""
def __init__(
self,
*,
customvtfilter=None,
storage=None,
max_scans=0,
min_free_mem_scan_queue=0,
file_storage_dir='/var/run/ospd',
max_queued_scans=0,
**kwargs,
): # pylint: disable=unused-argument
""" Initializes the daemon's internal data. """
self.scan_collection = ScanCollection(file_storage_dir)
self.scan_processes = dict()
self.daemon_info = dict()
self.daemon_info['name'] = "OSPd"
self.daemon_info['version'] = __version__
self.daemon_info['description'] = "No description"
self.scanner_info = dict()
self.scanner_info['name'] = 'No name'
self.scanner_info['version'] = 'No version'
self.scanner_info['description'] = 'No description'
self.server_version = None # Set by the subclass.
self.initialized = None # Set after initialization finished
self.max_scans = max_scans
self.min_free_mem_scan_queue = min_free_mem_scan_queue
self.max_queued_scans = max_queued_scans
self.last_scan_start_time = 0
self.scaninfo_store_time = kwargs.get('scaninfo_store_time')
self.protocol_version = PROTOCOL_VERSION
self.commands = {}
for command_class in get_commands():
command = command_class(self)
self.commands[command.get_name()] = command
self.scanner_params = dict()
for name, params in BASE_SCANNER_PARAMS.items():
self.set_scanner_param(name, params)
self.vts = Vts(storage)
self.vts_version = None
if customvtfilter:
self.vts_filter = customvtfilter
else:
self.vts_filter = VtsFilter()
def init(self, server: BaseServer) -> None:
"""Should be overridden by a subclass if the initialization is costly.
Will be called after check.
"""
self.scan_collection.init()
server.start(self.handle_client_stream)
self.initialized = True
def set_command_attributes(self, name: str, attributes: Dict) -> None:
""" Sets the xml attributes of a specified command. """
if self.command_exists(name):
command = self.commands.get(name)
command.attributes = attributes
def set_scanner_param(self, name: str, scanner_params: Dict) -> None:
""" Set a scanner parameter. """
assert name
assert scanner_params
self.scanner_params[name] = scanner_params
def get_scanner_params(self) -> Dict:
return self.scanner_params
def add_vt(
self,
vt_id: str,
name: str = None,
vt_params: str = None,
vt_refs: str = None,
custom: str = None,
vt_creation_time: str = None,
vt_modification_time: str = None,
vt_dependencies: str = None,
summary: str = None,
impact: str = None,
affected: str = None,
insight: str = None,
solution: str = None,
solution_t: str = None,
solution_m: str = None,
detection: str = None,
qod_t: str = None,
qod_v: str = None,
severities: str = None,
) -> None:
"""Add a vulnerability test information.
IMPORTANT: The VT's Data Manager will store the vts collection.
If the collection is considerably big and it will be consultated
intensible during a routine, consider to do a deepcopy(), since
accessing the shared memory in the data manager is very expensive.
At the end of the routine, the temporal copy must be set to None
and deleted.
"""
self.vts.add(
vt_id,
name=name,
vt_params=vt_params,
vt_refs=vt_refs,
custom=custom,
vt_creation_time=vt_creation_time,
vt_modification_time=vt_modification_time,
vt_dependencies=vt_dependencies,
summary=summary,
impact=impact,
affected=affected,
insight=insight,
solution=solution,
solution_t=solution_t,
solution_m=solution_m,
detection=detection,
qod_t=qod_t,
qod_v=qod_v,
severities=severities,
)
def set_vts_version(self, vts_version: str) -> None:
"""Add into the vts dictionary an entry to identify the
vts version.
Parameters:
vts_version (str): Identifies a unique vts version.
"""
if not vts_version:
raise OspdCommandError(
'A vts_version parameter is required', 'set_vts_version'
)
self.vts_version = vts_version
def get_vts_version(self) -> Optional[str]:
"""Return the vts version."""
return self.vts_version
def command_exists(self, name: str) -> bool:
""" Checks if a commands exists. """
return name in self.commands
def get_scanner_name(self) -> str:
""" Gives the wrapped scanner's name. """
return self.scanner_info['name']
def get_scanner_version(self) -> str:
""" Gives the wrapped scanner's version. """
return self.scanner_info['version']
def get_scanner_description(self) -> str:
""" Gives the wrapped scanner's description. """
return self.scanner_info['description']
def get_server_version(self) -> str:
""" Gives the specific OSP server's version. """
assert self.server_version
return self.server_version
def get_protocol_version(self) -> str:
""" Gives the OSP's version. """
return self.protocol_version
def preprocess_scan_params(self, xml_params):
""" Processes the scan parameters. """
params = {}
for param in xml_params:
params[param.tag] = param.text or ''
# Validate values.
for key in params:
param_type = self.get_scanner_param_type(key)
if not param_type:
continue
if param_type in ['integer', 'boolean']:
try:
params[key] = int(params[key])
except ValueError:
raise OspdCommandError(
'Invalid %s value' % key, 'start_scan'
) from None
if param_type == 'boolean':
if params[key] not in [0, 1]:
raise OspdCommandError(
'Invalid %s value' % key, 'start_scan'
)
elif param_type == 'selection':
selection = self.get_scanner_param_default(key).split('|')
if params[key] not in selection:
raise OspdCommandError(
'Invalid %s value' % key, 'start_scan'
)
if self.get_scanner_param_mandatory(key) and params[key] == '':
raise OspdCommandError(
'Mandatory %s value is missing' % key, 'start_scan'
)
return params
def process_scan_params(self, params: Dict) -> Dict:
"""This method is to be overridden by the child classes if necessary"""
return params
def stop_scan(self, scan_id: str) -> None:
if (
scan_id in self.scan_collection.ids_iterator()
and self.get_scan_status(scan_id) == ScanStatus.QUEUED
):
logger.info('Scan %s has been removed from the queue.', scan_id)
self.scan_collection.remove_file_pickled_scan_info(scan_id)
self.set_scan_status(scan_id, ScanStatus.STOPPED)
return
scan_process = self.scan_processes.get(scan_id)
if not scan_process:
raise OspdCommandError(
'Scan not found {0}.'.format(scan_id), 'stop_scan'
)
if not scan_process.is_alive():
raise OspdCommandError(
'Scan already stopped or finished.', 'stop_scan'
)
self.set_scan_status(scan_id, ScanStatus.STOPPED)
logger.info(
'%s: Stopping Scan with the PID %s.', scan_id, scan_process.ident
)
try:
scan_process.terminate()
except AttributeError:
logger.debug('%s: The scanner task stopped unexpectedly.', scan_id)
try:
logger.debug(
'%s: Terminating process group after stopping.', scan_id
)
_terminate_process_group(scan_process)
except ProcessLookupError:
logger.info(
'%s: Scan with the PID %s is already stopped.',
scan_id,
scan_process.pid,
)
if scan_process.ident != os.getpid():
scan_process.join(0)
logger.info('%s: Scan stopped.', scan_id)
def exec_scan(self, scan_id: str):
""" Asserts to False. Should be implemented by subclass. """
raise NotImplementedError
def finish_scan(self, scan_id: str) -> None:
""" Sets a scan as finished. """
self.scan_collection.set_progress(scan_id, ScanProgress.FINISHED.value)
self.set_scan_status(scan_id, ScanStatus.FINISHED)
logger.info("%s: Scan finished.", scan_id)
def interrupt_scan(self, scan_id: str) -> None:
""" Set scan status as interrupted. """
self.set_scan_status(scan_id, ScanStatus.INTERRUPTED)
logger.info("%s: Scan interrupted.", scan_id)
def daemon_exit_cleanup(self) -> None:
""" Perform a cleanup before exiting """
self.scan_collection.clean_up_pickled_scan_info()
# Stop scans which are not already stopped.
for scan_id in self.scan_collection.ids_iterator():
status = self.get_scan_status(scan_id)
if (
status != ScanStatus.STOPPED
and status != ScanStatus.FINISHED
and status != ScanStatus.INTERRUPTED
):
logger.debug("%s: Stopping scan before daemon exit.", scan_id)
self.stop_scan(scan_id)
# Wait for scans to be in some stopped state.
while True:
all_stopped = True
for scan_id in self.scan_collection.ids_iterator():
status = self.get_scan_status(scan_id)
if (
status != ScanStatus.STOPPED
and status != ScanStatus.FINISHED
and status != ScanStatus.INTERRUPTED
):
all_stopped = False
if all_stopped:
logger.debug(
"All scans stopped and daemon clean and ready to exit"
)
return
logger.debug("Waiting for running scans before daemon exit. ")
time.sleep(1)
def get_daemon_name(self) -> str:
""" Gives osp daemon's name. """
return self.daemon_info['name']
def get_daemon_version(self) -> str:
""" Gives osp daemon's version. """
return self.daemon_info['version']
def get_scanner_param_type(self, param: str):
""" Returns type of a scanner parameter. """
assert isinstance(param, str)
entry = self.scanner_params.get(param)
if not entry:
return None
return entry.get('type')
def get_scanner_param_mandatory(self, param: str):
""" Returns if a scanner parameter is mandatory. """
assert isinstance(param, str)
entry = self.scanner_params.get(param)
if not entry:
return False
return entry.get('mandatory')
def get_scanner_param_default(self, param: str):
""" Returns default value of a scanner parameter. """
assert isinstance(param, str)
entry = self.scanner_params.get(param)
if not entry:
return None
return entry.get('default')
def handle_client_stream(self, stream: Stream) -> None:
""" Handles stream of data received from client. """
data = b''
request_parser = RequestParser()
while True:
try:
buf = stream.read()
if not buf:
break
data += buf
if request_parser.has_ended(buf):
break
except (AttributeError, ValueError) as message:
logger.error(message)
return
except (ssl.SSLError) as exception:
logger.debug('Error: %s', exception)
break
except (socket.timeout) as exception:
logger.debug('Request timeout: %s', exception)
break
if len(data) <= 0:
logger.debug("Empty client stream")
return
response = None
try:
self.handle_command(data, stream)
except OspdCommandError as exception:
response = exception.as_xml()
logger.debug('Command error: %s', exception.message)
except Exception: # pylint: disable=broad-except
logger.exception('While handling client command:')
exception = OspdCommandError('Fatal error', 'error')
response = exception.as_xml()
if response:
stream.write(response)
stream.close()
def process_finished_hosts(self, scan_id: str) -> None:
""" Process the finished hosts before launching the scans."""
finished_hosts = self.scan_collection.get_finished_hosts(scan_id)
if not finished_hosts:
return
exc_finished_hosts_list = target_str_to_list(finished_hosts)
self.scan_collection.set_host_finished(scan_id, exc_finished_hosts_list)
def start_scan(self, scan_id: str) -> None:
""" Starts the scan with scan_id. """
os.setsid()
self.process_finished_hosts(scan_id)
try:
self.set_scan_status(scan_id, ScanStatus.RUNNING)
self.exec_scan(scan_id)
except Exception as e: # pylint: disable=broad-except
self.add_scan_error(
scan_id,
name='',
host=self.get_scan_host(scan_id),
value='Host process failure (%s).' % e,
)
logger.exception('%s: Exception %s while scanning', scan_id, e)
else:
logger.info("%s: Host scan finished.", scan_id)
status = self.get_scan_status(scan_id)
is_stopped = status == ScanStatus.STOPPED
self.set_scan_progress(scan_id)
progress = self.get_scan_progress(scan_id)
if not is_stopped and progress == ScanProgress.FINISHED:
self.finish_scan(scan_id)
elif not is_stopped:
logger.info(
"%s: Host scan got interrupted. Progress: %d, Status: %s",
scan_id,
progress,
status.name,
)
self.interrupt_scan(scan_id)
# For debug purposes
self._get_scan_progress_raw(scan_id)
def dry_run_scan(self, scan_id: str, target: Dict) -> None:
""" Dry runs a scan. """
os.setsid()
host = resolve_hostname(target.get('hosts'))
if host is None:
logger.info("Couldn't resolve %s.", self.get_scan_host(scan_id))
port = self.get_scan_ports(scan_id)
logger.info("%s:%s: Dry run mode.", host, port)
self.add_scan_log(scan_id, name='', host=host, value='Dry run result')
self.finish_scan(scan_id)
def handle_timeout(self, scan_id: str, host: str) -> None:
""" Handles scanner reaching timeout error. """
self.add_scan_error(
scan_id,
host=host,
name="Timeout",
value="{0} exec timeout.".format(self.get_scanner_name()),
)
def sort_host_finished(
self, scan_id: str, finished_hosts: Union[List[str], str]
) -> None:
"""Check if the finished host in the list was alive or dead
and update the corresponding alive_count or dead_count."""
if isinstance(finished_hosts, str):
finished_hosts = [finished_hosts]
alive_hosts = []
dead_hosts = []
current_hosts = self.scan_collection.get_current_target_progress(
scan_id
)
for finished_host in finished_hosts:
progress = current_hosts.get(finished_host)
if progress == ScanProgress.FINISHED:
alive_hosts.append(finished_host)
elif progress == ScanProgress.DEAD_HOST:
dead_hosts.append(finished_host)
else:
logger.debug(
'The host %s is considered dead or finished, but '
'its progress is still %d. This can lead to '
'interrupted scan.',
finished_host,
progress,
)
self.scan_collection.set_host_dead(scan_id, dead_hosts)
self.scan_collection.set_host_finished(scan_id, alive_hosts)
self.scan_collection.remove_hosts_from_target_progress(
scan_id, finished_hosts
)
def set_scan_progress(self, scan_id: str):
"""Calculate the target progress with the current host states
and stores in the scan table."""
# Get current scan progress for debugging purposes
logger.debug("Calculating scan progress with the following data:")
self._get_scan_progress_raw(scan_id)
scan_progress = self.scan_collection.calculate_target_progress(scan_id)
self.scan_collection.set_progress(scan_id, scan_progress)
def set_scan_progress_batch(
self, scan_id: str, host_progress: Dict[str, int]
):
self.scan_collection.set_host_progress(scan_id, host_progress)
self.set_scan_progress(scan_id)
def set_scan_host_progress(
self, scan_id: str, host: str = None, progress: int = None
) -> None:
"""Sets host's progress which is part of target.
Each time a host progress is updated, the scan progress
is updated too.
"""
if host is None or progress is None:
return
if not isinstance(progress, int):
try:
progress = int(progress)
except (TypeError, ValueError):
return
host_progress = {host: progress}
self.set_scan_progress_batch(scan_id, host_progress)
def get_scan_host_progress(self, scan_id: str, host: str = None) -> int:
""" Get host's progress which is part of target."""
current_progress = self.scan_collection.get_current_target_progress(
scan_id
)
return current_progress.get(host)
def set_scan_status(self, scan_id: str, status: ScanStatus) -> None:
""" Set the scan's status."""
logger.debug('%s: Set scan status %s,', scan_id, status.name)
self.scan_collection.set_status(scan_id, status)
def get_scan_status(self, scan_id: str) -> ScanStatus:
""" Get scan_id scans's status."""
status = self.scan_collection.get_status(scan_id)
logger.debug('%s: Current scan status: %s,', scan_id, status.name)
return status
def scan_exists(self, scan_id: str) -> bool:
"""Checks if a scan with ID scan_id is in collection.
Returns:
1 if scan exists, 0 otherwise.
"""
return self.scan_collection.id_exists(scan_id)
def get_help_text(self) -> str:
""" Returns the help output in plain text format."""
txt = ''
for name, info in self.commands.items():
description = info.get_description()
attributes = info.get_attributes()
elements = info.get_elements()
command_txt = "\t{0: <22} {1}\n".format(name, description)
if attributes:
command_txt = ''.join([command_txt, "\t Attributes:\n"])
for attrname, attrdesc in attributes.items():
attr_txt = "\t {0: <22} {1}\n".format(attrname, attrdesc)
command_txt = ''.join([command_txt, attr_txt])
if elements:
command_txt = ''.join(
[command_txt, "\t Elements:\n", elements_as_text(elements)]
)
txt += command_txt
return txt
def delete_scan(self, scan_id: str) -> int:
"""Deletes scan_id scan from collection.
Returns:
1 if scan deleted, 0 otherwise.
"""
if self.get_scan_status(scan_id) == ScanStatus.RUNNING:
return 0
# Don't delete the scan until the process stops
exitcode = None
try:
self.scan_processes[scan_id].join()
exitcode = self.scan_processes[scan_id].exitcode
except KeyError:
logger.debug('Scan process for %s never started,', scan_id)
if exitcode or exitcode == 0:
del self.scan_processes[scan_id]
return self.scan_collection.delete_scan(scan_id)
def get_scan_results_xml(
self, scan_id: str, pop_res: bool, max_res: Optional[int]
):
"""Gets scan_id scan's results in XML format.
Returns:
String of scan results in xml.
"""
results = Element('results')
for result in self.scan_collection.results_iterator(
scan_id, pop_res, max_res
):
results.append(get_result_xml(result))
logger.debug('Returning %d results', len(results))
return results
def _get_scan_progress_raw(self, scan_id: str) -> Dict:
"""Returns a dictionary with scan_id scan's progress information."""
current_progress = dict()
current_progress[
'current_hosts'
] = self.scan_collection.get_current_target_progress(scan_id)
current_progress['overall'] = self.get_scan_progress(scan_id)
current_progress['count_alive'] = self.scan_collection.get_count_alive(
scan_id
)
current_progress['count_dead'] = self.scan_collection.get_count_dead(
scan_id
)
current_progress[
'count_excluded'
] = self.scan_collection.get_simplified_exclude_host_count(scan_id)
current_progress['count_total'] = self.scan_collection.get_count_total(
scan_id
)
logging.debug(
"%s: Current progress: \n%s", scan_id, pformat(current_progress)
)
return current_progress
def _get_scan_progress_xml(self, scan_id: str):
"""Gets scan_id scan's progress in XML format.
Returns:
String of scan progress in xml.
"""
current_progress = self._get_scan_progress_raw(scan_id)
return get_progress_xml(current_progress)
def get_scan_xml(
self,
scan_id: str,
detailed: bool = True,
pop_res: bool = False,
max_res: int = 0,
progress: bool = False,
):
"""Gets scan in XML format.
Returns:
String of scan in XML format.
"""
if not scan_id:
return Element('scan')
if self.get_scan_status(scan_id) == ScanStatus.QUEUED:
target = ''
scan_progress = 0
status = self.get_scan_status(scan_id)
start_time = 0
end_time = 0
response = Element('scan')
detailed = False
progress = False
response.append(Element('results'))
else:
target = self.get_scan_host(scan_id)
scan_progress = self.get_scan_progress(scan_id)
status = self.get_scan_status(scan_id)
start_time = self.get_scan_start_time(scan_id)
end_time = self.get_scan_end_time(scan_id)
response = Element('scan')
for name, value in [
('id', scan_id),
('target', target),
('progress', scan_progress),
('status', status.name.lower()),
('start_time', start_time),
('end_time', end_time),
]:
response.set(name, str(value))
if detailed:
response.append(
self.get_scan_results_xml(scan_id, pop_res, max_res)
)
if progress:
response.append(self._get_scan_progress_xml(scan_id))
return response
@staticmethod
def get_custom_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, custom: Dict
) -> str:
"""Create a string representation of the XML object from the
custom data object.
This needs to be implemented by each ospd wrapper, in case
custom elements for VTs are used.
The custom XML object which is returned will be embedded
into a element.
Returns:
XML object as string for custom data.
"""
return ''
@staticmethod
def get_params_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, vt_params
) -> str:
"""Create a string representation of the XML object from the
vt_params data object.
This needs to be implemented by each ospd wrapper, in case
vt_params elements for VTs are used.
The params XML object which is returned will be embedded
into a element.
Returns:
XML object as string for vt parameters data.
"""
return ''
@staticmethod
def get_refs_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, vt_refs
) -> str:
"""Create a string representation of the XML object from the
refs data object.
This needs to be implemented by each ospd wrapper, in case
refs elements for VTs are used.
The refs XML object which is returned will be embedded
into a element.
Returns:
XML object as string for vt references data.
"""
return ''
@staticmethod
def get_dependencies_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, vt_dependencies
) -> str:
"""Create a string representation of the XML object from the
vt_dependencies data object.
This needs to be implemented by each ospd wrapper, in case
vt_dependencies elements for VTs are used.
The vt_dependencies XML object which is returned will be embedded
into a element.
Returns:
XML object as string for vt dependencies data.
"""
return ''
@staticmethod
def get_creation_time_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, vt_creation_time
) -> str:
"""Create a string representation of the XML object from the
vt_creation_time data object.
This needs to be implemented by each ospd wrapper, in case
vt_creation_time elements for VTs are used.
The vt_creation_time XML object which is returned will be embedded
into a element.
Returns:
XML object as string for vt creation time data.
"""
return ''
@staticmethod
def get_modification_time_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, vt_modification_time
) -> str:
"""Create a string representation of the XML object from the
vt_modification_time data object.
This needs to be implemented by each ospd wrapper, in case
vt_modification_time elements for VTs are used.
The vt_modification_time XML object which is returned will be embedded
into a element.
Returns:
XML object as string for vt references data.
"""
return ''
@staticmethod
def get_summary_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, summary
) -> str:
"""Create a string representation of the XML object from the
summary data object.
This needs to be implemented by each ospd wrapper, in case
summary elements for VTs are used.
The summary XML object which is returned will be embedded
into a element.
Returns:
XML object as string for summary data.
"""
return ''
@staticmethod
def get_impact_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, impact
) -> str:
"""Create a string representation of the XML object from the
impact data object.
This needs to be implemented by each ospd wrapper, in case
impact elements for VTs are used.
The impact XML object which is returned will be embedded
into a element.
Returns:
XML object as string for impact data.
"""
return ''
@staticmethod
def get_affected_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, affected
) -> str:
"""Create a string representation of the XML object from the
affected data object.
This needs to be implemented by each ospd wrapper, in case
affected elements for VTs are used.
The affected XML object which is returned will be embedded
into a element.
Returns:
XML object as string for affected data.
"""
return ''
@staticmethod
def get_insight_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, insight
) -> str:
"""Create a string representation of the XML object from the
insight data object.
This needs to be implemented by each ospd wrapper, in case
insight elements for VTs are used.
The insight XML object which is returned will be embedded
into a element.
Returns:
XML object as string for insight data.
"""
return ''
@staticmethod
def get_solution_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, solution, solution_type=None, solution_method=None
) -> str:
"""Create a string representation of the XML object from the
solution data object.
This needs to be implemented by each ospd wrapper, in case
solution elements for VTs are used.
The solution XML object which is returned will be embedded
into a element.
Returns:
XML object as string for solution data.
"""
return ''
@staticmethod
def get_detection_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, detection=None, qod_type=None, qod=None
) -> str:
"""Create a string representation of the XML object from the
detection data object.
This needs to be implemented by each ospd wrapper, in case
detection elements for VTs are used.
The detection XML object which is returned is an element with
tag element
Returns:
XML object as string for detection data.
"""
return ''
@staticmethod
def get_severities_vt_as_xml_str( # pylint: disable=unused-argument
vt_id: str, severities
) -> str:
"""Create a string representation of the XML object from the
severities data object.
This needs to be implemented by each ospd wrapper, in case
severities elements for VTs are used.
The severities XML objects which are returned will be embedded
into a element.
Returns:
XML object as string for severities data.
"""
return ''
def get_vt_iterator( # pylint: disable=unused-argument
self, vt_selection: List[str] = None, details: bool = True
) -> Iterator[Tuple[str, Dict]]:
"""Return iterator object for getting elements
from the VTs dictionary."""
return self.vts.items()
def get_vt_xml(self, single_vt: Tuple[str, Dict]) -> Element:
"""Gets a single vulnerability test information in XML format.
Returns:
String of single vulnerability test information in XML format.
"""
if not single_vt or single_vt[1] is None:
return Element('vt')
vt_id, vt = single_vt
name = vt.get('name')
vt_xml = Element('vt')
vt_xml.set('id', vt_id)
for name, value in [('name', name)]:
elem = SubElement(vt_xml, name)
elem.text = str(value)
if vt.get('vt_params'):
params_xml_str = self.get_params_vt_as_xml_str(
vt_id, vt.get('vt_params')
)
vt_xml.append(secET.fromstring(params_xml_str))
if vt.get('vt_refs'):
refs_xml_str = self.get_refs_vt_as_xml_str(vt_id, vt.get('vt_refs'))
vt_xml.append(secET.fromstring(refs_xml_str))
if vt.get('vt_dependencies'):
dependencies = self.get_dependencies_vt_as_xml_str(
vt_id, vt.get('vt_dependencies')
)
vt_xml.append(secET.fromstring(dependencies))
if vt.get('creation_time'):
vt_ctime = self.get_creation_time_vt_as_xml_str(
vt_id, vt.get('creation_time')
)
vt_xml.append(secET.fromstring(vt_ctime))
if vt.get('modification_time'):
vt_mtime = self.get_modification_time_vt_as_xml_str(
vt_id, vt.get('modification_time')
)
vt_xml.append(secET.fromstring(vt_mtime))
if vt.get('summary'):
summary_xml_str = self.get_summary_vt_as_xml_str(
vt_id, vt.get('summary')
)
vt_xml.append(secET.fromstring(summary_xml_str))
if vt.get('impact'):
impact_xml_str = self.get_impact_vt_as_xml_str(
vt_id, vt.get('impact')
)
vt_xml.append(secET.fromstring(impact_xml_str))
if vt.get('affected'):
affected_xml_str = self.get_affected_vt_as_xml_str(
vt_id, vt.get('affected')
)
vt_xml.append(secET.fromstring(affected_xml_str))
if vt.get('insight'):
insight_xml_str = self.get_insight_vt_as_xml_str(
vt_id, vt.get('insight')
)
vt_xml.append(secET.fromstring(insight_xml_str))
if vt.get('solution'):
solution_xml_str = self.get_solution_vt_as_xml_str(
vt_id,
vt.get('solution'),
vt.get('solution_type'),
vt.get('solution_method'),
)
vt_xml.append(secET.fromstring(solution_xml_str))
if vt.get('detection') or vt.get('qod_type') or vt.get('qod'):
detection_xml_str = self.get_detection_vt_as_xml_str(
vt_id, vt.get('detection'), vt.get('qod_type'), vt.get('qod')
)
vt_xml.append(secET.fromstring(detection_xml_str))
if vt.get('severities'):
severities_xml_str = self.get_severities_vt_as_xml_str(
vt_id, vt.get('severities')
)
vt_xml.append(secET.fromstring(severities_xml_str))
if vt.get('custom'):
custom_xml_str = self.get_custom_vt_as_xml_str(
vt_id, vt.get('custom')
)
vt_xml.append(secET.fromstring(custom_xml_str))
return vt_xml
def get_vts_selection_list(
self, vt_id: str = None, filtered_vts: Dict = None
) -> Iterable[str]:
"""
Get list of VT's OID.
If vt_id is specified, the collection will contain only this vt, if
found.
If no vt_id is specified or filtered_vts is None (default), the
collection will contain all vts. Otherwise those vts passed
in filtered_vts or vt_id are returned. In case of both vt_id and
filtered_vts are given, filtered_vts has priority.
Arguments:
vt_id (vt_id, optional): ID of the vt to get.
filtered_vts (list, optional): Filtered VTs collection.
Returns:
List of selected VT's OID.
"""
vts_xml = []
# No match for the filter
if filtered_vts is not None and len(filtered_vts) == 0:
return vts_xml
if filtered_vts:
vts_list = filtered_vts
elif vt_id:
vts_list = [vt_id]
else:
vts_list = self.vts.keys()
return vts_list
def handle_command(self, data: bytes, stream: Stream) -> None:
"""Handles an osp command in a string."""
try:
tree = secET.fromstring(data)
except secET.ParseError as e:
logger.debug("Erroneous client input: %s", data)
raise OspdCommandError('Invalid data') from e
command_name = tree.tag
logger.debug('Handling %s command request.', command_name)
command = self.commands.get(command_name, None)
if not command and command_name != "authenticate":
raise OspdCommandError('Bogus command name')
if not self.initialized and command.must_be_initialized:
exception = OspdCommandError(
'%s is still starting' % self.daemon_info['name'], 'error'
)
response = exception.as_xml()
stream.write(response)
return
response = command.handle_xml(tree)
write_success = True
if isinstance(response, bytes):
write_success = stream.write(response)
else:
for data in response:
write_success = stream.write(data)
if not write_success:
break
scan_id = tree.get('scan_id')
if self.scan_exists(scan_id) and command_name == "get_scans":
if write_success:
logger.debug(
'%s: Results sent successfully to the client. Cleaning '
'temporary result list.',
scan_id,
)
self.scan_collection.clean_temp_result_list(scan_id)
else:
logger.debug(
'%s: Failed sending results to the client. Restoring '
'result list into the cache.',
scan_id,
)
self.scan_collection.restore_temp_result_list(scan_id)
def check(self):
""" Asserts to False. Should be implemented by subclass. """
raise NotImplementedError
def run(self) -> None:
"""Starts the Daemon, handling commands until interrupted."""
try:
while True:
time.sleep(SCHEDULER_CHECK_PERIOD)
self.scheduler()
self.clean_forgotten_scans()
self.start_queued_scans()
self.wait_for_children()
except KeyboardInterrupt:
logger.info("Received Ctrl-C shutting-down ...")
def start_queued_scans(self) -> None:
""" Starts a queued scan if it is allowed """
current_queued_scans = self.get_count_queued_scans()
if not current_queued_scans:
return
if not self.initialized:
logger.info(
"Queued task can not be started because a feed "
"update is being performed."
)
return
logger.info('Currently %d queued scans.', current_queued_scans)
for scan_id in self.scan_collection.ids_iterator():
scan_allowed = (
self.is_new_scan_allowed() and self.is_enough_free_memory()
)
scan_is_queued = self.get_scan_status(scan_id) == ScanStatus.QUEUED
if scan_is_queued and scan_allowed:
try:
self.scan_collection.unpickle_scan_info(scan_id)
except OspdCommandError as e:
logger.error("Start scan error %s", e)
self.stop_scan(scan_id)
continue
scan_func = self.start_scan
scan_process = create_process(func=scan_func, args=(scan_id,))
self.scan_processes[scan_id] = scan_process
scan_process.start()
self.set_scan_status(scan_id, ScanStatus.INIT)
current_queued_scans = current_queued_scans - 1
self.last_scan_start_time = time.time()
logger.info('Starting scan %s.', scan_id)
elif scan_is_queued and not scan_allowed:
return
def is_new_scan_allowed(self) -> bool:
"""Check if max_scans has been reached.
Returns:
True if a new scan can be launch.
"""
if (self.max_scans != 0) and (
len(self.scan_processes) >= self.max_scans
):
logger.info(
'Not possible to run a new scan. Max scan limit set '
'to %d reached.',
self.max_scans,
)
return False
return True
def is_enough_free_memory(self) -> bool:
"""Check if there is enough free memory in the system to run
a new scan. The necessary memory is a rough calculation and very
conservative.
Returns:
True if there is enough memory for a new scan.
"""
if not self.min_free_mem_scan_queue:
return True
# If min_free_mem_scan_queue option is set, also wait some time
# between scans. Consider the case in which the last scan
# finished in a few seconds and there is no need to wait.
time_between_start_scan = time.time() - self.last_scan_start_time
if (
time_between_start_scan < MIN_TIME_BETWEEN_START_SCAN
and self.get_count_running_scans()
):
logger.debug(
'Not possible to run a new scan right now, a scan have been '
'just started.'
)
return False
free_mem = psutil.virtual_memory().available / (1024 * 1024)
if free_mem > self.min_free_mem_scan_queue:
return True
logger.info(
'Not possible to run a new scan. Not enough free memory. '
'Only %d MB available but at least %d are required',
free_mem,
self.min_free_mem_scan_queue,
)
return False
def scheduler(self):
"""Should be implemented by subclass in case of need
to run tasks periodically."""
def wait_for_children(self):
""" Join the zombie process to releases resources."""
for scan_id, _ in self.scan_processes.items():
self.scan_processes[scan_id].join(0)
def create_scan(
self,
scan_id: str,
targets: Dict,
options: Optional[Dict],
vt_selection: Dict,
) -> Optional[str]:
"""Creates a new scan.
Arguments:
target: Target to scan.
options: Miscellaneous scan options supplied via
XML element.
Returns:
New scan's ID. None if the scan_id already exists.
"""
status = None
scan_exists = self.scan_exists(scan_id)
if scan_id and scan_exists:
status = self.get_scan_status(scan_id)
logger.info(
"Scan %s exists with status %s.", scan_id, status.name.lower()
)
return
return self.scan_collection.create_scan(
scan_id, targets, options, vt_selection
)
def get_scan_options(self, scan_id: str) -> str:
""" Gives a scan's list of options. """
return self.scan_collection.get_options(scan_id)
def set_scan_option(self, scan_id: str, name: str, value: Any) -> None:
""" Sets a scan's option to a provided value. """
return self.scan_collection.set_option(scan_id, name, value)
def set_scan_total_hosts(self, scan_id: str, count_total: int) -> None:
"""Sets a scan's total hosts. Allow the scanner to update
the total count of host to be scanned."""
self.scan_collection.update_count_total(scan_id, count_total)
def clean_forgotten_scans(self) -> None:
"""Check for old stopped or finished scans which have not been
deleted and delete them if the are older than the set value."""
if not self.scaninfo_store_time:
return
for scan_id in list(self.scan_collection.ids_iterator()):
end_time = int(self.get_scan_end_time(scan_id))
scan_status = self.get_scan_status(scan_id)
if (
scan_status == ScanStatus.STOPPED
or scan_status == ScanStatus.FINISHED
or scan_status == ScanStatus.INTERRUPTED
) and end_time:
stored_time = int(time.time()) - end_time
if stored_time > self.scaninfo_store_time * 3600:
logger.debug(
'Scan %s is older than %d hours and seems have been '
'forgotten. Scan info will be deleted from the '
'scan table',
scan_id,
self.scaninfo_store_time,
)
self.delete_scan(scan_id)
def check_scan_process(self, scan_id: str) -> None:
""" Check the scan's process, and terminate the scan if not alive. """
status = self.get_scan_status(scan_id)
if status == ScanStatus.QUEUED:
return
scan_process = self.scan_processes.get(scan_id)
progress = self.get_scan_progress(scan_id)
if (
progress < ScanProgress.FINISHED
and scan_process
and not scan_process.is_alive()
):
if not status == ScanStatus.STOPPED:
self.add_scan_error(
scan_id, name="", host="", value="Scan process Failure"
)
logger.info(
"%s: Scan process is dead and its progress is %d",
scan_id,
progress,
)
self.interrupt_scan(scan_id)
elif progress == ScanProgress.FINISHED:
scan_process.join(0)
logger.debug(
"%s: Check scan process: \n\tProgress %d\n\t Status: %s",
scan_id,
progress,
status.name,
)
def get_count_queued_scans(self) -> int:
""" Get the amount of scans with queued status """
count = 0
for scan_id in self.scan_collection.ids_iterator():
if self.get_scan_status(scan_id) == ScanStatus.QUEUED:
count += 1
return count
def get_count_running_scans(self) -> int:
""" Get the amount of scans with INIT/RUNNING status """
count = 0
for scan_id in self.scan_collection.ids_iterator():
status = self.get_scan_status(scan_id)
if status == ScanStatus.RUNNING or status == ScanStatus.INIT:
count += 1
return count
def get_scan_progress(self, scan_id: str) -> int:
""" Gives a scan's current progress value. """
progress = self.scan_collection.get_progress(scan_id)
logger.debug('%s: Current scan progress: %s,', scan_id, progress)
return progress
def get_scan_host(self, scan_id: str) -> str:
""" Gives a scan's target. """
return self.scan_collection.get_host_list(scan_id)
def get_scan_ports(self, scan_id: str) -> str:
""" Gives a scan's ports list. """
return self.scan_collection.get_ports(scan_id)
def get_scan_exclude_hosts(self, scan_id: str):
"""Gives a scan's exclude host list. If a target is passed gives
the exclude host list for the given target."""
return self.scan_collection.get_exclude_hosts(scan_id)
def get_scan_credentials(self, scan_id: str) -> Dict:
"""Gives a scan's credential list. If a target is passed gives
the credential list for the given target."""
return self.scan_collection.get_credentials(scan_id)
def get_scan_target_options(self, scan_id: str) -> Dict:
"""Gives a scan's target option dict. If a target is passed gives
the credential list for the given target."""
return self.scan_collection.get_target_options(scan_id)
def get_scan_vts(self, scan_id: str) -> Dict:
""" Gives a scan's vts. """
return self.scan_collection.get_vts(scan_id)
def get_scan_start_time(self, scan_id: str) -> str:
""" Gives a scan's start time. """
return self.scan_collection.get_start_time(scan_id)
def get_scan_end_time(self, scan_id: str) -> str:
""" Gives a scan's end time. """
return self.scan_collection.get_end_time(scan_id)
def add_scan_log(
self,
scan_id: str,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
qod: str = '',
uri: str = '',
) -> None:
""" Adds a log result to scan_id scan. """
self.scan_collection.add_result(
scan_id,
ResultType.LOG,
host,
hostname,
name,
value,
port,
test_id,
'0.0',
qod,
uri,
)
def add_scan_error(
self,
scan_id: str,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id='',
uri: str = '',
) -> None:
""" Adds an error result to scan_id scan. """
self.scan_collection.add_result(
scan_id,
ResultType.ERROR,
host,
hostname,
name,
value,
port,
test_id,
uri,
)
def add_scan_host_detail(
self,
scan_id: str,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
uri: str = '',
) -> None:
""" Adds a host detail result to scan_id scan. """
self.scan_collection.add_result(
scan_id, ResultType.HOST_DETAIL, host, hostname, name, value, uri
)
def add_scan_alarm(
self,
scan_id: str,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
severity: str = '',
qod: str = '',
uri: str = '',
) -> None:
""" Adds an alarm result to scan_id scan. """
self.scan_collection.add_result(
scan_id,
ResultType.ALARM,
host,
hostname,
name,
value,
port,
test_id,
severity,
qod,
uri,
)
ospd-21.4.4/ospd/ospd_ssh.py 0000664 0000000 0000000 00000012620 14131311270 0015702 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" OSP Daemon class for simple remote SSH-based command execution.
"""
# This is needed for older pythons as our current module is called the same
# as the ospd package
# Another solution would be to rename that file.
from __future__ import absolute_import
import socket
from typing import Optional, Dict
from ospd.ospd import OSPDaemon
try:
import paramiko
except ImportError:
paramiko = None
SSH_SCANNER_PARAMS = {
'username_password': {
'type': 'credential_up',
'name': 'SSH credentials',
'default': '',
'mandatory': 0,
'description': 'The SSH credentials in username:password format. Used'
' to log into the target and to run the commands on'
' that target. This should not be a privileged user'
' like "root", a regular privileged user account'
' should be sufficient in most cases.',
},
'port': {
'type': 'integer',
'name': 'SSH Port',
'default': 22,
'mandatory': 0,
'description': 'The SSH port which to use for logging in with the'
' given username_password.',
},
'ssh_timeout': {
'type': 'integer',
'name': 'SSH timeout',
'default': 30,
'mandatory': 0,
'description': 'Timeout when communicating with the target via SSH.',
},
} # type: Dict
# pylint: disable=abstract-method
class OSPDaemonSimpleSSH(OSPDaemon):
"""
OSP Daemon class for simple remote SSH-based command execution.
This class automatically adds scanner parameters to handle remote
ssh login into the target systems: username, password, port and
ssh_timout
The method run_command can be used to execute a single command
on the given remote system. The stdout result is returned as
an array.
"""
def __init__(self, **kwargs):
"""Initializes the daemon and add parameters needed to remote SSH
execution."""
super().__init__(**kwargs)
self._niceness = kwargs.get('niceness', None)
if paramiko is None:
raise ImportError(
'paramiko needs to be installed in order to use'
' the %s class.' % self.__class__.__name__
)
for name, param in SSH_SCANNER_PARAMS.items():
self.set_scanner_param(name, param)
def run_command(self, scan_id: str, host: str, cmd: str) -> Optional[str]:
"""
Run a single command via SSH and return the content of stdout or
None in case of an Error. A scan error is issued in the latter
case.
For logging into 'host', the scan options 'port', 'username',
'password' and 'ssh_timeout' are used.
"""
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
options = self.get_scan_options(scan_id)
port = int(options['port'])
timeout = int(options['ssh_timeout'])
# For backward compatibility, consider the legacy mode to get
# credentials as scan_option.
# First and second modes should be removed in future releases.
# On the third case it receives the credentials as a subelement of
# the .
credentials = self.get_scan_credentials(scan_id)
if (
'username_password' in options
and ':' in options['username_password']
):
username, password = options['username_password'].split(':', 1)
elif 'username' in options and options['username']:
username = options['username']
password = options['password']
elif credentials:
cred_params = credentials.get('ssh')
username = cred_params.get('username', '')
password = cred_params.get('password', '')
else:
self.add_scan_error(
scan_id, host=host, value='Erroneous username_password value'
)
raise ValueError('Erroneous username_password value')
try:
ssh.connect(
hostname=host,
username=username,
password=password,
timeout=timeout,
port=port,
)
except (
paramiko.ssh_exception.AuthenticationException,
socket.error,
) as err:
# Errors: No route to host, connection timeout, authentication
# failure etc,.
self.add_scan_error(scan_id, host=host, value=str(err))
return None
if self._niceness is not None:
cmd = "nice -n %s %s" % (self._niceness, cmd)
_, stdout, _ = ssh.exec_command(cmd)
result = stdout.readlines()
ssh.close()
return result
ospd-21.4.4/ospd/parser.py 0000664 0000000 0000000 00000020642 14131311270 0015357 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import argparse
import logging
from pathlib import Path
from ospd.config import Config
# Default file locations as used by a OpenVAS default installation
DEFAULT_KEY_FILE = "/var/lib/gvm/private/CA/serverkey.pem"
DEFAULT_CERT_FILE = "/var/lib/gvm/CA/servercert.pem"
DEFAULT_CA_FILE = "/var/lib/gvm/CA/cacert.pem"
DEFAULT_PORT = 0
DEFAULT_ADDRESS = "0.0.0.0"
DEFAULT_NICENESS = 10
DEFAULT_UNIX_SOCKET_MODE = "0o770"
DEFAULT_CONFIG_PATH = "~/.config/ospd.conf"
DEFAULT_LOG_CONFIG_PATH = "~/.config/ospd-logging.conf"
DEFAULT_UNIX_SOCKET_PATH = "/run/ospd/ospd.sock"
DEFAULT_PID_PATH = "/run/ospd/ospd.pid"
DEFAULT_LOCKFILE_DIR_PATH = "/run/ospd"
DEFAULT_STREAM_TIMEOUT = 10 # ten seconds
DEFAULT_SCANINFO_STORE_TIME = 0 # in hours
DEFAULT_MAX_SCAN = 0 # 0 = disable
DEFAULT_MIN_FREE_MEM_SCAN_QUEUE = 0 # 0 = Disable
DEFAULT_MAX_QUEUED_SCANS = 0 # 0 = Disable
ParserType = argparse.ArgumentParser
Arguments = argparse.Namespace
logger = logging.getLogger(__name__)
class CliParser:
def __init__(self, description: str) -> None:
""" Create a command-line arguments parser for OSPD. """
self._name = description
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
'--version', action='store_true', help='Print version then exit.'
)
parser.add_argument(
'-s',
'--config',
nargs='?',
default=DEFAULT_CONFIG_PATH,
help='Configuration file path (default: %(default)s)',
)
parser.add_argument(
'--log-config',
nargs='?',
default=DEFAULT_LOG_CONFIG_PATH,
help='Log configuration file path (default: %(default)s)',
)
parser.add_argument(
'-p',
'--port',
default=DEFAULT_PORT,
type=self.network_port,
help='TCP Port to listen on. Default: %(default)s',
)
parser.add_argument(
'-b',
'--bind-address',
default=DEFAULT_ADDRESS,
dest='address',
help='Address to listen on. Default: %(default)s',
)
parser.add_argument(
'-u',
'--unix-socket',
default=DEFAULT_UNIX_SOCKET_PATH,
help='Unix file socket to listen on. Default: %(default)s',
)
parser.add_argument(
'--pid-file',
default=DEFAULT_PID_PATH,
help='Location of the file for the process ID. '
'Default: %(default)s',
)
parser.add_argument(
'--lock-file-dir',
default=DEFAULT_LOCKFILE_DIR_PATH,
help='Directory where lock files are placed. Default: %(default)s',
)
parser.add_argument(
'-m',
'--socket-mode',
default=DEFAULT_UNIX_SOCKET_MODE,
help='Unix file socket mode. Default: %(default)s',
)
parser.add_argument(
'-k',
'--key-file',
default=DEFAULT_KEY_FILE,
help='Server key file. Default: %(default)s',
)
parser.add_argument(
'-c',
'--cert-file',
default=DEFAULT_CERT_FILE,
help='Server cert file. Default: %(default)s',
)
parser.add_argument(
'--ca-file',
default=DEFAULT_CA_FILE,
help='CA cert file. Default: %(default)s',
)
parser.add_argument(
'-L',
'--log-level',
default='INFO',
type=self.log_level,
help='Wished level of logging. Default: %(default)s',
)
parser.add_argument(
'-f',
'--foreground',
action='store_true',
help='Run in foreground and logs all messages to console.',
)
parser.add_argument(
'-t',
'--stream-timeout',
default=DEFAULT_STREAM_TIMEOUT,
type=int,
help='Stream timeout. Default: %(default)s',
)
parser.add_argument(
'-l', '--log-file', help='Path to the logging file.'
)
parser.add_argument(
'--niceness',
default=DEFAULT_NICENESS,
type=int,
help='Start the scan with the given niceness. Default %(default)s',
)
parser.add_argument(
'--scaninfo-store-time',
default=DEFAULT_SCANINFO_STORE_TIME,
type=int,
help='Time in hours a scan is stored before being considered '
'forgotten and being delete from the scan table. '
'Default %(default)s, disabled.',
)
parser.add_argument(
'--list-commands',
action='store_true',
help='Display all protocol commands',
)
parser.add_argument(
'--max-scans',
default=DEFAULT_MAX_SCAN,
type=int,
help='Max. amount of parallel task that can be started. '
'Default %(default)s, disabled',
)
parser.add_argument(
'--min-free-mem-scan-queue',
default=DEFAULT_MIN_FREE_MEM_SCAN_QUEUE,
type=int,
help='Minimum free memory in MB required to run the scan. '
'If no enough free memory is available, the scan queued. '
'Default %(default)s, disabled',
)
parser.add_argument(
'--max-queued-scans',
default=DEFAULT_MAX_QUEUED_SCANS,
type=int,
help='Maximum number allowed of queued scans before '
'starting to reject new scans. '
'Default %(default)s, disabled',
)
self.parser = parser
def network_port(self, string: str) -> int:
""" Check if provided string is a valid network port. """
value = int(string)
if not 0 < value <= 65535:
raise argparse.ArgumentTypeError(
'port must be in ]0,65535] interval'
)
return value
def log_level(self, string: str) -> str:
""" Check if provided string is a valid log level. """
if not hasattr(logging, string.upper()):
raise argparse.ArgumentTypeError(
'log level must be one of {debug,info,warning,error,critical}'
)
return string.upper()
def _set_defaults(self, configfilename=None) -> None:
self._config = self._load_config(configfilename)
self.parser.set_defaults(**self._config.defaults())
def _load_config(self, configfile: str) -> Config:
config = Config()
if not configfile:
return config
configpath = Path(configfile)
if not configpath.expanduser().resolve().exists():
logger.debug('Ignoring non existing config file %s', configfile)
return config
try:
config.load(configpath, def_section=self._name)
logger.debug('Loaded config %s', configfile)
except Exception as e: # pylint: disable=broad-except
raise RuntimeError(
'Error while parsing config file {config}. Error was '
'{message}'.format(config=configfile, message=e)
) from None
return config
def parse_arguments(self, args=None):
# Parse args to get the config file path passed as option
_args, _ = self.parser.parse_known_args(args)
# Load the defaults from the config file if it exists.
# This override also what it was passed as cmd option.
self._set_defaults(_args.config)
args, _ = self.parser.parse_known_args(args)
return args
def create_parser(description: str) -> CliParser:
return CliParser(description)
ospd-21.4.4/ospd/protocol.py 0000664 0000000 0000000 00000026216 14131311270 0015727 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Helper classes for parsing and creating OSP XML requests and responses
"""
from typing import Dict, Union, List, Any
from xml.etree.ElementTree import SubElement, Element, XMLPullParser
from ospd.errors import OspdError
class RequestParser:
def __init__(self):
self._parser = XMLPullParser(['start', 'end'])
self._root_element = None
def has_ended(self, data: bytes) -> bool:
self._parser.feed(data)
for event, element in self._parser.read_events():
if event == 'start' and self._root_element is None:
self._root_element = element
elif event == 'end' and self._root_element is not None:
if element.tag == self._root_element.tag:
return True
return False
class OspRequest:
@staticmethod
def process_vts_params(
scanner_vts: Element,
) -> Dict[str, Union[Dict[str, str], List]]:
"""Receive an XML object with the Vulnerability Tests an their
parameters to be use in a scan and return a dictionary.
@param: XML element with vt subelements. Each vt has an
id attribute. Optional parameters can be included
as vt child.
Example form:
value
@return: Dictionary containing the vts attribute and subelements,
like the VT's id and VT's parameters.
Example form:
{'vt1': {},
'vt2': {'value_id': 'value'},
'vt_groups': ['family=debian', 'family=general']}
"""
vt_selection = {} # type: Dict
filters = []
for vt in scanner_vts:
if vt.tag == 'vt_single':
vt_id = vt.attrib.get('id')
vt_selection[vt_id] = {}
for vt_value in vt:
if not vt_value.attrib.get('id'):
raise OspdError(
'Invalid VT preference. No attribute id'
)
vt_value_id = vt_value.attrib.get('id')
vt_value_value = vt_value.text if vt_value.text else ''
vt_selection[vt_id][vt_value_id] = vt_value_value
if vt.tag == 'vt_group':
vts_filter = vt.attrib.get('filter', None)
if vts_filter is None:
raise OspdError('Invalid VT group. No filter given.')
filters.append(vts_filter)
vt_selection['vt_groups'] = filters
return vt_selection
@staticmethod
def process_credentials_elements(cred_tree: Element) -> Dict:
"""Receive an XML object with the credentials to run
a scan against a given target.
@param:
scanuser
mypass
smbuser
mypass
@return: Dictionary containing the credentials for a given target.
Example form:
{'ssh': {'type': type,
'port': port,
'username': username,
'password': pass,
},
'smb': {'type': type,
'username': username,
'password': pass,
},
}
"""
credentials = {} # type: Dict
for credential in cred_tree:
service = credential.attrib.get('service')
credentials[service] = {}
credentials[service]['type'] = credential.attrib.get('type')
if service == 'ssh':
credentials[service]['port'] = credential.attrib.get('port')
for param in credential:
credentials[service][param.tag] = (
param.text if param.text else ""
)
return credentials
@staticmethod
def process_alive_test_methods(
alive_test_tree: Element, options: Dict
) -> None:
"""Receive an XML object with the alive test methods to run
a scan with. Methods are added to the options Dict.
@param
boolean(1 or 0)
boolean(1 or 0)
boolean(1 or 0)
boolean(1 or 0)
boolean(1 or 0)
"""
for child in alive_test_tree:
if child.tag == 'icmp':
if child.text is not None:
options['icmp'] = child.text
if child.tag == 'tcp_ack':
if child.text is not None:
options['tcp_ack'] = child.text
if child.tag == 'tcp_syn':
if child.text is not None:
options['tcp_syn'] = child.text
if child.tag == 'arp':
if child.text is not None:
options['arp'] = child.text
if child.tag == 'consider_alive':
if child.text is not None:
options['consider_alive'] = child.text
@classmethod
def process_target_element(cls, scanner_target: Element) -> Dict:
"""Receive an XML object with the target, ports and credentials to run
a scan against.
Arguments:
Single XML target element. The target has and
subelements. Hosts can be a single host, a host range, a
comma-separated host list or a network address.
and are optional. Therefore each
ospd-scanner should check for a valid ones if needed.
Example form:
192.168.0.0/24
22
scanuser
mypass
smbuser
mypass
1
0
Return:
A Dict hosts, port, {credentials}, exclude_hosts, options].
Example form:
{
'hosts': '192.168.0.0/24',
'port': '22',
'credentials': {'smb': {'type': type,
'port': port,
'username': username,
'password': pass,
}
},
'exclude_hosts': '',
'finished_hosts': '',
'options': {'alive_test': 'ALIVE_TEST_CONSIDER_ALIVE',
'alive_test_ports: '22,80,123',
'reverse_lookup_only': '1',
'reverse_lookup_unify': '0',
},
}
"""
if scanner_target:
exclude_hosts = ''
finished_hosts = ''
ports = ''
hosts = None
credentials = {} # type: Dict
options = {}
for child in scanner_target:
if child.tag == 'hosts':
hosts = child.text
if child.tag == 'exclude_hosts':
exclude_hosts = child.text
if child.tag == 'finished_hosts':
finished_hosts = child.text
if child.tag == 'ports':
ports = child.text
if child.tag == 'credentials':
credentials = cls.process_credentials_elements(child)
if child.tag == 'alive_test_methods':
options['alive_test_methods'] = '1'
cls.process_alive_test_methods(child, options)
if child.tag == 'alive_test':
options['alive_test'] = child.text
if child.tag == 'alive_test_ports':
options['alive_test_ports'] = child.text
if child.tag == 'reverse_lookup_unify':
options['reverse_lookup_unify'] = child.text
if child.tag == 'reverse_lookup_only':
options['reverse_lookup_only'] = child.text
if hosts:
return {
'hosts': hosts,
'ports': ports,
'credentials': credentials,
'exclude_hosts': exclude_hosts,
'finished_hosts': finished_hosts,
'options': options,
}
else:
raise OspdError('No target to scan')
class OspResponse:
@staticmethod
def create_scanner_params_xml(scanner_params: Dict[str, Any]) -> Element:
""" Returns the OSP Daemon's scanner params in xml format. """
scanner_params_xml = Element('scanner_params')
for param_id, param in scanner_params.items():
param_xml = SubElement(scanner_params_xml, 'scanner_param')
for name, value in [('id', param_id), ('type', param['type'])]:
param_xml.set(name, value)
for name, value in [
('name', param['name']),
('description', param['description']),
('default', param['default']),
('mandatory', param['mandatory']),
]:
elem = SubElement(param_xml, name)
elem.text = str(value)
return scanner_params_xml
ospd-21.4.4/ospd/resultlist.py 0000664 0000000 0000000 00000007341 14131311270 0016276 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=too-many-lines
""" Class for handling list of resutls.
"""
from collections import OrderedDict
from typing import Dict
from ospd.misc import ResultType
class ResultList:
""" Class for handling list of resutls."""
def __init__(self):
self._result_list = list()
def __len__(self):
return len(self._result_list)
def add_scan_host_detail_to_list(
self,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
uri: str = '',
) -> None:
""" Adds a host detail result to result list. """
self.add_result_to_list(
ResultType.HOST_DETAIL, host, hostname, name, value, uri
)
def add_scan_error_to_list(
self,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id='',
uri: str = '',
) -> None:
""" Adds an error result to result list. """
self.add_result_to_list(
ResultType.ERROR, host, hostname, name, value, port, test_id, uri
)
def add_scan_log_to_list(
self,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
qod: str = '',
uri: str = '',
) -> None:
""" Adds log result to a list of results. """
self.add_result_to_list(
ResultType.LOG,
host,
hostname,
name,
value,
port,
test_id,
'0.0',
qod,
uri,
)
def add_scan_alarm_to_list(
self,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
severity: str = '',
qod: str = '',
uri: str = '',
) -> None:
""" Adds an alarm result to a result list. """
self.add_result_to_list(
ResultType.ALARM,
host,
hostname,
name,
value,
port,
test_id,
severity,
qod,
uri,
)
def add_result_to_list(
self,
result_type: int,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
severity: str = '',
qod: str = '',
uri: str = '',
) -> None:
result = OrderedDict() # type: Dict
result['type'] = result_type
result['name'] = name
result['severity'] = severity
result['test_id'] = test_id
result['value'] = value
result['host'] = host
result['hostname'] = hostname
result['port'] = port
result['qod'] = qod
result['uri'] = uri
self._result_list.append(result)
def __iter__(self):
return iter(self._result_list)
ospd-21.4.4/ospd/scan.py 0000664 0000000 0000000 00000050026 14131311270 0015006 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import logging
import multiprocessing
import time
import uuid
from pprint import pformat
from collections import OrderedDict
from enum import Enum, IntEnum
from typing import List, Any, Dict, Iterator, Optional, Iterable, Union
from ospd.network import target_str_to_list
from ospd.datapickler import DataPickler
from ospd.errors import OspdCommandError
LOGGER = logging.getLogger(__name__)
class ScanStatus(Enum):
"""Scan status. """
QUEUED = 0
INIT = 1
RUNNING = 2
STOPPED = 3
FINISHED = 4
INTERRUPTED = 5
class ScanProgress(IntEnum):
"""Scan or host progress. """
FINISHED = 100
INIT = 0
DEAD_HOST = -1
INTERRUPTED = -2
class ScanCollection:
"""Scans collection, managing scans and results read and write, exposing
only needed information.
Each scan has meta-information such as scan ID, current progress (from 0 to
100), start time, end time, scan target and options and a list of results.
There are 4 types of results: Alarms, Logs, Errors and Host Details.
Todo:
- Better checking for Scan ID existence and handling otherwise.
- More data validation.
- Mutex access per table/scan_info.
"""
def __init__(self, file_storage_dir: str) -> None:
""" Initialize the Scan Collection. """
self.data_manager = (
None
) # type: Optional[multiprocessing.managers.SyncManager]
self.scans_table = dict() # type: Dict
self.file_storage_dir = file_storage_dir
def init(self):
self.data_manager = multiprocessing.Manager()
def add_result(
self,
scan_id: str,
result_type: int,
host: str = '',
hostname: str = '',
name: str = '',
value: str = '',
port: str = '',
test_id: str = '',
severity: str = '',
qod: str = '',
uri: str = '',
) -> None:
""" Add a result to a scan in the table. """
assert scan_id
assert len(name) or len(value)
result = OrderedDict() # type: Dict
result['type'] = result_type
result['name'] = name
result['severity'] = severity
result['test_id'] = test_id
result['value'] = value
result['host'] = host
result['hostname'] = hostname
result['port'] = port
result['qod'] = qod
result['uri'] = uri
results = self.scans_table[scan_id]['results']
results.append(result)
# Set scan_info's results to propagate results to parent process.
self.scans_table[scan_id]['results'] = results
def add_result_list(
self, scan_id: str, result_list: Iterable[Dict[str, str]]
) -> None:
"""
Add a batch of results to the result's table for the corresponding
scan_id
"""
results = self.scans_table[scan_id]['results']
results.extend(result_list)
# Set scan_info's results to propagate results to parent process.
self.scans_table[scan_id]['results'] = results
def remove_hosts_from_target_progress(
self, scan_id: str, hosts: List
) -> None:
"""Remove a list of hosts from the main scan progress table to avoid
the hosts to be included in the calculation of the scan progress"""
if not hosts:
return
LOGGER.debug(
'%s: Remove the following hosts from the target list, '
'as they are already finished or are dead: %s',
scan_id,
pformat(hosts),
)
target = self.scans_table[scan_id].get('target_progress')
for host in hosts:
if host in target:
del target[host]
# Set scan_info's target_progress to propagate progresses
# to parent process.
self.scans_table[scan_id]['target_progress'] = target
def set_progress(self, scan_id: str, progress: int) -> None:
""" Sets scan_id scan's progress. """
if progress > ScanProgress.INIT and progress <= ScanProgress.FINISHED:
self.scans_table[scan_id]['progress'] = progress
if progress == ScanProgress.FINISHED:
self.scans_table[scan_id]['end_time'] = int(time.time())
def set_host_progress(
self, scan_id: str, host_progress_batch: Dict[str, int]
) -> None:
""" Sets scan_id scan's progress. """
host_progresses = self.scans_table[scan_id].get('target_progress')
host_progresses.update(host_progress_batch)
# Set scan_info's target_progress to propagate progresses
# to parent process.
self.scans_table[scan_id]['target_progress'] = host_progresses
def set_host_finished(self, scan_id: str, hosts: List[str]) -> None:
""" Increase the amount of finished hosts which were alive."""
LOGGER.debug(
'%s: Setting the following hosts as finished: %s',
scan_id,
pformat(hosts),
)
total_finished = len(hosts)
count_alive = (
self.scans_table[scan_id].get('count_alive') + total_finished
)
self.scans_table[scan_id]['count_alive'] = count_alive
def set_host_dead(self, scan_id: str, hosts: List[str]) -> None:
""" Increase the amount of dead hosts. """
LOGGER.debug(
'%s: Setting the following hosts as dead: %s',
scan_id,
pformat(hosts),
)
total_dead = len(hosts)
count_dead = self.scans_table[scan_id].get('count_dead') + total_dead
self.scans_table[scan_id]['count_dead'] = count_dead
def set_amount_dead_hosts(self, scan_id: str, total_dead: int) -> None:
""" Increase the amount of dead hosts. """
count_dead = self.scans_table[scan_id].get('count_dead') + total_dead
self.scans_table[scan_id]['count_dead'] = count_dead
def clean_temp_result_list(self, scan_id):
""" Clean the results stored in the temporary list. """
self.scans_table[scan_id]['temp_results'] = list()
def restore_temp_result_list(self, scan_id):
"""Add the results stored in the temporary list into the results
list again."""
result_aux = self.scans_table[scan_id].get('results', list())
result_aux.extend(self.scans_table[scan_id].get('temp_results', list()))
# Propagate results
self.scans_table[scan_id]['results'] = result_aux
self.clean_temp_result_list(scan_id)
def results_iterator(
self, scan_id: str, pop_res: bool = False, max_res: int = None
) -> Iterator[Any]:
"""Returns an iterator over scan_id scan's results. If pop_res is True,
it removed the fetched results from the list.
If max_res is None, return all the results.
Otherwise, if max_res = N > 0 return N as maximum number of results.
max_res works only together with pop_results.
"""
if pop_res and max_res:
result_aux = self.scans_table[scan_id].get('results', list())
self.scans_table[scan_id]['results'] = result_aux[max_res:]
self.scans_table[scan_id]['temp_results'] = result_aux[:max_res]
return iter(self.scans_table[scan_id]['temp_results'])
elif pop_res:
self.scans_table[scan_id]['temp_results'] = self.scans_table[
scan_id
].get('results', list())
self.scans_table[scan_id]['results'] = list()
return iter(self.scans_table[scan_id]['temp_results'])
return iter(self.scans_table[scan_id]['results'])
def ids_iterator(self) -> Iterator[str]:
""" Returns an iterator over the collection's scan IDS. """
# Do not iterate over the scans_table because it can change
# during iteration, since it is accessed by multiple processes.
scan_id_list = list(self.scans_table)
return iter(scan_id_list)
def clean_up_pickled_scan_info(self) -> None:
""" Remove files of pickled scan info """
for scan_id in self.ids_iterator():
if self.get_status(scan_id) == ScanStatus.QUEUED:
self.remove_file_pickled_scan_info(scan_id)
def remove_file_pickled_scan_info(self, scan_id: str) -> None:
pickler = DataPickler(self.file_storage_dir)
pickler.remove_file(scan_id)
def unpickle_scan_info(self, scan_id: str) -> None:
"""Unpickle a stored scan_inf corresponding to the scan_id
and store it in the scan_table"""
scan_info = self.scans_table.get(scan_id)
scan_info_hash = scan_info.pop('scan_info_hash')
pickler = DataPickler(self.file_storage_dir)
unpickled_scan_info = pickler.load_data(scan_id, scan_info_hash)
if not unpickled_scan_info:
pickler.remove_file(scan_id)
raise OspdCommandError(
'Not possible to unpickle stored scan info for %s' % scan_id,
'start_scan',
)
scan_info['results'] = list()
scan_info['temp_results'] = list()
scan_info['progress'] = ScanProgress.INIT.value
scan_info['target_progress'] = dict()
scan_info['count_alive'] = 0
scan_info['count_dead'] = 0
scan_info['count_total'] = None
scan_info['excluded_simplified'] = None
scan_info['target'] = unpickled_scan_info.pop('target')
scan_info['vts'] = unpickled_scan_info.pop('vts')
scan_info['options'] = unpickled_scan_info.pop('options')
scan_info['start_time'] = int(time.time())
scan_info['end_time'] = 0
self.scans_table[scan_id] = scan_info
pickler.remove_file(scan_id)
def create_scan(
self,
scan_id: str = '',
target: Dict = None,
options: Optional[Dict] = None,
vts: Dict = None,
) -> str:
"""Creates a new scan with provided scan information.
@target: Target to scan.
@options: Miscellaneous scan options supplied via
XML element.
@return: Scan's ID. None if error occurs.
"""
if not options:
options = dict()
credentials = target.pop('credentials')
scan_info = self.data_manager.dict() # type: Dict
scan_info['status'] = ScanStatus.QUEUED
scan_info['credentials'] = credentials
scan_info['start_time'] = int(time.time())
scan_info['end_time'] = 0
scan_info_to_pickle = {'target': target, 'options': options, 'vts': vts}
if scan_id is None or scan_id == '':
scan_id = str(uuid.uuid4())
pickler = DataPickler(self.file_storage_dir)
scan_info_hash = None
try:
scan_info_hash = pickler.store_data(scan_id, scan_info_to_pickle)
except OspdCommandError as e:
LOGGER.error(e)
return
scan_info['scan_id'] = scan_id
scan_info['scan_info_hash'] = scan_info_hash
self.scans_table[scan_id] = scan_info
return scan_id
def set_status(self, scan_id: str, status: ScanStatus) -> None:
""" Sets scan_id scan's status. """
self.scans_table[scan_id]['status'] = status
if status == ScanStatus.STOPPED or status == ScanStatus.INTERRUPTED:
self.scans_table[scan_id]['end_time'] = int(time.time())
def get_status(self, scan_id: str) -> ScanStatus:
""" Get scan_id scans's status."""
return self.scans_table[scan_id].get('status')
def get_options(self, scan_id: str) -> Dict:
""" Get scan_id scan's options list. """
return self.scans_table[scan_id].get('options')
def set_option(self, scan_id, name: str, value: Any) -> None:
""" Set a scan_id scan's name option to value. """
self.scans_table[scan_id]['options'][name] = value
def get_progress(self, scan_id: str) -> int:
""" Get a scan's current progress value. """
return self.scans_table[scan_id].get('progress', ScanProgress.INIT)
def get_count_dead(self, scan_id: str) -> int:
""" Get a scan's current dead host count. """
return self.scans_table[scan_id]['count_dead']
def get_count_alive(self, scan_id: str) -> int:
""" Get a scan's current alive host count. """
return self.scans_table[scan_id]['count_alive']
def update_count_total(self, scan_id: str, count_total: int) -> int:
""" Sets a scan's total hosts."""
self.scans_table[scan_id]['count_total'] = count_total
def get_count_total(self, scan_id: str) -> int:
""" Get a scan's total host count. """
count_total = self.scans_table[scan_id]['count_total']
# The value set by the server has priority over the value
# calculated from the original target list by ospd.
# As ospd is not intelligent enough to check the amount of valid
# hosts, check for duplicated or invalid hosts, consider a negative
# value set for the server, in case it detects an invalid target string
# or a different amount than the original amount in the target list.
if count_total == -1:
count_total = 0
# If the server does not set the total host count
# ospd set the amount of host from the original host list.
elif count_total is None:
count_total = self.get_host_count(scan_id)
self.update_count_total(scan_id, count_total)
return count_total
def get_current_target_progress(self, scan_id: str) -> Dict[str, int]:
""" Get a scan's current hosts progress """
return self.scans_table[scan_id]['target_progress']
def simplify_exclude_host_count(self, scan_id: str) -> int:
"""Remove from exclude_hosts the received hosts in the finished_hosts
list sent by the client.
The finished hosts are sent also as exclude hosts for backward
compatibility purposses.
Return:
Count of excluded host.
"""
exc_hosts_list = target_str_to_list(self.get_exclude_hosts(scan_id))
finished_hosts_list = target_str_to_list(
self.get_finished_hosts(scan_id)
)
# Remove finished hosts from excluded host list
if finished_hosts_list and exc_hosts_list:
for finished in finished_hosts_list:
if finished in exc_hosts_list:
exc_hosts_list.remove(finished)
# Remove excluded hosts which don't belong to the target list
host_list = target_str_to_list(self.get_host_list(scan_id))
excluded_simplified = 0
invalid_exc_hosts = 0
if exc_hosts_list:
for exc_host in exc_hosts_list:
if exc_host in host_list:
excluded_simplified += 1
else:
invalid_exc_hosts += 1
if invalid_exc_hosts > 0:
LOGGER.warning(
"Please check the excluded host list. It contains hosts which "
"do not belong to the target. This warning can be ignored if "
"this was done on purpose (e.g. to exclude specific hostname)."
)
# Set scan_info's excluded simplified to propagate excluded count
# to parent process.
self.scans_table[scan_id]['excluded_simplified'] = excluded_simplified
return excluded_simplified
def get_simplified_exclude_host_count(self, scan_id: str) -> int:
""" Get a scan's excluded host count. """
excluded_simplified = self.scans_table[scan_id]['excluded_simplified']
# Check for None because it is the init value, as excluded can be 0
# as well
if excluded_simplified is not None:
return excluded_simplified
return self.simplify_exclude_host_count(scan_id)
def calculate_target_progress(self, scan_id: str) -> int:
"""Get a target's current progress value.
The value is calculated with the progress of each single host
in the target."""
total_hosts = self.get_count_total(scan_id)
exc_hosts = self.get_simplified_exclude_host_count(scan_id)
count_alive = self.get_count_alive(scan_id)
count_dead = self.get_count_dead(scan_id)
host_progresses = self.get_current_target_progress(scan_id)
try:
t_prog = int(
(sum(host_progresses.values()) + 100 * count_alive)
/ (total_hosts - exc_hosts - count_dead)
)
except ZeroDivisionError:
# Consider the case in which all hosts are dead or excluded
LOGGER.debug('%s: All hosts dead or excluded.', scan_id)
t_prog = ScanProgress.FINISHED.value
return t_prog
def get_start_time(self, scan_id: str) -> str:
""" Get a scan's start time. """
return self.scans_table[scan_id]['start_time']
def get_end_time(self, scan_id: str) -> str:
""" Get a scan's end time. """
return self.scans_table[scan_id]['end_time']
def get_host_list(self, scan_id: str) -> Dict:
""" Get a scan's host list. """
target = None
try:
target = self.scans_table[scan_id]['target'].get('hosts')
except KeyError:
LOGGER.warning(
'%s: Scan ID is in the scan table, but it was '
'not initialized.',
scan_id,
)
return target
def get_host_count(self, scan_id: str) -> int:
""" Get total host count in the target. """
host = self.get_host_list(scan_id)
total_hosts = 0
if host:
total_hosts = len(target_str_to_list(host))
return total_hosts
def get_ports(self, scan_id: str) -> str:
"""Get a scan's ports list."""
target = self.scans_table[scan_id].get('target')
ports = target.pop('ports')
self.scans_table[scan_id]['target'] = target
return ports
def get_exclude_hosts(self, scan_id: str) -> str:
"""Get an exclude host list for a given target."""
return self.scans_table[scan_id]['target'].get('exclude_hosts')
def get_finished_hosts(self, scan_id: str) -> str:
"""Get the finished host list sent by the client for a given target."""
return self.scans_table[scan_id]['target'].get('finished_hosts')
def get_credentials(self, scan_id: str) -> Dict[str, Dict[str, str]]:
"""Get a scan's credential list. It return dictionary with
the corresponding credential for a given target.
"""
return self.scans_table[scan_id].get('credentials')
def get_target_options(self, scan_id: str) -> Dict[str, str]:
"""Get a scan's target option dictionary.
It return dictionary with the corresponding options for
a given target.
"""
return self.scans_table[scan_id]['target'].get('options')
def get_vts(self, scan_id: str) -> Dict[str, Union[Dict[str, str], List]]:
""" Get a scan's vts. """
scan_info = self.scans_table[scan_id]
vts = scan_info.pop('vts')
self.scans_table[scan_id] = scan_info
return vts
def id_exists(self, scan_id: str) -> bool:
""" Check whether a scan exists in the table. """
return self.scans_table.get(scan_id) is not None
def delete_scan(self, scan_id: str) -> bool:
""" Delete a scan if fully finished. """
if self.get_status(scan_id) == ScanStatus.RUNNING:
return False
scans_table = self.scans_table
del scans_table[scan_id]
self.scans_table = scans_table
return True
ospd-21.4.4/ospd/server.py 0000664 0000000 0000000 00000022065 14131311270 0015372 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
"""
Module for serving and streaming data
"""
import logging
import socket
import ssl
import time
import threading
import socketserver
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Callable, Optional, Tuple, Union
from ospd.errors import OspdError
logger = logging.getLogger(__name__)
DEFAULT_BUFSIZE = 1024
class Stream:
def __init__(self, sock: socket.socket, stream_timeout: int):
self.socket = sock
self.socket.settimeout(stream_timeout)
def close(self):
"""Close the stream"""
try:
self.socket.shutdown(socket.SHUT_RDWR)
except OSError as e:
logger.debug(
"Ignoring error while shutting down the connection. %s", e
)
self.socket.close()
def read(self, bufsize: Optional[int] = DEFAULT_BUFSIZE) -> bytes:
"""Read at maximum bufsize data from the stream"""
data = self.socket.recv(bufsize)
if not data:
logger.debug('Client closed the connection')
return data
def write(self, data: bytes) -> bool:
"""Send data in chunks of DEFAULT_BUFSIZE to the client"""
b_start = 0
b_end = DEFAULT_BUFSIZE
ret_success = True
while True:
if b_end > len(data):
try:
self.socket.send(data[b_start:])
except (socket.error, BrokenPipeError) as e:
logger.error("Error sending data to the client. %s", e)
ret_success = False
finally:
return ret_success # pylint: disable=lost-exception
try:
b_sent = self.socket.send(data[b_start:b_end])
except (socket.error, BrokenPipeError) as e:
logger.error("Error sending data to the client. %s", e)
return False
b_start = b_end
b_end += b_sent
return ret_success
StreamCallbackType = Callable[[Stream], None]
InetAddress = Tuple[str, int]
def validate_cacert_file(cacert: str):
""" Check if provided file is a valid CA Certificate """
try:
context = ssl.create_default_context(cafile=cacert)
except AttributeError:
# Python version < 2.7.9
return
except IOError:
raise OspdError('CA Certificate not found') from None
try:
not_after = context.get_ca_certs()[0]['notAfter']
not_after = ssl.cert_time_to_seconds(not_after)
not_before = context.get_ca_certs()[0]['notBefore']
not_before = ssl.cert_time_to_seconds(not_before)
except (KeyError, IndexError):
raise OspdError('CA Certificate is erroneous') from None
now = int(time.time())
if not_after < now:
raise OspdError('CA Certificate expired')
if not_before > now:
raise OspdError('CA Certificate not active yet')
class RequestHandler(socketserver.BaseRequestHandler):
""" Class to handle the request."""
def handle(self):
self.server.handle_request(self.request, self.client_address)
class BaseServer(ABC):
def __init__(self, stream_timeout: int):
self.server = None
self.stream_timeout = stream_timeout
@abstractmethod
def start(self, stream_callback: StreamCallbackType):
"""Starts a server with capabilities to handle multiple client
connections simultaneously.
If a new client connects the stream_callback is called with a Stream
Arguments:
stream_callback (function): Callback function to be called when
a stream is ready
"""
def close(self):
""" Shutdown the server"""
self.server.shutdown()
self.server.server_close()
@abstractmethod
def handle_request(self, request, client_address):
""" Handle an incoming client request"""
def _start_threading_server(self):
server_thread = threading.Thread(target=self.server.serve_forever)
server_thread.daemon = True
server_thread.start()
class SocketServerMixin:
# Use daemon mode to circrumvent a memory leak
# (reported at https://bugs.python.org/issue37193).
#
# Daemonic threads are killed immediately by the python interpreter without
# waiting for until they are finished.
#
# Maybe block_on_close = True could work too.
# In that case the interpreter waits for the threads to finish but doesn't
# track them in the _threads list.
daemon_threads = True
def __init__(self, server: BaseServer, address: Union[str, InetAddress]):
self.server = server
super().__init__(address, RequestHandler, bind_and_activate=True)
def handle_request(self, request, client_address):
self.server.handle_request(request, client_address)
class ThreadedUnixSocketServer(
SocketServerMixin, socketserver.ThreadingUnixStreamServer
):
pass
class ThreadedTlsSocketServer(
SocketServerMixin, socketserver.ThreadingTCPServer
):
pass
class UnixSocketServer(BaseServer):
"""Server for accepting connections via a Unix domain socket"""
def __init__(self, socket_path: str, socket_mode: str, stream_timeout: int):
super().__init__(stream_timeout)
self.socket_path = Path(socket_path)
self.socket_mode = int(socket_mode, 8)
def _cleanup_socket(self):
if self.socket_path.exists():
self.socket_path.unlink()
def _create_parent_dirs(self):
# create all parent directories for the socket path
parent = self.socket_path.parent
parent.mkdir(parents=True, exist_ok=True)
def start(self, stream_callback: StreamCallbackType):
self._cleanup_socket()
self._create_parent_dirs()
try:
self.stream_callback = stream_callback
self.server = ThreadedUnixSocketServer(self, str(self.socket_path))
self._start_threading_server()
except OSError as e:
logger.error("Couldn't bind socket on %s", str(self.socket_path))
raise OspdError(
"Couldn't bind socket on {}. {}".format(
str(self.socket_path), e
)
) from e
if self.socket_path.exists():
self.socket_path.chmod(self.socket_mode)
def close(self):
super().close()
self._cleanup_socket()
def handle_request(self, request, client_address):
logger.debug("New request from %s", str(self.socket_path))
stream = Stream(request, self.stream_timeout)
self.stream_callback(stream)
class TlsServer(BaseServer):
"""Server for accepting TLS encrypted connections via a TCP socket"""
def __init__(
self,
address: str,
port: int,
cert_file: str,
key_file: str,
ca_file: str,
stream_timeout: int,
):
super().__init__(stream_timeout)
self.socket = (address, port)
if not Path(cert_file).exists():
raise OspdError('cert file {} not found'.format(cert_file))
if not Path(key_file).exists():
raise OspdError('key file {} not found'.format(key_file))
if not Path(ca_file).exists():
raise OspdError('CA file {} not found'.format(ca_file))
validate_cacert_file(ca_file)
protocol = ssl.PROTOCOL_SSLv23
self.tls_context = ssl.SSLContext(protocol)
self.tls_context.verify_mode = ssl.CERT_REQUIRED
self.tls_context.load_cert_chain(cert_file, keyfile=key_file)
self.tls_context.load_verify_locations(ca_file)
def start(self, stream_callback: StreamCallbackType):
try:
self.stream_callback = stream_callback
self.server = ThreadedTlsSocketServer(self, self.socket)
self._start_threading_server()
except OSError as e:
logger.error(
"Couldn't bind socket on %s:%s", self.socket[0], self.socket[1]
)
raise OspdError(
"Couldn't bind socket on {}:{}. {}".format(
self.socket[0], str(self.socket[1]), e
)
) from e
def handle_request(self, request, client_address):
logger.debug("New connection from %s", client_address)
req_socket = self.tls_context.wrap_socket(request, server_side=True)
stream = Stream(req_socket, self.stream_timeout)
self.stream_callback(stream)
ospd-21.4.4/ospd/timer.py 0000664 0000000 0000000 00000003530 14131311270 0015200 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import time
import logging
from ospd.errors import OspdError
logger = logging.getLogger(__name__)
class TimerError(OspdError):
""" Timer errors """
class Timer:
def __init__(
self,
name: str = None,
text: str = "{}: Elapsed time: {:0.4f} seconds",
logger=logger.debug, # pylint: disable=redefined-outer-name
):
self._start_time = None
self._name = name
self._text = text
self._logger = logger
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.stop()
@staticmethod
def create(name) -> "Timer":
timer = Timer(name)
timer.start()
return timer
def start(self):
"""Start a new timer"""
self._start_time = time.perf_counter()
def stop(self):
if not self._start_time:
raise TimerError('Timer is not running.')
duration = time.perf_counter() - self._start_time
if self._logger:
self._logger(self._text.format(self._name, duration))
return duration
ospd-21.4.4/ospd/vtfilter.py 0000664 0000000 0000000 00000011231 14131311270 0015714 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Vulnerability Test Filter class.
"""
import re
import operator
from typing import Dict, List, Optional
from ospd.errors import OspdCommandError
from .vts import Vts
class VtsFilter:
""" Helper class to filter Vulnerability Tests """
def __init__(self) -> None:
""" Initialize filter operator and allowed filters. """
self.filter_operator = {
'<': operator.lt,
'>': operator.gt,
'=': operator.eq,
}
self.allowed_filter = {
'creation_time': self.format_vt_creation_time,
'modification_time': self.format_vt_modification_time,
}
def parse_filters(self, vt_filter: str) -> List:
"""Parse a string containing one or more filters
and return a list of filters
Arguments:
vt_filter (string): String containing filters separated with
semicolon.
Return:
List with filters. Each filters is a list with 3 elements
e.g. [arg, operator, value]
"""
filter_list = vt_filter.split(';')
filters = list()
for single_filter in filter_list:
filter_aux = re.split(r'(\W)', single_filter, 1)
if len(filter_aux) < 3:
raise OspdCommandError(
"Invalid number of argument in the filter", "get_vts"
)
_element, _oper, _val = filter_aux
if _element not in self.allowed_filter:
raise OspdCommandError("Invalid filter element", "get_vts")
if _oper not in self.filter_operator:
raise OspdCommandError("Invalid filter operator", "get_vts")
filters.append(filter_aux)
return filters
def format_vt_creation_time(self, value):
"""In case the given creationdatetime value must be formatted,
this function must be implemented by the wrapper
"""
return value
def format_vt_modification_time(self, value):
"""In case the given modification datetime value must be formatted,
this function must be implemented by the wrapper
"""
return value
def format_filter_value(self, element: str, value: Dict):
"""Calls the specific function to format value,
depending on the given element.
Arguments:
element (string): The element of the VT to be formatted.
value (dictionary): The element value.
Returns:
Returns a formatted value.
"""
format_func = self.allowed_filter.get(element)
return format_func(value)
def get_filtered_vts_list(
self, vts: Vts, vt_filter: str
) -> Optional[List[str]]:
"""Gets a collection of vulnerability test from the vts dictionary,
which match the filter.
Arguments:
vt_filter: Filter to apply to the vts collection.
vts: The complete vts collection.
Returns:
List with filtered vulnerability tests. The list can be empty.
None in case of filter parse failure.
"""
if not vt_filter:
raise OspdCommandError('vt_filter: A valid filter is required.')
filters = self.parse_filters(vt_filter)
if not filters:
return None
vt_oid_list = list(vts)
for _element, _oper, _filter_val in filters:
for vt_oid in vts:
if vt_oid not in vt_oid_list:
continue
vt = vts.get(vt_oid)
if vt is None or not vt.get(_element):
vt_oid_list.remove(vt_oid)
continue
_elem_val = vt.get(_element)
_val = self.format_filter_value(_element, _elem_val)
if self.filter_operator[_oper](_val, _filter_val):
continue
else:
vt_oid_list.remove(vt_oid)
return vt_oid_list
ospd-21.4.4/ospd/vts.py 0000664 0000000 0000000 00000014323 14131311270 0014676 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Classes for storing VTs
"""
import logging
import multiprocessing
from hashlib import sha256
import re
from copy import deepcopy
from typing import Dict, Any, Type, Iterator, Iterable, Tuple
from ospd.errors import OspdError
logger = logging.getLogger(__name__)
DEFAULT_VT_ID_PATTERN = re.compile("[0-9a-zA-Z_\\-:.]{1,80}")
class Vts:
def __init__(
self, storage: Type[Dict] = None, vt_id_pattern=DEFAULT_VT_ID_PATTERN
):
self.storage = storage
self.vt_id_pattern = vt_id_pattern
self._vts = None
self.sha256_hash = None
self.is_cache_available = True
def __contains__(self, key: str) -> bool:
return key in self._vts
def __iter__(self) -> Iterator[str]:
if hasattr(self.vts, '__iter__'):
return self.vts.__iter__()
def __getitem__(self, key):
return self.vts[key]
def items(self) -> Iterator[Tuple[str, Dict]]:
return iter(self.vts.items())
def __len__(self) -> int:
return len(self.vts)
def __init_vts(self):
if self.storage:
self._vts = self.storage()
else:
self._vts = multiprocessing.Manager().dict()
@property
def vts(self) -> Dict[str, Any]:
if self._vts is None:
self.__init_vts()
return self._vts
def add(
self,
vt_id: str,
name: str = None,
vt_params: str = None,
vt_refs: str = None,
custom: str = None,
vt_creation_time: str = None,
vt_modification_time: str = None,
vt_dependencies: str = None,
summary: str = None,
impact: str = None,
affected: str = None,
insight: str = None,
solution: str = None,
solution_t: str = None,
solution_m: str = None,
detection: str = None,
qod_t: str = None,
qod_v: str = None,
severities: str = None,
) -> None:
"""Add a vulnerability test information.
IMPORTANT: The VT's Data Manager will store the vts collection.
If the collection is considerably big and it will be consultated
intensible during a routine, consider to do a deepcopy(), since
accessing the shared memory in the data manager is very expensive.
At the end of the routine, the temporal copy must be set to None
and deleted.
"""
if not vt_id:
raise OspdError('Invalid vt_id {}'.format(vt_id))
if self.vt_id_pattern.fullmatch(vt_id) is None:
raise OspdError('Invalid vt_id {}'.format(vt_id))
if vt_id in self.vts:
raise OspdError('vt_id {} already exists'.format(vt_id))
if name is None:
name = ''
vt = {'name': name}
if custom is not None:
vt["custom"] = custom
if vt_params is not None:
vt["vt_params"] = vt_params
if vt_refs is not None:
vt["vt_refs"] = vt_refs
if vt_dependencies is not None:
vt["vt_dependencies"] = vt_dependencies
if vt_creation_time is not None:
vt["creation_time"] = vt_creation_time
if vt_modification_time is not None:
vt["modification_time"] = vt_modification_time
if summary is not None:
vt["summary"] = summary
if impact is not None:
vt["impact"] = impact
if affected is not None:
vt["affected"] = affected
if insight is not None:
vt["insight"] = insight
if solution is not None:
vt["solution"] = solution
if solution_t is not None:
vt["solution_type"] = solution_t
if solution_m is not None:
vt["solution_method"] = solution_m
if detection is not None:
vt["detection"] = detection
if qod_t is not None:
vt["qod_type"] = qod_t
elif qod_v is not None:
vt["qod"] = qod_v
if severities is not None:
vt["severities"] = severities
self.vts[vt_id] = vt
def get(self, vt_id: str) -> Dict[str, Any]:
return self.vts.get(vt_id)
def keys(self) -> Iterable[str]:
return self.vts.keys()
def clear(self) -> None:
self._vts.clear()
self._vts = None
def copy(self) -> "Vts":
copy = Vts(self.storage, vt_id_pattern=self.vt_id_pattern)
copy._vts = deepcopy(self._vts) # pylint: disable=protected-access
return copy
def calculate_vts_collection_hash(self, include_vt_params: bool = True):
""" Calculate the vts collection sha256 hash. """
if not self._vts:
logger.debug(
"Error calculating VTs collection hash. Cache is empty"
)
return
m = sha256() # pylint: disable=invalid-name
# for a reproducible hash calculation
# the vts must already be sorted in the dictionary.
for vt_id, vt in self.vts.items():
param_chain = ""
vt_params = vt.get('vt_params')
if include_vt_params and vt_params:
for _, param in sorted(vt_params.items()):
param_chain += (
param.get('id')
+ param.get('name')
+ param.get('default')
)
m.update(
(vt_id + vt.get('modification_time')).encode('utf-8')
+ param_chain.encode('utf-8')
)
self.sha256_hash = m.hexdigest()
ospd-21.4.4/ospd/xml.py 0000664 0000000 0000000 00000022133 14131311270 0014660 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" OSP XML utils class.
"""
import re
from typing import List, Dict, Any, Union
from xml.sax.saxutils import escape, quoteattr
from xml.etree.ElementTree import tostring, Element
from ospd.misc import ResultType
r = re.compile( # pylint: disable=invalid-name
r'(.*?)(?:([^\x09\x0A\x0D\x20-\x7E\x85\xA0-\xFF'
+ r'\u0100-\uD7FF\uE000-\uFDCF\uFDE0-\uFFFD])|([\n])|$)'
)
def split_invalid_xml(result_text: str) -> Union[List[Union[str, int]], str]:
"""Search for occurrence of non printable chars and replace them
with the integer representation the Unicode code. The original string
is splitted where a non printable char is found.
"""
splitted_string = []
def replacer(match):
regex_g1 = match.group(1)
if len(regex_g1) > 0:
splitted_string.append(regex_g1)
regex_g2 = match.group(2)
if regex_g2 is not None:
splitted_string.append(ord(regex_g2))
regex_g3 = match.group(3)
if regex_g3 is not None:
splitted_string.append(regex_g3)
return ""
re.sub(r, replacer, result_text)
return splitted_string
def escape_ctrl_chars(result_text):
"""Replace non printable chars in result_text with an hexa code
in string format.
"""
escaped_str = ''
for fragment in split_invalid_xml(result_text):
if isinstance(fragment, int):
escaped_str += '\\x%04X' % fragment
else:
escaped_str += fragment
return escaped_str
def get_result_xml(result):
"""Formats a scan result to XML format.
Arguments:
result (dict): Dictionary with a scan result.
Return:
Result as xml element object.
"""
result_xml = Element('result')
for name, value in [
('name', result['name']),
('type', ResultType.get_str(result['type'])),
('severity', result['severity']),
('host', result['host']),
('hostname', result['hostname']),
('test_id', result['test_id']),
('port', result['port']),
('qod', result['qod']),
('uri', result['uri']),
]:
result_xml.set(name, escape(str(value)))
if result['value'] is not None:
result_xml.text = escape_ctrl_chars(result['value'])
return result_xml
def get_progress_xml(progress: Dict[str, int]):
"""Formats a scan progress to XML format.
Arguments:
progress (dict): Dictionary with a scan progress.
Return:
Progress as xml element object.
"""
progress_xml = Element('progress')
for progress_item, value in progress.items():
elem = None
if progress_item == 'current_hosts':
for host, h_progress in value.items():
elem = Element('host')
elem.set('name', host)
elem.text = str(h_progress)
progress_xml.append(elem)
else:
elem = Element(progress_item)
elem.text = str(value)
progress_xml.append(elem)
return progress_xml
def simple_response_str(
command: str,
status: int,
status_text: str,
content: Union[str, Element, List[str], List[Element]] = "",
) -> bytes:
"""Creates an OSP response XML string.
Arguments:
command (str): OSP Command to respond to.
status (int): Status of the response.
status_text (str): Status text of the response.
content (str): Text part of the response XML element.
Return:
String of response in xml format.
"""
response = Element('%s_response' % command)
for name, value in [('status', str(status)), ('status_text', status_text)]:
response.set(name, escape(str(value)))
if isinstance(content, list):
for elem in content:
if isinstance(elem, Element):
response.append(elem)
elif isinstance(content, Element):
response.append(content)
elif content is not None:
response.text = escape_ctrl_chars(content)
return tostring(response, encoding='utf-8')
def get_elements_from_dict(data: Dict[str, Any]) -> List[Element]:
"""Creates a list of etree elements from a dictionary
Args:
Dictionary of tags and their elements.
Return:
List of xml elements.
"""
responses = []
for tag, value in data.items():
elem = Element(tag)
if isinstance(value, dict):
for val in get_elements_from_dict(value):
elem.append(val)
elif isinstance(value, list):
elem.text = ', '.join(value)
elif value is not None:
elem.text = escape_ctrl_chars(value)
responses.append(elem)
return responses
def elements_as_text(
elements: Dict[str, Union[str, Dict]], indent: int = 2
) -> str:
""" Returns the elements dictionary as formatted plain text. """
text = ""
for elename, eledesc in elements.items():
if isinstance(eledesc, dict):
desc_txt = elements_as_text(eledesc, indent + 2)
desc_txt = ''.join(['\n', desc_txt])
elif isinstance(eledesc, str):
desc_txt = ''.join([eledesc, '\n'])
else:
assert False, "Only string or dictionary"
ele_txt = "\t{0}{1: <22} {2}".format(' ' * indent, elename, desc_txt)
text = ''.join([text, ele_txt])
return text
class XmlStringHelper:
"""Class with methods to help the creation of a xml object in
string format.
"""
def create_element(self, elem_name: str, end: bool = False) -> bytes:
"""Get a name and create the open element of an entity.
Arguments:
elem_name (str): The name of the tag element.
end (bool): Create a initial tag if False, otherwise the end tag.
Return:
Encoded string representing a part of an xml element.
"""
if end:
ret = "%s>" % elem_name
else:
ret = "<%s>" % elem_name
return ret.encode('utf-8')
def create_response(self, command: str, end: bool = False) -> bytes:
"""Create or end an xml response.
Arguments:
command (str): The name of the command for the response element.
end (bool): Create a initial tag if False, otherwise the end tag.
Return:
Encoded string representing a part of an xml element.
"""
if not command:
return
if end:
return ('%s_response>' % command).encode('utf-8')
return ('<%s_response status="200" status_text="OK">' % command).encode(
'utf-8'
)
def add_element(
self,
content: Union[Element, str, list],
xml_str: bytes = None,
end: bool = False,
) -> bytes:
"""Create the initial or ending tag for a subelement, or add
one or many xml elements
Arguments:
content (Element, str, list): Content to add.
xml_str (bytes): Initial string where content to be added to.
end (bool): Create a initial tag if False, otherwise the end tag.
It will be added to the xml_str.
Return:
Encoded string representing a part of an xml element.
"""
if not xml_str:
xml_str = b''
if content:
if isinstance(content, list):
for elem in content:
xml_str = xml_str + tostring(elem, encoding='utf-8')
elif isinstance(content, Element):
xml_str = xml_str + tostring(content, encoding='utf-8')
else:
if end:
xml_str = xml_str + self.create_element(content, False)
else:
xml_str = xml_str + self.create_element(content)
return xml_str
def add_attr(
self, tag: bytes, attribute: str, value: Union[str, int] = None
) -> bytes:
"""Add an attribute to the beginning tag of an xml element.
Arguments:
tag (bytes): Tag to add the attribute to.
attribute (str): Attribute name
value (str): Attribute value
Return:
Tag in encoded string format with the given attribute
"""
if not tag:
return None
if not attribute:
return tag
if not value:
value = ''
return tag[:-1] + (
" %s=%s>" % (attribute, quoteattr(str(value)))
).encode('utf-8')
ospd-21.4.4/poetry.lock 0000664 0000000 0000000 00000156066 14131311270 0014752 0 ustar 00root root 0000000 0000000 [[package]]
name = "appdirs"
version = "1.4.4"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "astroid"
version = "2.6.5"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = "~=3.6"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
wrapt = ">=1.11,<1.13"
[[package]]
name = "autohooks"
version = "21.7.0"
description = "Library for managing git hooks"
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
colorful = ">=0.5.4,<0.6.0"
packaging = ">=20.3,<21.0"
tomlkit = ">=0.5.11"
[[package]]
name = "autohooks-plugin-black"
version = "1.2.0"
description = "Autohooks plugin for code formatting via black"
category = "dev"
optional = false
python-versions = ">=3.5"
[package.dependencies]
autohooks = ">=1.1"
black = "*"
[[package]]
name = "autohooks-plugin-pylint"
version = "1.2.0"
description = "Autohooks plugin for code linting via pylint"
category = "dev"
optional = false
python-versions = ">=3.5"
[package.dependencies]
autohooks = ">=1.1"
pylint = "*"
[[package]]
name = "bcrypt"
version = "3.2.0"
description = "Modern password hashing for your software and your servers"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
cffi = ">=1.1"
six = ">=1.4.1"
[package.extras]
tests = ["pytest (>=3.2.1,!=3.3.0)"]
typecheck = ["mypy"]
[[package]]
name = "black"
version = "20.8b1"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
appdirs = "*"
click = ">=7.1.2"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.6,<1"
regex = ">=2020.1.8"
toml = ">=0.10.1"
typed-ast = ">=1.4.0"
typing-extensions = ">=3.7.4"
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
name = "certifi"
version = "2021.5.30"
description = "Python package for providing Mozilla's CA Bundle."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "cffi"
version = "1.14.6"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
pycparser = "*"
[[package]]
name = "charset-normalizer"
version = "2.0.4"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
optional = false
python-versions = ">=3.5.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "click"
version = "8.0.1"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[[package]]
name = "colorama"
version = "0.4.4"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "colorful"
version = "0.5.4"
description = "Terminal string styling done right, in Python."
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "cryptography"
version = "3.4.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools-rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
[[package]]
name = "defusedxml"
version = "0.7.1"
description = "XML bomb protection for Python stdlib modules"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "deprecated"
version = "1.2.12"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
wrapt = ">=1.10,<2"
[package.extras]
dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
[[package]]
name = "idna"
version = "3.2"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "dev"
optional = false
python-versions = ">=3.5"
[[package]]
name = "importlib-metadata"
version = "4.6.3"
description = "Read metadata from Python packages"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
perf = ["ipython"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
[[package]]
name = "isort"
version = "5.9.3"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "lazy-object-proxy"
version = "1.6.0"
description = "A fast and thorough lazy object proxy."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[[package]]
name = "lxml"
version = "4.6.3"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
[package.extras]
cssselect = ["cssselect (>=0.7)"]
html5 = ["html5lib"]
htmlsoup = ["beautifulsoup4"]
source = ["Cython (>=0.29.7)"]
[[package]]
name = "mccabe"
version = "0.6.1"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "mypy-extensions"
version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "packaging"
version = "20.9"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pyparsing = ">=2.0.2"
[[package]]
name = "paramiko"
version = "2.7.2"
description = "SSH2 protocol library"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
bcrypt = ">=3.1.3"
cryptography = ">=2.5"
pynacl = ">=1.0.1"
[package.extras]
all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"]
gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
invoke = ["invoke (>=1.3)"]
[[package]]
name = "pathspec"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "pontos"
version = "21.7.4"
description = "Common utilities and tools maintained by Greenbone Networks"
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
colorful = ">=0.5.4,<0.6.0"
packaging = ">=20.3,<21.0"
requests = ">=2.24.0,<3.0.0"
tomlkit = ">=0.5.11"
[[package]]
name = "psutil"
version = "5.8.0"
description = "Cross-platform lib for process and system monitoring in Python."
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.extras]
test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"]
[[package]]
name = "pycparser"
version = "2.20"
description = "C parser in Python"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pylint"
version = "2.9.6"
description = "python code static checker"
category = "dev"
optional = false
python-versions = "~=3.6"
[package.dependencies]
astroid = ">=2.6.5,<2.7"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.7"
toml = ">=0.7.1"
[[package]]
name = "pynacl"
version = "1.4.0"
description = "Python binding to the Networking and Cryptography (NaCl) library"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
cffi = ">=1.4.1"
six = "*"
[package.extras]
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
[[package]]
name = "pyparsing"
version = "2.4.7"
description = "Python parsing module"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "regex"
version = "2021.7.6"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "requests"
version = "2.26.0"
description = "Python HTTP for Humans."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
name = "rope"
version = "0.18.0"
description = "a python refactoring library..."
category = "dev"
optional = false
python-versions = "*"
[package.extras]
dev = ["pytest"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomlkit"
version = "0.7.2"
description = "Style preserving TOML library"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "typed-ast"
version = "1.4.3"
description = "a fork of Python 2 and 3 ast modules with type comment support"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "typing-extensions"
version = "3.10.0.0"
description = "Backported and Experimental Type Hints for Python 3.5+"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "urllib3"
version = "1.26.6"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "wrapt"
version = "1.12.1"
description = "Module for decorators, wrappers and monkey patching."
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "zipp"
version = "3.5.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
content-hash = "402d995fc719012bfc1873b506ae8db9ab76818c42a763cbad0f986bdf90129d"
[metadata.files]
appdirs = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
]
astroid = [
{file = "astroid-2.6.5-py3-none-any.whl", hash = "sha256:7b963d1c590d490f60d2973e57437115978d3a2529843f160b5003b721e1e925"},
{file = "astroid-2.6.5.tar.gz", hash = "sha256:83e494b02d75d07d4e347b27c066fd791c0c74fc96c613d1ea3de0c82c48168f"},
]
autohooks = [
{file = "autohooks-21.7.0-py3-none-any.whl", hash = "sha256:54bda89bbd5b19d83538a110bca0088b38378a4c0e1cf14581792f3e41a5a260"},
{file = "autohooks-21.7.0.tar.gz", hash = "sha256:8d09ee52f67cedd5873578a9b4d97b6b80fd9db6dca6d1844621d02a197a12e2"},
]
autohooks-plugin-black = [
{file = "autohooks-plugin-black-1.2.0.tar.gz", hash = "sha256:e69a18209f3fd584da903e87faf0e3685caa4d51a9e870ddb3c2b1aef93387b8"},
{file = "autohooks_plugin_black-1.2.0-py3-none-any.whl", hash = "sha256:5099f620e01d3fcfd70195e63f101d003528b40475e921829a9ebde000f2c5ee"},
]
autohooks-plugin-pylint = [
{file = "autohooks-plugin-pylint-1.2.0.tar.gz", hash = "sha256:b1acc77b7845622d969ea2bfe5d8a95ae16a80c2282ca43888f522c9be479d10"},
{file = "autohooks_plugin_pylint-1.2.0-py3-none-any.whl", hash = "sha256:f08cef7ce4374661a2087e621e8fda411f3b2c19ebb7c92c7ac47b57892f7a3d"},
]
bcrypt = [
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"},
{file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"},
{file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"},
{file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"},
]
black = [
{file = "black-20.8b1-py3-none-any.whl", hash = "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b"},
{file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
]
certifi = [
{file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"},
{file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"},
]
cffi = [
{file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"},
{file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"},
{file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"},
{file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"},
{file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"},
{file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"},
{file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"},
{file = "cffi-1.14.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534"},
{file = "cffi-1.14.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a"},
{file = "cffi-1.14.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5"},
{file = "cffi-1.14.6-cp35-cp35m-win32.whl", hash = "sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca"},
{file = "cffi-1.14.6-cp35-cp35m-win_amd64.whl", hash = "sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218"},
{file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"},
{file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"},
{file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"},
{file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"},
{file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"},
{file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"},
{file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"},
{file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"},
{file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"},
{file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"},
{file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"},
{file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"},
{file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"},
{file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"},
{file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"},
{file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"},
{file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"},
{file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"},
{file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"},
{file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"},
{file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"},
{file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"},
{file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"},
{file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"},
{file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"},
{file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"},
{file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"},
{file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"},
{file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"},
{file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"},
{file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"},
{file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"},
{file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"},
]
charset-normalizer = [
{file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"},
{file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"},
]
click = [
{file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
{file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
colorful = [
{file = "colorful-0.5.4-py2.py3-none-any.whl", hash = "sha256:8d264b52a39aae4c0ba3e2a46afbaec81b0559a99be0d2cfe2aba4cf94531348"},
{file = "colorful-0.5.4.tar.gz", hash = "sha256:86848ad4e2eda60cd2519d8698945d22f6f6551e23e95f3f14dfbb60997807ea"},
]
cryptography = [
{file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"},
{file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"},
{file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"},
{file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"},
{file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"},
{file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"},
{file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"},
{file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"},
{file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"},
{file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"},
{file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"},
{file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"},
]
defusedxml = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
]
deprecated = [
{file = "Deprecated-1.2.12-py2.py3-none-any.whl", hash = "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771"},
{file = "Deprecated-1.2.12.tar.gz", hash = "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1"},
]
idna = [
{file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"},
{file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"},
]
importlib-metadata = [
{file = "importlib_metadata-4.6.3-py3-none-any.whl", hash = "sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b"},
{file = "importlib_metadata-4.6.3.tar.gz", hash = "sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9"},
]
isort = [
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
{file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
]
lazy-object-proxy = [
{file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"},
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"},
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"},
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"},
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"},
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"},
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"},
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"},
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"},
{file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"},
{file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"},
{file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"},
{file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"},
{file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"},
{file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"},
{file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"},
{file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"},
{file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"},
]
lxml = [
{file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"},
{file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"},
{file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"},
{file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"},
{file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"},
{file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"},
{file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"},
{file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"},
{file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"},
{file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"},
{file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"},
{file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"},
{file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"},
{file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"},
{file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"},
{file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"},
{file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"},
{file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"},
{file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"},
{file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"},
{file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"},
{file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"},
{file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"},
{file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"},
{file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"},
{file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"},
{file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"},
{file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
{file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
]
paramiko = [
{file = "paramiko-2.7.2-py2.py3-none-any.whl", hash = "sha256:4f3e316fef2ac628b05097a637af35685183111d4bc1b5979bd397c2ab7b5898"},
{file = "paramiko-2.7.2.tar.gz", hash = "sha256:7f36f4ba2c0d81d219f4595e35f70d56cc94f9ac40a6acdf51d6ca210ce65035"},
]
pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
pontos = [
{file = "pontos-21.7.4-py3-none-any.whl", hash = "sha256:780647abc41235c382cf1735319eb8d9e98607b46608b321787303019e6b4443"},
{file = "pontos-21.7.4.tar.gz", hash = "sha256:342a5939fbbed1ca4b9b34991987b6a8d1c52627047319c8ce73b69f432cc9a5"},
]
psutil = [
{file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"},
{file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"},
{file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"},
{file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"},
{file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"},
{file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"},
{file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"},
{file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"},
{file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"},
{file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"},
{file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"},
{file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"},
{file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"},
{file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"},
{file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"},
{file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"},
{file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"},
{file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"},
{file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"},
{file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"},
{file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"},
{file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"},
{file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"},
{file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"},
{file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"},
{file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"},
{file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"},
{file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"},
]
pycparser = [
{file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
]
pylint = [
{file = "pylint-2.9.6-py3-none-any.whl", hash = "sha256:2e1a0eb2e8ab41d6b5dbada87f066492bb1557b12b76c47c2ee8aa8a11186594"},
{file = "pylint-2.9.6.tar.gz", hash = "sha256:8b838c8983ee1904b2de66cce9d0b96649a91901350e956d78f289c3bc87b48e"},
]
pynacl = [
{file = "PyNaCl-1.4.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff"},
{file = "PyNaCl-1.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514"},
{file = "PyNaCl-1.4.0-cp27-cp27m-win32.whl", hash = "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574"},
{file = "PyNaCl-1.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"},
{file = "PyNaCl-1.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7"},
{file = "PyNaCl-1.4.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122"},
{file = "PyNaCl-1.4.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d"},
{file = "PyNaCl-1.4.0-cp35-cp35m-win32.whl", hash = "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4"},
{file = "PyNaCl-1.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25"},
{file = "PyNaCl-1.4.0-cp36-cp36m-win32.whl", hash = "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4"},
{file = "PyNaCl-1.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6"},
{file = "PyNaCl-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f"},
{file = "PyNaCl-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f"},
{file = "PyNaCl-1.4.0-cp38-cp38-win32.whl", hash = "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96"},
{file = "PyNaCl-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420"},
{file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"},
]
pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
]
regex = [
{file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"},
{file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"},
{file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"},
{file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"},
{file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"},
{file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"},
{file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"},
{file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"},
{file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"},
{file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"},
{file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"},
{file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"},
{file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"},
{file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"},
{file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"},
{file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"},
{file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"},
{file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"},
{file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"},
{file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"},
{file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"},
{file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"},
{file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"},
{file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"},
{file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"},
{file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"},
{file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"},
{file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"},
{file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"},
]
requests = [
{file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
{file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
]
rope = [
{file = "rope-0.18.0.tar.gz", hash = "sha256:786b5c38c530d4846aa68a42604f61b4e69a493390e3ca11b88df0fbfdc3ed04"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomlkit = [
{file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"},
{file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"},
]
typed-ast = [
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
{file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
{file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
{file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
{file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
{file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
{file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
{file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
{file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
{file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
{file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
{file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
{file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
{file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
{file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
{file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
{file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
{file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
]
typing-extensions = [
{file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"},
{file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"},
{file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"},
]
urllib3 = [
{file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"},
{file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"},
]
wrapt = [
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
]
zipp = [
{file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"},
{file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"},
]
ospd-21.4.4/poetry.toml 0000664 0000000 0000000 00000000040 14131311270 0014751 0 ustar 00root root 0000000 0000000 [virtualenvs]
in-project = true
ospd-21.4.4/pyproject.toml 0000664 0000000 0000000 00000003743 14131311270 0015463 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools", "wheel"]
[tool.poetry]
name = "ospd"
version = "21.4.4"
description = "OSPD is a base for scanner wrappers which share the same communication protocol: OSP (Open Scanner Protocol)"
authors = ["Greenbone Networks GmbH "]
license = "AGPL-3.0-or-later"
readme = "README.md"
homepage = "https://github.com/greenbone/ospd"
repository = "https://github.com/greenbone/ospd"
classifiers = [
# Full list: https://pypi.org/pypi?%3Aaction=list_classifiers
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
keywords = [
"Greenbone Vulnerability Management",
"Vulnerability Scanner",
"Open Scanner Protocol",
"OSP",
]
packages = [
{ include = "ospd"},
{ include = "tests", format = "sdist" },
{ include = "COPYING", format = "sdist"},
{ include = "CHANGELOG.md", format = "sdist"},
{ include = "poetry.lock", format = "sdist"},
{ include = "poetry.toml", format = "sdist"},
{ include = "setup.py", format = "sdist"},
]
[tool.poetry.dependencies]
python = "^3.7"
psutil = "^5.5.1"
lxml = "^4.5.2"
defusedxml = ">=0.6,<0.8"
paramiko = "^2.7.1"
deprecated = "^1.2.10"
[tool.poetry.dev-dependencies]
pylint = "^2.7.2"
autohooks-plugin-pylint = "^1.2.0"
autohooks-plugin-black = {version = "^1.2.0", python = "^3.7"}
black = {version = "20.8b1", python = "^3.7"}
rope = "^0.18.0"
pontos = "^21.6.7"
[tool.black]
line-length = 80
target-version = ['py37', 'py38']
skip-string-normalization = true
exclude = '''
/(
\.git
| \.hg
| \.venv
| \.circleci
| \.github
| \.vscode
| _build
| build
| dist
| docs
)/
'''
[tool.autohooks]
mode = "poetry"
pre-commit = ['autohooks.plugins.black', 'autohooks.plugins.pylint']
[tool.pontos.version]
version-module-file = "ospd/__version__.py"
ospd-21.4.4/setup.py 0000664 0000000 0000000 00000010221 14131311270 0014246 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=invalid-name
""" Setup configuration and management for module ospd
Standard Python setup configuration, including support for PyPI.
"""
from os import path
from setuptools import (
setup,
find_packages,
) # Always prefer setuptools over distutils
from ospd import __version__
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ospd',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# http://packaging.python.org/en/latest/tutorial.html#version
version=__version__,
description=(
'OSPD is a base for scanner wrappers which share the '
'same communication protocol: OSP (Open Scanner '
'Protocol)'
),
long_description=long_description,
long_description_content_type='text/markdown',
# The project's main homepage.
url='http://www.openvas.org',
# Author
author='Greenbone Networks GmbH',
author_email='info@greenbone.net',
# License
license='GPLv2+',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', # pylint: disable=line-too-long
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
# What does your project relate to?
keywords=['Greenbone Vulnerability Manager OSP'],
python_requires='>=3.7',
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=['paramiko', 'defusedxml', 'lxml', 'deprecated', 'psutil'],
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['tests*']),
# If there are data files included in your packages that need to be
# installed, specify them here.
include_package_data=True,
package_data={'': []},
# Scripts. Define scripts here which should be installed in the
# sys.prefix/bin directory. You can define an alternative place for
# installation by setting the --install-scripts option of setup.py
# scripts = [''],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
# entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
test_suite="tests",
)
ospd-21.4.4/tests/ 0000775 0000000 0000000 00000000000 14131311270 0013702 5 ustar 00root root 0000000 0000000 ospd-21.4.4/tests/__init__.py 0000664 0000000 0000000 00000001361 14131311270 0016014 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
ospd-21.4.4/tests/command/ 0000775 0000000 0000000 00000000000 14131311270 0015320 5 ustar 00root root 0000000 0000000 ospd-21.4.4/tests/command/__init__.py 0000664 0000000 0000000 00000001361 14131311270 0017432 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
ospd-21.4.4/tests/command/test_command.py 0000664 0000000 0000000 00000005144 14131311270 0020353 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
from unittest import TestCase
from ospd.command.registry import get_commands, remove_command
from ospd.command.command import BaseCommand
class BaseCommandTestCase(TestCase):
def test_auto_register(self):
commands = get_commands()
before = len(commands)
class Foo(BaseCommand):
name = "foo"
def handle_xml(self, xml):
pass
after = len(commands)
try:
self.assertEqual(before + 1, after)
c_dict = {c.name: c for c in commands}
self.assertIn('foo', c_dict)
self.assertIs(c_dict['foo'], Foo)
finally:
remove_command(Foo)
def test_basic_properties(self):
class Foo(BaseCommand):
name = "foo"
attributes = {'lorem': 'ipsum'}
elements = {'foo': 'bar'}
description = 'bar'
def handle_xml(self, xml):
pass
try:
f = Foo({})
self.assertEqual(f.get_name(), 'foo')
self.assertEqual(f.get_description(), 'bar')
self.assertEqual(f.get_attributes(), {'lorem': 'ipsum'})
self.assertEqual(f.get_elements(), {'foo': 'bar'})
finally:
remove_command(Foo)
def test_as_dict(self):
class Foo(BaseCommand):
name = "foo"
attributes = {'lorem': 'ipsum'}
elements = {'foo': 'bar'}
description = 'bar'
def handle_xml(self, xml):
pass
try:
f = Foo({})
f_dict = f.as_dict()
self.assertEqual(f_dict['name'], 'foo')
self.assertEqual(f_dict['description'], 'bar')
self.assertEqual(f_dict['attributes'], {'lorem': 'ipsum'})
self.assertEqual(f_dict['elements'], {'foo': 'bar'})
finally:
remove_command(Foo)
ospd-21.4.4/tests/command/test_commands.py 0000664 0000000 0000000 00000040707 14131311270 0020542 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import time
from unittest import TestCase
from unittest.mock import patch
from xml.etree import ElementTree as et
from ospd.command.command import (
GetPerformance,
StartScan,
StopScan,
GetMemoryUsage,
)
from ospd.errors import OspdCommandError, OspdError
from ospd.misc import create_process
from ..helper import DummyWrapper, assert_called, FakeStream, FakeDataManager
class GetPerformanceTestCase(TestCase):
@patch('ospd.command.command.subprocess')
def test_get_performance(self, mock_subproc):
cmd = GetPerformance(None)
mock_subproc.check_output.return_value = b'foo'
response = et.fromstring(
cmd.handle_xml(
et.fromstring(
''
)
)
)
self.assertEqual(response.get('status'), '200')
self.assertEqual(response.tag, 'get_performance_response')
def test_get_performance_fail_int(self):
cmd = GetPerformance(None)
request = et.fromstring(
''
)
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
def test_get_performance_fail_regex(self):
cmd = GetPerformance(None)
request = et.fromstring(
''
)
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
def test_get_performance_fail_cmd(self):
cmd = GetPerformance(None)
request = et.fromstring(
''
)
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
class StartScanTestCase(TestCase):
def test_scan_with_vts_empty_vt_list(self):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
)
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
@patch("ospd.ospd.create_process")
def test_scan_with_vts(self, mock_create_process):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
''
''
)
# With one vt, without params
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
vts_collection = daemon.get_scan_vts(scan_id)
self.assertEqual(vts_collection, {'1.2.3.4': {}, 'vt_groups': []})
self.assertNotEqual(vts_collection, {'1.2.3.6': {}})
daemon.start_queued_scans()
assert_called(mock_create_process)
def test_scan_pop_vts(self):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
''
''
)
# With one vt, without params
response = et.fromstring(cmd.handle_xml(request))
scan_id = response.findtext('id')
daemon.start_queued_scans()
vts_collection = daemon.get_scan_vts(scan_id)
self.assertEqual(vts_collection, {'1.2.3.4': {}, 'vt_groups': []})
self.assertRaises(KeyError, daemon.get_scan_vts, scan_id)
def test_scan_pop_ports(self):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
''
''
)
# With one vt, without params
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
ports = daemon.scan_collection.get_ports(scan_id)
self.assertEqual(ports, '80, 443')
self.assertRaises(KeyError, daemon.scan_collection.get_ports, scan_id)
@patch("ospd.ospd.create_process")
def test_scan_without_vts(self, mock_create_process):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
# With out vts
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
)
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
self.assertEqual(daemon.get_scan_vts(scan_id), {})
assert_called(mock_create_process)
def test_scan_with_vts_and_param_missing_vt_param_id(self):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
# Raise because no vt_param id attribute
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
'200'
''
''
)
with self.assertRaises(OspdError):
cmd.handle_xml(request)
@patch("ospd.ospd.create_process")
def test_scan_with_vts_and_param(self, mock_create_process):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
# No error
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
'200'
''
''
''
)
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
self.assertEqual(
daemon.get_scan_vts(scan_id),
{'1234': {'ABC': '200'}, 'vt_groups': []},
)
daemon.start_queued_scans()
assert_called(mock_create_process)
def test_scan_with_vts_and_param_missing_vt_group_filter(self):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
# Raise because no vtgroup filter attribute
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
)
daemon.start_queued_scans()
with self.assertRaises(OspdError):
cmd.handle_xml(request)
@patch("ospd.ospd.create_process")
def test_scan_with_vts_and_param_with_vt_group_filter(
self, mock_create_process
):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
# No error
request = et.fromstring(
''
''
''
'localhost'
'80, 443'
''
''
''
''
''
''
''
)
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
self.assertEqual(daemon.get_scan_vts(scan_id), {'vt_groups': ['a']})
assert_called(mock_create_process)
@patch("ospd.ospd.create_process")
@patch("ospd.command.command.logger")
def test_scan_ignore_multi_target(self, mock_logger, mock_create_process):
daemon = DummyWrapper([])
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhosts'
'22'
''
''
''
''
)
cmd.handle_xml(request)
daemon.start_queued_scans()
assert_called(mock_logger.warning)
assert_called(mock_create_process)
def test_max_queued_scans_reached(self):
daemon = DummyWrapper([])
daemon.max_queued_scans = 1
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
'localhosts'
'22'
''
''
''
''
)
# create first scan
response = et.fromstring(cmd.handle_xml(request))
scan_id_1 = response.findtext('id')
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
daemon.scan_collection.remove_file_pickled_scan_info(scan_id_1)
@patch("ospd.ospd.create_process")
@patch("ospd.command.command.logger")
def test_scan_use_legacy_target_and_port(
self, mock_logger, mock_create_process
):
daemon = DummyWrapper([])
daemon.scan_collection.datamanager = FakeDataManager()
cmd = StartScan(daemon)
request = et.fromstring(
''
''
''
)
response = et.fromstring(cmd.handle_xml(request))
daemon.start_queued_scans()
scan_id = response.findtext('id')
self.assertIsNotNone(scan_id)
self.assertEqual(daemon.get_scan_host(scan_id), 'localhost')
self.assertEqual(daemon.get_scan_ports(scan_id), '22')
assert_called(mock_logger.warning)
assert_called(mock_create_process)
class StopCommandTestCase(TestCase):
@patch("ospd.ospd.os")
@patch("ospd.ospd.create_process")
def test_stop_scan(self, mock_create_process, mock_os):
mock_process = mock_create_process.return_value
mock_process.is_alive.return_value = True
mock_process.pid = "foo"
fs = FakeStream()
daemon = DummyWrapper([])
daemon.scan_collection.datamanager = FakeDataManager()
request = (
''
''
''
'localhosts'
'22'
''
''
''
''
)
daemon.handle_command(request, fs)
response = fs.get_response()
daemon.start_queued_scans()
assert_called(mock_create_process)
assert_called(mock_process.start)
scan_id = response.findtext('id')
request = et.fromstring('' % scan_id)
cmd = StopScan(daemon)
cmd.handle_xml(request)
assert_called(mock_process.terminate)
mock_os.getpgid.assert_called_with('foo')
def test_unknown_scan_id(self):
daemon = DummyWrapper([])
cmd = StopScan(daemon)
request = et.fromstring('')
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
def test_missing_scan_id(self):
request = et.fromstring('')
cmd = StopScan(None)
with self.assertRaises(OspdCommandError):
cmd.handle_xml(request)
class GetMemoryUsageTestCase(TestCase):
def test_with_main_process_only(self):
cmd = GetMemoryUsage(None)
request = et.fromstring('')
response = et.fromstring(cmd.handle_xml(request))
processes_element = response.find('processes')
process_elements = processes_element.findall('process')
self.assertTrue(len(process_elements), 1)
main_process_element = process_elements[0]
rss_element = main_process_element.find('rss')
vms_element = main_process_element.find('vms')
shared_element = main_process_element.find('shared')
self.assertIsNotNone(rss_element)
self.assertIsNotNone(rss_element.text)
self.assertIsNotNone(vms_element)
self.assertIsNotNone(vms_element.text)
self.assertIsNotNone(shared_element)
self.assertIsNotNone(shared_element.text)
def test_with_subprocess(self):
cmd = GetMemoryUsage(None)
def foo(): # pylint: disable=blacklisted-name
time.sleep(60)
create_process(foo, args=[])
request = et.fromstring('')
response = et.fromstring(cmd.handle_xml(request))
processes_element = response.find('processes')
process_elements = processes_element.findall('process')
self.assertTrue(len(process_elements), 2)
for process_element in process_elements:
rss_element = process_element.find('rss')
vms_element = process_element.find('vms')
shared_element = process_element.find('shared')
self.assertIsNotNone(rss_element)
self.assertIsNotNone(rss_element.text)
self.assertIsNotNone(vms_element)
self.assertIsNotNone(vms_element.text)
self.assertIsNotNone(shared_element)
self.assertIsNotNone(shared_element.text)
def test_with_subsubprocess(self):
cmd = GetMemoryUsage(None)
def bar(): # pylint: disable=blacklisted-name
create_process(foo, args=[])
def foo(): # pylint: disable=blacklisted-name
time.sleep(60)
create_process(bar, args=[])
request = et.fromstring('')
response = et.fromstring(cmd.handle_xml(request))
processes_element = response.find('processes')
process_elements = processes_element.findall('process')
# sub-sub-processes aren't listed
self.assertTrue(len(process_elements), 2)
for process_element in process_elements:
rss_element = process_element.find('rss')
vms_element = process_element.find('vms')
shared_element = process_element.find('shared')
self.assertIsNotNone(rss_element)
self.assertIsNotNone(rss_element.text)
self.assertIsNotNone(vms_element)
self.assertIsNotNone(vms_element.text)
self.assertIsNotNone(shared_element)
self.assertIsNotNone(shared_element.text)
ospd-21.4.4/tests/command/test_registry.py 0000664 0000000 0000000 00000003766 14131311270 0020615 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
from unittest import TestCase
from ospd.command.registry import get_commands, register_command, remove_command
COMMAND_NAMES = [
"help",
"get_version",
"get_performance",
"get_scanner_details",
"delete_scan",
"get_vts",
"stop_scan",
"get_scans",
"start_scan",
"get_memory_usage",
]
class RegistryTestCase(TestCase):
def test_available_commands(self):
commands = get_commands()
self.assertEqual(len(COMMAND_NAMES), len(commands))
c_list = [c.name for c in commands]
self.assertListEqual(COMMAND_NAMES, c_list)
def test_register_command(self):
commands = get_commands()
before = len(commands)
class Foo:
name = 'foo'
register_command(Foo)
commands = get_commands()
after = len(commands)
try:
self.assertEqual(before + 1, after)
c_dict = {c.name: c for c in commands}
self.assertIn('foo', c_dict)
self.assertIs(c_dict['foo'], Foo)
finally:
remove_command(Foo)
commands = get_commands()
after2 = len(commands)
self.assertEqual(before, after2)
c_dict = {c.name: c for c in commands}
self.assertNotIn('foo', c_dict)
ospd-21.4.4/tests/helper.py 0000664 0000000 0000000 00000014532 14131311270 0015540 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import time
import multiprocessing
from unittest.mock import Mock
from xml.etree import ElementTree as et
from ospd.ospd import OSPDaemon
def assert_called(mock: Mock):
if hasattr(mock, 'assert_called'):
return mock.assert_called()
if not mock.call_count == 1:
msg = "Expected '%s' to have been called once. Called %s times.%s" % (
mock._mock_name or 'mock', # pylint: disable=protected-access
mock.call_count,
mock._calls_repr(), # pylint: disable=protected-access
)
raise AssertionError(msg)
class FakePsutil:
def __init__(self, available=None):
self.available = available
class FakeStream:
def __init__(self, return_value=True):
self.response = b''
self.return_value = return_value
def write(self, data):
self.response = self.response + data
return self.return_value
def get_response(self):
return et.fromstring(self.response)
class FakeDataManager:
def __init__(self):
pass
def dict(self):
return dict()
class DummyWrapper(OSPDaemon):
def __init__(self, results, checkresult=True):
super().__init__()
self.checkresult = checkresult
self.results = results
self.initialized = True
self.scan_collection.data_manager = FakeDataManager()
self.scan_collection.file_storage_dir = '/tmp'
def check(self):
return self.checkresult
@staticmethod
def get_custom_vt_as_xml_str(vt_id, custom):
return 'static test'
@staticmethod
def get_params_vt_as_xml_str(vt_id, vt_params):
return (
''
'ABCTest ABC'
'yes'
''
'DEFTest DEF'
'no'
)
@staticmethod
def get_refs_vt_as_xml_str(vt_id, vt_refs):
response = (
''
''
)
return response
@staticmethod
def get_dependencies_vt_as_xml_str(vt_id, vt_dependencies):
response = (
''
''
''
''
)
return response
@staticmethod
def get_severities_vt_as_xml_str(vt_id, severities):
response = (
'AV:N/AC:L/Au:N/C:N/I:N/'
'A:P'
)
return response
@staticmethod
def get_detection_vt_as_xml_str(
vt_id, detection=None, qod_type=None, qod=None
):
response = 'some detection'
return response
@staticmethod
def get_summary_vt_as_xml_str(vt_id, summary):
response = 'Some summary'
return response
@staticmethod
def get_affected_vt_as_xml_str(vt_id, affected):
response = 'Some affected'
return response
@staticmethod
def get_impact_vt_as_xml_str(vt_id, impact):
response = 'Some impact'
return response
@staticmethod
def get_insight_vt_as_xml_str(vt_id, insight):
response = 'Some insight'
return response
@staticmethod
def get_solution_vt_as_xml_str(
vt_id, solution, solution_type=None, solution_method=None
):
response = 'Some solution'
return response
@staticmethod
def get_creation_time_vt_as_xml_str(
vt_id, creation_time
): # pylint: disable=arguments-differ, arguments-renamed
response = '%s' % creation_time
return response
@staticmethod
def get_modification_time_vt_as_xml_str(
vt_id, modification_time
): # pylint: disable=arguments-differ, arguments-renamed
response = (
'%s' % modification_time
)
return response
def exec_scan(self, scan_id):
time.sleep(0.01)
for res in self.results:
if res.result_type == 'log':
self.add_scan_log(
scan_id,
res.host,
res.hostname,
res.name,
res.value,
res.port,
)
if res.result_type == 'error':
self.add_scan_error(
scan_id,
res.host,
res.hostname,
res.name,
res.value,
res.port,
)
elif res.result_type == 'host-detail':
self.add_scan_host_detail(
scan_id, res.host, res.hostname, res.name, res.value
)
elif res.result_type == 'alarm':
self.add_scan_alarm(
scan_id,
res.host,
res.hostname,
res.name,
res.value,
res.port,
res.test_id,
res.severity,
res.qod,
)
else:
raise ValueError(res.result_type)
ospd-21.4.4/tests/test_argument_parser.py 0000664 0000000 0000000 00000006743 14131311270 0020523 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test module for command line arguments.
"""
import unittest
from unittest.mock import patch
from io import StringIO
from typing import List
from ospd.parser import (
create_parser,
Arguments,
DEFAULT_ADDRESS,
DEFAULT_PORT,
DEFAULT_KEY_FILE,
DEFAULT_NICENESS,
DEFAULT_SCANINFO_STORE_TIME,
DEFAULT_CONFIG_PATH,
DEFAULT_UNIX_SOCKET_PATH,
DEFAULT_PID_PATH,
DEFAULT_LOCKFILE_DIR_PATH,
)
class ArgumentParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = create_parser('Wrapper name')
def parse_args(self, args: List[str]) -> Arguments:
return self.parser.parse_arguments(args)
@patch('sys.stderr', new_callable=StringIO)
def test_port_interval(self, _mock_stderr):
with self.assertRaises(SystemExit):
self.parse_args(['--port=65536'])
with self.assertRaises(SystemExit):
self.parse_args(['--port=0'])
args = self.parse_args(['--port=3353'])
self.assertEqual(3353, args.port)
@patch('sys.stderr', new_callable=StringIO)
def test_port_as_string(self, _mock_stderr):
with self.assertRaises(SystemExit):
self.parse_args(['--port=abcd'])
def test_address_param(self):
args = self.parse_args('-b 1.2.3.4'.split())
self.assertEqual('1.2.3.4', args.address)
def test_correct_lower_case_log_level(self):
args = self.parse_args('-L error'.split())
self.assertEqual('ERROR', args.log_level)
def test_correct_upper_case_log_level(self):
args = self.parse_args('-L INFO'.split())
self.assertEqual('INFO', args.log_level)
@patch('sys.stderr', new_callable=StringIO)
def test_correct_log_level(self, _mock_stderr):
with self.assertRaises(SystemExit):
self.parse_args('-L blah'.split())
def test_non_existing_key(self):
args = self.parse_args('-k foo'.split())
self.assertEqual('foo', args.key_file)
def test_existing_key(self):
args = self.parse_args('-k /etc/passwd'.split())
self.assertEqual('/etc/passwd', args.key_file)
def test_defaults(self):
args = self.parse_args([])
self.assertEqual(args.key_file, DEFAULT_KEY_FILE)
self.assertEqual(args.niceness, DEFAULT_NICENESS)
self.assertEqual(args.log_level, 'INFO')
self.assertEqual(args.address, DEFAULT_ADDRESS)
self.assertEqual(args.port, DEFAULT_PORT)
self.assertEqual(args.scaninfo_store_time, DEFAULT_SCANINFO_STORE_TIME)
self.assertEqual(args.config, DEFAULT_CONFIG_PATH)
self.assertEqual(args.unix_socket, DEFAULT_UNIX_SOCKET_PATH)
self.assertEqual(args.pid_file, DEFAULT_PID_PATH)
self.assertEqual(args.lock_file_dir, DEFAULT_LOCKFILE_DIR_PATH)
ospd-21.4.4/tests/test_cvss.py 0000664 0000000 0000000 00000002332 14131311270 0016271 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test module for cvss scoring calculation
"""
import unittest
from ospd.cvss import CVSS
class CvssTestCase(unittest.TestCase):
def test_cvssv2(self):
vector = 'AV:A/AC:L/Au:S/C:P/I:P/A:P'
cvss_base = CVSS.cvss_base_v2_value(vector)
self.assertEqual(cvss_base, 5.2)
def test_cvssv3(self):
vector = 'CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:U/C:L/I:L/A:N'
cvss_base = CVSS.cvss_base_v3_value(vector)
self.assertEqual(cvss_base, 3.8)
ospd-21.4.4/tests/test_datapickler.py 0000664 0000000 0000000 00000010235 14131311270 0017577 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import pickle
from pathlib import Path
from hashlib import sha256
from unittest import TestCase
from unittest.mock import patch
from ospd.errors import OspdCommandError
from ospd.datapickler import DataPickler
from .helper import assert_called
class DataPecklerTestCase(TestCase):
def test_store_data(self):
data = {'foo', 'bar'}
filename = 'scan_info_1'
pickled_data = pickle.dumps(data)
tmp_hash = sha256()
tmp_hash.update(pickled_data)
data_pickler = DataPickler('/tmp')
ret = data_pickler.store_data(filename, data)
self.assertEqual(ret, tmp_hash.hexdigest())
data_pickler.remove_file(filename)
def test_store_data_failed(self):
data = {'foo', 'bar'}
filename = 'scan_info_1'
data_pickler = DataPickler('/root')
self.assertRaises(
OspdCommandError, data_pickler.store_data, filename, data
)
def test_store_data_check_permission(self):
OWNER_ONLY_RW_PERMISSION = '0o100600' # pylint: disable=invalid-name
data = {'foo', 'bar'}
filename = 'scan_info_1'
data_pickler = DataPickler('/tmp')
data_pickler.store_data(filename, data)
file_path = (
Path(data_pickler._storage_path) # pylint: disable=protected-access
/ filename
)
self.assertEqual(
oct(file_path.stat().st_mode), OWNER_ONLY_RW_PERMISSION
)
data_pickler.remove_file(filename)
def test_load_data(self):
data_pickler = DataPickler('/tmp')
data = {'foo', 'bar'}
filename = 'scan_info_1'
pickled_data = pickle.dumps(data)
tmp_hash = sha256()
tmp_hash.update(pickled_data)
pickled_data_hash = tmp_hash.hexdigest()
ret = data_pickler.store_data(filename, data)
self.assertEqual(ret, pickled_data_hash)
original_data = data_pickler.load_data(filename, pickled_data_hash)
self.assertIsNotNone(original_data)
self.assertIn('foo', original_data)
@patch("ospd.datapickler.logger")
def test_remove_file_failed(self, mock_logger):
filename = 'inenxistent_file'
data_pickler = DataPickler('/root')
data_pickler.remove_file(filename)
assert_called(mock_logger.error)
@patch("ospd.datapickler.logger")
def test_load_data_no_file(self, mock_logger):
filename = 'scan_info_1'
data_pickler = DataPickler('/tmp')
data_loaded = data_pickler.load_data(filename, "1234")
assert_called(mock_logger.error)
self.assertIsNone(data_loaded)
data_pickler.remove_file(filename)
def test_load_data_corrupted(self):
data_pickler = DataPickler('/tmp')
data = {'foo', 'bar'}
filename = 'scan_info_1'
pickled_data = pickle.dumps(data)
tmp_hash = sha256()
tmp_hash.update(pickled_data)
pickled_data_hash = tmp_hash.hexdigest()
ret = data_pickler.store_data(filename, data)
self.assertEqual(ret, pickled_data_hash)
# courrupt data
file_to_corrupt = (
Path(data_pickler._storage_path) # pylint: disable=protected-access
/ filename
)
with file_to_corrupt.open('ab') as f:
f.write(b'bar2')
original_data = data_pickler.load_data(filename, pickled_data_hash)
self.assertIsNone(original_data)
data_pickler.remove_file(filename)
ospd-21.4.4/tests/test_errors.py 0000664 0000000 0000000 00000004556 14131311270 0016641 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test module for OspdCommandError class
"""
import unittest
from ospd.errors import OspdError, OspdCommandError, RequiredArgument
class OspdCommandErrorTestCase(unittest.TestCase):
def test_is_ospd_error(self):
e = OspdCommandError('message')
self.assertIsInstance(e, OspdError)
def test_default_params(self):
e = OspdCommandError('message')
self.assertEqual('message', e.message)
self.assertEqual(400, e.status)
self.assertEqual('osp', e.command)
def test_constructor(self):
e = OspdCommandError('message', 'command', 304)
self.assertEqual('message', e.message)
self.assertEqual('command', e.command)
self.assertEqual(304, e.status)
def test_string_conversion(self):
e = OspdCommandError('message foo bar', 'command', 304)
self.assertEqual('message foo bar', str(e))
def test_as_xml(self):
e = OspdCommandError('message')
self.assertEqual(
b'', e.as_xml()
)
class RequiredArgumentTestCase(unittest.TestCase):
def test_raise_exception(self):
with self.assertRaises(RequiredArgument) as cm:
raise RequiredArgument('foo', 'bar')
ex = cm.exception
self.assertEqual(ex.function, 'foo')
self.assertEqual(ex.argument, 'bar')
def test_string_conversion(self):
ex = RequiredArgument('foo', 'bar')
self.assertEqual(str(ex), 'foo: Argument bar is required')
def test_is_ospd_error(self):
e = RequiredArgument('foo', 'bar')
self.assertIsInstance(e, OspdError)
ospd-21.4.4/tests/test_port_convert.py 0000664 0000000 0000000 00000011027 14131311270 0020040 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test suites for Port manipulation.
"""
import unittest
from ospd.network import (
ports_as_list,
get_udp_port_list,
get_tcp_port_list,
port_list_compress,
)
class ConvertPortTestCase(unittest.TestCase):
def test_tcp_ports(self):
"""Test only tcp ports."""
tports, uports = ports_as_list('T:1-10,30,31')
self.assertIsNotNone(tports)
self.assertEqual(len(uports), 0)
self.assertEqual(len(tports), 12)
for i in range(1, 10):
self.assertIn(i, tports)
self.assertIn(30, tports)
self.assertIn(31, tports)
def test_udp_ports(self):
"""Test only udp ports."""
tports, uports = ports_as_list('U:1-10')
self.assertIsNotNone(uports)
self.assertEqual(len(tports), 0)
self.assertEqual(len(uports), 10)
for i in range(1, 10):
self.assertIn(i, uports)
def test_both_ports(self):
"""Test tcp und udp ports."""
tports, uports = ports_as_list('T:1-10, U:1-10')
self.assertIsNotNone(tports)
self.assertIsNotNone(uports)
self.assertEqual(len(tports), 10)
self.assertEqual(len(uports), 10)
for i in range(1, 10):
self.assertIn(i, tports)
self.assertIn(i, uports)
self.assertNotIn(0, uports)
def test_both_ports_udp_first(self):
"""Test tcp und udp ports, but udp listed first."""
tports, uports = ports_as_list('U:20-30, T:1-10')
self.assertIsNotNone(tports)
self.assertIsNotNone(uports)
self.assertEqual(len(tports), 10)
self.assertEqual(len(uports), 11)
for i in range(1, 10):
self.assertIn(i, tports)
for i in range(20, 30):
self.assertIn(i, uports)
def test_not_spec_type_ports(self):
"""Test port list without specific type."""
tports, uports = ports_as_list('51-60')
self.assertIsNotNone(tports)
self.assertEqual(len(uports), 0)
self.assertEqual(len(tports), 10)
for i in range(51, 60):
self.assertIn(i, tports)
def test_invalid_char_port(self):
"""Test list with a false char."""
tports, uports = ports_as_list('R:51-60')
self.assertIsNone(tports)
self.assertIsNone(uports)
def test_empty_port(self):
"""Test an empty port list."""
tports, uports = ports_as_list('')
self.assertIsNone(tports)
self.assertIsNone(uports)
def test_get_spec_type_ports(self):
"""Test get specific type ports."""
uports = get_udp_port_list('U:9392,9393T:22')
self.assertEqual(len(uports), 2)
self.assertIn(9392, uports)
tports = get_tcp_port_list('U:9392T:80,22,443')
self.assertEqual(len(tports), 3)
self.assertIn(22, tports)
self.assertIn(80, tports)
self.assertIn(443, tports)
def test_malformed_port_string(self):
"""Test different malformed port list."""
tports, uports = ports_as_list('TU:1-2')
self.assertIsNone(tports)
self.assertIsNone(uports)
tports, uports = ports_as_list('U1-2')
self.assertIsNone(tports)
self.assertIsNone(uports)
tports, uports = ports_as_list('U:1-2t:22')
self.assertIsNone(tports)
self.assertIsNone(uports)
tports, uports = ports_as_list('U1-2,T22')
self.assertIsNone(tports)
self.assertIsNone(uports)
tports, uports = ports_as_list('U:1-2,U:22')
self.assertIsNone(tports)
self.assertIsNone(uports)
def test_compress_list(self):
"""Test different malformed port list."""
port_list = [1, 2, 3, 4, 5, 8, 9, 10, 22, 24, 29, 30]
string = port_list_compress(port_list)
self.assertEqual(string, '1-5,8-10,22,24,29-30')
ospd-21.4.4/tests/test_protocol.py 0000664 0000000 0000000 00000002075 14131311270 0017160 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import unittest
from ospd.protocol import RequestParser
class RequestParserTestCase(unittest.TestCase):
def test_parse(self):
parser = RequestParser()
self.assertFalse(parser.has_ended(b''))
self.assertFalse(parser.has_ended(b''))
self.assertTrue(parser.has_ended(b''))
ospd-21.4.4/tests/test_scan_and_result.py 0000664 0000000 0000000 00000152456 14131311270 0020474 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
# pylint: disable=too-many-lines
""" Test module for scan runs
"""
import time
import unittest
from unittest.mock import patch, MagicMock, Mock
import logging
import xml.etree.ElementTree as ET
from defusedxml.common import EntitiesForbidden
from ospd.resultlist import ResultList
from ospd.errors import OspdCommandError
from ospd.scan import ScanStatus
from .helper import (
DummyWrapper,
assert_called,
FakeStream,
FakeDataManager,
FakePsutil,
)
class FakeStartProcess:
def __init__(self):
self.run_mock = MagicMock()
self.call_mock = MagicMock()
self.func = None
self.args = None
self.kwargs = None
def __call__(self, func, *, args=None, kwargs=None):
self.func = func
self.args = args or []
self.kwargs = kwargs or {}
return self.call_mock
def run(self):
self.func(*self.args, **self.kwargs)
return self.run_mock
def __repr__(self):
return "".format(
self.func, self.args, self.kwargs
)
class Result(object):
def __init__(self, type_, **kwargs):
self.result_type = type_
self.host = ''
self.hostname = ''
self.name = ''
self.value = ''
self.port = ''
self.test_id = ''
self.severity = ''
self.qod = ''
self.uri = ''
for name, value in kwargs.items():
setattr(self, name, value)
class ScanTestCase(unittest.TestCase):
def setUp(self):
self.daemon = DummyWrapper([])
self.daemon.scan_collection.datamanager = FakeDataManager()
self.daemon.scan_collection.file_storage_dir = '/tmp'
def test_get_default_scanner_params(self):
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
# The status of the response must be success (i.e. 200)
self.assertEqual(response.get('status'), '200')
# The response root element must have the correct name
self.assertEqual(response.tag, 'get_scanner_details_response')
# The response must contain a 'scanner_params' element
self.assertIsNotNone(response.find('scanner_params'))
def test_get_default_help(self):
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
self.assertEqual(response.tag, 'help_response')
def test_get_default_scanner_version(self):
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
self.assertIsNotNone(response.find('protocol'))
def test_get_vts_no_vt(self):
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
self.assertIsNotNone(response.find('vts'))
def test_get_vt_xml_no_dict(self):
single_vt = ('1234', None)
vt = self.daemon.get_vt_xml(single_vt)
self.assertFalse(vt.get('id'))
def test_get_vts_single_vt(self):
fs = FakeStream()
self.daemon.add_vt('1.2.3.4', 'A vulnerability test')
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts = response.find('vts')
self.assertIsNotNone(vts.find('vt'))
vt = vts.find('vt')
self.assertEqual(vt.get('id'), '1.2.3.4')
def test_get_vts_version(self):
fs = FakeStream()
self.daemon.add_vt('1.2.3.4', 'A vulnerability test')
self.daemon.set_vts_version('today')
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts_version = response.find('vts').attrib['vts_version']
self.assertEqual(vts_version, self.daemon.get_vts_version())
vts = response.find('vts')
self.assertIsNotNone(vts.find('vt'))
vt = vts.find('vt')
self.assertEqual(vt.get('id'), '1.2.3.4')
def test_get_vts_version_only(self):
fs = FakeStream()
self.daemon.add_vt('1.2.3.4', 'A vulnerability test')
self.daemon.set_vts_version('today')
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts_version = response.find('vts').attrib['vts_version']
self.assertEqual(vts_version, self.daemon.get_vts_version())
vts = response.find('vts')
self.assertIsNone(vts.find('vt'))
def test_get_vts_still_not_init(self):
fs = FakeStream()
self.daemon.initialized = False
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '400')
def test_get_help_still_not_init(self):
fs = FakeStream()
self.daemon.initialized = False
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
def test_get_vts_filter_positive(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
vt_modification_time='19000202',
)
fs = FakeStream()
self.daemon.handle_command(
'', fs
)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts = response.find('vts')
vt = vts.find('vt')
self.assertIsNotNone(vt)
self.assertEqual(vt.get('id'), '1.2.3.4')
modification_time = response.findall('vts/vt/modification_time')
self.assertEqual(
'19000202',
ET.tostring(modification_time[0]).decode('utf-8'),
)
def test_get_vts_filter_negative(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
vt_modification_time='19000202',
)
fs = FakeStream()
self.daemon.handle_command(
'', fs
)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts = response.find('vts')
vt = vts.find('vt')
self.assertIsNotNone(vt)
self.assertEqual(vt.get('id'), '1.2.3.4')
modification_time = response.findall('vts/vt/modification_time')
self.assertEqual(
'19000202',
ET.tostring(modification_time[0]).decode('utf-8'),
)
def test_get_vts_bad_filter(self):
fs = FakeStream()
cmd = ''
self.assertRaises(OspdCommandError, self.daemon.handle_command, cmd, fs)
self.assertTrue(self.daemon.vts.is_cache_available)
def test_get_vtss_multiple_vts(self):
self.daemon.add_vt('1.2.3.4', 'A vulnerability test')
self.daemon.add_vt('1.2.3.5', 'Another vulnerability test')
self.daemon.add_vt('123456789', 'Yet another vulnerability test')
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
vts = response.find('vts')
self.assertIsNotNone(vts.find('vt'))
def test_get_vts_multiple_vts_with_custom(self):
self.daemon.add_vt('1.2.3.4', 'A vulnerability test', custom='b')
self.daemon.add_vt(
'4.3.2.1', 'Another vulnerability test with custom info', custom='b'
)
self.daemon.add_vt(
'123456789', 'Yet another vulnerability test', custom='b'
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
custom = response.findall('vts/vt/custom')
self.assertEqual(3, len(custom))
def test_get_vts_vts_with_params(self):
self.daemon.add_vt(
'1.2.3.4', 'A vulnerability test', vt_params="a", custom="b"
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
# The status of the response must be success (i.e. 200)
self.assertEqual(response.get('status'), '200')
# The response root element must have the correct name
self.assertEqual(response.tag, 'get_vts_response')
# The response must contain a 'scanner_params' element
self.assertIsNotNone(response.find('vts'))
vt_params = response[0][0].findall('params')
self.assertEqual(1, len(vt_params))
custom = response[0][0].findall('custom')
self.assertEqual(1, len(custom))
params = response.findall('vts/vt/params/param')
self.assertEqual(2, len(params))
def test_get_vts_vts_with_refs(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
vt_refs="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
# The status of the response must be success (i.e. 200)
self.assertEqual(response.get('status'), '200')
# The response root element must have the correct name
self.assertEqual(response.tag, 'get_vts_response')
# The response must contain a 'vts' element
self.assertIsNotNone(response.find('vts'))
vt_params = response[0][0].findall('params')
self.assertEqual(1, len(vt_params))
custom = response[0][0].findall('custom')
self.assertEqual(1, len(custom))
refs = response.findall('vts/vt/refs/ref')
self.assertEqual(2, len(refs))
def test_get_vts_vts_with_dependencies(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
vt_dependencies="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
deps = response.findall('vts/vt/dependencies/dependency')
self.assertEqual(2, len(deps))
def test_get_vts_vts_with_severities(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
severities="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
severity = response.findall('vts/vt/severities/severity')
self.assertEqual(1, len(severity))
def test_get_vts_vts_with_detection_qodt(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
detection="c",
qod_t="d",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
detection = response.findall('vts/vt/detection')
self.assertEqual(1, len(detection))
def test_get_vts_vts_with_detection_qodv(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
detection="c",
qod_v="d",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
detection = response.findall('vts/vt/detection')
self.assertEqual(1, len(detection))
def test_get_vts_vts_with_summary(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
summary="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
summary = response.findall('vts/vt/summary')
self.assertEqual(1, len(summary))
def test_get_vts_vts_with_impact(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
impact="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
impact = response.findall('vts/vt/impact')
self.assertEqual(1, len(impact))
def test_get_vts_vts_with_affected(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
affected="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
affect = response.findall('vts/vt/affected')
self.assertEqual(1, len(affect))
def test_get_vts_vts_with_insight(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
insight="c",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
insight = response.findall('vts/vt/insight')
self.assertEqual(1, len(insight))
def test_get_vts_vts_with_solution(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
custom="b",
solution="c",
solution_t="d",
solution_m="e",
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
solution = response.findall('vts/vt/solution')
self.assertEqual(1, len(solution))
def test_get_vts_vts_with_ctime(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
vt_creation_time='01-01-1900',
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
creation_time = response.findall('vts/vt/creation_time')
self.assertEqual(
'01-01-1900',
ET.tostring(creation_time[0]).decode('utf-8'),
)
def test_get_vts_vts_with_mtime(self):
self.daemon.add_vt(
'1.2.3.4',
'A vulnerability test',
vt_params="a",
vt_modification_time='02-01-1900',
)
fs = FakeStream()
self.daemon.handle_command('', fs)
response = fs.get_response()
modification_time = response.findall('vts/vt/modification_time')
self.assertEqual(
'02-01-1900',
ET.tostring(modification_time[0]).decode('utf-8'),
)
def test_clean_forgotten_scans(self):
fs = FakeStream()
self.daemon.handle_command(
'',
fs,
)
response = fs.get_response()
scan_id = response.findtext('id')
finished = False
self.daemon.start_queued_scans()
while not finished:
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
scans = response.findall('scan')
self.assertEqual(1, len(scans))
scan = scans[0]
if scan.get('end_time') != '0':
finished = True
else:
time.sleep(0.01)
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
self.assertEqual(
len(list(self.daemon.scan_collection.ids_iterator())), 1
)
# Set an old end_time
self.daemon.scan_collection.scans_table[scan_id]['end_time'] = 123456
# Run the check
self.daemon.clean_forgotten_scans()
# Not removed
self.assertEqual(
len(list(self.daemon.scan_collection.ids_iterator())), 1
)
# Set the max time and run again
self.daemon.scaninfo_store_time = 1
self.daemon.clean_forgotten_scans()
# Now is removed
self.assertEqual(
len(list(self.daemon.scan_collection.ids_iterator())), 0
)
def test_scan_with_error(self):
fs = FakeStream()
self.daemon.handle_command(
'',
fs,
)
response = fs.get_response()
scan_id = response.findtext('id')
finished = False
self.daemon.start_queued_scans()
self.daemon.add_scan_error(
scan_id, host='a', value='something went wrong'
)
while not finished:
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
scans = response.findall('scan')
self.assertEqual(1, len(scans))
scan = scans[0]
status = scan.get('status')
if status == "init" or status == "running":
self.assertEqual('0', scan.get('end_time'))
time.sleep(0.010)
else:
finished = True
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
self.assertEqual(
response.findtext('scan/results/result'), 'something went wrong'
)
fs = FakeStream()
self.daemon.handle_command('' % scan_id, fs)
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
def test_get_scan_pop(self):
fs = FakeStream()
self.daemon.handle_command(
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.add_scan_host_detail(
scan_id, host='a', value='Some Host Detail'
)
time.sleep(1)
fs = FakeStream()
self.daemon.handle_command('' % scan_id, fs)
response = fs.get_response()
self.assertEqual(
response.findtext('scan/results/result'), 'Some Host Detail'
)
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
self.assertEqual(
response.findtext('scan/results/result'), 'Some Host Detail'
)
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id,
fs,
)
response = fs.get_response()
self.assertEqual(response.findtext('scan/results/result'), None)
def test_get_scan_pop_max_res(self):
fs = FakeStream()
self.daemon.handle_command(
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.add_scan_log(scan_id, host='a', name='a')
self.daemon.add_scan_log(scan_id, host='c', name='c')
self.daemon.add_scan_log(scan_id, host='b', name='b')
fs = FakeStream()
self.daemon.handle_command(
''
% scan_id,
fs,
)
response = fs.get_response()
self.assertEqual(len(response.findall('scan/results/result')), 1)
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
self.assertEqual(len(response.findall('scan/results/result')), 2)
def test_get_scan_results_clean(self):
fs = FakeStream()
self.daemon.handle_command(
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.add_scan_log(scan_id, host='a', name='a')
self.daemon.add_scan_log(scan_id, host='c', name='c')
self.daemon.add_scan_log(scan_id, host='b', name='b')
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
res_len = len(
self.daemon.scan_collection.scans_table[scan_id]['results']
)
self.assertEqual(res_len, 0)
res_len = len(
self.daemon.scan_collection.scans_table[scan_id]['temp_results']
)
self.assertEqual(res_len, 0)
def test_get_scan_results_restore(self):
fs = FakeStream()
self.daemon.handle_command(
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.add_scan_log(scan_id, host='a', name='a')
self.daemon.add_scan_log(scan_id, host='c', name='c')
self.daemon.add_scan_log(scan_id, host='b', name='b')
fs = FakeStream(return_value=False)
self.daemon.handle_command(
'' % scan_id, fs
)
res_len = len(
self.daemon.scan_collection.scans_table[scan_id]['results']
)
self.assertEqual(res_len, 3)
res_len = len(
self.daemon.scan_collection.scans_table[scan_id]['temp_results']
)
self.assertEqual(res_len, 0)
def test_billon_laughs(self):
# pylint: disable=line-too-long
lol = (
''
''
' '
' '
' '
' '
' '
' '
' '
' '
' '
' '
']>'
)
fs = FakeStream()
self.assertRaises(
EntitiesForbidden, self.daemon.handle_command, lol, fs
)
def test_target_with_credentials(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'192.168.0.0/2422'
''
''
'scanuser'
'mypass'
''
'smbuser'
'mypass'
''
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
cred_dict = {
'ssh': {
'type': 'up',
'password': 'mypass',
'port': '22',
'username': 'scanuser',
},
'smb': {'type': 'up', 'password': 'mypass', 'username': 'smbuser'},
}
scan_id = response.findtext('id')
response = self.daemon.get_scan_credentials(scan_id)
self.assertEqual(response, cred_dict)
def test_target_with_credential_empty_community(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'192.168.0.0/2422'
''
''
''
''
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
self.assertEqual(response.get('status'), '200')
cred_dict = {'snmp': {'type': 'up', 'community': ''}}
scan_id = response.findtext('id')
response = self.daemon.get_scan_credentials(scan_id)
self.assertEqual(response, cred_dict)
def test_scan_get_target(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'localhosts,192.168.0.0/24'
'80,443'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
fs = FakeStream()
self.daemon.handle_command('' % scan_id, fs)
response = fs.get_response()
scan_res = response.find('scan')
self.assertEqual(scan_res.get('target'), 'localhosts,192.168.0.0/24')
def test_scan_get_target_options(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'192.168.0.1'
'220'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
time.sleep(1)
target_options = self.daemon.get_scan_target_options(scan_id)
self.assertEqual(target_options, {'alive_test': '0'})
def test_scan_get_target_options_alive_test_methods(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'192.168.0.1'
'22'
''
'1'
'1'
'1'
'1'
'1'
''
''
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
time.sleep(1)
target_options = self.daemon.get_scan_target_options(scan_id)
self.assertEqual(
target_options,
{
'alive_test_methods': '1',
'icmp': '1',
'tcp_syn': '1',
'tcp_ack': '1',
'arp': '1',
'consider_alive': '1',
},
)
def test_scan_get_target_options_alive_test_methods_dont_add_empty_or_missing( # pylint: disable=line-too-long
self,
):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'192.168.0.1'
'22'
''
'1'
''
''
''
''
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
time.sleep(1)
target_options = self.daemon.get_scan_target_options(scan_id)
self.assertEqual(
target_options, {'alive_test_methods': '1', 'icmp': '1'}
)
def test_progress(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id, 'localhost1', 75)
self.daemon.set_scan_host_progress(scan_id, 'localhost2', 25)
self.assertEqual(
self.daemon.scan_collection.calculate_target_progress(scan_id), 50
)
def test_progress_all_host_dead(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id, 'localhost1', -1)
self.daemon.set_scan_host_progress(scan_id, 'localhost2', -1)
self.daemon.sort_host_finished(scan_id, ['localhost1', 'localhost2'])
self.assertEqual(
self.daemon.scan_collection.calculate_target_progress(scan_id), 100
)
@patch('ospd.ospd.os')
def test_interrupted_scan(self, mock_os):
mock_os.setsid.return_value = None
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.exec_scan = Mock(return_value=None)
self.daemon.set_scan_host_progress(scan_id, 'localhost1', 5)
self.daemon.set_scan_host_progress(scan_id, 'localhost2', 14)
while self.daemon.get_scan_status(scan_id) == ScanStatus.INIT:
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id,
fs,
)
response = fs.get_response()
status = response.find('scan').attrib['status']
self.assertEqual(status, ScanStatus.INTERRUPTED.name.lower())
def test_sort_host_finished(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id, 'localhost3', -1)
self.daemon.set_scan_host_progress(scan_id, 'localhost1', 75)
self.daemon.set_scan_host_progress(scan_id, 'localhost4', 100)
self.daemon.set_scan_host_progress(scan_id, 'localhost2', 25)
self.daemon.sort_host_finished(scan_id, ['localhost3', 'localhost4'])
rounded_progress = self.daemon.scan_collection.calculate_target_progress( # pylint: disable=line-too-long)
scan_id
)
self.assertEqual(rounded_progress, 66)
def test_set_status_interrupted(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
end_time = self.daemon.scan_collection.get_end_time(scan_id)
self.assertEqual(end_time, 0)
self.daemon.interrupt_scan(scan_id)
end_time = self.daemon.scan_collection.get_end_time(scan_id)
self.assertNotEqual(end_time, 0)
def test_set_status_stopped(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
end_time = self.daemon.scan_collection.get_end_time(scan_id)
self.assertEqual(end_time, 0)
self.daemon.set_scan_status(scan_id, ScanStatus.STOPPED)
end_time = self.daemon.scan_collection.get_end_time(scan_id)
self.assertNotEqual(end_time, 0)
def test_calculate_progress_without_current_hosts(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id)
self.daemon.set_scan_host_progress(scan_id, 'localhost3', -1)
self.daemon.set_scan_host_progress(scan_id, 'localhost4', 100)
self.daemon.sort_host_finished(scan_id, ['localhost3', 'localhost4'])
float_progress = self.daemon.scan_collection.calculate_target_progress(
scan_id
)
self.assertEqual(int(float_progress), 33)
self.daemon.scan_collection.set_progress(scan_id, float_progress)
progress = self.daemon.get_scan_progress(scan_id)
self.assertEqual(progress, 33)
def test_get_scan_host_progress(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id, 'localhost', 45)
self.assertEqual(
self.daemon.get_scan_host_progress(scan_id, 'localhost'), 45
)
def test_get_scan_without_scanid(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
fs = FakeStream()
self.assertRaises(
OspdCommandError,
self.daemon.handle_command,
'',
fs,
)
def test_set_scan_total_hosts(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 4)
self.daemon.set_scan_total_hosts(scan_id, 3)
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 3)
def test_set_scan_total_hosts_zero(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
# Default calculated by ospd with the hosts in the target
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 4)
# Set to 0 (all hosts unresolved, dead, invalid target) via
# the server. This one has priority and must be still 0 and
# never overwritten with the calculation from host list
self.daemon.set_scan_total_hosts(scan_id, 0)
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 0)
def test_set_scan_total_hosts_invalid_target(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 4)
# The total host is set by the server as -1, because invalid target
self.daemon.set_scan_total_hosts(scan_id, -1)
count = self.daemon.scan_collection.get_count_total(scan_id)
self.assertEqual(count, 0)
def test_scan_invalid_excluded_hosts(self):
logging.Logger.warning = Mock()
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'192.168.0.0/24'
'192.168.0.1-192.168.0.200,10.0.0.0/24'
''
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
# Count only the excluded hosts present in the original target.
count = self.daemon.scan_collection.get_simplified_exclude_host_count(
scan_id
)
self.assertEqual(count, 200)
logging.Logger.warning.assert_called_with( # pylint: disable=no-member
"Please check the excluded host list. It contains hosts "
"which do not belong to the target. This warning can be ignored if "
"this was done on purpose (e.g. to exclude specific hostname)."
)
def test_get_scan_progress_xml(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost1, localhost2, localhost3, localhost4'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.set_scan_host_progress(scan_id, 'localhost3', -1)
self.daemon.set_scan_host_progress(scan_id, 'localhost4', 100)
self.daemon.sort_host_finished(scan_id, ['localhost3', 'localhost4'])
self.daemon.set_scan_host_progress(scan_id, 'localhost1', 75)
self.daemon.set_scan_host_progress(scan_id, 'localhost2', 25)
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
progress = response.find('scan/progress')
overall = float(progress.findtext('overall'))
self.assertEqual(int(overall), 66)
count_alive = progress.findtext('count_alive')
self.assertEqual(count_alive, '1')
count_dead = progress.findtext('count_dead')
self.assertEqual(count_dead, '1')
current_hosts = progress.findall('host')
self.assertEqual(len(current_hosts), 2)
count_excluded = progress.findtext('count_excluded')
self.assertEqual(count_excluded, '0')
def test_set_get_vts_version(self):
self.daemon.set_vts_version('1234')
version = self.daemon.get_vts_version()
self.assertEqual('1234', version)
def test_set_get_vts_version_error(self):
self.assertRaises(TypeError, self.daemon.set_vts_version)
@patch("ospd.ospd.os")
@patch("ospd.ospd.create_process")
def test_scan_exists(self, mock_create_process, _mock_os):
fp = FakeStartProcess()
mock_create_process.side_effect = fp
mock_process = fp.call_mock
mock_process.start.side_effect = fp.run
mock_process.is_alive.return_value = True
mock_process.pid = "main-scan-process"
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'localhost'
'22'
''
'',
fs,
)
response = fs.get_response()
scan_id = response.findtext('id')
self.assertIsNotNone(scan_id)
status = response.get('status_text')
self.assertEqual(status, 'OK')
self.daemon.start_queued_scans()
assert_called(mock_create_process)
assert_called(mock_process.start)
self.daemon.handle_command('' % scan_id, fs)
fs = FakeStream()
cmd = (
''
''
''
'localhost'
'22'
''
''
)
self.daemon.handle_command(cmd, fs)
self.daemon.start_queued_scans()
response = fs.get_response()
status = response.get('status_text')
self.assertEqual(status, 'Continue')
def test_result_order(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'a'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
self.daemon.add_scan_log(scan_id, host='a', name='a')
self.daemon.add_scan_log(scan_id, host='c', name='c')
self.daemon.add_scan_log(scan_id, host='b', name='b')
hosts = ['a', 'c', 'b']
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
results = response.findall("scan/results/")
for idx, res in enumerate(results):
att_dict = res.attrib
self.assertEqual(hosts[idx], att_dict['name'])
def test_batch_result(self):
reslist = ResultList()
fs = FakeStream()
self.daemon.handle_command(
''
''
''
'a'
'22'
''
'',
fs,
)
self.daemon.start_queued_scans()
response = fs.get_response()
scan_id = response.findtext('id')
reslist.add_scan_log_to_list(host='a', name='a')
reslist.add_scan_log_to_list(host='c', name='c')
reslist.add_scan_log_to_list(host='b', name='b')
self.daemon.scan_collection.add_result_list(scan_id, reslist)
hosts = ['a', 'c', 'b']
fs = FakeStream()
self.daemon.handle_command(
'' % scan_id, fs
)
response = fs.get_response()
results = response.findall("scan/results/")
for idx, res in enumerate(results):
att_dict = res.attrib
self.assertEqual(hosts[idx], att_dict['name'])
def test_is_new_scan_allowed_false(self):
self.daemon.scan_processes = { # pylint: disable=protected-access
'a': 1,
'b': 2,
}
self.daemon.max_scans = 1
self.assertFalse(self.daemon.is_new_scan_allowed())
def test_is_new_scan_allowed_true(self):
self.daemon.scan_processes = { # pylint: disable=protected-access
'a': 1,
'b': 2,
}
self.daemon.max_scans = 3
self.assertTrue(self.daemon.is_new_scan_allowed())
def test_start_queue_scan_daemon_not_init(self):
self.daemon.get_count_queued_scans = MagicMock(return_value=10)
self.daemon.initialized = False
logging.Logger.info = Mock()
self.daemon.start_queued_scans()
logging.Logger.info.assert_called_with( # pylint: disable=no-member
"Queued task can not be started because a "
"feed update is being performed."
)
@patch("ospd.ospd.psutil")
def test_free_memory_true(self, mock_psutil):
self.daemon.min_free_mem_scan_queue = 1000
# 1.5 GB free
mock_psutil.virtual_memory.return_value = FakePsutil(
available=1500000000
)
self.assertTrue(self.daemon.is_enough_free_memory())
@patch("ospd.ospd.psutil")
def test_wait_between_scan_no_scans(self, mock_psutil):
# Enable option
self.daemon.min_free_mem_scan_queue = 1000
# 1.5 GB free
mock_psutil.virtual_memory.return_value = FakePsutil(
available=1500000000
)
# Not enough time between scans, but no running scan
self.daemon.last_scan_start_time = time.time() - 20
self.assertTrue(self.daemon.is_enough_free_memory())
@patch("ospd.ospd.psutil")
def test_wait_between_scan_run_scans_not_allow(self, mock_psutil):
# Enable option
self.daemon.min_free_mem_scan_queue = 1000
# 1.5 GB free
mock_psutil.virtual_memory.return_value = FakePsutil(
available=1500000000
)
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'localhosts,192.168.0.0/24'
'80,443'
''
'',
fs,
)
# There is a running scan
self.daemon.start_queued_scans()
# Not enough time between scans
self.daemon.last_scan_start_time = time.time() - 20
self.assertFalse(self.daemon.is_enough_free_memory())
@patch("ospd.ospd.psutil")
def test_wait_between_scan_allow(self, mock_psutil):
# Enable option
self.daemon.min_free_mem_scan_queue = 1000
# 1.5 GB free
mock_psutil.virtual_memory.return_value = FakePsutil(
available=1500000000
)
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'localhosts,192.168.0.0/24'
'80,443'
''
'',
fs,
)
# There is a running scan, enough memory and enough time
# in between
self.daemon.start_queued_scans()
self.daemon.last_scan_start_time = time.time() - 65
self.assertTrue(self.daemon.is_enough_free_memory())
@patch("ospd.ospd.psutil")
def test_free_memory_false(self, mock_psutil):
self.daemon.min_free_mem_scan_queue = 2000
# 1.5 GB free
mock_psutil.virtual_memory.return_value = FakePsutil(
available=1500000000
)
self.assertFalse(self.daemon.is_enough_free_memory())
def test_count_queued_scans(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'localhosts,192.168.0.0/24'
'80,443'
''
'',
fs,
)
self.assertEqual(self.daemon.get_count_queued_scans(), 1)
self.daemon.start_queued_scans()
self.assertEqual(self.daemon.get_count_queued_scans(), 0)
def test_count_running_scans(self):
fs = FakeStream()
self.daemon.handle_command(
''
''
''
''
'localhosts,192.168.0.0/24'
'80,443'
''
'',
fs,
)
self.assertEqual(self.daemon.get_count_running_scans(), 0)
self.daemon.start_queued_scans()
self.assertEqual(self.daemon.get_count_running_scans(), 1)
def test_ids_iterator_dict_modified(self):
self.daemon.scan_collection.scans_table = {'a': 1, 'b': 2}
for _ in self.daemon.scan_collection.ids_iterator():
self.daemon.scan_collection.scans_table['c'] = 3
self.assertEqual(len(self.daemon.scan_collection.scans_table), 3)
ospd-21.4.4/tests/test_ssh_daemon.py 0000664 0000000 0000000 00000013077 14131311270 0017443 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test module for ospd ssh support.
"""
import unittest
from ospd import ospd_ssh
from ospd.ospd_ssh import OSPDaemonSimpleSSH
from .helper import FakeDataManager
class FakeFile(object):
def __init__(self, content):
self.content = content
def readlines(self):
return self.content.split('\n')
commands = None # pylint: disable=invalid-name
class FakeSSHClient(object):
def __init__(self):
global commands # pylint: disable=global-statement,invalid-name
commands = []
def set_missing_host_key_policy(self, policy):
pass
def connect(self, **kwargs):
pass
def exec_command(self, cmd):
commands.append(cmd)
return None, FakeFile(''), None
def close(self):
pass
class FakeExceptions(object):
AuthenticationException = None # pylint: disable=invalid-name
class fakeparamiko(object): # pylint: disable=invalid-name
@staticmethod
def SSHClient(*args): # pylint: disable=invalid-name
return FakeSSHClient(*args)
@staticmethod
def AutoAddPolicy(): # pylint: disable=invalid-name
pass
ssh_exception = FakeExceptions
class DummyWrapper(OSPDaemonSimpleSSH):
def __init__(self, niceness=10):
super().__init__(niceness=niceness)
self.scan_collection.data_manager = FakeDataManager()
self.scan_collection.file_storage_dir = '/tmp'
self.initialized = True
def check(self):
return True
def exec_scan(self, scan_id: str):
return
class SSHDaemonTestCase(unittest.TestCase):
def test_no_paramiko(self):
ospd_ssh.paramiko = None
with self.assertRaises(ImportError):
OSPDaemonSimpleSSH()
def test_run_command(self):
ospd_ssh.paramiko = fakeparamiko
daemon = DummyWrapper(niceness=10)
scanid = daemon.create_scan(
None,
{
'target': 'host.example.com',
'ports': '80, 443',
'credentials': {},
'exclude_hosts': '',
'finished_hosts': '',
'options': {},
},
dict(port=5, ssh_timeout=15, username_password='dummy:pw'),
'',
)
daemon.start_queued_scans()
res = daemon.run_command(scanid, 'host.example.com', 'cat /etc/passwd')
self.assertIsInstance(res, list)
self.assertEqual(commands, ['nice -n 10 cat /etc/passwd'])
def test_run_command_legacy_credential(self):
ospd_ssh.paramiko = fakeparamiko
daemon = DummyWrapper(niceness=10)
scanid = daemon.create_scan(
None,
{
'target': 'host.example.com',
'ports': '80, 443',
'credentials': {},
'exclude_hosts': '',
'finished_hosts': '',
'options': {},
},
dict(port=5, ssh_timeout=15, username='dummy', password='pw'),
'',
)
daemon.start_queued_scans()
res = daemon.run_command(scanid, 'host.example.com', 'cat /etc/passwd')
self.assertIsInstance(res, list)
self.assertEqual(commands, ['nice -n 10 cat /etc/passwd'])
def test_run_command_new_credential(self):
ospd_ssh.paramiko = fakeparamiko
daemon = DummyWrapper(niceness=10)
cred_dict = {
'ssh': {
'type': 'up',
'password': 'mypass',
'port': '22',
'username': 'scanuser',
},
'smb': {'type': 'up', 'password': 'mypass', 'username': 'smbuser'},
}
scanid = daemon.create_scan(
None,
{
'target': 'host.example.com',
'ports': '80, 443',
'credentials': cred_dict,
'exclude_hosts': '',
'finished_hosts': '',
'options': {},
},
dict(port=5, ssh_timeout=15),
'',
)
daemon.start_queued_scans()
res = daemon.run_command(scanid, 'host.example.com', 'cat /etc/passwd')
self.assertIsInstance(res, list)
self.assertEqual(commands, ['nice -n 10 cat /etc/passwd'])
def test_run_command_no_credential(self):
ospd_ssh.paramiko = fakeparamiko
daemon = DummyWrapper(niceness=10)
scanid = daemon.create_scan(
None,
{
'target': 'host.example.com',
'ports': '80, 443',
'credentials': {},
'exclude_hosts': '',
'finished_hosts': '',
'options': {},
},
dict(port=5, ssh_timeout=15),
'',
)
daemon.start_queued_scans()
with self.assertRaises(ValueError):
daemon.run_command(scanid, 'host.example.com', 'cat /etc/passwd')
ospd-21.4.4/tests/test_target_convert.py 0000664 0000000 0000000 00000007445 14131311270 0020353 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
""" Test suites for Target manipulations.
"""
import unittest
from unittest.mock import patch
from ospd.network import (
target_str_to_list,
get_hostname_by_address,
is_valid_address,
target_to_ipv4,
socket,
)
class ConvertTargetListsTestCase(unittest.TestCase):
def test_24_net(self):
addresses = target_str_to_list('195.70.81.0/24')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 254)
for i in range(1, 255):
self.assertIn('195.70.81.%d' % i, addresses)
def test_bad_ipv4_cidr(self):
addresses = target_str_to_list('195.70.81.0/32')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 0)
addresses = target_str_to_list('195.70.81.0/31')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 0)
def test_good_ipv4_cidr(self):
addresses = target_str_to_list('195.70.81.0/30')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 2)
def test_range(self):
addresses = target_str_to_list('195.70.81.0-10')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 11)
for i in range(0, 10):
self.assertIn('195.70.81.%d' % i, addresses)
def test_target_str_with_trailing_comma(self):
addresses = target_str_to_list(',195.70.81.1,195.70.81.2,')
self.assertIsNotNone(addresses)
self.assertEqual(len(addresses), 2)
for i in range(1, 2):
self.assertIn('195.70.81.%d' % i, addresses)
def test_get_hostname_by_address(self):
with patch.object(socket, "getfqdn", return_value="localhost"):
hostname = get_hostname_by_address('127.0.0.1')
self.assertEqual(hostname, 'localhost')
hostname = get_hostname_by_address('')
self.assertEqual(hostname, '')
hostname = get_hostname_by_address('127.0.0.1111')
self.assertEqual(hostname, '')
def test_is_valid_address(self):
self.assertFalse(is_valid_address(None))
self.assertFalse(is_valid_address(''))
self.assertFalse(is_valid_address('foo'))
self.assertFalse(is_valid_address('127.0.0.1111'))
self.assertFalse(is_valid_address('127.0.0,1'))
self.assertTrue(is_valid_address('127.0.0.1'))
self.assertTrue(is_valid_address('192.168.0.1'))
self.assertTrue(is_valid_address('::1'))
self.assertTrue(is_valid_address('fc00::'))
self.assertTrue(is_valid_address('fec0::'))
self.assertTrue(
is_valid_address('2001:0db8:85a3:08d3:1319:8a2e:0370:7344')
)
def test_target_to_ipv4(self):
self.assertIsNone(target_to_ipv4('foo'))
self.assertIsNone(target_to_ipv4(''))
self.assertIsNone(target_to_ipv4('127,0,0,1'))
self.assertIsNone(target_to_ipv4('127.0.0'))
self.assertIsNone(target_to_ipv4('127.0.0.11111'))
self.assertEqual(target_to_ipv4('127.0.0.1'), ['127.0.0.1'])
self.assertEqual(target_to_ipv4('192.168.1.1'), ['192.168.1.1'])
ospd-21.4.4/tests/test_vts.py 0000664 0000000 0000000 00000012135 14131311270 0016131 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
import logging
from hashlib import sha256
from unittest import TestCase
from unittest.mock import Mock
from collections import OrderedDict
from ospd.errors import OspdError
from ospd.vts import Vts
class VtsTestCase(TestCase):
def test_add_vt(self):
vts = Vts()
vts.add('id_1', name='foo')
self.assertEqual(len(vts.vts), 1)
def test_add_duplicate_vt(self):
vts = Vts()
vts.add('id_1', name='foo')
with self.assertRaises(OspdError):
vts.add('id_1', name='bar')
self.assertEqual(len(vts.vts), 1)
def test_add_vt_with_empty_id(self):
vts = Vts()
with self.assertRaises(OspdError):
vts.add(None, name='foo')
with self.assertRaises(OspdError):
vts.add('', name='foo')
def test_add_vt_with_invalid_id(self):
vts = Vts()
with self.assertRaises(OspdError):
vts.add('$$$_1', name='foo')
self.assertEqual(len(vts.vts), 0)
def test_contains(self):
vts = Vts()
vts.add('id_1', name='foo')
self.assertIn('id_1', vts)
def test_get(self):
vts = Vts()
vts.add('id_1', name='foo')
vt = vts.get('id_1')
self.assertIsNotNone(vt)
self.assertEqual(vt['name'], 'foo')
self.assertIsNone(vt.get('bar'))
def test_iterator(self):
vts = Vts()
vts.add('id_1', name='foo')
vts.add('id_2', name='bar')
it = iter(vts)
vt_id = next(it)
self.assertIn(vt_id, ['id_1', 'id_2'])
vt_id = next(it)
self.assertIn(vt_id, ['id_1', 'id_2'])
with self.assertRaises(StopIteration):
next(it)
def test_keys(self):
vts = Vts()
vts.add('id_1', name='foo')
vts.add('id_2', name='bar')
self.assertEqual(vts.keys(), ['id_1', 'id_2'])
def test_getitem(self):
vts = Vts()
vts.add('id_1', name='foo')
vt = vts['id_1']
self.assertEqual(vt['name'], 'foo')
with self.assertRaises(KeyError):
vt = vts['foo']
def test_copy(self):
vts = Vts()
vts.add('id_1', name='foo')
vts.add('id_2', name='bar')
vts2 = vts.copy()
self.assertIsNot(vts, vts2)
self.assertIsNot(vts.vts, vts2.vts)
vta = vts.get('id_1')
vtb = vts2.get('id_1')
self.assertEqual(vta['name'], vtb['name'])
self.assertIsNot(vta, vtb)
vta = vts.get('id_2')
vtb = vts2.get('id_2')
self.assertEqual(vta['name'], vtb['name'])
self.assertIsNot(vta, vtb)
def test_calculate_vts_collection_hash(self):
vts = Vts(storage=OrderedDict)
vts.add(
'id_1',
name='foo',
vt_modification_time='01234',
vt_params={
'0': {'id': '0', 'name': 'timeout', 'default': '20'},
'1': {'id': '1', 'name': 'foo_pref:', 'default': 'bar_value'},
},
)
vts.add('id_2', name='bar', vt_modification_time='56789')
vts.calculate_vts_collection_hash()
vt_hash = sha256()
vt_hash.update(
"id_1012340timeout201foo_pref:bar_valueid_256789".encode('utf-8')
)
hash_test = vt_hash.hexdigest()
self.assertEqual(hash_test, vts.sha256_hash)
def test_calculate_vts_collection_hash_no_params(self):
vts = Vts(storage=OrderedDict)
vts.add(
'id_1',
name='foo',
vt_modification_time='01234',
vt_params={
'0': {'id': '0', 'name': 'timeout', 'default': '20'},
'1': {'id': '1', 'name': 'foo_pref:', 'default': 'bar_value'},
},
)
vts.add('id_2', name='bar', vt_modification_time='56789')
vts.calculate_vts_collection_hash(include_vt_params=False)
vt_hash = sha256()
vt_hash.update("id_101234id_256789".encode('utf-8'))
hash_test = vt_hash.hexdigest()
self.assertEqual(hash_test, vts.sha256_hash)
def test_calculate_vts_collection_hash_empty(self):
vts = Vts()
logging.Logger.debug = Mock()
vts.calculate_vts_collection_hash()
self.assertEqual(vts.sha256_hash, None)
logging.Logger.debug.assert_called_with( # pylint: disable=no-member
"Error calculating VTs collection hash. Cache is empty"
)
ospd-21.4.4/tests/test_xml.py 0000664 0000000 0000000 00000007470 14131311270 0016123 0 ustar 00root root 0000000 0000000 # Copyright (C) 2014-2021 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
from collections import OrderedDict
from unittest import TestCase
from xml.etree.ElementTree import Element, tostring, fromstring
from ospd.xml import elements_as_text, escape_ctrl_chars
class ElementsAsText(TestCase):
def test_simple_element(self):
elements = {'foo': 'bar'}
text = elements_as_text(elements)
self.assertEqual(text, '\t foo bar\n')
def test_simple_elements(self):
elements = OrderedDict([('foo', 'bar'), ('lorem', 'ipsum')])
text = elements_as_text(elements)
self.assertEqual(
text,
'\t foo bar\n'
'\t lorem ipsum\n',
)
def test_elements(self):
elements = OrderedDict(
[
('foo', 'bar'),
(
'lorem',
OrderedDict(
[
('dolor', 'sit amet'),
('consectetur', 'adipiscing elit'),
]
),
),
]
)
text = elements_as_text(elements)
self.assertEqual(
text,
'\t foo bar\n'
'\t lorem \n'
'\t dolor sit amet\n'
'\t consectetur adipiscing elit\n',
)
class EscapeText(TestCase):
def test_escape_xml_valid_text(self):
text = 'this is a valid xml'
res = escape_ctrl_chars(text)
self.assertEqual(text, res)
def test_escape_xml_invalid_char(self):
text = 'End of transmission is not printable \x04.'
res = escape_ctrl_chars(text)
self.assertEqual(res, 'End of transmission is not printable \\x0004.')
# Create element
elem = Element('text')
elem.text = res
self.assertEqual(
tostring(elem),
b'End of transmission is not printable \\x0004.',
)
# The string format of the element does not break the xml.
elem_as_str = tostring(elem, encoding='utf-8')
new_elem = fromstring(elem_as_str)
self.assertEqual(
b'' + new_elem.text.encode('utf-8') + b'', elem_as_str
)
def test_escape_xml_printable_char(self):
text = 'Latin Capital Letter A With Circumflex \xc2 is printable.'
res = escape_ctrl_chars(text)
self.assertEqual(
res, 'Latin Capital Letter A With Circumflex  is printable.'
)
# Create the element
elem = Element('text')
elem.text = res
self.assertEqual(
tostring(elem),
b'Latin Capital Letter A With Circumflex  is '
b'printable.',
)
# The string format of the element does not break the xml
elem_as_str = tostring(elem, encoding='utf-8')
new_elem = fromstring(elem_as_str)
self.assertEqual(
b'' + new_elem.text.encode('utf-8') + b'', elem_as_str
)