v2
This commit is contained in:
@@ -0,0 +1,20 @@
|
||||
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
#
|
||||
# Compat for python2.7
|
||||
#
|
||||
|
||||
# One unittest needs to import builtins via __import__() so we need to have
|
||||
# the string that represents it
|
||||
try:
|
||||
import __builtin__
|
||||
except ImportError:
|
||||
BUILTINS = 'builtins'
|
||||
else:
|
||||
BUILTINS = '__builtin__'
|
||||
@@ -0,0 +1,109 @@
|
||||
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
'''
|
||||
Compat module for Python3.x's unittest.mock module
|
||||
'''
|
||||
import sys
|
||||
|
||||
# Python 2.7
|
||||
|
||||
# Note: Could use the pypi mock library on python3.x as well as python2.x. It
|
||||
# is the same as the python3 stdlib mock library
|
||||
|
||||
try:
|
||||
# Allow wildcard import because we really do want to import all of mock's
|
||||
# symbols into this compat shim
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
from unittest.mock import *
|
||||
except ImportError:
|
||||
# Python 2
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
try:
|
||||
from mock import *
|
||||
except ImportError:
|
||||
print('You need the mock library installed on python2.x to run tests')
|
||||
|
||||
|
||||
# Prior to 3.4.4, mock_open cannot handle binary read_data
|
||||
if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
|
||||
file_spec = None
|
||||
|
||||
def _iterate_read_data(read_data):
|
||||
# Helper for mock_open:
|
||||
# Retrieve lines from read_data via a generator so that separate calls to
|
||||
# readline, read, and readlines are properly interleaved
|
||||
sep = b'\n' if isinstance(read_data, bytes) else '\n'
|
||||
data_as_list = [l + sep for l in read_data.split(sep)]
|
||||
|
||||
if data_as_list[-1] == sep:
|
||||
# If the last line ended in a newline, the list comprehension will have an
|
||||
# extra entry that's just a newline. Remove this.
|
||||
data_as_list = data_as_list[:-1]
|
||||
else:
|
||||
# If there wasn't an extra newline by itself, then the file being
|
||||
# emulated doesn't have a newline to end the last line remove the
|
||||
# newline that our naive format() added
|
||||
data_as_list[-1] = data_as_list[-1][:-1]
|
||||
|
||||
for line in data_as_list:
|
||||
yield line
|
||||
|
||||
def mock_open(mock=None, read_data=''):
|
||||
"""
|
||||
A helper function to create a mock to replace the use of `open`. It works
|
||||
for `open` called directly or used as a context manager.
|
||||
|
||||
The `mock` argument is the mock object to configure. If `None` (the
|
||||
default) then a `MagicMock` will be created for you, with the API limited
|
||||
to methods or attributes available on standard file handles.
|
||||
|
||||
`read_data` is a string for the `read` methoddline`, and `readlines` of the
|
||||
file handle to return. This is an empty string by default.
|
||||
"""
|
||||
def _readlines_side_effect(*args, **kwargs):
|
||||
if handle.readlines.return_value is not None:
|
||||
return handle.readlines.return_value
|
||||
return list(_data)
|
||||
|
||||
def _read_side_effect(*args, **kwargs):
|
||||
if handle.read.return_value is not None:
|
||||
return handle.read.return_value
|
||||
return type(read_data)().join(_data)
|
||||
|
||||
def _readline_side_effect():
|
||||
if handle.readline.return_value is not None:
|
||||
while True:
|
||||
yield handle.readline.return_value
|
||||
for line in _data:
|
||||
yield line
|
||||
|
||||
global file_spec
|
||||
if file_spec is None:
|
||||
import _io
|
||||
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
|
||||
|
||||
if mock is None:
|
||||
mock = MagicMock(name='open', spec=open)
|
||||
|
||||
handle = MagicMock(spec=file_spec)
|
||||
handle.__enter__.return_value = handle
|
||||
|
||||
_data = _iterate_read_data(read_data)
|
||||
|
||||
handle.write.return_value = None
|
||||
handle.read.return_value = None
|
||||
handle.readline.return_value = None
|
||||
handle.readlines.return_value = None
|
||||
|
||||
handle.read.side_effect = _read_side_effect
|
||||
handle.readline.side_effect = _readline_side_effect()
|
||||
handle.readlines.side_effect = _readlines_side_effect
|
||||
|
||||
mock.return_value = handle
|
||||
return mock
|
||||
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
'''
|
||||
Compat module for Python2.7's unittest module
|
||||
'''
|
||||
|
||||
import sys
|
||||
|
||||
# Allow wildcard import because we really do want to import all of
|
||||
# unittests's symbols into this compat shim
|
||||
# pylint: disable=wildcard-import,unused-wildcard-import
|
||||
if sys.version_info < (2, 7):
|
||||
try:
|
||||
# Need unittest2 on python2.6
|
||||
from unittest2 import *
|
||||
except ImportError:
|
||||
print('You need unittest2 installed on python2.6.x to run tests')
|
||||
else:
|
||||
from unittest import *
|
||||
@@ -0,0 +1,103 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
|
||||
|
||||
class DictDataLoader(DataLoader):
|
||||
|
||||
def __init__(self, file_mapping=None):
|
||||
file_mapping = {} if file_mapping is None else file_mapping
|
||||
assert type(file_mapping) == dict
|
||||
|
||||
super(DictDataLoader, self).__init__()
|
||||
|
||||
self._file_mapping = file_mapping
|
||||
self._build_known_directories()
|
||||
self._vault_secrets = None
|
||||
|
||||
def load_from_file(self, path, cache=True, unsafe=False):
|
||||
path = to_text(path)
|
||||
if path in self._file_mapping:
|
||||
return self.load(self._file_mapping[path], path)
|
||||
return None
|
||||
|
||||
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
|
||||
# unicode/text it's created with to utf-8
|
||||
def _get_file_contents(self, file_name):
|
||||
path = to_text(file_name)
|
||||
if path in self._file_mapping:
|
||||
return (to_bytes(self._file_mapping[path]), False)
|
||||
else:
|
||||
raise AnsibleParserError("file not found: %s" % path)
|
||||
|
||||
def path_exists(self, path):
|
||||
path = to_text(path)
|
||||
return path in self._file_mapping or path in self._known_directories
|
||||
|
||||
def is_file(self, path):
|
||||
path = to_text(path)
|
||||
return path in self._file_mapping
|
||||
|
||||
def is_directory(self, path):
|
||||
path = to_text(path)
|
||||
return path in self._known_directories
|
||||
|
||||
def list_directory(self, path):
|
||||
ret = []
|
||||
path = to_text(path)
|
||||
for x in (list(self._file_mapping.keys()) + self._known_directories):
|
||||
if x.startswith(path):
|
||||
if os.path.dirname(x) == path:
|
||||
ret.append(os.path.basename(x))
|
||||
return ret
|
||||
|
||||
def is_executable(self, path):
|
||||
# FIXME: figure out a way to make paths return true for this
|
||||
return False
|
||||
|
||||
def _add_known_directory(self, directory):
|
||||
if directory not in self._known_directories:
|
||||
self._known_directories.append(directory)
|
||||
|
||||
def _build_known_directories(self):
|
||||
self._known_directories = []
|
||||
for path in self._file_mapping:
|
||||
dirname = os.path.dirname(path)
|
||||
while dirname not in ('/', ''):
|
||||
self._add_known_directory(dirname)
|
||||
dirname = os.path.dirname(dirname)
|
||||
|
||||
def push(self, path, content):
|
||||
rebuild_dirs = False
|
||||
if path not in self._file_mapping:
|
||||
rebuild_dirs = True
|
||||
|
||||
self._file_mapping[path] = content
|
||||
|
||||
if rebuild_dirs:
|
||||
self._build_known_directories()
|
||||
|
||||
def pop(self, path):
|
||||
if path in self._file_mapping:
|
||||
del self._file_mapping[path]
|
||||
self._build_known_directories()
|
||||
|
||||
def clear(self):
|
||||
self._file_mapping = dict()
|
||||
self._known_directories = []
|
||||
|
||||
def get_basedir(self):
|
||||
return os.getcwd()
|
||||
|
||||
def set_vault_secrets(self, vault_secrets):
|
||||
self._vault_secrets = vault_secrets
|
||||
@@ -0,0 +1,12 @@
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock
|
||||
from ansible.utils.path import unfrackpath
|
||||
|
||||
|
||||
mock_unfrackpath_noop = MagicMock(spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x)
|
||||
@@ -0,0 +1,77 @@
|
||||
# Copyright (c) 2016, Matt Davis <mdavis@ansible.com>
|
||||
# Copyright (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import json
|
||||
|
||||
from contextlib import contextmanager
|
||||
from io import BytesIO, StringIO
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible.module_utils.six import PY3
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
|
||||
|
||||
@contextmanager
|
||||
def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
|
||||
"""
|
||||
context manager that temporarily masks the test runner's values for stdin and argv
|
||||
"""
|
||||
real_stdin = sys.stdin
|
||||
real_argv = sys.argv
|
||||
|
||||
if PY3:
|
||||
fake_stream = StringIO(stdin_data)
|
||||
fake_stream.buffer = BytesIO(to_bytes(stdin_data))
|
||||
else:
|
||||
fake_stream = BytesIO(to_bytes(stdin_data))
|
||||
|
||||
try:
|
||||
sys.stdin = fake_stream
|
||||
sys.argv = argv_data
|
||||
|
||||
yield
|
||||
finally:
|
||||
sys.stdin = real_stdin
|
||||
sys.argv = real_argv
|
||||
|
||||
|
||||
@contextmanager
|
||||
def swap_stdout():
|
||||
"""
|
||||
context manager that temporarily replaces stdout for tests that need to verify output
|
||||
"""
|
||||
old_stdout = sys.stdout
|
||||
|
||||
if PY3:
|
||||
fake_stream = StringIO()
|
||||
else:
|
||||
fake_stream = BytesIO()
|
||||
|
||||
try:
|
||||
sys.stdout = fake_stream
|
||||
|
||||
yield fake_stream
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
|
||||
class ModuleTestCase(unittest.TestCase):
|
||||
def setUp(self, module_args=None):
|
||||
if module_args is None:
|
||||
module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
|
||||
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
|
||||
|
||||
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
|
||||
self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
|
||||
self.stdin_swap.__enter__()
|
||||
|
||||
def tearDown(self):
|
||||
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
|
||||
self.stdin_swap.__exit__(None, None, None)
|
||||
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
|
||||
from ansible.parsing.vault import VaultSecret
|
||||
|
||||
|
||||
class TextVaultSecret(VaultSecret):
|
||||
'''A secret piece of text. ie, a password. Tracks text encoding.
|
||||
|
||||
The text encoding of the text may not be the default text encoding so
|
||||
we keep track of the encoding so we encode it to the same bytes.'''
|
||||
|
||||
def __init__(self, text, encoding=None, errors=None, _bytes=None):
|
||||
super(TextVaultSecret, self).__init__()
|
||||
self.text = text
|
||||
self.encoding = encoding or 'utf-8'
|
||||
self._bytes = _bytes
|
||||
self.errors = errors or 'strict'
|
||||
|
||||
@property
|
||||
def bytes(self):
|
||||
'''The text encoded with encoding, unless we specifically set _bytes.'''
|
||||
return self._bytes or to_bytes(self.text, encoding=self.encoding, errors=self.errors)
|
||||
@@ -0,0 +1,128 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import io
|
||||
import yaml
|
||||
|
||||
from ansible.module_utils.six import PY3
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
|
||||
|
||||
class YamlTestUtils(object):
|
||||
"""Mixin class to combine with a unittest.TestCase subclass."""
|
||||
def _loader(self, stream):
|
||||
"""Vault related tests will want to override this.
|
||||
|
||||
Vault cases should setup a AnsibleLoader that has the vault password."""
|
||||
return AnsibleLoader(stream)
|
||||
|
||||
def _dump_stream(self, obj, stream, dumper=None):
|
||||
"""Dump to a py2-unicode or py3-string stream."""
|
||||
if PY3:
|
||||
return yaml.dump(obj, stream, Dumper=dumper)
|
||||
else:
|
||||
return yaml.dump(obj, stream, Dumper=dumper, encoding=None)
|
||||
|
||||
def _dump_string(self, obj, dumper=None):
|
||||
"""Dump to a py2-unicode or py3-string"""
|
||||
if PY3:
|
||||
return yaml.dump(obj, Dumper=dumper)
|
||||
else:
|
||||
return yaml.dump(obj, Dumper=dumper, encoding=None)
|
||||
|
||||
def _dump_load_cycle(self, obj):
|
||||
# Each pass though a dump or load revs the 'generation'
|
||||
# obj to yaml string
|
||||
string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper)
|
||||
|
||||
# wrap a stream/file like StringIO around that yaml
|
||||
stream_from_object_dump = io.StringIO(string_from_object_dump)
|
||||
loader = self._loader(stream_from_object_dump)
|
||||
# load the yaml stream to create a new instance of the object (gen 2)
|
||||
obj_2 = loader.get_data()
|
||||
|
||||
# dump the gen 2 objects directory to strings
|
||||
string_from_object_dump_2 = self._dump_string(obj_2,
|
||||
dumper=AnsibleDumper)
|
||||
|
||||
# The gen 1 and gen 2 yaml strings
|
||||
self.assertEqual(string_from_object_dump, string_from_object_dump_2)
|
||||
# the gen 1 (orig) and gen 2 py object
|
||||
self.assertEqual(obj, obj_2)
|
||||
|
||||
# again! gen 3... load strings into py objects
|
||||
stream_3 = io.StringIO(string_from_object_dump_2)
|
||||
loader_3 = self._loader(stream_3)
|
||||
obj_3 = loader_3.get_data()
|
||||
|
||||
string_from_object_dump_3 = self._dump_string(obj_3, dumper=AnsibleDumper)
|
||||
|
||||
self.assertEqual(obj, obj_3)
|
||||
# should be transitive, but...
|
||||
self.assertEqual(obj_2, obj_3)
|
||||
self.assertEqual(string_from_object_dump, string_from_object_dump_3)
|
||||
|
||||
def _old_dump_load_cycle(self, obj):
|
||||
'''Dump the passed in object to yaml, load it back up, dump again, compare.'''
|
||||
stream = io.StringIO()
|
||||
|
||||
yaml_string = self._dump_string(obj, dumper=AnsibleDumper)
|
||||
self._dump_stream(obj, stream, dumper=AnsibleDumper)
|
||||
|
||||
yaml_string_from_stream = stream.getvalue()
|
||||
|
||||
# reset stream
|
||||
stream.seek(0)
|
||||
|
||||
loader = self._loader(stream)
|
||||
# loader = AnsibleLoader(stream, vault_password=self.vault_password)
|
||||
obj_from_stream = loader.get_data()
|
||||
|
||||
stream_from_string = io.StringIO(yaml_string)
|
||||
loader2 = self._loader(stream_from_string)
|
||||
# loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)
|
||||
obj_from_string = loader2.get_data()
|
||||
|
||||
stream_obj_from_stream = io.StringIO()
|
||||
stream_obj_from_string = io.StringIO()
|
||||
|
||||
if PY3:
|
||||
yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper)
|
||||
yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper)
|
||||
else:
|
||||
yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper, encoding=None)
|
||||
yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper, encoding=None)
|
||||
|
||||
yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue()
|
||||
yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue()
|
||||
|
||||
stream_obj_from_stream.seek(0)
|
||||
stream_obj_from_string.seek(0)
|
||||
|
||||
if PY3:
|
||||
yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper)
|
||||
yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
|
||||
else:
|
||||
yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper, encoding=None)
|
||||
yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper, encoding=None)
|
||||
|
||||
assert yaml_string == yaml_string_obj_from_stream
|
||||
assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
|
||||
assert (yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string == yaml_string_stream_obj_from_stream ==
|
||||
yaml_string_stream_obj_from_string)
|
||||
assert obj == obj_from_stream
|
||||
assert obj == obj_from_string
|
||||
assert obj == yaml_string_obj_from_stream
|
||||
assert obj == yaml_string_obj_from_string
|
||||
assert obj == obj_from_stream == obj_from_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
|
||||
return {'obj': obj,
|
||||
'yaml_string': yaml_string,
|
||||
'yaml_string_from_stream': yaml_string_from_stream,
|
||||
'obj_from_stream': obj_from_stream,
|
||||
'obj_from_string': obj_from_string,
|
||||
'yaml_string_obj_from_string': yaml_string_obj_from_string}
|
||||
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.cli.arguments import option_helpers as opt_help
|
||||
from ansible.utils import context_objects as co
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def parser():
|
||||
parser = opt_help.create_base_parser('testparser')
|
||||
|
||||
opt_help.add_runas_options(parser)
|
||||
opt_help.add_meta_options(parser)
|
||||
opt_help.add_runtask_options(parser)
|
||||
opt_help.add_vault_options(parser)
|
||||
opt_help.add_async_options(parser)
|
||||
opt_help.add_connect_options(parser)
|
||||
opt_help.add_subset_options(parser)
|
||||
opt_help.add_check_options(parser)
|
||||
opt_help.add_inventory_options(parser)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_cli_args():
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
yield
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
@@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.loader import become_loader, get_shell_plugin
|
||||
|
||||
|
||||
def call_become_plugin(task, var_options, cmd, executable=None):
|
||||
"""Helper function to call become plugin simiarly on how Ansible itself handles this."""
|
||||
plugin = become_loader.get(task['become_method'])
|
||||
plugin.set_options(task_keys=task, var_options=var_options)
|
||||
shell = get_shell_plugin(executable=executable)
|
||||
return plugin.build_become_command(cmd, shell)
|
||||
@@ -0,0 +1,85 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_doas_basic(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
doas_exe = 'doas'
|
||||
doas_flags = '-n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_method': 'community.general.doas',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s %s -c 'echo %s; %s'""" % (doas_exe, doas_flags, default_exe, success,
|
||||
default_cmd), cmd) is not None)
|
||||
|
||||
|
||||
def test_doas(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
doas_exe = 'doas'
|
||||
doas_flags = '-n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.doas',
|
||||
'become_flags': doas_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s -u %s %s -c 'echo %s; %s'""" % (doas_exe, doas_flags, task['become_user'], default_exe, success,
|
||||
default_cmd), cmd) is not None)
|
||||
|
||||
|
||||
def test_doas_varoptions(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
doas_exe = 'doas'
|
||||
doas_flags = '-n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.doas',
|
||||
'become_flags': 'xxx',
|
||||
}
|
||||
var_options = {
|
||||
'ansible_become_user': 'bar',
|
||||
'ansible_become_flags': doas_flags,
|
||||
}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s -u %s %s -c 'echo %s; %s'""" % (doas_exe, doas_flags, var_options['ansible_become_user'], default_exe, success,
|
||||
default_cmd), cmd) is not None)
|
||||
@@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_dzdo_basic(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
dzdo_exe = 'dzdo'
|
||||
dzdo_flags = '-H -S -n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_method': 'community.general.dzdo',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s %s -c 'echo %s; %s'""" % (dzdo_exe, dzdo_flags, default_exe,
|
||||
success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_dzdo(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
dzdo_exe = 'dzdo'
|
||||
dzdo_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.dzdo',
|
||||
'become_flags': dzdo_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, dzdo_flags, task['become_user'], default_exe,
|
||||
success, default_cmd), cmd) is not None
|
||||
task['become_pass'] = 'testpass'
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -p %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, dzdo_flags, r'\"\[dzdo via ansible, key=.+?\] password:\"',
|
||||
task['become_user'], default_exe, success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_dzdo_varoptions(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
dzdo_exe = 'dzdo'
|
||||
dzdo_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.dzdo',
|
||||
'become_flags': 'xxx',
|
||||
}
|
||||
var_options = {
|
||||
'ansible_become_user': 'bar',
|
||||
'ansible_become_flags': dzdo_flags,
|
||||
}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, dzdo_flags, var_options['ansible_become_user'], default_exe,
|
||||
success, default_cmd), cmd) is not None
|
||||
var_options['ansible_become_pass'] = 'testpass'
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -p %s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, dzdo_flags, r'\"\[dzdo via ansible, key=.+?\] password:\"',
|
||||
var_options['ansible_become_user'], default_exe, success, default_cmd), cmd) is not None
|
||||
@@ -0,0 +1,86 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_ksu_basic(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
ksu_exe = 'ksu'
|
||||
ksu_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.ksu',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, task['become_user'], ksu_flags,
|
||||
default_exe, success, default_cmd), cmd) is not None)
|
||||
|
||||
|
||||
def test_ksu(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
ksu_exe = 'ksu'
|
||||
ksu_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.ksu',
|
||||
'become_flags': ksu_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, task['become_user'], ksu_flags,
|
||||
default_exe, success, default_cmd), cmd) is not None)
|
||||
|
||||
|
||||
def test_ksu_varoptions(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
ksu_exe = 'ksu'
|
||||
ksu_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.ksu',
|
||||
'become_flags': 'xxx',
|
||||
}
|
||||
var_options = {
|
||||
'ansible_become_user': 'bar',
|
||||
'ansible_become_flags': ksu_flags,
|
||||
}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s %s -e %s -c 'echo %s; %s'""" % (ksu_exe, var_options['ansible_become_user'], ksu_flags,
|
||||
default_exe, success, default_cmd), cmd) is not None)
|
||||
@@ -0,0 +1,85 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_pbrun_basic(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pbrun_exe = 'pbrun'
|
||||
pbrun_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_method': 'community.general.pbrun',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags,
|
||||
success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_pbrun(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pbrun_exe = 'pbrun'
|
||||
pbrun_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.pbrun',
|
||||
'become_flags': pbrun_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, task['become_user'],
|
||||
success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_pbrun_var_varoptions(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pbrun_exe = 'pbrun'
|
||||
pbrun_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.pbrun',
|
||||
'become_flags': 'xxx',
|
||||
}
|
||||
var_options = {
|
||||
'ansible_become_user': 'bar',
|
||||
'ansible_become_flags': pbrun_flags,
|
||||
}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, var_options['ansible_become_user'],
|
||||
success, default_cmd), cmd) is not None
|
||||
@@ -0,0 +1,82 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_pfexec_basic(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pfexec_exe = 'pfexec'
|
||||
pfexec_flags = '-H -S -n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_method': 'community.general.pfexec',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s 'echo %s; %s'""" % (pfexec_exe, pfexec_flags, success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_pfexec(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pfexec_exe = 'pfexec'
|
||||
pfexec_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.pfexec',
|
||||
'become_flags': pfexec_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s 'echo %s; %s'""" % (pfexec_exe, pfexec_flags, success, default_cmd), cmd) is not None
|
||||
|
||||
|
||||
def test_pfexec_varoptions(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
pfexec_exe = 'pfexec'
|
||||
pfexec_flags = ''
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.pfexec',
|
||||
'become_flags': 'xxx',
|
||||
}
|
||||
var_options = {
|
||||
'ansible_become_user': 'bar',
|
||||
'ansible_become_flags': pfexec_flags,
|
||||
}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert re.match("""%s %s 'echo %s; %s'""" % (pfexec_exe, pfexec_flags, success, default_cmd), cmd) is not None
|
||||
@@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright (c) 2021 Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible import context
|
||||
|
||||
from .helper import call_become_plugin
|
||||
|
||||
|
||||
def test_sudosu(mocker, parser, reset_cli_args):
|
||||
options = parser.parse_args([])
|
||||
context._init_global_context(options)
|
||||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
sudo_exe = 'sudo'
|
||||
sudo_flags = '-H -s -n'
|
||||
|
||||
success = 'BECOME-SUCCESS-.+?'
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.sudosu',
|
||||
'become_flags': sudo_flags,
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s su -l %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, task['become_user'],
|
||||
default_exe, success, default_cmd), cmd) is not None)
|
||||
|
||||
task = {
|
||||
'become_user': 'foo',
|
||||
'become_method': 'community.general.sudosu',
|
||||
'become_flags': sudo_flags,
|
||||
'become_pass': 'testpass',
|
||||
}
|
||||
var_options = {}
|
||||
cmd = call_become_plugin(task, var_options, cmd=default_cmd, executable=default_exe)
|
||||
print(cmd)
|
||||
assert (re.match("""%s %s -p "%s" su -l %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n', ''),
|
||||
r"\[sudo via ansible, key=.+?\] password:", task['become_user'],
|
||||
default_exe, success, default_cmd), cmd) is not None)
|
||||
18
collections/ansible_collections/community/general/tests/unit/plugins/cache/test_memcached.py
vendored
Normal file
18
collections/ansible_collections/community/general/tests/unit/plugins/cache/test_memcached.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) 2012-2015, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.importorskip('memcache')
|
||||
|
||||
from ansible.plugins.loader import cache_loader
|
||||
from ansible_collections.community.general.plugins.cache.memcached import CacheModule as MemcachedCache
|
||||
|
||||
|
||||
def test_memcached_cachemodule():
|
||||
assert isinstance(cache_loader.get('community.general.memcached'), MemcachedCache)
|
||||
27
collections/ansible_collections/community/general/tests/unit/plugins/cache/test_redis.py
vendored
Normal file
27
collections/ansible_collections/community/general/tests/unit/plugins/cache/test_redis.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (c) 2012-2015, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.importorskip('redis')
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.loader import cache_loader
|
||||
from ansible_collections.community.general.plugins.cache.redis import CacheModule as RedisCache
|
||||
|
||||
|
||||
def test_redis_cachemodule():
|
||||
# The _uri option is required for the redis plugin
|
||||
connection = '127.0.0.1:6379:1'
|
||||
assert isinstance(cache_loader.get('community.general.redis', **{'_uri': connection}), RedisCache)
|
||||
|
||||
|
||||
def test_redis_cachemodule():
|
||||
# The _uri option is required for the redis plugin
|
||||
connection = '[::1]:6379:1'
|
||||
assert isinstance(cache_loader.get('community.general.redis', **{'_uri': connection}), RedisCache)
|
||||
@@ -0,0 +1,127 @@
|
||||
# Copyright (c) 2021, Victor Martinez <VictorMartinezRubio@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.executor.task_result import TaskResult
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock, Mock
|
||||
from ansible_collections.community.general.plugins.callback.elastic import ElasticSource, TaskData
|
||||
from collections import OrderedDict
|
||||
import sys
|
||||
|
||||
ELASTIC_MINIMUM_PYTHON_VERSION = (3, 6)
|
||||
|
||||
|
||||
class TestOpentelemetry(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.callback.elastic.socket')
|
||||
def setUp(self, mock_socket):
|
||||
if sys.version_info < ELASTIC_MINIMUM_PYTHON_VERSION:
|
||||
self.skipTest("Python %s+ is needed for Elastic" %
|
||||
",".join(map(str, ELASTIC_MINIMUM_PYTHON_VERSION)))
|
||||
mock_socket.gethostname.return_value = 'my-host'
|
||||
mock_socket.gethostbyname.return_value = '1.2.3.4'
|
||||
self.elastic = ElasticSource(display=None)
|
||||
self.task_fields = {'args': {}}
|
||||
self.mock_host = Mock('MockHost')
|
||||
self.mock_host.name = 'myhost'
|
||||
self.mock_host._uuid = 'myhost_uuid'
|
||||
self.mock_task = Task()
|
||||
self.mock_task.action = 'myaction'
|
||||
self.mock_task.no_log = False
|
||||
self.mock_task._role = 'myrole'
|
||||
self.mock_task._uuid = 'myuuid'
|
||||
self.mock_task.args = {}
|
||||
self.mock_task.get_name = MagicMock(return_value='mytask')
|
||||
self.mock_task.get_path = MagicMock(return_value='/mypath')
|
||||
self.my_task = TaskData('myuuid', 'mytask', '/mypath', 'myplay', 'myaction', '')
|
||||
self.my_task_result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
def test_start_task(self):
|
||||
tasks_data = OrderedDict()
|
||||
|
||||
self.elastic.start_task(
|
||||
tasks_data,
|
||||
False,
|
||||
'myplay',
|
||||
self.mock_task
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
self.assertEqual(task_data.uuid, 'myuuid')
|
||||
self.assertEqual(task_data.name, 'mytask')
|
||||
self.assertEqual(task_data.path, '/mypath')
|
||||
self.assertEqual(task_data.play, 'myplay')
|
||||
self.assertEqual(task_data.action, 'myaction')
|
||||
self.assertEqual(task_data.args, '')
|
||||
|
||||
def test_finish_task_with_a_host_match(self):
|
||||
tasks_data = OrderedDict()
|
||||
tasks_data['myuuid'] = self.my_task
|
||||
|
||||
self.elastic.finish_task(
|
||||
tasks_data,
|
||||
'ok',
|
||||
self.my_task_result
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
host_data = task_data.host_data['myhost_uuid']
|
||||
self.assertEqual(host_data.uuid, 'myhost_uuid')
|
||||
self.assertEqual(host_data.name, 'myhost')
|
||||
self.assertEqual(host_data.status, 'ok')
|
||||
|
||||
def test_finish_task_without_a_host_match(self):
|
||||
result = TaskResult(host=None, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
tasks_data = OrderedDict()
|
||||
tasks_data['myuuid'] = self.my_task
|
||||
|
||||
self.elastic.finish_task(
|
||||
tasks_data,
|
||||
'ok',
|
||||
result
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
host_data = task_data.host_data['include']
|
||||
self.assertEqual(host_data.uuid, 'include')
|
||||
self.assertEqual(host_data.name, 'include')
|
||||
self.assertEqual(host_data.status, 'ok')
|
||||
|
||||
def test_get_error_message(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', None, 'my-exception'),
|
||||
(None, 'my-msg', None, 'my-msg'),
|
||||
(None, None, None, 'failed'),
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.elastic.get_error_message(generate_test_data(tc[0], tc[1], tc[2]))
|
||||
self.assertEqual(result, tc[3])
|
||||
|
||||
def test_enrich_error_message(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', 'my-stderr', 'message: "my-msg"\nexception: "my-exception"\nstderr: "my-stderr"'),
|
||||
('my-exception', None, 'my-stderr', 'message: "failed"\nexception: "my-exception"\nstderr: "my-stderr"'),
|
||||
(None, 'my-msg', 'my-stderr', 'message: "my-msg"\nexception: "None"\nstderr: "my-stderr"'),
|
||||
('my-exception', 'my-msg', None, 'message: "my-msg"\nexception: "my-exception"\nstderr: "None"'),
|
||||
('my-exception', 'my-msg', '\nline1\nline2', 'message: "my-msg"\nexception: "my-exception"\nstderr: "\nline1\nline2"')
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.elastic.enrich_error_message(generate_test_data(tc[0], tc[1], tc[2]))
|
||||
self.assertEqual(result, tc[3])
|
||||
|
||||
|
||||
def generate_test_data(exception=None, msg=None, stderr=None):
|
||||
res_data = OrderedDict()
|
||||
if exception:
|
||||
res_data['exception'] = exception
|
||||
if msg:
|
||||
res_data['msg'] = msg
|
||||
if stderr:
|
||||
res_data['stderr'] = stderr
|
||||
return res_data
|
||||
@@ -0,0 +1,66 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.executor.task_result import TaskResult
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, call, MagicMock, Mock
|
||||
from ansible_collections.community.general.plugins.callback.loganalytics import AzureLogAnalyticsSource
|
||||
from datetime import datetime
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class TestAzureLogAnalytics(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.callback.loganalytics.socket')
|
||||
def setUp(self, mock_socket):
|
||||
mock_socket.gethostname.return_value = 'my-host'
|
||||
mock_socket.gethostbyname.return_value = '1.2.3.4'
|
||||
self.loganalytics = AzureLogAnalyticsSource()
|
||||
self.mock_task = Mock('MockTask')
|
||||
self.mock_task._role = 'myrole'
|
||||
self.mock_task._uuid = 'myuuid'
|
||||
self.task_fields = {'args': {}}
|
||||
self.mock_host = Mock('MockHost')
|
||||
self.mock_host.name = 'myhost'
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.callback.loganalytics.datetime')
|
||||
@patch('ansible_collections.community.general.plugins.callback.loganalytics.open_url')
|
||||
def test_overall(self, open_url_mock, mock_datetime):
|
||||
mock_datetime.utcnow.return_value = datetime(2020, 12, 1)
|
||||
result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
self.loganalytics.send_event(workspace_id='01234567-0123-0123-0123-01234567890a',
|
||||
shared_key='dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==',
|
||||
state='OK',
|
||||
result=result,
|
||||
runtime=100)
|
||||
|
||||
args, kwargs = open_url_mock.call_args
|
||||
sent_data = json.loads(args[1])
|
||||
|
||||
self.assertEqual(sent_data['event']['timestamp'], 'Tue, 01 Dec 2020 00:00:00 GMT')
|
||||
self.assertEqual(sent_data['event']['host'], 'my-host')
|
||||
self.assertEqual(sent_data['event']['uuid'], 'myuuid')
|
||||
self.assertEqual(args[0], 'https://01234567-0123-0123-0123-01234567890a.ods.opinsights.azure.com/api/logs?api-version=2016-04-01')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.callback.loganalytics.datetime')
|
||||
@patch('ansible_collections.community.general.plugins.callback.loganalytics.open_url')
|
||||
def test_auth_headers(self, open_url_mock, mock_datetime):
|
||||
mock_datetime.utcnow.return_value = datetime(2020, 12, 1)
|
||||
result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
self.loganalytics.send_event(workspace_id='01234567-0123-0123-0123-01234567890a',
|
||||
shared_key='dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==',
|
||||
state='OK',
|
||||
result=result,
|
||||
runtime=100)
|
||||
|
||||
args, kwargs = open_url_mock.call_args
|
||||
headers = kwargs['headers']
|
||||
|
||||
self.assertRegexpMatches(headers['Authorization'], r'^SharedKey 01234567-0123-0123-0123-01234567890a:.*=$')
|
||||
self.assertEqual(headers['Log-Type'], 'ansible_playbook')
|
||||
@@ -0,0 +1,212 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021, Victor Martinez <VictorMartinezRubio@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.executor.task_result import TaskResult
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock, Mock
|
||||
from ansible_collections.community.general.plugins.callback.opentelemetry import OpenTelemetrySource, TaskData, CallbackModule
|
||||
from collections import OrderedDict
|
||||
import sys
|
||||
|
||||
OPENTELEMETRY_MINIMUM_PYTHON_VERSION = (3, 7)
|
||||
|
||||
|
||||
class TestOpentelemetry(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.callback.opentelemetry.socket')
|
||||
def setUp(self, mock_socket):
|
||||
# TODO: this python version validation won't be needed as long as the _time_ns call is mocked.
|
||||
if sys.version_info < OPENTELEMETRY_MINIMUM_PYTHON_VERSION:
|
||||
self.skipTest("Python %s+ is needed for OpenTelemetry" %
|
||||
",".join(map(str, OPENTELEMETRY_MINIMUM_PYTHON_VERSION)))
|
||||
|
||||
mock_socket.gethostname.return_value = 'my-host'
|
||||
mock_socket.gethostbyname.return_value = '1.2.3.4'
|
||||
self.opentelemetry = OpenTelemetrySource(display=None)
|
||||
self.task_fields = {'args': {}}
|
||||
self.mock_host = Mock('MockHost')
|
||||
self.mock_host.name = 'myhost'
|
||||
self.mock_host._uuid = 'myhost_uuid'
|
||||
self.mock_task = Task()
|
||||
self.mock_task.action = 'myaction'
|
||||
self.mock_task.no_log = False
|
||||
self.mock_task._role = 'myrole'
|
||||
self.mock_task._uuid = 'myuuid'
|
||||
self.mock_task.args = {}
|
||||
self.mock_task.get_name = MagicMock(return_value='mytask')
|
||||
self.mock_task.get_path = MagicMock(return_value='/mypath')
|
||||
self.my_task = TaskData('myuuid', 'mytask', '/mypath', 'myplay', 'myaction', '')
|
||||
self.my_task_result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
def test_start_task(self):
|
||||
tasks_data = OrderedDict()
|
||||
|
||||
self.opentelemetry.start_task(
|
||||
tasks_data,
|
||||
False,
|
||||
'myplay',
|
||||
self.mock_task
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
self.assertEqual(task_data.uuid, 'myuuid')
|
||||
self.assertEqual(task_data.name, 'mytask')
|
||||
self.assertEqual(task_data.path, '/mypath')
|
||||
self.assertEqual(task_data.play, 'myplay')
|
||||
self.assertEqual(task_data.action, 'myaction')
|
||||
self.assertEqual(task_data.args, {})
|
||||
|
||||
def test_finish_task_with_a_host_match(self):
|
||||
tasks_data = OrderedDict()
|
||||
tasks_data['myuuid'] = self.my_task
|
||||
|
||||
self.opentelemetry.finish_task(
|
||||
tasks_data,
|
||||
'ok',
|
||||
self.my_task_result,
|
||||
""
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
host_data = task_data.host_data['myhost_uuid']
|
||||
self.assertEqual(host_data.uuid, 'myhost_uuid')
|
||||
self.assertEqual(host_data.name, 'myhost')
|
||||
self.assertEqual(host_data.status, 'ok')
|
||||
|
||||
def test_finish_task_without_a_host_match(self):
|
||||
result = TaskResult(host=None, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
tasks_data = OrderedDict()
|
||||
tasks_data['myuuid'] = self.my_task
|
||||
|
||||
self.opentelemetry.finish_task(
|
||||
tasks_data,
|
||||
'ok',
|
||||
result,
|
||||
""
|
||||
)
|
||||
|
||||
task_data = tasks_data['myuuid']
|
||||
host_data = task_data.host_data['include']
|
||||
self.assertEqual(host_data.uuid, 'include')
|
||||
self.assertEqual(host_data.name, 'include')
|
||||
self.assertEqual(host_data.status, 'ok')
|
||||
self.assertEqual(self.opentelemetry.ansible_version, None)
|
||||
|
||||
def test_finish_task_include_with_ansible_version(self):
|
||||
task_fields = {'args': {'_ansible_version': '1.2.3'}}
|
||||
result = TaskResult(host=None, task=self.mock_task, return_data={}, task_fields=task_fields)
|
||||
tasks_data = OrderedDict()
|
||||
tasks_data['myuuid'] = self.my_task
|
||||
|
||||
self.opentelemetry.finish_task(
|
||||
tasks_data,
|
||||
'ok',
|
||||
result,
|
||||
""
|
||||
)
|
||||
|
||||
self.assertEqual(self.opentelemetry.ansible_version, '1.2.3')
|
||||
|
||||
def test_get_error_message(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', None, 'my-exception'),
|
||||
(None, 'my-msg', None, 'my-msg'),
|
||||
(None, None, None, 'failed'),
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.get_error_message(generate_test_data(tc[0], tc[1], tc[2]))
|
||||
self.assertEqual(result, tc[3])
|
||||
|
||||
def test_get_error_message_from_results(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', None, False, None),
|
||||
(None, 'my-msg', None, False, None),
|
||||
(None, None, None, False, None),
|
||||
('my-exception', 'my-msg', None, True, 'shell(none) - my-exception'),
|
||||
(None, 'my-msg', None, True, 'shell(none) - my-msg'),
|
||||
(None, None, None, True, 'shell(none) - failed'),
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.get_error_message_from_results([generate_test_data(tc[0], tc[1], tc[2], tc[3])], 'shell')
|
||||
self.assertEqual(result, tc[4])
|
||||
|
||||
def test_enrich_error_message(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', 'my-stderr', 'message: "my-msg"\nexception: "my-exception"\nstderr: "my-stderr"'),
|
||||
('my-exception', None, 'my-stderr', 'message: "failed"\nexception: "my-exception"\nstderr: "my-stderr"'),
|
||||
(None, 'my-msg', 'my-stderr', 'message: "my-msg"\nexception: "None"\nstderr: "my-stderr"'),
|
||||
('my-exception', 'my-msg', None, 'message: "my-msg"\nexception: "my-exception"\nstderr: "None"'),
|
||||
('my-exception', 'my-msg', '\nline1\nline2', 'message: "my-msg"\nexception: "my-exception"\nstderr: "\nline1\nline2"')
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.enrich_error_message(generate_test_data(tc[0], tc[1], tc[2]))
|
||||
self.assertEqual(result, tc[3])
|
||||
|
||||
def test_enrich_error_message_from_results(self):
|
||||
test_cases = (
|
||||
('my-exception', 'my-msg', 'my-stderr', False, ''),
|
||||
('my-exception', None, 'my-stderr', False, ''),
|
||||
(None, 'my-msg', 'my-stderr', False, ''),
|
||||
('my-exception', 'my-msg', None, False, ''),
|
||||
('my-exception', 'my-msg', '\nline1\nline2', False, ''),
|
||||
('my-exception', 'my-msg', 'my-stderr', True, 'shell(none) - message: "my-msg"\nexception: "my-exception"\nstderr: "my-stderr"\n'),
|
||||
('my-exception', None, 'my-stderr', True, 'shell(none) - message: "failed"\nexception: "my-exception"\nstderr: "my-stderr"\n'),
|
||||
(None, 'my-msg', 'my-stderr', True, 'shell(none) - message: "my-msg"\nexception: "None"\nstderr: "my-stderr"\n'),
|
||||
('my-exception', 'my-msg', None, True, 'shell(none) - message: "my-msg"\nexception: "my-exception"\nstderr: "None"\n'),
|
||||
('my-exception', 'my-msg', '\nline1\nline2', True, 'shell(none) - message: "my-msg"\nexception: "my-exception"\nstderr: "\nline1\nline2"\n')
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.enrich_error_message_from_results([generate_test_data(tc[0], tc[1], tc[2], tc[3])], 'shell')
|
||||
self.assertEqual(result, tc[4])
|
||||
|
||||
def test_url_from_args(self):
|
||||
test_cases = (
|
||||
({}, ""),
|
||||
({'url': 'my-url'}, 'my-url'),
|
||||
({'url': 'my-url', 'api_url': 'my-api_url'}, 'my-url'),
|
||||
({'api_url': 'my-api_url'}, 'my-api_url'),
|
||||
({'api_url': 'my-api_url', 'chart_repo_url': 'my-chart_repo_url'}, 'my-api_url')
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.url_from_args(tc[0])
|
||||
self.assertEqual(result, tc[1])
|
||||
|
||||
def test_parse_and_redact_url_if_possible(self):
|
||||
test_cases = (
|
||||
({}, None),
|
||||
({'url': 'wrong'}, None),
|
||||
({'url': 'https://my-url'}, 'https://my-url'),
|
||||
({'url': 'https://user:pass@my-url'}, 'https://my-url'),
|
||||
({'url': 'https://my-url:{{ my_port }}'}, 'https://my-url:{{ my_port }}'),
|
||||
({'url': 'https://{{ my_hostname }}:{{ my_port }}'}, None),
|
||||
({'url': '{{my_schema}}{{ my_hostname }}:{{ my_port }}'}, None)
|
||||
)
|
||||
|
||||
for tc in test_cases:
|
||||
result = self.opentelemetry.parse_and_redact_url_if_possible(tc[0])
|
||||
if tc[1]:
|
||||
self.assertEqual(result.geturl(), tc[1])
|
||||
else:
|
||||
self.assertEqual(result, tc[1])
|
||||
|
||||
|
||||
def generate_test_data(exception=None, msg=None, stderr=None, failed=False):
|
||||
res_data = OrderedDict()
|
||||
if exception:
|
||||
res_data['exception'] = exception
|
||||
if msg:
|
||||
res_data['msg'] = msg
|
||||
if stderr:
|
||||
res_data['stderr'] = stderr
|
||||
res_data['failed'] = failed
|
||||
return res_data
|
||||
@@ -0,0 +1,64 @@
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.executor.task_result import TaskResult
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, call, MagicMock, Mock
|
||||
from ansible_collections.community.general.plugins.callback.splunk import SplunkHTTPCollectorSource
|
||||
from datetime import datetime
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class TestSplunkClient(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.callback.splunk.socket')
|
||||
def setUp(self, mock_socket):
|
||||
mock_socket.gethostname.return_value = 'my-host'
|
||||
mock_socket.gethostbyname.return_value = '1.2.3.4'
|
||||
self.splunk = SplunkHTTPCollectorSource()
|
||||
self.mock_task = Mock('MockTask')
|
||||
self.mock_task._role = 'myrole'
|
||||
self.mock_task._uuid = 'myuuid'
|
||||
self.task_fields = {'args': {}}
|
||||
self.mock_host = Mock('MockHost')
|
||||
self.mock_host.name = 'myhost'
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.callback.splunk.datetime')
|
||||
@patch('ansible_collections.community.general.plugins.callback.splunk.open_url')
|
||||
def test_timestamp_with_milliseconds(self, open_url_mock, mock_datetime):
|
||||
mock_datetime.utcnow.return_value = datetime(2020, 12, 1)
|
||||
result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
self.splunk.send_event(
|
||||
url='endpoint', authtoken='token', validate_certs=False, include_milliseconds=True,
|
||||
batch="abcefghi-1234-5678-9012-abcdefghijkl", state='OK', result=result, runtime=100
|
||||
)
|
||||
|
||||
args, kwargs = open_url_mock.call_args
|
||||
sent_data = json.loads(args[1])
|
||||
|
||||
self.assertEqual(sent_data['event']['timestamp'], '2020-12-01 00:00:00.000000 +0000')
|
||||
self.assertEqual(sent_data['event']['host'], 'my-host')
|
||||
self.assertEqual(sent_data['event']['ip_address'], '1.2.3.4')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.callback.splunk.datetime')
|
||||
@patch('ansible_collections.community.general.plugins.callback.splunk.open_url')
|
||||
def test_timestamp_without_milliseconds(self, open_url_mock, mock_datetime):
|
||||
mock_datetime.utcnow.return_value = datetime(2020, 12, 1)
|
||||
result = TaskResult(host=self.mock_host, task=self.mock_task, return_data={}, task_fields=self.task_fields)
|
||||
|
||||
self.splunk.send_event(
|
||||
url='endpoint', authtoken='token', validate_certs=False, include_milliseconds=False,
|
||||
batch="abcefghi-1234-5678-9012-abcdefghijkl", state='OK', result=result, runtime=100
|
||||
)
|
||||
|
||||
args, kwargs = open_url_mock.call_args
|
||||
sent_data = json.loads(args[1])
|
||||
|
||||
self.assertEqual(sent_data['event']['timestamp'], '2020-12-01 00:00:00 +0000')
|
||||
self.assertEqual(sent_data['event']['host'], 'my-host')
|
||||
self.assertEqual(sent_data['event']['ip_address'], '1.2.3.4')
|
||||
@@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2020 Red Hat Inc.
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from io import StringIO
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.connection import lxc
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
|
||||
|
||||
class TestLXCConnectionClass(unittest.TestCase):
|
||||
|
||||
def test_lxc_connection_module(self):
|
||||
play_context = PlayContext()
|
||||
play_context.prompt = (
|
||||
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
|
||||
)
|
||||
in_stream = StringIO()
|
||||
|
||||
self.assertIsInstance(lxc.Connection(play_context, in_stream), lxc.Connection)
|
||||
@@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, Julien Riou <julien@riou.xyz>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible_collections.community.general.plugins.filter.crc32 import crc32s
|
||||
|
||||
|
||||
class TestFilterCrc32(unittest.TestCase):
|
||||
|
||||
def test_checksum(self):
|
||||
self.assertEqual(crc32s('test'), 'd87f7e0c')
|
||||
@@ -0,0 +1,174 @@
|
||||
{
|
||||
"instances":{
|
||||
"vlantest":{
|
||||
"instances":{
|
||||
"metadata":{
|
||||
"config":{
|
||||
"image.os":"ubuntu",
|
||||
"image.release":"focal",
|
||||
"image.version":"20.04",
|
||||
"volatile.last_state.power":"RUNNING"
|
||||
},
|
||||
"devices":{
|
||||
"eth0":{
|
||||
"name":"eth0",
|
||||
"network":"my-macvlan",
|
||||
"type":"nic"
|
||||
}
|
||||
},
|
||||
"profiles":[
|
||||
"default"
|
||||
],
|
||||
"expanded_devices":{
|
||||
"eth0":{
|
||||
"name":"eth0",
|
||||
"network":"my-macvlan",
|
||||
"type":"nic"
|
||||
}
|
||||
},
|
||||
"name":"vlantest",
|
||||
"status":"Running",
|
||||
"location":"Berlin"
|
||||
}
|
||||
},
|
||||
"state":{
|
||||
"metadata":{
|
||||
"status":"Running",
|
||||
"network":{
|
||||
"eth0":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet",
|
||||
"address":"10.98.143.199",
|
||||
"netmask":"24",
|
||||
"scope":"global"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fd42:bd00:7b11:2167:216:3eff:fe78:2ef3",
|
||||
"netmask":"64",
|
||||
"scope":"global"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fe80::216:3eff:fed3:7af3",
|
||||
"netmask":"64",
|
||||
"scope":"link"
|
||||
}
|
||||
]
|
||||
},
|
||||
"lo":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet",
|
||||
"address":"127.0.0.1",
|
||||
"netmask":"8",
|
||||
"scope":"local"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"::1",
|
||||
"netmask":"128",
|
||||
"scope":"local"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"networks":{
|
||||
"my-macvlan":{
|
||||
"state":{
|
||||
"metadata":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet",
|
||||
"address":"192.168.178.199",
|
||||
"netmask":"24",
|
||||
"scope":"global"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fd42:bd00:7b11:2167:216:3eff:fe78:2ef3",
|
||||
"netmask":"64",
|
||||
"scope":"global"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fe80::216:3eff:fed3:7af3",
|
||||
"netmask":"64",
|
||||
"scope":"link"
|
||||
}
|
||||
],
|
||||
"vlan":{
|
||||
"lower_device":"eno1",
|
||||
"vid":666
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"lo":{
|
||||
"state":{
|
||||
"metadata":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet",
|
||||
"address":"127.0.0.1",
|
||||
"netmask":"8",
|
||||
"scope":"local"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"::1",
|
||||
"netmask":"128",
|
||||
"scope":"local"
|
||||
}
|
||||
],
|
||||
"vlan":null
|
||||
}
|
||||
}
|
||||
},
|
||||
"eno1":{
|
||||
"state":{
|
||||
"metadata":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet",
|
||||
"address":"192.168.178.126",
|
||||
"netmask":"24",
|
||||
"scope":"global"
|
||||
},
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fe80::3c0b:7da9:3cc7:9e40",
|
||||
"netmask":"64",
|
||||
"scope":"link"
|
||||
}
|
||||
],
|
||||
"vlan":null
|
||||
}
|
||||
}
|
||||
},
|
||||
"eno1.666":{
|
||||
"state":{
|
||||
"metadata":{
|
||||
"addresses":[
|
||||
{
|
||||
"family":"inet6",
|
||||
"address":"fe80::de4a:3eff:fe8d:f356",
|
||||
"netmask":"64",
|
||||
"scope":"link"
|
||||
}
|
||||
],
|
||||
"vlan":{
|
||||
"lower_device":"eno1",
|
||||
"vid":666
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,222 @@
|
||||
[
|
||||
{
|
||||
"DEPLOY_ID": "bcfec9d9-c0d0-4523-b5e7-62993947e94c",
|
||||
"ETIME": 0,
|
||||
"GID": 105,
|
||||
"GNAME": "SW",
|
||||
"HISTORY_RECORDS": {},
|
||||
"ID": 451,
|
||||
"LAST_POLL": 0,
|
||||
"LCM_STATE": 3,
|
||||
"MONITORING": {},
|
||||
"NAME": "terraform_demo_00",
|
||||
"RESCHED": 0,
|
||||
"STATE": 3,
|
||||
"STIME": 1649886492,
|
||||
"TEMPLATE": {
|
||||
"NIC": [
|
||||
{
|
||||
"AR_ID": "0",
|
||||
"BRIDGE": "mgmt0",
|
||||
"BRIDGE_TYPE": "linux",
|
||||
"CLUSTER_ID": "0",
|
||||
"IP": "192.168.11.248",
|
||||
"MAC": "02:00:c0:a8:2b:bb",
|
||||
"MODEL": "virtio",
|
||||
"NAME": "NIC0",
|
||||
"NETWORK": "Infrastructure",
|
||||
"NETWORK_ID": "0",
|
||||
"NIC_ID": "0",
|
||||
"SECURITY_GROUPS": "0,101",
|
||||
"TARGET": "one-453-0",
|
||||
"VLAN_ID": "12",
|
||||
"VN_MAD": "802.1Q"
|
||||
}
|
||||
],
|
||||
"NIC_DEFAULT": {
|
||||
"MODEL": "virtio"
|
||||
},
|
||||
"TEMPLATE_ID": "28",
|
||||
"TM_MAD_SYSTEM": "shared",
|
||||
"VCPU": "4",
|
||||
"VMID": "453"
|
||||
},
|
||||
"USER_TEMPLATE": {
|
||||
"GUEST_OS": "linux",
|
||||
"INPUTS_ORDER": "",
|
||||
"LABELS": "foo,bench",
|
||||
"LOGO": "images/logos/linux.png",
|
||||
"MEMORY_UNIT_COST": "MB",
|
||||
"SCHED_REQUIREMENTS": "ARCH=\"x86_64\"",
|
||||
"TGROUP": "bench_clients"
|
||||
}
|
||||
},
|
||||
{
|
||||
"DEPLOY_ID": "25895435-5e3a-4d50-a025-e03a7a463abd",
|
||||
"ETIME": 0,
|
||||
"GID": 105,
|
||||
"GNAME": "SW",
|
||||
"HISTORY_RECORDS": {},
|
||||
"ID": 451,
|
||||
"LAST_POLL": 0,
|
||||
"LCM_STATE": 3,
|
||||
"MONITORING": {},
|
||||
"NAME": "terraform_demo_01",
|
||||
"RESCHED": 0,
|
||||
"STATE": 3,
|
||||
"STIME": 1649886492,
|
||||
"TEMPLATE": {
|
||||
"NIC": [
|
||||
{
|
||||
"AR_ID": "0",
|
||||
"BRIDGE": "mgmt0",
|
||||
"BRIDGE_TYPE": "linux",
|
||||
"CLUSTER_ID": "0",
|
||||
"IP": "192.168.11.241",
|
||||
"MAC": "02:00:c0:a8:4b:bb",
|
||||
"MODEL": "virtio",
|
||||
"NAME": "NIC0",
|
||||
"NETWORK": "Infrastructure",
|
||||
"NETWORK_ID": "0",
|
||||
"NIC_ID": "0",
|
||||
"SECURITY_GROUPS": "0,101",
|
||||
"TARGET": "one-451-0",
|
||||
"VLAN_ID": "12",
|
||||
"VN_MAD": "802.1Q"
|
||||
}
|
||||
],
|
||||
"NIC_DEFAULT": {
|
||||
"MODEL": "virtio"
|
||||
},
|
||||
"TEMPLATE_ID": "28",
|
||||
"TM_MAD_SYSTEM": "shared",
|
||||
"VCPU": "4",
|
||||
"VMID": "451"
|
||||
},
|
||||
"USER_TEMPLATE": {
|
||||
"GUEST_OS": "linux",
|
||||
"INPUTS_ORDER": "",
|
||||
"LABELS": "foo,bench",
|
||||
"LOGO": "images/logos/linux.png",
|
||||
"MEMORY_UNIT_COST": "MB",
|
||||
"SCHED_REQUIREMENTS": "ARCH=\"x86_64\"",
|
||||
"TESTATTR": "testvar",
|
||||
"TGROUP": "bench_clients"
|
||||
}
|
||||
},
|
||||
{
|
||||
"DEPLOY_ID": "2b00c379-3601-45ee-acf5-e7b3ff2b7bca",
|
||||
"ETIME": 0,
|
||||
"GID": 105,
|
||||
"GNAME": "SW",
|
||||
"HISTORY_RECORDS": {},
|
||||
"ID": 451,
|
||||
"LAST_POLL": 0,
|
||||
"LCM_STATE": 3,
|
||||
"MONITORING": {},
|
||||
"NAME": "terraform_demo_srv_00",
|
||||
"RESCHED": 0,
|
||||
"STATE": 3,
|
||||
"STIME": 1649886492,
|
||||
"TEMPLATE": {
|
||||
"NIC": [
|
||||
{
|
||||
"AR_ID": "0",
|
||||
"BRIDGE": "mgmt0",
|
||||
"BRIDGE_TYPE": "linux",
|
||||
"CLUSTER_ID": "0",
|
||||
"IP": "192.168.11.247",
|
||||
"MAC": "02:00:c0:a8:0b:cc",
|
||||
"MODEL": "virtio",
|
||||
"NAME": "NIC0",
|
||||
"NETWORK": "Infrastructure",
|
||||
"NETWORK_ID": "0",
|
||||
"NIC_ID": "0",
|
||||
"SECURITY_GROUPS": "0,101",
|
||||
"TARGET": "one-452-0",
|
||||
"VLAN_ID": "12",
|
||||
"VN_MAD": "802.1Q"
|
||||
}
|
||||
],
|
||||
"NIC_DEFAULT": {
|
||||
"MODEL": "virtio"
|
||||
},
|
||||
"TEMPLATE_ID": "28",
|
||||
"TM_MAD_SYSTEM": "shared",
|
||||
"VCPU": "4",
|
||||
"VMID": "452"
|
||||
},
|
||||
"USER_TEMPLATE": {
|
||||
"GUEST_OS": "linux",
|
||||
"INPUTS_ORDER": "",
|
||||
"LABELS": "serv,bench",
|
||||
"LOGO": "images/logos/linux.png",
|
||||
"MEMORY_UNIT_COST": "MB",
|
||||
"SCHED_REQUIREMENTS": "ARCH=\"x86_64\"",
|
||||
"TGROUP": "bench_server"
|
||||
}
|
||||
},
|
||||
{
|
||||
"DEPLOY_ID": "97037f55-dd2c-4549-8d24-561a6569e870",
|
||||
"ETIME": 0,
|
||||
"GID": 105,
|
||||
"GNAME": "SW",
|
||||
"HISTORY_RECORDS": {},
|
||||
"ID": 311,
|
||||
"LAST_POLL": 0,
|
||||
"LCM_STATE": 3,
|
||||
"MONITORING": {},
|
||||
"NAME": "bs-windows",
|
||||
"RESCHED": 0,
|
||||
"STATE": 3,
|
||||
"STIME": 1648076254,
|
||||
"TEMPLATE": {
|
||||
"NIC": [
|
||||
{
|
||||
"AR_ID": "0",
|
||||
"BRIDGE": "mgmt0",
|
||||
"BRIDGE_TYPE": "linux",
|
||||
"CLUSTER_ID": "0",
|
||||
"IP": "192.168.11.209",
|
||||
"MAC": "02:00:c0:a8:0b:dd",
|
||||
"MODEL": "virtio",
|
||||
"NAME": "NIC0",
|
||||
"NETWORK": "Infrastructure",
|
||||
"NETWORK_ID": "0",
|
||||
"NETWORK_UNAME": "admin",
|
||||
"NIC_ID": "0",
|
||||
"SECURITY_GROUPS": "0,101",
|
||||
"TARGET": "one-311-0",
|
||||
"VLAN_ID": "12",
|
||||
"VN_MAD": "802.1Q"
|
||||
},
|
||||
[
|
||||
"TEMPLATE_ID",
|
||||
"23"
|
||||
],
|
||||
[
|
||||
"TM_MAD_SYSTEM",
|
||||
"shared"
|
||||
],
|
||||
[
|
||||
"VCPU",
|
||||
"4"
|
||||
],
|
||||
[
|
||||
"VMID",
|
||||
"311"
|
||||
]
|
||||
]
|
||||
},
|
||||
"UID": 22,
|
||||
"UNAME": "bsanders",
|
||||
"USER_TEMPLATE": {
|
||||
"GUEST_OS": "windows",
|
||||
"INPUTS_ORDER": "",
|
||||
"LABELS": "serv",
|
||||
"HYPERVISOR": "kvm",
|
||||
"SCHED_REQUIREMENTS": "ARCH=\"x86_64\"",
|
||||
"SET_HOSTNAME": "windows"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Orion Poplawski <orion@nwra.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible_collections.community.general.plugins.inventory.cobbler import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
return InventoryModule()
|
||||
|
||||
|
||||
def test_init_cache(inventory):
|
||||
inventory._init_cache()
|
||||
assert inventory._cache[inventory.cache_key] == {}
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.cobbler.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.cobbler.yml') is False
|
||||
@@ -0,0 +1,149 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021, Cliff Hults <cliff.hlts@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# The API responses used in these tests were recorded from PVE version 6.2.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible_collections.community.general.plugins.inventory.icinga2 import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
return r
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.icinga2.yml') is False
|
||||
|
||||
|
||||
def check_api():
|
||||
return True
|
||||
|
||||
|
||||
# NOTE: when updating/adding replies to this function,
|
||||
# be sure to only add only the _contents_ of the 'data' dict in the API reply
|
||||
def query_hosts(hosts=None, attrs=None, joins=None, host_filter=None):
|
||||
# _get_hosts - list of dicts
|
||||
json_host_data = [
|
||||
{
|
||||
'attrs': {
|
||||
'address': 'test-host1.home.local',
|
||||
'groups': ['home_servers', 'servers_dell'],
|
||||
'display_name': 'Test Host 1',
|
||||
'state': 0.0,
|
||||
'state_type': 1.0
|
||||
},
|
||||
'joins': {},
|
||||
'meta': {},
|
||||
'name': 'test-host1',
|
||||
'type': 'Host'
|
||||
},
|
||||
{
|
||||
'attrs': {
|
||||
'address': 'test-host2.home.local',
|
||||
'display_name': 'Test Host 2',
|
||||
'groups': ['home_servers', 'servers_hp'],
|
||||
'state': 1.0,
|
||||
'state_type': 1.0
|
||||
},
|
||||
'joins': {},
|
||||
'meta': {},
|
||||
'name': 'test-host2',
|
||||
'type': 'Host'
|
||||
},
|
||||
{
|
||||
'attrs': {
|
||||
'address': '',
|
||||
'display_name': 'Test Host 3',
|
||||
'groups': ['not_home_servers', 'servers_hp'],
|
||||
'state': 1.0,
|
||||
'state_type': 1.0
|
||||
},
|
||||
'joins': {},
|
||||
'meta': {},
|
||||
'name': 'test-host3.example.com',
|
||||
'type': 'Host'
|
||||
}
|
||||
]
|
||||
return json_host_data
|
||||
|
||||
|
||||
def get_option(option):
|
||||
if option == 'groups':
|
||||
return {}
|
||||
elif option == 'keyed_groups':
|
||||
return []
|
||||
elif option == 'compose':
|
||||
return {}
|
||||
elif option == 'strict':
|
||||
return False
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def test_populate(inventory, mocker):
|
||||
# module settings
|
||||
inventory.icinga2_user = 'ansible'
|
||||
inventory.icinga2_password = 'password'
|
||||
inventory.icinga2_url = 'https://localhost:5665' + '/v1'
|
||||
inventory.inventory_attr = "address"
|
||||
|
||||
# bypass authentication and API fetch calls
|
||||
inventory._check_api = mocker.MagicMock(side_effect=check_api)
|
||||
inventory._query_hosts = mocker.MagicMock(side_effect=query_hosts)
|
||||
inventory.get_option = mocker.MagicMock(side_effect=get_option)
|
||||
inventory._populate()
|
||||
|
||||
# get different hosts
|
||||
host1_info = inventory.inventory.get_host('test-host1.home.local')
|
||||
print(host1_info)
|
||||
host2_info = inventory.inventory.get_host('test-host2.home.local')
|
||||
print(host2_info)
|
||||
host3_info = inventory.inventory.get_host('test-host3.example.com')
|
||||
assert inventory.inventory.get_host('test-host3.example.com') is not None
|
||||
print(host3_info)
|
||||
|
||||
# check if host in the home_servers group
|
||||
assert 'home_servers' in inventory.inventory.groups
|
||||
group1_data = inventory.inventory.groups['home_servers']
|
||||
group1_test_data = [host1_info, host2_info]
|
||||
print(group1_data.hosts)
|
||||
print(group1_test_data)
|
||||
assert group1_data.hosts == group1_test_data
|
||||
# Test servers_hp group
|
||||
group2_data = inventory.inventory.groups['servers_hp']
|
||||
group2_test_data = [host2_info, host3_info]
|
||||
print(group2_data.hosts)
|
||||
print(group2_test_data)
|
||||
assert group2_data.hosts == group2_test_data
|
||||
|
||||
# check if host state rules apply properly
|
||||
assert host1_info.get_vars()['state'] == 'on'
|
||||
assert host1_info.get_vars()['display_name'] == "Test Host 1"
|
||||
assert host2_info.get_vars()['state'] == 'off'
|
||||
assert host3_info.get_vars().get('ansible_host') is None
|
||||
|
||||
# Confirm attribute options switcher
|
||||
inventory.inventory_attr = "name"
|
||||
inventory._populate()
|
||||
assert inventory.inventory.get_host('test-host3.example.com') is not None
|
||||
host2_info = inventory.inventory.get_host('test-host2')
|
||||
assert host2_info is not None
|
||||
assert host2_info.get_vars().get('ansible_host') == 'test-host2.home.local'
|
||||
|
||||
# Confirm attribute options switcher
|
||||
inventory.inventory_attr = "display_name"
|
||||
inventory._populate()
|
||||
assert inventory.inventory.get_host('Test Host 3') is not None
|
||||
host2_info = inventory.inventory.get_host('Test Host 2')
|
||||
assert host2_info is not None
|
||||
assert host2_info.get_vars().get('ansible_host') == 'test-host2.home.local'
|
||||
@@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2018 Luke Murphy <lukewm@riseup.net>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
linode_apiv4 = pytest.importorskip('linode_api4')
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason='The linode_api4 dependency requires python2.7 or higher'
|
||||
)
|
||||
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.template import Templar
|
||||
from ansible_collections.community.general.plugins.inventory.linode import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
plugin = InventoryModule()
|
||||
plugin.templar = Templar(loader=DataLoader())
|
||||
return plugin
|
||||
|
||||
|
||||
def test_missing_access_token_lookup(inventory):
|
||||
loader = DataLoader()
|
||||
inventory._options = {'access_token': None}
|
||||
with pytest.raises(AnsibleError) as error_message:
|
||||
inventory._build_client(loader)
|
||||
assert 'Could not retrieve Linode access token' in error_message
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.linode.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.linode.yml') is False
|
||||
@@ -0,0 +1,108 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021, Frank Dornheim <dornheim@posteo.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible_collections.community.general.plugins.inventory.lxd import InventoryModule
|
||||
|
||||
|
||||
HOST_COMPARATIVE_DATA = {
|
||||
'ansible_connection': 'ssh', 'ansible_host': '10.98.143.199', 'ansible_lxd_os': 'ubuntu', 'ansible_lxd_release': 'focal',
|
||||
'ansible_lxd_profile': ['default'], 'ansible_lxd_state': 'running', 'ansible_lxd_location': 'Berlin',
|
||||
'ansible_lxd_vlan_ids': {'my-macvlan': 666}, 'inventory_hostname': 'vlantest', 'inventory_hostname_short': 'vlantest'}
|
||||
GROUP_COMPARATIVE_DATA = {
|
||||
'all': [], 'ungrouped': [], 'testpattern': ['vlantest'], 'vlan666': ['vlantest'], 'locationBerlin': ['vlantest'],
|
||||
'osUbuntu': ['vlantest'], 'releaseFocal': ['vlantest'], 'releaseBionic': [], 'profileDefault': ['vlantest'],
|
||||
'profileX11': [], 'netRangeIPv4': ['vlantest'], 'netRangeIPv6': ['vlantest']}
|
||||
GROUP_Config = {
|
||||
'testpattern': {'type': 'pattern', 'attribute': 'test'},
|
||||
'vlan666': {'type': 'vlanid', 'attribute': 666},
|
||||
'locationBerlin': {'type': 'location', 'attribute': 'Berlin'},
|
||||
'osUbuntu': {'type': 'os', 'attribute': 'ubuntu'},
|
||||
'releaseFocal': {'type': 'release', 'attribute': 'focal'},
|
||||
'releaseBionic': {'type': 'release', 'attribute': 'bionic'},
|
||||
'profileDefault': {'type': 'profile', 'attribute': 'default'},
|
||||
'profileX11': {'type': 'profile', 'attribute': 'x11'},
|
||||
'netRangeIPv4': {'type': 'network_range', 'attribute': '10.98.143.0/24'},
|
||||
'netRangeIPv6': {'type': 'network_range', 'attribute': 'fd42:bd00:7b11:2167:216:3eff::/96'}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory():
|
||||
inv = InventoryModule()
|
||||
inv.inventory = InventoryData()
|
||||
|
||||
# Test Values
|
||||
inv.data = inv.load_json_data('tests/unit/plugins/inventory/fixtures/lxd_inventory.atd') # Load Test Data
|
||||
inv.groupby = GROUP_Config
|
||||
inv.prefered_instance_network_interface = 'eth'
|
||||
inv.prefered_instance_network_family = 'inet'
|
||||
inv.filter = 'running'
|
||||
inv.dump_data = False
|
||||
inv.type_filter = 'both'
|
||||
|
||||
return inv
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.lxd.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.lxd.yml') is False
|
||||
|
||||
|
||||
def test_build_inventory_hosts(inventory):
|
||||
"""Load example data and start the inventoryto test the host generation.
|
||||
|
||||
After the inventory plugin has run with the test data, the result of the host is checked."""
|
||||
inventory._populate()
|
||||
generated_data = inventory.inventory.get_host('vlantest').get_vars()
|
||||
|
||||
eq = True
|
||||
for key, value in HOST_COMPARATIVE_DATA.items():
|
||||
if generated_data[key] != value:
|
||||
eq = False
|
||||
assert eq
|
||||
|
||||
|
||||
def test_build_inventory_groups(inventory):
|
||||
"""Load example data and start the inventory to test the group generation.
|
||||
|
||||
After the inventory plugin has run with the test data, the result of the host is checked."""
|
||||
inventory._populate()
|
||||
generated_data = inventory.inventory.get_groups_dict()
|
||||
|
||||
eq = True
|
||||
for key, value in GROUP_COMPARATIVE_DATA.items():
|
||||
if generated_data[key] != value:
|
||||
eq = False
|
||||
assert eq
|
||||
|
||||
|
||||
def test_build_inventory_groups_with_no_groupselection(inventory):
|
||||
"""Load example data and start the inventory to test the group generation with groupby is none.
|
||||
|
||||
After the inventory plugin has run with the test data, the result of the host is checked."""
|
||||
inventory.groupby = None
|
||||
inventory._populate()
|
||||
generated_data = inventory.inventory.get_groups_dict()
|
||||
group_comparative_data = {'all': [], 'ungrouped': []}
|
||||
|
||||
eq = True
|
||||
print("data: {0}".format(generated_data))
|
||||
for key, value in group_comparative_data.items():
|
||||
if generated_data[key] != value:
|
||||
eq = False
|
||||
assert eq
|
||||
@@ -0,0 +1,342 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, FELDSAM s.r.o. - FeldHost™ <support@feldhost.cz>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# The API responses used in these tests were recorded from OpenNebula version 5.10.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.template import Templar
|
||||
from ansible_collections.community.general.plugins.inventory.opennebula import InventoryModule
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import create_autospec
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory():
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
return r
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.opennebula.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.opennebula.yml') is False
|
||||
|
||||
|
||||
def get_vm_pool_json():
|
||||
with open('tests/unit/plugins/inventory/fixtures/opennebula_inventory.json', 'r') as json_file:
|
||||
jsondata = json.load(json_file)
|
||||
|
||||
data = type('pyone.bindings.VM_POOLSub', (object,), {'VM': []})()
|
||||
|
||||
for fake_server in jsondata:
|
||||
data.VM.append(type('pyone.bindings.VMType90Sub', (object,), fake_server)())
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_vm_pool():
|
||||
data = type('pyone.bindings.VM_POOLSub', (object,), {'VM': []})()
|
||||
|
||||
vm = type('pyone.bindings.VMType90Sub', (object,), {
|
||||
'DEPLOY_ID': 'one-7157',
|
||||
'ETIME': 0,
|
||||
'GID': 132,
|
||||
'GNAME': 'CSApparelVDC',
|
||||
'HISTORY_RECORDS': {},
|
||||
'ID': 7157,
|
||||
'LAST_POLL': 1632762935,
|
||||
'LCM_STATE': 3,
|
||||
'MONITORING': {},
|
||||
'NAME': 'sam-691-sam',
|
||||
'RESCHED': 0,
|
||||
'SNAPSHOTS': [],
|
||||
'STATE': 3,
|
||||
'STIME': 1632755245,
|
||||
'TEMPLATE': OrderedDict({
|
||||
'NIC': OrderedDict({
|
||||
'AR_ID': '0',
|
||||
'BRIDGE': 'onebr80',
|
||||
'BRIDGE_TYPE': 'linux',
|
||||
'CLUSTER_ID': '0',
|
||||
'IP': '172.22.4.187',
|
||||
'MAC': '02:00:ac:16:04:bb',
|
||||
'MTU': '8192',
|
||||
'NAME': 'NIC0',
|
||||
'NETWORK': 'Private Net CSApparel',
|
||||
'NETWORK_ID': '80',
|
||||
'NETWORK_UNAME': 'CSApparelVDC-admin',
|
||||
'NIC_ID': '0',
|
||||
'PHYDEV': 'team0',
|
||||
'SECURITY_GROUPS': '0',
|
||||
'TARGET': 'one-7157-0',
|
||||
'VLAN_ID': '480',
|
||||
'VN_MAD': '802.1Q'
|
||||
})
|
||||
}),
|
||||
'USER_TEMPLATE': OrderedDict({
|
||||
'HYPERVISOR': 'kvm',
|
||||
'INPUTS_ORDER': '',
|
||||
'LOGO': 'images/logos/centos.png',
|
||||
'MEMORY_UNIT_COST': 'MB',
|
||||
'SCHED_REQUIREMENTS': 'CLUSTER_ID="0"'
|
||||
})
|
||||
})()
|
||||
data.VM.append(vm)
|
||||
|
||||
vm = type('pyone.bindings.VMType90Sub', (object,), {
|
||||
'DEPLOY_ID': 'one-327',
|
||||
'ETIME': 0,
|
||||
'GID': 0,
|
||||
'GNAME': 'oneadmin',
|
||||
'HISTORY_RECORDS': {},
|
||||
'ID': 327,
|
||||
'LAST_POLL': 1632763543,
|
||||
'LCM_STATE': 3,
|
||||
'MONITORING': {},
|
||||
'NAME': 'zabbix-327',
|
||||
'RESCHED': 0,
|
||||
'SNAPSHOTS': [],
|
||||
'STATE': 3,
|
||||
'STIME': 1575410106,
|
||||
'TEMPLATE': OrderedDict({
|
||||
'NIC': [
|
||||
OrderedDict({
|
||||
'AR_ID': '0',
|
||||
'BRIDGE': 'onerb.103',
|
||||
'BRIDGE_TYPE': 'linux',
|
||||
'IP': '185.165.1.1',
|
||||
'IP6_GLOBAL': '2000:a001::b9ff:feae:aa0d',
|
||||
'IP6_LINK': 'fe80::b9ff:feae:aa0d',
|
||||
'MAC': '02:00:b9:ae:aa:0d',
|
||||
'NAME': 'NIC0',
|
||||
'NETWORK': 'Public',
|
||||
'NETWORK_ID': '7',
|
||||
'NIC_ID': '0',
|
||||
'PHYDEV': 'team0',
|
||||
'SECURITY_GROUPS': '0',
|
||||
'TARGET': 'one-327-0',
|
||||
'VLAN_ID': '100',
|
||||
'VN_MAD': '802.1Q'
|
||||
}),
|
||||
OrderedDict({
|
||||
'AR_ID': '0',
|
||||
'BRIDGE': 'br0',
|
||||
'BRIDGE_TYPE': 'linux',
|
||||
'CLUSTER_ID': '0',
|
||||
'IP': '192.168.1.1',
|
||||
'MAC': '02:00:c0:a8:3b:01',
|
||||
'NAME': 'NIC1',
|
||||
'NETWORK': 'Management',
|
||||
'NETWORK_ID': '11',
|
||||
'NIC_ID': '1',
|
||||
'SECURITY_GROUPS': '0',
|
||||
'TARGET': 'one-327-1',
|
||||
'VN_MAD': 'bridge'
|
||||
})
|
||||
]
|
||||
}),
|
||||
'USER_TEMPLATE': OrderedDict({
|
||||
'HYPERVISOR': 'kvm',
|
||||
'INPUTS_ORDER': '',
|
||||
'LABELS': 'Oracle Linux',
|
||||
'LOGO': 'images/logos/centos.png',
|
||||
'MEMORY_UNIT_COST': 'MB',
|
||||
'SAVED_TEMPLATE_ID': '29'
|
||||
})
|
||||
})()
|
||||
data.VM.append(vm)
|
||||
|
||||
vm = type('pyone.bindings.VMType90Sub', (object,), {
|
||||
'DEPLOY_ID': 'one-107',
|
||||
'ETIME': 0,
|
||||
'GID': 0,
|
||||
'GNAME': 'oneadmin',
|
||||
'HISTORY_RECORDS': {},
|
||||
'ID': 107,
|
||||
'LAST_POLL': 1632764186,
|
||||
'LCM_STATE': 3,
|
||||
'MONITORING': {},
|
||||
'NAME': 'gitlab-107',
|
||||
'RESCHED': 0,
|
||||
'SNAPSHOTS': [],
|
||||
'STATE': 3,
|
||||
'STIME': 1572485522,
|
||||
'TEMPLATE': OrderedDict({
|
||||
'NIC': OrderedDict({
|
||||
'AR_ID': '0',
|
||||
'BRIDGE': 'onerb.103',
|
||||
'BRIDGE_TYPE': 'linux',
|
||||
'IP': '185.165.1.3',
|
||||
'IP6_GLOBAL': '2000:a001::b9ff:feae:aa03',
|
||||
'IP6_LINK': 'fe80::b9ff:feae:aa03',
|
||||
'MAC': '02:00:b9:ae:aa:03',
|
||||
'NAME': 'NIC0',
|
||||
'NETWORK': 'Public',
|
||||
'NETWORK_ID': '7',
|
||||
'NIC_ID': '0',
|
||||
'PHYDEV': 'team0',
|
||||
'SECURITY_GROUPS': '0',
|
||||
'TARGET': 'one-107-0',
|
||||
'VLAN_ID': '100',
|
||||
'VN_MAD': '802.1Q'
|
||||
})
|
||||
}),
|
||||
'USER_TEMPLATE': OrderedDict({
|
||||
'HYPERVISOR': 'kvm',
|
||||
'INPUTS_ORDER': '',
|
||||
'LABELS': 'Gitlab,Centos',
|
||||
'LOGO': 'images/logos/centos.png',
|
||||
'MEMORY_UNIT_COST': 'MB',
|
||||
'SCHED_REQUIREMENTS': 'ID="0" | ID="1" | ID="2"',
|
||||
'SSH_PORT': '8822'
|
||||
})
|
||||
})()
|
||||
data.VM.append(vm)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
options_base_test = {
|
||||
'api_url': 'https://opennebula:2633/RPC2',
|
||||
'api_username': 'username',
|
||||
'api_password': 'password',
|
||||
'api_authfile': '~/.one/one_auth',
|
||||
'hostname': 'v4_first_ip',
|
||||
'group_by_labels': True,
|
||||
'filter_by_label': None,
|
||||
}
|
||||
|
||||
options_constructable_test = options_base_test.copy()
|
||||
options_constructable_test.update({
|
||||
'compose': {'is_linux': "GUEST_OS == 'linux'"},
|
||||
'filter_by_label': 'bench',
|
||||
'groups': {
|
||||
'benchmark_clients': "TGROUP.endswith('clients')",
|
||||
'lin': 'is_linux == True'
|
||||
},
|
||||
'keyed_groups': [{'key': 'TGROUP', 'prefix': 'tgroup'}],
|
||||
|
||||
})
|
||||
|
||||
|
||||
# given a dictionary `opts_dict`, return a function that behaves like ansible's inventory get_options
|
||||
def mk_get_options(opts_dict):
|
||||
def inner(opt):
|
||||
return opts_dict.get(opt, False)
|
||||
return inner
|
||||
|
||||
|
||||
def test_get_connection_info(inventory, mocker):
|
||||
inventory.get_option = mocker.MagicMock(side_effect=mk_get_options(options_base_test))
|
||||
|
||||
auth = inventory._get_connection_info()
|
||||
assert (auth.username and auth.password)
|
||||
|
||||
|
||||
def test_populate_constructable_templating(inventory, mocker):
|
||||
# bypass API fetch call
|
||||
inventory._get_vm_pool = mocker.MagicMock(side_effect=get_vm_pool_json)
|
||||
inventory.get_option = mocker.MagicMock(side_effect=mk_get_options(options_constructable_test))
|
||||
|
||||
# the templating engine is needed for the constructable groups/vars
|
||||
# so give that some fake data and instantiate it.
|
||||
fake_config_filepath = '/fake/opennebula.yml'
|
||||
fake_cache = {fake_config_filepath: options_constructable_test.copy()}
|
||||
fake_cache[fake_config_filepath]['plugin'] = 'community.general.opennebula'
|
||||
dataloader = create_autospec(DataLoader, instance=True)
|
||||
dataloader._FILE_CACHE = fake_cache
|
||||
inventory.templar = Templar(loader=dataloader)
|
||||
|
||||
inventory._populate()
|
||||
|
||||
# note the vm_pool (and json data file) has four hosts,
|
||||
# but options_constructable_test asks ansible to filter it out
|
||||
assert len(get_vm_pool_json().VM) == 4
|
||||
assert set([vm.NAME for vm in get_vm_pool_json().VM]) == set([
|
||||
'terraform_demo_00',
|
||||
'terraform_demo_01',
|
||||
'terraform_demo_srv_00',
|
||||
'bs-windows',
|
||||
])
|
||||
assert set(inventory.inventory.hosts) == set(['terraform_demo_00', 'terraform_demo_01', 'terraform_demo_srv_00'])
|
||||
|
||||
host_demo00 = inventory.inventory.get_host('terraform_demo_00')
|
||||
host_demo01 = inventory.inventory.get_host('terraform_demo_01')
|
||||
host_demosrv = inventory.inventory.get_host('terraform_demo_srv_00')
|
||||
|
||||
assert 'benchmark_clients' in inventory.inventory.groups
|
||||
assert 'lin' in inventory.inventory.groups
|
||||
assert inventory.inventory.groups['benchmark_clients'].hosts == [host_demo00, host_demo01]
|
||||
assert inventory.inventory.groups['lin'].hosts == [host_demo00, host_demo01, host_demosrv]
|
||||
|
||||
# test group by label:
|
||||
assert 'bench' in inventory.inventory.groups
|
||||
assert 'foo' in inventory.inventory.groups
|
||||
assert inventory.inventory.groups['bench'].hosts == [host_demo00, host_demo01, host_demosrv]
|
||||
assert inventory.inventory.groups['serv'].hosts == [host_demosrv]
|
||||
assert inventory.inventory.groups['foo'].hosts == [host_demo00, host_demo01]
|
||||
|
||||
# test `compose` transforms GUEST_OS=Linux to is_linux == True
|
||||
assert host_demo00.get_vars()['GUEST_OS'] == 'linux'
|
||||
assert host_demo00.get_vars()['is_linux'] is True
|
||||
|
||||
# test `keyed_groups`
|
||||
assert inventory.inventory.groups['tgroup_bench_clients'].hosts == [host_demo00, host_demo01]
|
||||
assert inventory.inventory.groups['tgroup_bench_server'].hosts == [host_demosrv]
|
||||
|
||||
|
||||
def test_populate(inventory, mocker):
|
||||
# bypass API fetch call
|
||||
inventory._get_vm_pool = mocker.MagicMock(side_effect=get_vm_pool)
|
||||
inventory.get_option = mocker.MagicMock(side_effect=mk_get_options(options_base_test))
|
||||
inventory._populate()
|
||||
|
||||
# get different hosts
|
||||
host_sam = inventory.inventory.get_host('sam-691-sam')
|
||||
host_zabbix = inventory.inventory.get_host('zabbix-327')
|
||||
host_gitlab = inventory.inventory.get_host('gitlab-107')
|
||||
|
||||
# test if groups exists
|
||||
assert 'Gitlab' in inventory.inventory.groups
|
||||
assert 'Centos' in inventory.inventory.groups
|
||||
assert 'Oracle_Linux' in inventory.inventory.groups
|
||||
|
||||
# check if host_zabbix is in Oracle_Linux group
|
||||
group_oracle_linux = inventory.inventory.groups['Oracle_Linux']
|
||||
assert group_oracle_linux.hosts == [host_zabbix]
|
||||
|
||||
# check if host_gitlab is in Gitlab and Centos group
|
||||
group_gitlab = inventory.inventory.groups['Gitlab']
|
||||
group_centos = inventory.inventory.groups['Centos']
|
||||
assert group_gitlab.hosts == [host_gitlab]
|
||||
assert group_centos.hosts == [host_gitlab]
|
||||
|
||||
# check IPv4 address
|
||||
assert '172.22.4.187' == host_sam.get_vars()['v4_first_ip']
|
||||
|
||||
# check IPv6 address
|
||||
assert '2000:a001::b9ff:feae:aa0d' == host_zabbix.get_vars()['v6_first_ip']
|
||||
|
||||
# check ansible_hosts
|
||||
assert '172.22.4.187' == host_sam.get_vars()['ansible_host']
|
||||
assert '185.165.1.1' == host_zabbix.get_vars()['ansible_host']
|
||||
assert '185.165.1.3' == host_gitlab.get_vars()['ansible_host']
|
||||
|
||||
# check for custom ssh port
|
||||
assert '8822' == host_gitlab.get_vars()['ansible_port']
|
||||
@@ -0,0 +1,745 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, Jeffrey van Pelt <jeff@vanpelt.one>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# The API responses used in these tests were recorded from PVE version 6.2.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible_collections.community.general.plugins.inventory.proxmox import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
return r
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.proxmox.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.proxmox.yml') is False
|
||||
|
||||
|
||||
def get_auth():
|
||||
return True
|
||||
|
||||
|
||||
# NOTE: when updating/adding replies to this function,
|
||||
# be sure to only add only the _contents_ of the 'data' dict in the API reply
|
||||
def get_json(url):
|
||||
if url == "https://localhost:8006/api2/json/nodes":
|
||||
# _get_nodes
|
||||
return [{"type": "node",
|
||||
"cpu": 0.01,
|
||||
"maxdisk": 500,
|
||||
"mem": 500,
|
||||
"node": "testnode",
|
||||
"id": "node/testnode",
|
||||
"maxcpu": 1,
|
||||
"status": "online",
|
||||
"ssl_fingerprint": "xx",
|
||||
"disk": 1000,
|
||||
"maxmem": 1000,
|
||||
"uptime": 10000,
|
||||
"level": ""},
|
||||
{"type": "node",
|
||||
"node": "testnode2",
|
||||
"id": "node/testnode2",
|
||||
"status": "offline",
|
||||
"ssl_fingerprint": "yy"}]
|
||||
elif url == "https://localhost:8006/api2/json/pools":
|
||||
# _get_pools
|
||||
return [{"poolid": "test"}]
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc":
|
||||
# _get_lxc_per_node
|
||||
return [{"cpus": 1,
|
||||
"name": "test-lxc",
|
||||
"cpu": 0.01,
|
||||
"diskwrite": 0,
|
||||
"lock": "",
|
||||
"maxmem": 1000,
|
||||
"template": "",
|
||||
"diskread": 0,
|
||||
"mem": 1000,
|
||||
"swap": 0,
|
||||
"type": "lxc",
|
||||
"maxswap": 0,
|
||||
"maxdisk": "1000",
|
||||
"netout": 1000,
|
||||
"pid": "1000",
|
||||
"netin": 1000,
|
||||
"status": "running",
|
||||
"vmid": "100",
|
||||
"disk": "1000",
|
||||
"uptime": 1000}]
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu":
|
||||
# _get_qemu_per_node
|
||||
return [{"name": "test-qemu",
|
||||
"cpus": 1,
|
||||
"mem": 1000,
|
||||
"template": "",
|
||||
"diskread": 0,
|
||||
"cpu": 0.01,
|
||||
"maxmem": 1000,
|
||||
"diskwrite": 0,
|
||||
"netout": 1000,
|
||||
"pid": "1001",
|
||||
"netin": 1000,
|
||||
"maxdisk": 1000,
|
||||
"vmid": "101",
|
||||
"uptime": 1000,
|
||||
"disk": 0,
|
||||
"status": "running"},
|
||||
{"name": "test-qemu-windows",
|
||||
"cpus": 1,
|
||||
"mem": 1000,
|
||||
"template": "",
|
||||
"diskread": 0,
|
||||
"cpu": 0.01,
|
||||
"maxmem": 1000,
|
||||
"diskwrite": 0,
|
||||
"netout": 1000,
|
||||
"pid": "1001",
|
||||
"netin": 1000,
|
||||
"maxdisk": 1000,
|
||||
"vmid": "102",
|
||||
"uptime": 1000,
|
||||
"disk": 0,
|
||||
"status": "running"},
|
||||
{"name": "test-qemu-multi-nic",
|
||||
"cpus": 1,
|
||||
"mem": 1000,
|
||||
"template": "",
|
||||
"diskread": 0,
|
||||
"cpu": 0.01,
|
||||
"maxmem": 1000,
|
||||
"diskwrite": 0,
|
||||
"netout": 1000,
|
||||
"pid": "1001",
|
||||
"netin": 1000,
|
||||
"maxdisk": 1000,
|
||||
"vmid": "103",
|
||||
"uptime": 1000,
|
||||
"disk": 0,
|
||||
"status": "running"},
|
||||
{"name": "test-qemu-template",
|
||||
"cpus": 1,
|
||||
"mem": 0,
|
||||
"template": 1,
|
||||
"diskread": 0,
|
||||
"cpu": 0,
|
||||
"maxmem": 1000,
|
||||
"diskwrite": 0,
|
||||
"netout": 0,
|
||||
"pid": "1001",
|
||||
"netin": 0,
|
||||
"maxdisk": 1000,
|
||||
"vmid": "9001",
|
||||
"uptime": 0,
|
||||
"disk": 0,
|
||||
"status": "stopped"}]
|
||||
elif url == "https://localhost:8006/api2/json/pools/test":
|
||||
# _get_members_per_pool
|
||||
return {"members": [{"uptime": 1000,
|
||||
"template": 0,
|
||||
"id": "qemu/101",
|
||||
"mem": 1000,
|
||||
"status": "running",
|
||||
"cpu": 0.01,
|
||||
"maxmem": 1000,
|
||||
"diskwrite": 1000,
|
||||
"name": "test-qemu",
|
||||
"netout": 1000,
|
||||
"netin": 1000,
|
||||
"vmid": 101,
|
||||
"node": "testnode",
|
||||
"maxcpu": 1,
|
||||
"type": "qemu",
|
||||
"maxdisk": 1000,
|
||||
"disk": 0,
|
||||
"diskread": 1000}]}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/network":
|
||||
# _get_node_ip
|
||||
return [{"families": ["inet"],
|
||||
"priority": 3,
|
||||
"active": 1,
|
||||
"cidr": "10.1.1.2/24",
|
||||
"iface": "eth0",
|
||||
"method": "static",
|
||||
"exists": 1,
|
||||
"type": "eth",
|
||||
"netmask": "24",
|
||||
"gateway": "10.1.1.1",
|
||||
"address": "10.1.1.2",
|
||||
"method6": "manual",
|
||||
"autostart": 1},
|
||||
{"method6": "manual",
|
||||
"autostart": 1,
|
||||
"type": "OVSPort",
|
||||
"exists": 1,
|
||||
"method": "manual",
|
||||
"iface": "eth1",
|
||||
"ovs_bridge": "vmbr0",
|
||||
"active": 1,
|
||||
"families": ["inet"],
|
||||
"priority": 5,
|
||||
"ovs_type": "OVSPort"},
|
||||
{"type": "OVSBridge",
|
||||
"method": "manual",
|
||||
"iface": "vmbr0",
|
||||
"families": ["inet"],
|
||||
"priority": 4,
|
||||
"ovs_ports": "eth1",
|
||||
"ovs_type": "OVSBridge",
|
||||
"method6": "manual",
|
||||
"autostart": 1,
|
||||
"active": 1}]
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc/100/config":
|
||||
# _get_vm_config (lxc)
|
||||
return {
|
||||
"console": 1,
|
||||
"rootfs": "local-lvm:vm-100-disk-0,size=4G",
|
||||
"cmode": "tty",
|
||||
"description": "A testnode",
|
||||
"cores": 1,
|
||||
"hostname": "test-lxc",
|
||||
"arch": "amd64",
|
||||
"tty": 2,
|
||||
"swap": 0,
|
||||
"cpulimit": "0",
|
||||
"net0": "name=eth0,bridge=vmbr0,gw=10.1.1.1,hwaddr=FF:FF:FF:FF:FF:FF,ip=10.1.1.3/24,type=veth",
|
||||
"ostype": "ubuntu",
|
||||
"digest": "123456789abcdef0123456789abcdef01234567890",
|
||||
"protection": 0,
|
||||
"memory": 1000,
|
||||
"onboot": 0,
|
||||
"cpuunits": 1024,
|
||||
"tags": "one, two, three",
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/config":
|
||||
# _get_vm_config (qemu)
|
||||
return {
|
||||
"tags": "one, two, three",
|
||||
"cores": 1,
|
||||
"ide2": "none,media=cdrom",
|
||||
"memory": 1000,
|
||||
"kvm": 1,
|
||||
"digest": "0123456789abcdef0123456789abcdef0123456789",
|
||||
"description": "A test qemu",
|
||||
"sockets": 1,
|
||||
"onboot": 1,
|
||||
"vmgenid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
|
||||
"numa": 0,
|
||||
"bootdisk": "scsi0",
|
||||
"cpu": "host",
|
||||
"name": "test-qemu",
|
||||
"ostype": "l26",
|
||||
"hotplug": "network,disk,usb",
|
||||
"scsi0": "local-lvm:vm-101-disk-0,size=8G",
|
||||
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0,firewall=1",
|
||||
"agent": "1,fstrim_cloned_disks=1",
|
||||
"bios": "seabios",
|
||||
"ide0": "local-lvm:vm-101-cloudinit,media=cdrom,size=4M",
|
||||
"boot": "cdn",
|
||||
"scsihw": "virtio-scsi-pci",
|
||||
"smbios1": "uuid=ffffffff-ffff-ffff-ffff-ffffffffffff"
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/config":
|
||||
# _get_vm_config (qemu)
|
||||
return {
|
||||
"numa": 0,
|
||||
"digest": "460add1531a7068d2ae62d54f67e8fb9493dece9",
|
||||
"ide2": "none,media=cdrom",
|
||||
"bootdisk": "sata0",
|
||||
"name": "test-qemu-windows",
|
||||
"balloon": 0,
|
||||
"cpulimit": "4",
|
||||
"agent": "1",
|
||||
"cores": 6,
|
||||
"sata0": "storage:vm-102-disk-0,size=100G",
|
||||
"memory": 10240,
|
||||
"smbios1": "uuid=127301fc-0122-48d5-8fc5-c04fa78d8146",
|
||||
"scsihw": "virtio-scsi-pci",
|
||||
"sockets": 1,
|
||||
"ostype": "win8",
|
||||
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0",
|
||||
"onboot": 1
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/config":
|
||||
# _get_vm_config (qemu)
|
||||
return {
|
||||
'scsi1': 'storage:vm-103-disk-3,size=30G',
|
||||
'sockets': 1,
|
||||
'memory': 8192,
|
||||
'ostype': 'l26',
|
||||
'scsihw': 'virtio-scsi-pci',
|
||||
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0",
|
||||
"net1": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr1",
|
||||
'bootdisk': 'scsi0',
|
||||
'scsi0': 'storage:vm-103-disk-0,size=10G',
|
||||
'name': 'test-qemu-multi-nic',
|
||||
'cores': 4,
|
||||
'digest': '51b7599f869b9a3f564804a0aed290f3de803292',
|
||||
'smbios1': 'uuid=863b31c3-42ca-4a92-aed7-4111f342f70a',
|
||||
'agent': '1,type=virtio',
|
||||
'ide2': 'none,media=cdrom',
|
||||
'balloon': 0,
|
||||
'numa': 0,
|
||||
'scsi2': 'storage:vm-103-disk-2,size=10G',
|
||||
'serial0': 'socket',
|
||||
'vmgenid': 'ddfb79b2-b484-4d66-88e7-6e76f2d1be77',
|
||||
'onboot': 1,
|
||||
'tablet': 0
|
||||
}
|
||||
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/agent/network-get-interfaces":
|
||||
# _get_agent_network_interfaces
|
||||
return {"result": [
|
||||
{
|
||||
"hardware-address": "00:00:00:00:00:00",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"prefix": 8,
|
||||
"ip-address-type": "ipv4",
|
||||
"ip-address": "127.0.0.1"
|
||||
},
|
||||
{
|
||||
"ip-address-type": "ipv6",
|
||||
"ip-address": "::1",
|
||||
"prefix": 128
|
||||
}],
|
||||
"statistics": {
|
||||
"rx-errs": 0,
|
||||
"rx-bytes": 163244,
|
||||
"rx-packets": 1623,
|
||||
"rx-dropped": 0,
|
||||
"tx-dropped": 0,
|
||||
"tx-packets": 1623,
|
||||
"tx-bytes": 163244,
|
||||
"tx-errs": 0},
|
||||
"name": "lo"},
|
||||
{
|
||||
"statistics": {
|
||||
"rx-packets": 4025,
|
||||
"rx-dropped": 12,
|
||||
"rx-bytes": 324105,
|
||||
"rx-errs": 0,
|
||||
"tx-errs": 0,
|
||||
"tx-bytes": 368860,
|
||||
"tx-packets": 3479,
|
||||
"tx-dropped": 0},
|
||||
"name": "eth0",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"prefix": 24,
|
||||
"ip-address-type": "ipv4",
|
||||
"ip-address": "10.1.2.3"
|
||||
},
|
||||
{
|
||||
"prefix": 64,
|
||||
"ip-address": "fd8c:4687:e88d:1be3:5b70:7b88:c79c:293",
|
||||
"ip-address-type": "ipv6"
|
||||
}],
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"prefix": 16,
|
||||
"ip-address": "10.10.2.3",
|
||||
"ip-address-type": "ipv4"
|
||||
}],
|
||||
"name": "docker0",
|
||||
"statistics": {
|
||||
"rx-bytes": 0,
|
||||
"rx-errs": 0,
|
||||
"rx-dropped": 0,
|
||||
"rx-packets": 0,
|
||||
"tx-packets": 0,
|
||||
"tx-dropped": 0,
|
||||
"tx-errs": 0,
|
||||
"tx-bytes": 0
|
||||
}}]}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/agent/network-get-interfaces":
|
||||
# _get_agent_network_interfaces
|
||||
return {"result": {'error': {'desc': 'this feature or command is not currently supported', 'class': 'Unsupported'}}}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/agent/network-get-interfaces":
|
||||
# _get_agent_network_interfaces
|
||||
return {
|
||||
"result": [
|
||||
{
|
||||
"statistics": {
|
||||
"tx-errs": 0,
|
||||
"rx-errs": 0,
|
||||
"rx-dropped": 0,
|
||||
"tx-bytes": 48132932372,
|
||||
"tx-dropped": 0,
|
||||
"rx-bytes": 48132932372,
|
||||
"tx-packets": 178578980,
|
||||
"rx-packets": 178578980
|
||||
},
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"ip-address-type": "ipv4",
|
||||
"prefix": 8,
|
||||
"ip-address": "127.0.0.1"
|
||||
}
|
||||
],
|
||||
"name": "lo"
|
||||
},
|
||||
{
|
||||
"name": "eth0",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"ip-address-type": "ipv4",
|
||||
"prefix": 24,
|
||||
"ip-address": "172.16.0.143"
|
||||
}
|
||||
],
|
||||
"statistics": {
|
||||
"rx-errs": 0,
|
||||
"tx-errs": 0,
|
||||
"rx-packets": 660028,
|
||||
"tx-packets": 304599,
|
||||
"tx-dropped": 0,
|
||||
"rx-bytes": 1846743499,
|
||||
"tx-bytes": 1287844926,
|
||||
"rx-dropped": 0
|
||||
},
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"name": "eth1",
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"statistics": {
|
||||
"rx-bytes": 235717091946,
|
||||
"tx-dropped": 0,
|
||||
"rx-dropped": 0,
|
||||
"tx-bytes": 123411636251,
|
||||
"rx-packets": 540431277,
|
||||
"tx-packets": 468411864,
|
||||
"rx-errs": 0,
|
||||
"tx-errs": 0
|
||||
},
|
||||
"ip-addresses": [
|
||||
{
|
||||
"ip-address": "10.0.0.133",
|
||||
"prefix": 24,
|
||||
"ip-address-type": "ipv4"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "docker0",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"ip-address": "172.17.0.1",
|
||||
"prefix": 16,
|
||||
"ip-address-type": "ipv4"
|
||||
}
|
||||
],
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"statistics": {
|
||||
"rx-errs": 0,
|
||||
"tx-errs": 0,
|
||||
"rx-packets": 0,
|
||||
"tx-packets": 0,
|
||||
"tx-dropped": 0,
|
||||
"rx-bytes": 0,
|
||||
"rx-dropped": 0,
|
||||
"tx-bytes": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"name": "datapath"
|
||||
},
|
||||
{
|
||||
"name": "weave",
|
||||
"ip-addresses": [
|
||||
{
|
||||
"ip-address": "10.42.0.1",
|
||||
"ip-address-type": "ipv4",
|
||||
"prefix": 16
|
||||
}
|
||||
],
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"statistics": {
|
||||
"rx-bytes": 127289123306,
|
||||
"tx-dropped": 0,
|
||||
"rx-dropped": 0,
|
||||
"tx-bytes": 43827573343,
|
||||
"rx-packets": 132750542,
|
||||
"tx-packets": 74218762,
|
||||
"rx-errs": 0,
|
||||
"tx-errs": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "vethwe-datapath",
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"name": "vethwe-bridge",
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"name": "vxlan-6784"
|
||||
},
|
||||
{
|
||||
"name": "vethwepl0dfe1fe",
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"name": "vethweplf1e7715",
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff"
|
||||
},
|
||||
{
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"name": "vethwepl9d244a1"
|
||||
},
|
||||
{
|
||||
"hardware-address": "ff:ff:ff:ff:ff:ff",
|
||||
"name": "vethwepl2ca477b"
|
||||
},
|
||||
{
|
||||
"name": "nomacorip",
|
||||
}
|
||||
]
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc/100/status/current":
|
||||
# _get_vm_status (lxc)
|
||||
return {
|
||||
"swap": 0,
|
||||
"name": "test-lxc",
|
||||
"diskread": 0,
|
||||
"vmid": 100,
|
||||
"diskwrite": 0,
|
||||
"pid": 9000,
|
||||
"mem": 89980928,
|
||||
"netin": 1950776396424,
|
||||
"disk": 4998168576,
|
||||
"cpu": 0.00163430613110039,
|
||||
"type": "lxc",
|
||||
"uptime": 6793736,
|
||||
"maxmem": 1073741824,
|
||||
"status": "running",
|
||||
"cpus": "1",
|
||||
"ha": {
|
||||
"group": 'null',
|
||||
"state": "started",
|
||||
"managed": 1
|
||||
},
|
||||
"maxdisk": 3348329267200,
|
||||
"netout": 1947793356037,
|
||||
"maxswap": 1073741824
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/status/current":
|
||||
# _get_vm_status (qemu)
|
||||
return {
|
||||
"status": "stopped",
|
||||
"uptime": 0,
|
||||
"maxmem": 5364514816,
|
||||
"maxdisk": 34359738368,
|
||||
"netout": 0,
|
||||
"cpus": 2,
|
||||
"ha": {
|
||||
"managed": 0
|
||||
},
|
||||
"diskread": 0,
|
||||
"vmid": 101,
|
||||
"diskwrite": 0,
|
||||
"name": "test-qemu",
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"netin": 0,
|
||||
"mem": 0,
|
||||
"qmpstatus": "stopped"
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/status/current":
|
||||
# _get_vm_status (qemu)
|
||||
return {
|
||||
"status": "stopped",
|
||||
"uptime": 0,
|
||||
"maxmem": 5364514816,
|
||||
"maxdisk": 34359738368,
|
||||
"netout": 0,
|
||||
"cpus": 2,
|
||||
"ha": {
|
||||
"managed": 0
|
||||
},
|
||||
"diskread": 0,
|
||||
"vmid": 102,
|
||||
"diskwrite": 0,
|
||||
"name": "test-qemu-windows",
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"netin": 0,
|
||||
"mem": 0,
|
||||
"qmpstatus": "prelaunch"
|
||||
}
|
||||
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/status/current":
|
||||
# _get_vm_status (qemu)
|
||||
return {
|
||||
"status": "stopped",
|
||||
"uptime": 0,
|
||||
"maxmem": 5364514816,
|
||||
"maxdisk": 34359738368,
|
||||
"netout": 0,
|
||||
"cpus": 2,
|
||||
"ha": {
|
||||
"managed": 0
|
||||
},
|
||||
"diskread": 0,
|
||||
"vmid": 103,
|
||||
"diskwrite": 0,
|
||||
"name": "test-qemu-multi-nic",
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"netin": 0,
|
||||
"mem": 0,
|
||||
"qmpstatus": "paused"
|
||||
}
|
||||
|
||||
|
||||
def get_vm_snapshots(node, properties, vmtype, vmid, name):
|
||||
return [
|
||||
{"description": "",
|
||||
"name": "clean",
|
||||
"snaptime": 1000,
|
||||
"vmstate": 0
|
||||
},
|
||||
{"name": "current",
|
||||
"digest": "1234689abcdf",
|
||||
"running": 0,
|
||||
"description": "You are here!",
|
||||
"parent": "clean"
|
||||
}]
|
||||
|
||||
|
||||
def get_option(opts):
|
||||
def fn(option):
|
||||
default = opts.get('default', False)
|
||||
return opts.get(option, default)
|
||||
return fn
|
||||
|
||||
|
||||
def test_populate(inventory, mocker):
|
||||
# module settings
|
||||
inventory.proxmox_user = 'root@pam'
|
||||
inventory.proxmox_password = 'password'
|
||||
inventory.proxmox_url = 'https://localhost:8006'
|
||||
inventory.group_prefix = 'proxmox_'
|
||||
inventory.facts_prefix = 'proxmox_'
|
||||
inventory.strict = False
|
||||
|
||||
opts = {
|
||||
'group_prefix': 'proxmox_',
|
||||
'facts_prefix': 'proxmox_',
|
||||
'want_facts': True,
|
||||
'want_proxmox_nodes_ansible_host': True,
|
||||
'qemu_extended_statuses': True
|
||||
}
|
||||
|
||||
# bypass authentication and API fetch calls
|
||||
inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
|
||||
inventory._get_json = mocker.MagicMock(side_effect=get_json)
|
||||
inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
|
||||
inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
|
||||
inventory._can_add_host = mocker.MagicMock(return_value=True)
|
||||
inventory._populate()
|
||||
|
||||
# get different hosts
|
||||
host_qemu = inventory.inventory.get_host('test-qemu')
|
||||
host_qemu_windows = inventory.inventory.get_host('test-qemu-windows')
|
||||
host_qemu_multi_nic = inventory.inventory.get_host('test-qemu-multi-nic')
|
||||
host_qemu_template = inventory.inventory.get_host('test-qemu-template')
|
||||
host_lxc = inventory.inventory.get_host('test-lxc')
|
||||
|
||||
# check if qemu-test is in the proxmox_pool_test group
|
||||
assert 'proxmox_pool_test' in inventory.inventory.groups
|
||||
group_qemu = inventory.inventory.groups['proxmox_pool_test']
|
||||
assert group_qemu.hosts == [host_qemu]
|
||||
|
||||
# check if qemu-test has eth0 interface in agent_interfaces fact
|
||||
assert 'eth0' in [d['name'] for d in host_qemu.get_vars()['proxmox_agent_interfaces']]
|
||||
|
||||
# check if qemu-multi-nic has multiple network interfaces
|
||||
for iface_name in ['eth0', 'eth1', 'weave']:
|
||||
assert iface_name in [d['name'] for d in host_qemu_multi_nic.get_vars()['proxmox_agent_interfaces']]
|
||||
|
||||
# check if interface with no mac-address or ip-address defaults correctly
|
||||
assert [iface for iface in host_qemu_multi_nic.get_vars()['proxmox_agent_interfaces']
|
||||
if iface['name'] == 'nomacorip'
|
||||
and iface['mac-address'] == ''
|
||||
and iface['ip-addresses'] == []
|
||||
]
|
||||
|
||||
# check to make sure qemu-windows doesn't have proxmox_agent_interfaces
|
||||
assert "proxmox_agent_interfaces" not in host_qemu_windows.get_vars()
|
||||
|
||||
# check if lxc-test has been discovered correctly
|
||||
group_lxc = inventory.inventory.groups['proxmox_all_lxc']
|
||||
assert group_lxc.hosts == [host_lxc]
|
||||
|
||||
# check if qemu template is not present
|
||||
assert host_qemu_template is None
|
||||
|
||||
# check that offline node is in inventory
|
||||
assert inventory.inventory.get_host('testnode2')
|
||||
|
||||
# make sure that ['prelaunch', 'paused'] are in the group list
|
||||
for group in ['paused', 'prelaunch']:
|
||||
assert ('%sall_%s' % (inventory.group_prefix, group)) in inventory.inventory.groups
|
||||
|
||||
# check if qemu-windows is in the prelaunch group
|
||||
group_prelaunch = inventory.inventory.groups['proxmox_all_prelaunch']
|
||||
assert group_prelaunch.hosts == [host_qemu_windows]
|
||||
|
||||
# check if qemu-multi-nic is in the paused group
|
||||
group_paused = inventory.inventory.groups['proxmox_all_paused']
|
||||
assert group_paused.hosts == [host_qemu_multi_nic]
|
||||
|
||||
|
||||
def test_populate_missing_qemu_extended_groups(inventory, mocker):
|
||||
# module settings
|
||||
inventory.proxmox_user = 'root@pam'
|
||||
inventory.proxmox_password = 'password'
|
||||
inventory.proxmox_url = 'https://localhost:8006'
|
||||
inventory.group_prefix = 'proxmox_'
|
||||
inventory.facts_prefix = 'proxmox_'
|
||||
inventory.strict = False
|
||||
|
||||
opts = {
|
||||
'group_prefix': 'proxmox_',
|
||||
'facts_prefix': 'proxmox_',
|
||||
'want_facts': True,
|
||||
'want_proxmox_nodes_ansible_host': True,
|
||||
'qemu_extended_statuses': False
|
||||
}
|
||||
|
||||
# bypass authentication and API fetch calls
|
||||
inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
|
||||
inventory._get_json = mocker.MagicMock(side_effect=get_json)
|
||||
inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
|
||||
inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
|
||||
inventory._can_add_host = mocker.MagicMock(return_value=True)
|
||||
inventory._populate()
|
||||
|
||||
# make sure that ['prelaunch', 'paused'] are not in the group list
|
||||
for group in ['paused', 'prelaunch']:
|
||||
assert ('%sall_%s' % (inventory.group_prefix, group)) not in inventory.inventory.groups
|
||||
@@ -0,0 +1,206 @@
|
||||
# Copyright (c) 2020 Shay Rybak <shay.rybak@stackpath.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible_collections.community.general.plugins.inventory.stackpath_compute import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
return r
|
||||
|
||||
|
||||
def test_get_stack_slugs(inventory):
|
||||
stacks = [
|
||||
{
|
||||
'status': 'ACTIVE',
|
||||
'name': 'test1',
|
||||
'id': 'XXXX',
|
||||
'updatedAt': '2020-07-08T01:00:00.000000Z',
|
||||
'slug': 'test1',
|
||||
'createdAt': '2020-07-08T00:00:00.000000Z',
|
||||
'accountId': 'XXXX',
|
||||
}, {
|
||||
'status': 'ACTIVE',
|
||||
'name': 'test2',
|
||||
'id': 'XXXX',
|
||||
'updatedAt': '2019-10-22T18:00:00.000000Z',
|
||||
'slug': 'test2',
|
||||
'createdAt': '2019-10-22T18:00:00.000000Z',
|
||||
'accountId': 'XXXX',
|
||||
}, {
|
||||
'status': 'DISABLED',
|
||||
'name': 'test3',
|
||||
'id': 'XXXX',
|
||||
'updatedAt': '2020-01-16T20:00:00.000000Z',
|
||||
'slug': 'test3',
|
||||
'createdAt': '2019-10-15T13:00:00.000000Z',
|
||||
'accountId': 'XXXX',
|
||||
}, {
|
||||
'status': 'ACTIVE',
|
||||
'name': 'test4',
|
||||
'id': 'XXXX',
|
||||
'updatedAt': '2019-11-20T22:00:00.000000Z',
|
||||
'slug': 'test4',
|
||||
'createdAt': '2019-11-20T22:00:00.000000Z',
|
||||
'accountId': 'XXXX',
|
||||
}
|
||||
]
|
||||
inventory._get_stack_slugs(stacks)
|
||||
assert len(inventory.stack_slugs) == 4
|
||||
assert inventory.stack_slugs == [
|
||||
"test1",
|
||||
"test2",
|
||||
"test3",
|
||||
"test4"
|
||||
]
|
||||
|
||||
|
||||
def test_verify_file(tmp_path, inventory):
|
||||
file = tmp_path / "foobar.stackpath_compute.yml"
|
||||
file.touch()
|
||||
assert inventory.verify_file(str(file)) is True
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.stackpath_compute.yml') is False
|
||||
|
||||
|
||||
def test_validate_config(inventory):
|
||||
config = {
|
||||
"client_secret": "short_client_secret",
|
||||
"use_internal_ip": False,
|
||||
"stack_slugs": ["test1"],
|
||||
"client_id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
"plugin": "community.general.stackpath_compute",
|
||||
}
|
||||
with pytest.raises(AnsibleError) as error_message:
|
||||
inventory._validate_config(config)
|
||||
assert "client_secret must be 64 characters long" in error_message
|
||||
|
||||
config = {
|
||||
"client_secret": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
"use_internal_ip": True,
|
||||
"stack_slugs": ["test1"],
|
||||
"client_id": "short_client_id",
|
||||
"plugin": "community.general.stackpath_compute",
|
||||
}
|
||||
with pytest.raises(AnsibleError) as error_message:
|
||||
inventory._validate_config(config)
|
||||
assert "client_id must be 32 characters long" in error_message
|
||||
|
||||
config = {
|
||||
"use_internal_ip": True,
|
||||
"stack_slugs": ["test1"],
|
||||
"client_id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
"plugin": "community.general.stackpath_compute",
|
||||
}
|
||||
with pytest.raises(AnsibleError) as error_message:
|
||||
inventory._validate_config(config)
|
||||
assert "config missing client_secret, a required parameter" in error_message
|
||||
|
||||
config = {
|
||||
"client_secret": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
"use_internal_ip": False,
|
||||
"plugin": "community.general.stackpath_compute",
|
||||
}
|
||||
with pytest.raises(AnsibleError) as error_message:
|
||||
inventory._validate_config(config)
|
||||
assert "config missing client_id, a required parameter" in error_message
|
||||
|
||||
|
||||
def test_populate(inventory):
|
||||
instances = [
|
||||
{
|
||||
"name": "instance1",
|
||||
"countryCode": "SE",
|
||||
"workloadSlug": "wokrload1",
|
||||
"continent": "Europe",
|
||||
"workloadId": "id1",
|
||||
"cityCode": "ARN",
|
||||
"externalIpAddress": "20.0.0.1",
|
||||
"target": "target1",
|
||||
"stackSlug": "stack1",
|
||||
"ipAddress": "10.0.0.1",
|
||||
},
|
||||
{
|
||||
"name": "instance2",
|
||||
"countryCode": "US",
|
||||
"workloadSlug": "wokrload2",
|
||||
"continent": "America",
|
||||
"workloadId": "id2",
|
||||
"cityCode": "JFK",
|
||||
"externalIpAddress": "20.0.0.2",
|
||||
"target": "target2",
|
||||
"stackSlug": "stack1",
|
||||
"ipAddress": "10.0.0.2",
|
||||
},
|
||||
{
|
||||
"name": "instance3",
|
||||
"countryCode": "SE",
|
||||
"workloadSlug": "workload3",
|
||||
"continent": "Europe",
|
||||
"workloadId": "id3",
|
||||
"cityCode": "ARN",
|
||||
"externalIpAddress": "20.0.0.3",
|
||||
"target": "target1",
|
||||
"stackSlug": "stack2",
|
||||
"ipAddress": "10.0.0.3",
|
||||
},
|
||||
{
|
||||
"name": "instance4",
|
||||
"countryCode": "US",
|
||||
"workloadSlug": "workload3",
|
||||
"continent": "America",
|
||||
"workloadId": "id4",
|
||||
"cityCode": "JFK",
|
||||
"externalIpAddress": "20.0.0.4",
|
||||
"target": "target2",
|
||||
"stackSlug": "stack2",
|
||||
"ipAddress": "10.0.0.4",
|
||||
},
|
||||
]
|
||||
inventory.hostname_key = "externalIpAddress"
|
||||
inventory._populate(instances)
|
||||
# get different hosts
|
||||
host1 = inventory.inventory.get_host('20.0.0.1')
|
||||
host2 = inventory.inventory.get_host('20.0.0.2')
|
||||
host3 = inventory.inventory.get_host('20.0.0.3')
|
||||
host4 = inventory.inventory.get_host('20.0.0.4')
|
||||
|
||||
# get different groups
|
||||
assert 'citycode_arn' in inventory.inventory.groups
|
||||
group_citycode_arn = inventory.inventory.groups['citycode_arn']
|
||||
assert 'countrycode_se' in inventory.inventory.groups
|
||||
group_countrycode_se = inventory.inventory.groups['countrycode_se']
|
||||
assert 'continent_america' in inventory.inventory.groups
|
||||
group_continent_america = inventory.inventory.groups['continent_america']
|
||||
assert 'name_instance1' in inventory.inventory.groups
|
||||
group_name_instance1 = inventory.inventory.groups['name_instance1']
|
||||
assert 'stackslug_stack1' in inventory.inventory.groups
|
||||
group_stackslug_stack1 = inventory.inventory.groups['stackslug_stack1']
|
||||
assert 'target_target1' in inventory.inventory.groups
|
||||
group_target_target1 = inventory.inventory.groups['target_target1']
|
||||
assert 'workloadslug_workload3' in inventory.inventory.groups
|
||||
group_workloadslug_workload3 = inventory.inventory.groups['workloadslug_workload3']
|
||||
assert 'workloadid_id1' in inventory.inventory.groups
|
||||
group_workloadid_id1 = inventory.inventory.groups['workloadid_id1']
|
||||
|
||||
assert group_citycode_arn.hosts == [host1, host3]
|
||||
assert group_countrycode_se.hosts == [host1, host3]
|
||||
assert group_continent_america.hosts == [host2, host4]
|
||||
assert group_name_instance1.hosts == [host1]
|
||||
assert group_stackslug_stack1.hosts == [host1, host2]
|
||||
assert group_target_target1.hosts == [host1, host3]
|
||||
assert group_workloadslug_workload3.hosts == [host3, host4]
|
||||
assert group_workloadid_id1.hosts == [host1]
|
||||
@@ -0,0 +1,212 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, Jeffrey van Pelt <jeff@vanpelt.one>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# The API responses used in these tests were recorded from PVE version 6.2.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible_collections.community.general.plugins.inventory.xen_orchestra import InventoryModule
|
||||
|
||||
objects = {
|
||||
'vms': {
|
||||
'0e64588-2bea-2d82-e922-881654b0a48f':
|
||||
{
|
||||
'type': 'VM',
|
||||
'addresses': {},
|
||||
'CPUs': {'max': 4, 'number': 4},
|
||||
'memory': {'dynamic': [1073741824, 2147483648], 'static': [536870912, 4294967296], 'size': 2147483648},
|
||||
'name_description': '',
|
||||
'name_label': 'XCP-NG lab 2',
|
||||
'os_version': {},
|
||||
'parent': 'd3af89b2-d846-0874-6acb-031ccf11c560',
|
||||
'power_state': 'Running',
|
||||
'tags': [],
|
||||
'id': '0e645898-2bea-2d82-e922-881654b0a48f',
|
||||
'uuid': '0e645898-2bea-2d82-e922-881654b0a48f',
|
||||
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$container': '222d8594-9426-468a-ad69-7a6f02330fa3'
|
||||
},
|
||||
'b0d25e70-019d-6182-2f7c-b0f5d8ef9331':
|
||||
{
|
||||
'type': 'VM',
|
||||
'addresses': {'0/ipv4/0': '192.168.1.55', '1/ipv4/0': '10.0.90.1'},
|
||||
'CPUs': {'max': 4, 'number': 4},
|
||||
'mainIpAddress': '192.168.1.55',
|
||||
'memory': {'dynamic': [2147483648, 2147483648], 'static': [134217728, 2147483648], 'size': 2147483648},
|
||||
'name_description': '',
|
||||
'name_label': 'XCP-NG lab 3',
|
||||
'os_version': {'name': 'FreeBSD 11.3-STABLE', 'uname': '11.3-STABLE', 'distro': 'FreeBSD'},
|
||||
'power_state': 'Halted',
|
||||
'tags': [],
|
||||
'id': 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331',
|
||||
'uuid': 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331',
|
||||
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$container': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
|
||||
}
|
||||
},
|
||||
'pools': {
|
||||
'3d315997-73bd-5a74-8ca7-289206cb03ab': {
|
||||
'master': '222d8594-9426-468a-ad69-7a6f02330fa3',
|
||||
'tags': [],
|
||||
'name_description': '',
|
||||
'name_label': 'Storage Lab',
|
||||
'cpus': {'cores': 120, 'sockets': 6},
|
||||
'id': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'type': 'pool',
|
||||
'uuid': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
|
||||
}
|
||||
},
|
||||
'hosts': {
|
||||
'c96ec4dd-28ac-4df4-b73c-4371bd202728': {
|
||||
'type': 'host',
|
||||
'uuid': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
|
||||
'enabled': True,
|
||||
'CPUs': {
|
||||
'cpu_count': '40',
|
||||
'socket_count': '2',
|
||||
'vendor': 'GenuineIntel',
|
||||
'speed': '1699.998',
|
||||
'modelname': 'Intel(R) Xeon(R) CPU E5-2650L v2 @ 1.70GHz',
|
||||
'family': '6',
|
||||
'model': '62',
|
||||
'stepping': '4'
|
||||
},
|
||||
'address': '172.16.210.14',
|
||||
'build': 'release/stockholm/master/7',
|
||||
'cpus': {'cores': 40, 'sockets': 2},
|
||||
'hostname': 'r620-s1',
|
||||
'name_description': 'Default install',
|
||||
'name_label': 'R620-S1',
|
||||
'memory': {'usage': 45283590144, 'size': 137391292416},
|
||||
'power_state': 'Running',
|
||||
'tags': [],
|
||||
'version': '8.2.0',
|
||||
'productBrand': 'XCP-ng',
|
||||
'id': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
|
||||
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
|
||||
},
|
||||
'222d8594-9426-468a-ad69-7a6f02330fa3': {
|
||||
'type': 'host',
|
||||
'uuid': '222d8594-9426-468a-ad69-7a6f02330fa3',
|
||||
'enabled': True,
|
||||
'CPUs': {
|
||||
'cpu_count': '40',
|
||||
'socket_count': '2',
|
||||
'vendor': 'GenuineIntel',
|
||||
'speed': '1700.007',
|
||||
'modelname': 'Intel(R) Xeon(R) CPU E5-2650L v2 @ 1.70GHz',
|
||||
'family': '6',
|
||||
'model': '62',
|
||||
'stepping': '4'
|
||||
},
|
||||
'address': '172.16.210.16',
|
||||
'build': 'release/stockholm/master/7',
|
||||
'cpus': {'cores': 40, 'sockets': 2},
|
||||
'hostname': 'r620-s2',
|
||||
'name_description': 'Default install',
|
||||
'name_label': 'R620-S2',
|
||||
'memory': {'usage': 10636521472, 'size': 137391292416},
|
||||
'power_state': 'Running',
|
||||
'tags': ['foo', 'bar', 'baz'],
|
||||
'version': '8.2.0',
|
||||
'productBrand': 'XCP-ng',
|
||||
'id': '222d8594-9426-468a-ad69-7a6f02330fa3',
|
||||
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
|
||||
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_option(option):
|
||||
if option == 'groups':
|
||||
return {}
|
||||
elif option == 'keyed_groups':
|
||||
return []
|
||||
elif option == 'compose':
|
||||
return {}
|
||||
elif option == 'strict':
|
||||
return False
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def serialize_groups(groups):
|
||||
return list(map(str, groups))
|
||||
|
||||
|
||||
@ pytest.fixture(scope="module")
|
||||
def inventory():
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
return r
|
||||
|
||||
|
||||
def test_verify_file_bad_config(inventory):
|
||||
assert inventory.verify_file('foobar.xen_orchestra.yml') is False
|
||||
|
||||
|
||||
def test_populate(inventory, mocker):
|
||||
inventory.get_option = mocker.MagicMock(side_effect=get_option)
|
||||
inventory._populate(objects)
|
||||
actual = sorted(inventory.inventory.hosts.keys())
|
||||
expected = sorted(['c96ec4dd-28ac-4df4-b73c-4371bd202728', '222d8594-9426-468a-ad69-7a6f02330fa3',
|
||||
'0e64588-2bea-2d82-e922-881654b0a48f', 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331'])
|
||||
|
||||
assert actual == expected
|
||||
|
||||
# Host with ip assertions
|
||||
host_with_ip = inventory.inventory.get_host(
|
||||
'b0d25e70-019d-6182-2f7c-b0f5d8ef9331')
|
||||
host_with_ip_vars = host_with_ip.vars
|
||||
|
||||
assert host_with_ip_vars['ansible_host'] == '192.168.1.55'
|
||||
assert host_with_ip_vars['power_state'] == 'halted'
|
||||
assert host_with_ip_vars['type'] == 'VM'
|
||||
|
||||
assert host_with_ip in inventory.inventory.groups['with_ip'].hosts
|
||||
|
||||
# Host without ip
|
||||
host_without_ip = inventory.inventory.get_host(
|
||||
'0e64588-2bea-2d82-e922-881654b0a48f')
|
||||
host_without_ip_vars = host_without_ip.vars
|
||||
|
||||
assert host_without_ip_vars['ansible_host'] is None
|
||||
assert host_without_ip_vars['power_state'] == 'running'
|
||||
|
||||
assert host_without_ip in inventory.inventory.groups['without_ip'].hosts
|
||||
|
||||
assert host_with_ip in inventory.inventory.groups['xo_host_r620_s1'].hosts
|
||||
assert host_without_ip in inventory.inventory.groups['xo_host_r620_s2'].hosts
|
||||
|
||||
r620_s1 = inventory.inventory.get_host(
|
||||
'c96ec4dd-28ac-4df4-b73c-4371bd202728')
|
||||
r620_s2 = inventory.inventory.get_host(
|
||||
'222d8594-9426-468a-ad69-7a6f02330fa3')
|
||||
|
||||
assert r620_s1.vars['address'] == '172.16.210.14'
|
||||
assert r620_s1.vars['tags'] == []
|
||||
assert r620_s2.vars['address'] == '172.16.210.16'
|
||||
assert r620_s2.vars['tags'] == ['foo', 'bar', 'baz']
|
||||
|
||||
storage_lab = inventory.inventory.groups['xo_pool_storage_lab']
|
||||
|
||||
# Check that hosts are in their corresponding pool
|
||||
assert r620_s1 in storage_lab.hosts
|
||||
assert r620_s2 in storage_lab.hosts
|
||||
|
||||
# Check that hosts are in their corresponding pool
|
||||
assert host_without_ip in storage_lab.hosts
|
||||
assert host_with_ip in storage_lab.hosts
|
||||
@@ -0,0 +1,85 @@
|
||||
# Copyright (c) 2022 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.plugins.lookup.onepassword import (
|
||||
OnePassCLIv1,
|
||||
OnePassCLIv2,
|
||||
)
|
||||
|
||||
|
||||
def load_file(file):
|
||||
with open((os.path.join(os.path.dirname(__file__), "fixtures", file)), "r") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
# Intentionally excludes metadata leaf nodes that would exist in real output if not relevant.
|
||||
MOCK_ENTRIES = {
|
||||
OnePassCLIv1: [
|
||||
{
|
||||
'vault_name': 'Acme "Quot\'d" Servers',
|
||||
'queries': [
|
||||
'0123456789',
|
||||
'Mock "Quot\'d" Server'
|
||||
],
|
||||
'expected': ['t0pS3cret', 't0pS3cret'],
|
||||
'output': load_file("v1_out_01.json"),
|
||||
},
|
||||
{
|
||||
'vault_name': 'Acme Logins',
|
||||
'queries': [
|
||||
'9876543210',
|
||||
'Mock Website',
|
||||
'acme.com'
|
||||
],
|
||||
'expected': ['t0pS3cret', 't0pS3cret', 't0pS3cret'],
|
||||
'output': load_file("v1_out_02.json"),
|
||||
},
|
||||
{
|
||||
'vault_name': 'Acme Logins',
|
||||
'queries': [
|
||||
'864201357'
|
||||
],
|
||||
'expected': ['vauxhall'],
|
||||
'output': load_file("v1_out_03.json"),
|
||||
},
|
||||
],
|
||||
OnePassCLIv2: [
|
||||
{
|
||||
"vault_name": "Test Vault",
|
||||
"queries": [
|
||||
"ywvdbojsguzgrgnokmcxtydgdv",
|
||||
"Authy Backup",
|
||||
],
|
||||
"expected": ["OctoberPoppyNuttyDraperySabbath", "OctoberPoppyNuttyDraperySabbath"],
|
||||
"output": load_file("v2_out_01.json"),
|
||||
},
|
||||
{
|
||||
# Request a custom field where ID and label are different
|
||||
"vault_name": "Test Vault",
|
||||
"queries": ["Dummy Login"],
|
||||
"kwargs": {
|
||||
"field": "password1",
|
||||
},
|
||||
"expected": ["data in custom field"],
|
||||
"output": load_file("v2_out_02.json")
|
||||
},
|
||||
{
|
||||
# Request data from a custom section
|
||||
"vault_name": "Test Vault",
|
||||
"queries": ["Duplicate Sections"],
|
||||
"kwargs": {
|
||||
"field": "s2 text",
|
||||
"section": "Section 2",
|
||||
},
|
||||
"expected": ["first value"],
|
||||
"output": load_file("v2_out_03.json")
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.lookup.onepassword import OnePass
|
||||
|
||||
|
||||
OP_VERSION_FIXTURES = [
|
||||
"opv1",
|
||||
"opv2"
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_op(mocker):
|
||||
def _fake_op(version):
|
||||
mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePassCLIBase.get_current_version", return_value=version)
|
||||
op = OnePass(None, None, None, None, None)
|
||||
op._config._config_file_path = "/home/jin/.op/config"
|
||||
mocker.patch.object(op._cli, "_run")
|
||||
|
||||
return op
|
||||
|
||||
return _fake_op
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def opv1(fake_op):
|
||||
return fake_op("1.17.2")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def opv2(fake_op):
|
||||
return fake_op("2.27.2")
|
||||
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"uuid": "0123456789",
|
||||
"vaultUuid": "2468",
|
||||
"overview": {
|
||||
"title": "Mock \"Quot'd\" Server"
|
||||
},
|
||||
"details": {
|
||||
"sections": [{
|
||||
"title": "",
|
||||
"fields": [
|
||||
{"t": "username", "v": "jamesbond"},
|
||||
{"t": "password", "v": "t0pS3cret"},
|
||||
{"t": "notes", "v": "Test note with\nmultiple lines and trailing space.\n\n"},
|
||||
{"t": "tricksy \"quot'd\" field\\", "v": "\"quot'd\" value"}
|
||||
]
|
||||
}]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"uuid": "9876543210",
|
||||
"vaultUuid": "1357",
|
||||
"overview": {
|
||||
"title": "Mock Website",
|
||||
"URLs": [
|
||||
{"l": "website", "u": "https://acme.com/login"}
|
||||
]
|
||||
},
|
||||
"details": {
|
||||
"sections": [{
|
||||
"title": "",
|
||||
"fields": [
|
||||
{"t": "password", "v": "t0pS3cret"}
|
||||
]
|
||||
}]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"uuid": "864201357",
|
||||
"vaultUuid": "1357",
|
||||
"overview": {
|
||||
"title": "Mock Something"
|
||||
},
|
||||
"details": {
|
||||
"fields": [
|
||||
{
|
||||
"value": "jbond@mi6.gov.uk",
|
||||
"name": "emailAddress"
|
||||
},
|
||||
{
|
||||
"name": "password",
|
||||
"value": "vauxhall"
|
||||
},
|
||||
{}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"id": "ywvdbojsguzgrgnokmcxtydgdv",
|
||||
"title": "Authy Backup",
|
||||
"version": 1,
|
||||
"vault": {
|
||||
"id": "bcqxysvcnejjrwzoqrwzcqjqxc",
|
||||
"name": "test vault"
|
||||
},
|
||||
"category": "PASSWORD",
|
||||
"last_edited_by": "7FUPZ8ZNE02KSHMAIMKHIVUE17",
|
||||
"created_at": "2015-01-18T13:13:38Z",
|
||||
"updated_at": "2016-02-20T16:23:54Z",
|
||||
"additional_information": "Jan 18, 2015, 08:13:38",
|
||||
"fields": [
|
||||
{
|
||||
"id": "password",
|
||||
"type": "CONCEALED",
|
||||
"purpose": "PASSWORD",
|
||||
"label": "password",
|
||||
"value": "OctoberPoppyNuttyDraperySabbath",
|
||||
"reference": "op://Test Vault/Authy Backup/password",
|
||||
"password_details": {
|
||||
"strength": "FANTASTIC"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "notesPlain",
|
||||
"type": "STRING",
|
||||
"purpose": "NOTES",
|
||||
"label": "notesPlain",
|
||||
"value": "Backup password to restore Authy",
|
||||
"reference": "op://Test Vault/Authy Backup/notesPlain"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,85 @@
|
||||
{
|
||||
"id": "awk4s2u44fhnrgppszcsvc663i",
|
||||
"title": "Dummy Login",
|
||||
"version": 4,
|
||||
"vault": {
|
||||
"id": "stpebbaccrq72xulgouxsk4p7y",
|
||||
"name": "Personal"
|
||||
},
|
||||
"category": "LOGIN",
|
||||
"last_edited_by": "LSGPJERUYBH7BFPHMZ2KKGL6AU",
|
||||
"created_at": "2018-04-25T21:55:19Z",
|
||||
"updated_at": "2022-09-02T17:51:21Z",
|
||||
"additional_information": "agent.smith",
|
||||
"urls": [
|
||||
{
|
||||
"primary": true,
|
||||
"href": "https://acme.com"
|
||||
}
|
||||
],
|
||||
"sections": [
|
||||
{
|
||||
"id": "add more"
|
||||
},
|
||||
{
|
||||
"id": "gafaeg7vnqmgrklw5r6yrufyxy",
|
||||
"label": "COMMANDS"
|
||||
},
|
||||
{
|
||||
"id": "linked items",
|
||||
"label": "Related Items"
|
||||
}
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"id": "username",
|
||||
"type": "STRING",
|
||||
"purpose": "USERNAME",
|
||||
"label": "username",
|
||||
"value": "agent.smith",
|
||||
"reference": "op://Personal/Dummy Login/username"
|
||||
},
|
||||
{
|
||||
"id": "password",
|
||||
"type": "CONCEALED",
|
||||
"purpose": "PASSWORD",
|
||||
"label": "password",
|
||||
"value": "FootworkDegreeReverence",
|
||||
"entropy": 159.60836791992188,
|
||||
"reference": "op://Personal/Dummy Login/password",
|
||||
"password_details": {
|
||||
"entropy": 159,
|
||||
"generated": true,
|
||||
"strength": "FANTASTIC"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "notesPlain",
|
||||
"type": "STRING",
|
||||
"purpose": "NOTES",
|
||||
"label": "notesPlain",
|
||||
"reference": "op://Personal/Dummy Login/notesPlain"
|
||||
},
|
||||
{
|
||||
"id": "7gyjekelk24ghgd4rvafspjbli",
|
||||
"section": {
|
||||
"id": "add more"
|
||||
},
|
||||
"type": "STRING",
|
||||
"label": "title",
|
||||
"value": "value of the field",
|
||||
"reference": "op://Personal/Dummy Login/add more/title"
|
||||
},
|
||||
{
|
||||
"id": "fx4wpzokrxn7tlb3uwpdjfptgm",
|
||||
"section": {
|
||||
"id": "gafaeg7vnqmgrklw5r6yrufyxy",
|
||||
"label": "COMMANDS"
|
||||
},
|
||||
"type": "CONCEALED",
|
||||
"label": "password1",
|
||||
"value": "data in custom field",
|
||||
"reference": "op://Personal/Dummy Login/COMMANDS/password1"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,103 @@
|
||||
{
|
||||
"id": "7t7qu2r35qyvqj3crujd4dqxmy",
|
||||
"title": "Duplicate Sections",
|
||||
"version": 3,
|
||||
"vault": {
|
||||
"id": "stpebbaccrq72xulgouxsk4p7y",
|
||||
"name": "Personal"
|
||||
},
|
||||
"category": "LOGIN",
|
||||
"last_edited_by": "LSGPJERUYBH7BFPHMZ2KKGL6AU",
|
||||
"created_at": "2022-11-04T17:09:18Z",
|
||||
"updated_at": "2022-11-04T17:22:19Z",
|
||||
"additional_information": "flora",
|
||||
"urls": [
|
||||
{
|
||||
"label": "website",
|
||||
"primary": true,
|
||||
"href": "https://acme.com/login"
|
||||
}
|
||||
],
|
||||
"sections": [
|
||||
{
|
||||
"id": "add more"
|
||||
},
|
||||
{
|
||||
"id": "7osqcvd43i75teocdzbb6d7mie",
|
||||
"label": "Section 2"
|
||||
}
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"id": "username",
|
||||
"type": "STRING",
|
||||
"purpose": "USERNAME",
|
||||
"label": "username",
|
||||
"value": "flora",
|
||||
"reference": "op://Personal/Duplicate Sections/username"
|
||||
},
|
||||
{
|
||||
"id": "password",
|
||||
"type": "CONCEALED",
|
||||
"purpose": "PASSWORD",
|
||||
"label": "password",
|
||||
"value": "PtZGFLAibx-erTo7ywywEvh-n4syas97n-tuF2D.b8DdqA2vCjrvRGkNQxj!Gi9R",
|
||||
"entropy": 379.564697265625,
|
||||
"reference": "op://Personal/Duplicate Sections/password",
|
||||
"password_details": {
|
||||
"entropy": 379,
|
||||
"generated": true,
|
||||
"strength": "FANTASTIC"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "notesPlain",
|
||||
"type": "STRING",
|
||||
"purpose": "NOTES",
|
||||
"label": "notesPlain",
|
||||
"reference": "op://Personal/Duplicate Sections/notesPlain"
|
||||
},
|
||||
{
|
||||
"id": "4saaazkb7arwisj6ysctb4jmm4",
|
||||
"section": {
|
||||
"id": "add more"
|
||||
},
|
||||
"type": "STRING",
|
||||
"label": "text",
|
||||
"value": "text field the first",
|
||||
"reference": "op://Personal/Duplicate Sections/add more/text"
|
||||
},
|
||||
{
|
||||
"id": "4vtfkj4bwcmg7d5uf62wnpkp3a",
|
||||
"section": {
|
||||
"id": "add more"
|
||||
},
|
||||
"type": "STRING",
|
||||
"label": "text",
|
||||
"value": "text field the second",
|
||||
"reference": "op://Personal/Duplicate Sections/add more/text"
|
||||
},
|
||||
{
|
||||
"id": "wbrjnowkrgavpooomtht36gjqu",
|
||||
"section": {
|
||||
"id": "7osqcvd43i75teocdzbb6d7mie",
|
||||
"label": "Section 2"
|
||||
},
|
||||
"type": "STRING",
|
||||
"label": "s2 text",
|
||||
"value": "first value",
|
||||
"reference": "op://Personal/Duplicate Sections/Section 2/s2 text"
|
||||
},
|
||||
{
|
||||
"id": "bddlz2fj2pebmtfhksbmcexy7m",
|
||||
"section": {
|
||||
"id": "7osqcvd43i75teocdzbb6d7mie",
|
||||
"label": "Section 2"
|
||||
},
|
||||
"type": "STRING",
|
||||
"label": "s2 text",
|
||||
"value": "second value",
|
||||
"reference": "op://Personal/Duplicate Sections/Section 2/s2 text"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Ansible Project
|
||||
@@ -0,0 +1,183 @@
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import operator
|
||||
import itertools
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from .conftest import OP_VERSION_FIXTURES
|
||||
from .common import MOCK_ENTRIES
|
||||
|
||||
from ansible.errors import AnsibleLookupError
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
from ansible_collections.community.general.plugins.lookup.onepassword import (
|
||||
OnePassCLIv1,
|
||||
OnePassCLIv2,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("version", "version_class"),
|
||||
(
|
||||
("1.17.2", OnePassCLIv1),
|
||||
("2.27.4", OnePassCLIv2),
|
||||
)
|
||||
)
|
||||
def test_op_correct_cli_class(fake_op, version, version_class):
|
||||
op = fake_op(version)
|
||||
assert op._cli.version == version
|
||||
assert isinstance(op._cli, version_class)
|
||||
|
||||
|
||||
def test_op_unsupported_cli_version(fake_op):
|
||||
with pytest.raises(AnsibleLookupError, match="is unsupported"):
|
||||
fake_op("99.77.77")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("op_fixture", OP_VERSION_FIXTURES)
|
||||
def test_op_set_token_with_config(op_fixture, mocker, request):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
token = "F5417F77529B41B595D7F9D6F76EC057"
|
||||
mocker.patch("os.path.isfile", return_value=True)
|
||||
mocker.patch.object(op._cli, "signin", return_value=(0, token + "\n", ""))
|
||||
|
||||
op.set_token()
|
||||
|
||||
assert op.token == token
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("op_fixture", "message"),
|
||||
[
|
||||
(op, value)
|
||||
for op in OP_VERSION_FIXTURES
|
||||
for value in
|
||||
(
|
||||
"Missing required parameters",
|
||||
"The operation is unauthorized",
|
||||
)
|
||||
]
|
||||
)
|
||||
def test_op_set_token_with_config_missing_args(op_fixture, message, request, mocker):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
mocker.patch("os.path.isfile", return_value=True)
|
||||
mocker.patch.object(op._cli, "signin", return_value=(99, "", ""), side_effect=AnsibleLookupError(message))
|
||||
mocker.patch.object(op._cli, "full_signin", return_value=(0, "", ""))
|
||||
|
||||
with pytest.raises(AnsibleLookupError, match=message):
|
||||
op.set_token()
|
||||
|
||||
op._cli.full_signin.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("op_fixture", OP_VERSION_FIXTURES)
|
||||
def test_op_set_token_with_config_full_signin(op_fixture, request, mocker):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
mocker.patch("os.path.isfile", return_value=True)
|
||||
mocker.patch.object(op._cli, "signin", return_value=(99, "", ""), side_effect=AnsibleLookupError("Raised intentionally"))
|
||||
mocker.patch.object(op._cli, "full_signin", return_value=(0, "", ""))
|
||||
|
||||
op.set_token()
|
||||
|
||||
op._cli.full_signin.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("op_fixture", OP_VERSION_FIXTURES)
|
||||
def test_op_set_token_without_config(op_fixture, request, mocker):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
token = "B988E8A2680A4A348962751A96861FA1"
|
||||
mocker.patch("os.path.isfile", return_value=False)
|
||||
mocker.patch.object(op._cli, "signin", return_value=(99, "", ""))
|
||||
mocker.patch.object(op._cli, "full_signin", return_value=(0, token + "\n", ""))
|
||||
|
||||
op.set_token()
|
||||
|
||||
op._cli.signin.assert_not_called()
|
||||
assert op.token == token
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("op_fixture", "login_status"),
|
||||
[(op, value) for op in OP_VERSION_FIXTURES for value in [False, True]]
|
||||
)
|
||||
def test_op_assert_logged_in(mocker, login_status, op_fixture, request):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
mocker.patch.object(op._cli, "assert_logged_in", return_value=login_status)
|
||||
mocker.patch.object(op, "set_token")
|
||||
|
||||
op.assert_logged_in()
|
||||
|
||||
op._cli.assert_logged_in.assert_called_once()
|
||||
assert op.logged_in == login_status
|
||||
|
||||
if not login_status:
|
||||
op.set_token.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("op_fixture", OP_VERSION_FIXTURES)
|
||||
def test_op_get_raw_v1(mocker, op_fixture, request):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
mocker.patch.object(op._cli, "get_raw", return_value=[99, "RAW OUTPUT", ""])
|
||||
|
||||
result = op.get_raw("some item")
|
||||
|
||||
assert result == "RAW OUTPUT"
|
||||
op._cli.get_raw.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("op_fixture", "output", "expected"),
|
||||
(
|
||||
list(itertools.chain([op], d))
|
||||
for op in OP_VERSION_FIXTURES
|
||||
for d in [
|
||||
("RAW OUTPUT", "RAW OUTPUT"),
|
||||
(None, ""),
|
||||
("", ""),
|
||||
]
|
||||
)
|
||||
)
|
||||
def test_op_get_field(mocker, op_fixture, output, expected, request):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
mocker.patch.object(op, "get_raw", return_value=output)
|
||||
mocker.patch.object(op._cli, "_parse_field", return_value=output)
|
||||
|
||||
result = op.get_field("some item", "some field")
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
# This test sometimes fails on older Python versions because the gathered tests mismatch.
|
||||
# Sort the fixture data to make this reliable
|
||||
# https://github.com/pytest-dev/pytest-xdist/issues/432
|
||||
@pytest.mark.parametrize(
|
||||
("cli_class", "vault", "queries", "kwargs", "output", "expected"),
|
||||
(
|
||||
(_cli_class, item["vault_name"], item["queries"], item.get("kwargs", {}), item["output"], item["expected"])
|
||||
for _cli_class in sorted(MOCK_ENTRIES, key=operator.attrgetter("__name__"))
|
||||
for item in MOCK_ENTRIES[_cli_class]
|
||||
)
|
||||
)
|
||||
def test_op_lookup(mocker, cli_class, vault, queries, kwargs, output, expected):
|
||||
mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePass._get_cli_class", cli_class)
|
||||
mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePass.assert_logged_in", return_value=True)
|
||||
mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePassCLIBase._run", return_value=(0, json.dumps(output), ""))
|
||||
|
||||
op_lookup = lookup_loader.get("community.general.onepassword")
|
||||
result = op_lookup.run(queries, vault=vault, **kwargs)
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("op_fixture", OP_VERSION_FIXTURES)
|
||||
def test_signin(op_fixture, request):
|
||||
op = request.getfixturevalue(op_fixture)
|
||||
op._cli.master_password = "master_pass"
|
||||
op._cli.signin()
|
||||
print(op._cli.version)
|
||||
op._cli._run.assert_called_once_with(['signin', '--raw'], command_input=b"master_pass")
|
||||
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2022 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.lookup.onepassword import OnePassCLIv1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("args", "rc", "expected_call_args", "expected_call_kwargs", "expected"),
|
||||
(
|
||||
([], 0, ["get", "account"], {"ignore_errors": True}, True,),
|
||||
([], 1, ["get", "account"], {"ignore_errors": True}, False,),
|
||||
(["acme"], 1, ["get", "account", "--account", "acme.1password.com"], {"ignore_errors": True}, False,),
|
||||
)
|
||||
)
|
||||
def test_assert_logged_in(mocker, args, rc, expected_call_args, expected_call_kwargs, expected):
|
||||
mocker.patch.object(OnePassCLIv1, "_run", return_value=[rc, "", ""])
|
||||
|
||||
op_cli = OnePassCLIv1(*args)
|
||||
result = op_cli.assert_logged_in()
|
||||
|
||||
op_cli._run.assert_called_with(expected_call_args, **expected_call_kwargs)
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_full_signin(mocker):
|
||||
mocker.patch.object(OnePassCLIv1, "_run", return_value=[0, "", ""])
|
||||
|
||||
op_cli = OnePassCLIv1(
|
||||
subdomain="acme",
|
||||
username="bob@acme.com",
|
||||
secret_key="SECRET",
|
||||
master_password="ONEKEYTORULETHEMALL",
|
||||
)
|
||||
result = op_cli.full_signin()
|
||||
|
||||
op_cli._run.assert_called_with([
|
||||
"signin",
|
||||
"acme.1password.com",
|
||||
b"bob@acme.com",
|
||||
b"SECRET",
|
||||
"--raw",
|
||||
], command_input=b"ONEKEYTORULETHEMALL")
|
||||
assert result == [0, "", ""]
|
||||
@@ -0,0 +1,52 @@
|
||||
# Copyright (c) 2022 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.lookup.onepassword import OnePassCLIv2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("args", "out", "expected_call_args", "expected"),
|
||||
(
|
||||
([], "list of accounts", ["account", "get"], True,),
|
||||
(["acme"], "list of accounts", ["account", "get", "--account", "acme.1password.com"], True,),
|
||||
([], "", ["account", "list"], False,),
|
||||
)
|
||||
)
|
||||
def test_assert_logged_in(mocker, args, out, expected_call_args, expected):
|
||||
mocker.patch.object(OnePassCLIv2, "_run", return_value=[0, out, ""])
|
||||
op_cli = OnePassCLIv2(*args)
|
||||
result = op_cli.assert_logged_in()
|
||||
|
||||
op_cli._run.assert_called_with(expected_call_args)
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_full_signin(mocker):
|
||||
mocker.patch.object(OnePassCLIv2, "_run", return_value=[0, "", ""])
|
||||
|
||||
op_cli = OnePassCLIv2(
|
||||
subdomain="acme",
|
||||
username="bob@acme.com",
|
||||
secret_key="SECRET",
|
||||
master_password="ONEKEYTORULETHEMALL",
|
||||
)
|
||||
result = op_cli.full_signin()
|
||||
|
||||
op_cli._run.assert_called_with(
|
||||
[
|
||||
"account", "add", "--raw",
|
||||
"--address", "acme.1password.com",
|
||||
"--email", b"bob@acme.com",
|
||||
"--signin",
|
||||
],
|
||||
command_input=b"ONEKEYTORULETHEMALL",
|
||||
environment_update={'OP_SECRET_KEY': 'SECRET'},
|
||||
)
|
||||
assert result == [0, "", ""]
|
||||
@@ -0,0 +1,162 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, Jonathan Lung <lungj@heresjono.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils import six
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
from ansible_collections.community.general.plugins.lookup.bitwarden import LookupModule, Bitwarden, BitwardenException
|
||||
|
||||
|
||||
MOCK_RECORDS = [
|
||||
{
|
||||
"collectionIds": [],
|
||||
"deletedDate": None,
|
||||
"favorite": False,
|
||||
"fields": [
|
||||
{
|
||||
"linkedId": None,
|
||||
"name": "a_new_secret",
|
||||
"type": 1,
|
||||
"value": "this is a new secret"
|
||||
},
|
||||
{
|
||||
"linkedId": None,
|
||||
"name": "not so secret",
|
||||
"type": 0,
|
||||
"value": "not secret"
|
||||
}
|
||||
],
|
||||
"folderId": "3b12a9da-7c49-40b8-ad33-aede017a7ead",
|
||||
"id": "90992f63-ddb6-4e76-8bfc-aede016ca5eb",
|
||||
"login": {
|
||||
"password": "passwordA3",
|
||||
"passwordRevisionDate": "2022-07-26T23:03:23.399Z",
|
||||
"totp": None,
|
||||
"username": "userA"
|
||||
},
|
||||
"name": "a_test",
|
||||
"notes": None,
|
||||
"object": "item",
|
||||
"organizationId": None,
|
||||
"passwordHistory": [
|
||||
{
|
||||
"lastUsedDate": "2022-07-26T23:03:23.405Z",
|
||||
"password": "a_new_secret: this is secret"
|
||||
},
|
||||
{
|
||||
"lastUsedDate": "2022-07-26T23:03:23.399Z",
|
||||
"password": "passwordA2"
|
||||
},
|
||||
{
|
||||
"lastUsedDate": "2022-07-26T22:59:52.885Z",
|
||||
"password": "passwordA"
|
||||
}
|
||||
],
|
||||
"reprompt": 0,
|
||||
"revisionDate": "2022-07-26T23:03:23.743Z",
|
||||
"type": 1
|
||||
},
|
||||
{
|
||||
"collectionIds": [],
|
||||
"deletedDate": None,
|
||||
"favorite": False,
|
||||
"folderId": None,
|
||||
"id": "5ebd4d31-104c-49fc-a09c-aedf003d28ad",
|
||||
"login": {
|
||||
"password": "b",
|
||||
"passwordRevisionDate": None,
|
||||
"totp": None,
|
||||
"username": "a"
|
||||
},
|
||||
"name": "dupe_name",
|
||||
"notes": None,
|
||||
"object": "item",
|
||||
"organizationId": None,
|
||||
"reprompt": 0,
|
||||
"revisionDate": "2022-07-27T03:42:40.353Z",
|
||||
"type": 1
|
||||
},
|
||||
{
|
||||
"collectionIds": [],
|
||||
"deletedDate": None,
|
||||
"favorite": False,
|
||||
"folderId": None,
|
||||
"id": "90657653-6695-496d-9431-aedf003d3015",
|
||||
"login": {
|
||||
"password": "d",
|
||||
"passwordRevisionDate": None,
|
||||
"totp": None,
|
||||
"username": "c"
|
||||
},
|
||||
"name": "dupe_name",
|
||||
"notes": None,
|
||||
"object": "item",
|
||||
"organizationId": None,
|
||||
"reprompt": 0,
|
||||
"revisionDate": "2022-07-27T03:42:46.673Z",
|
||||
"type": 1
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class MockBitwarden(Bitwarden):
|
||||
|
||||
logged_in = True
|
||||
|
||||
def _get_matches(self, search_value, search_field="name"):
|
||||
return list(filter(lambda record: record[search_field] == search_value, MOCK_RECORDS))
|
||||
|
||||
|
||||
class LoggedOutMockBitwarden(MockBitwarden):
|
||||
|
||||
logged_in = False
|
||||
|
||||
|
||||
class TestLookupModule(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.lookup = lookup_loader.get('community.general.bitwarden')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', new=MockBitwarden())
|
||||
def test_bitwarden_plugin_no_match(self):
|
||||
# Entry 0, "a_test" of the test input should have no duplicates.
|
||||
self.assertEqual([], self.lookup.run(['not_here'], field='password')[0])
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', new=MockBitwarden())
|
||||
def test_bitwarden_plugin_fields(self):
|
||||
# Entry 0, "a_test" of the test input should have no duplicates.
|
||||
record = MOCK_RECORDS[0]
|
||||
record_name = record['name']
|
||||
for k, v in six.iteritems(record['login']):
|
||||
self.assertEqual([v],
|
||||
self.lookup.run([record_name], field=k)[0])
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', new=MockBitwarden())
|
||||
def test_bitwarden_plugin_duplicates(self):
|
||||
# There are two records with name dupe_name; we need to be order-insensitive with
|
||||
# checking what was retrieved.
|
||||
self.assertEqual(set(['b', 'd']),
|
||||
set(self.lookup.run(['dupe_name'], field='password')[0]))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', new=MockBitwarden())
|
||||
def test_bitwarden_plugin_full_item(self):
|
||||
# Try to retrieve the full record of the first entry where the name is "a_name".
|
||||
self.assertEqual([MOCK_RECORDS[0]],
|
||||
self.lookup.run(['a_test'])[0])
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden', LoggedOutMockBitwarden())
|
||||
def test_bitwarden_plugin_logged_out(self):
|
||||
record = MOCK_RECORDS[0]
|
||||
record_name = record['name']
|
||||
with self.assertRaises(AnsibleError):
|
||||
self.lookup.run([record_name], field='password')
|
||||
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020-2021, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.unittest import TestCase
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import (
|
||||
MagicMock,
|
||||
)
|
||||
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
|
||||
|
||||
class TestLookupModule(TestCase):
|
||||
def setUp(self):
|
||||
templar = MagicMock()
|
||||
templar._loader = None
|
||||
self.lookup = lookup_loader.get("community.general.dependent", templar=templar)
|
||||
|
||||
def test_empty(self):
|
||||
self.assertListEqual(self.lookup.run([], None), [])
|
||||
|
||||
def test_simple(self):
|
||||
self.assertListEqual(
|
||||
self.lookup.run(
|
||||
[
|
||||
{'a': '[1, 2]'},
|
||||
{'b': '[item.a + 3, item.a + 6]'},
|
||||
{'c': '[item.a + item.b * 10]'},
|
||||
],
|
||||
{},
|
||||
),
|
||||
[
|
||||
{'a': 1, 'b': 4, 'c': 41},
|
||||
{'a': 1, 'b': 7, 'c': 71},
|
||||
{'a': 2, 'b': 5, 'c': 52},
|
||||
{'a': 2, 'b': 8, 'c': 82},
|
||||
],
|
||||
)
|
||||
@@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, Adam Migus <adam@migus.org>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.unittest import TestCase
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import (
|
||||
patch,
|
||||
MagicMock,
|
||||
)
|
||||
from ansible_collections.community.general.plugins.lookup import dsv
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
|
||||
|
||||
class MockSecretsVault(MagicMock):
|
||||
RESPONSE = '{"foo": "bar"}'
|
||||
|
||||
def get_secret_json(self, path):
|
||||
return self.RESPONSE
|
||||
|
||||
|
||||
class TestLookupModule(TestCase):
|
||||
def setUp(self):
|
||||
dsv.sdk_is_missing = False
|
||||
self.lookup = lookup_loader.get("community.general.dsv")
|
||||
|
||||
@patch(
|
||||
"ansible_collections.community.general.plugins.lookup.dsv.LookupModule.Client",
|
||||
MockSecretsVault(),
|
||||
)
|
||||
def test_get_secret_json(self):
|
||||
self.assertListEqual(
|
||||
[MockSecretsVault.RESPONSE],
|
||||
self.lookup.run(
|
||||
["/dummy"],
|
||||
[],
|
||||
**{"tenant": "dummy", "client_id": "dummy", "client_secret": "dummy", }
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2020, SCC France, Eric Belhomme <ebelhomme@fr.scc.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible_collections.community.general.plugins.lookup import etcd3
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
|
||||
|
||||
class FakeKVMetadata:
|
||||
|
||||
def __init__(self, keyvalue, header):
|
||||
self.key = keyvalue
|
||||
self.create_revision = ''
|
||||
self.mod_revision = ''
|
||||
self.version = ''
|
||||
self.lease_id = ''
|
||||
self.response_header = header
|
||||
|
||||
|
||||
class FakeEtcd3Client(MagicMock):
|
||||
|
||||
def get_prefix(self, key):
|
||||
for i in range(1, 4):
|
||||
yield self.get('{0}_{1}'.format(key, i))
|
||||
|
||||
def get(self, key):
|
||||
return ("{0} value".format(key), FakeKVMetadata(key, None))
|
||||
|
||||
|
||||
class TestLookupModule(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
etcd3.HAS_ETCD = True
|
||||
self.lookup = lookup_loader.get('community.general.etcd3')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.etcd3.etcd3_client', FakeEtcd3Client())
|
||||
def test_key(self):
|
||||
expected_result = [{'key': 'a_key', 'value': 'a_key value'}]
|
||||
self.assertListEqual(expected_result, self.lookup.run(['a_key'], []))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.etcd3.etcd3_client', FakeEtcd3Client())
|
||||
def test_key_prefix(self):
|
||||
expected_result = [
|
||||
{'key': 'a_key_1', 'value': 'a_key_1 value'},
|
||||
{'key': 'a_key_2', 'value': 'a_key_2 value'},
|
||||
{'key': 'a_key_3', 'value': 'a_key_3 value'},
|
||||
]
|
||||
self.assertListEqual(expected_result, self.lookup.run(['a_key'], [], **{'prefix': True}))
|
||||
@@ -0,0 +1,175 @@
|
||||
# Copyright (c) 2016 Andrew Zenk <azenk@umn.edu>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils import six
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
from ansible_collections.community.general.plugins.lookup.lastpass import LookupModule, LPass, LPassException
|
||||
|
||||
|
||||
MOCK_ENTRIES = [{'username': 'user',
|
||||
'name': 'Mock Entry',
|
||||
'password': 't0pS3cret passphrase entry!',
|
||||
'url': 'https://localhost/login',
|
||||
'notes': 'Test\nnote with multiple lines.\n',
|
||||
'id': '0123456789'}]
|
||||
|
||||
|
||||
class MockLPass(LPass):
|
||||
|
||||
_mock_logged_out = False
|
||||
_mock_disconnected = False
|
||||
|
||||
def _lookup_mock_entry(self, key):
|
||||
for entry in MOCK_ENTRIES:
|
||||
if key == entry['id'] or key == entry['name']:
|
||||
return entry
|
||||
|
||||
def _run(self, args, stdin=None, expected_rc=0):
|
||||
# Mock behavior of lpass executable
|
||||
base_options = ArgumentParser(add_help=False)
|
||||
base_options.add_argument('--color', default="auto", choices=['auto', 'always', 'never'])
|
||||
|
||||
p = ArgumentParser()
|
||||
sp = p.add_subparsers(help='command', dest='subparser_name')
|
||||
|
||||
logout_p = sp.add_parser('logout', parents=[base_options], help='logout')
|
||||
show_p = sp.add_parser('show', parents=[base_options], help='show entry details')
|
||||
|
||||
field_group = show_p.add_mutually_exclusive_group(required=True)
|
||||
for field in MOCK_ENTRIES[0].keys():
|
||||
field_group.add_argument("--{0}".format(field), default=False, action='store_true')
|
||||
field_group.add_argument('--field', default=None)
|
||||
show_p.add_argument('selector', help='Unique Name or ID')
|
||||
|
||||
args = p.parse_args(args)
|
||||
|
||||
def mock_exit(output='', error='', rc=0):
|
||||
if rc != expected_rc:
|
||||
raise LPassException(error)
|
||||
return output, error
|
||||
|
||||
if args.color != 'never':
|
||||
return mock_exit(error='Error: Mock only supports --color=never', rc=1)
|
||||
|
||||
if args.subparser_name == 'logout':
|
||||
if self._mock_logged_out:
|
||||
return mock_exit(error='Error: Not currently logged in', rc=1)
|
||||
|
||||
logged_in_error = 'Are you sure you would like to log out? [Y/n]'
|
||||
if stdin and stdin.lower() == 'n\n':
|
||||
return mock_exit(output='Log out: aborted.', error=logged_in_error, rc=1)
|
||||
elif stdin and stdin.lower() == 'y\n':
|
||||
return mock_exit(output='Log out: complete.', error=logged_in_error, rc=0)
|
||||
else:
|
||||
return mock_exit(error='Error: aborted response', rc=1)
|
||||
|
||||
if args.subparser_name == 'show':
|
||||
if self._mock_logged_out:
|
||||
return mock_exit(error='Error: Could not find decryption key.' +
|
||||
' Perhaps you need to login with `lpass login`.', rc=1)
|
||||
|
||||
if self._mock_disconnected:
|
||||
return mock_exit(error='Error: Couldn\'t resolve host name.', rc=1)
|
||||
|
||||
mock_entry = self._lookup_mock_entry(args.selector)
|
||||
|
||||
if args.field:
|
||||
return mock_exit(output=mock_entry.get(args.field, ''))
|
||||
elif args.password:
|
||||
return mock_exit(output=mock_entry.get('password', ''))
|
||||
elif args.username:
|
||||
return mock_exit(output=mock_entry.get('username', ''))
|
||||
elif args.url:
|
||||
return mock_exit(output=mock_entry.get('url', ''))
|
||||
elif args.name:
|
||||
return mock_exit(output=mock_entry.get('name', ''))
|
||||
elif args.id:
|
||||
return mock_exit(output=mock_entry.get('id', ''))
|
||||
elif args.notes:
|
||||
return mock_exit(output=mock_entry.get('notes', ''))
|
||||
|
||||
raise LPassException('We should never get here')
|
||||
|
||||
|
||||
class DisconnectedMockLPass(MockLPass):
|
||||
|
||||
_mock_disconnected = True
|
||||
|
||||
|
||||
class LoggedOutMockLPass(MockLPass):
|
||||
|
||||
_mock_logged_out = True
|
||||
|
||||
|
||||
class TestLPass(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.lookup = lookup_loader.get('community.general.lastpass')
|
||||
|
||||
def test_lastpass_cli_path(self):
|
||||
lp = MockLPass(path='/dev/null')
|
||||
self.assertEqual('/dev/null', lp.cli_path)
|
||||
|
||||
def test_lastpass_build_args_logout(self):
|
||||
lp = MockLPass()
|
||||
self.assertEqual(['logout', '--color=never'], lp._build_args("logout"))
|
||||
|
||||
def test_lastpass_logged_in_true(self):
|
||||
lp = MockLPass()
|
||||
self.assertTrue(lp.logged_in)
|
||||
|
||||
def test_lastpass_logged_in_false(self):
|
||||
lp = LoggedOutMockLPass()
|
||||
self.assertFalse(lp.logged_in)
|
||||
|
||||
def test_lastpass_show_disconnected(self):
|
||||
lp = DisconnectedMockLPass()
|
||||
|
||||
with self.assertRaises(LPassException):
|
||||
lp.get_field('0123456789', 'username')
|
||||
|
||||
def test_lastpass_show(self):
|
||||
lp = MockLPass()
|
||||
for entry in MOCK_ENTRIES:
|
||||
entry_id = entry.get('id')
|
||||
for k, v in six.iteritems(entry):
|
||||
self.assertEqual(v.strip(), lp.get_field(entry_id, k))
|
||||
|
||||
|
||||
class TestLastpassPlugin(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.lookup = lookup_loader.get('community.general.lastpass')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.lastpass.LPass', new=MockLPass)
|
||||
def test_lastpass_plugin_normal(self):
|
||||
for entry in MOCK_ENTRIES:
|
||||
entry_id = entry.get('id')
|
||||
for k, v in six.iteritems(entry):
|
||||
self.assertEqual(v.strip(),
|
||||
self.lookup.run([entry_id], field=k)[0])
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.lastpass.LPass', LoggedOutMockLPass)
|
||||
def test_lastpass_plugin_logged_out(self):
|
||||
entry = MOCK_ENTRIES[0]
|
||||
entry_id = entry.get('id')
|
||||
with self.assertRaises(AnsibleError):
|
||||
self.lookup.run([entry_id], field='password')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.lastpass.LPass', DisconnectedMockLPass)
|
||||
def test_lastpass_plugin_disconnected(self):
|
||||
entry = MOCK_ENTRIES[0]
|
||||
entry_id = entry.get('id')
|
||||
with self.assertRaises(AnsibleError):
|
||||
self.lookup.run([entry_id], field='password')
|
||||
@@ -0,0 +1,537 @@
|
||||
# Copyright (c) 2018, Arigato Machine Inc.
|
||||
# Copyright (c) 2018, Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, call
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.urls import ConnectionError, SSLValidationError
|
||||
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
|
||||
from ansible.module_utils import six
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
from ansible_collections.community.general.plugins.lookup.manifold import ManifoldApiClient, LookupModule, ApiError
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
API_FIXTURES = {
|
||||
'https://api.marketplace.manifold.co/v1/resources':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-1",
|
||||
"name": "Resource 1"
|
||||
},
|
||||
"id": "rid-1"
|
||||
},
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-2",
|
||||
"name": "Resource 2"
|
||||
},
|
||||
"id": "rid-2"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?label=resource-1':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-1",
|
||||
"name": "Resource 1"
|
||||
},
|
||||
"id": "rid-1"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?label=resource-2':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-2",
|
||||
"name": "Resource 2"
|
||||
},
|
||||
"id": "rid-2"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-1",
|
||||
"name": "Resource 1"
|
||||
},
|
||||
"id": "rid-1"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?project_id=pid-1':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-2",
|
||||
"name": "Resource 2"
|
||||
},
|
||||
"id": "rid-2"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?project_id=pid-2':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-1",
|
||||
"name": "Resource 1"
|
||||
},
|
||||
"id": "rid-1"
|
||||
},
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-3",
|
||||
"name": "Resource 3"
|
||||
},
|
||||
"id": "rid-3"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "resource-1",
|
||||
"name": "Resource 1"
|
||||
},
|
||||
"id": "rid-1"
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/projects':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "project-1",
|
||||
"name": "Project 1",
|
||||
},
|
||||
"id": "pid-1",
|
||||
},
|
||||
{
|
||||
"body": {
|
||||
"label": "project-2",
|
||||
"name": "Project 2",
|
||||
},
|
||||
"id": "pid-2",
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/projects?label=project-2':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"label": "project-2",
|
||||
"name": "Project 2",
|
||||
},
|
||||
"id": "pid-2",
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-1':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"resource_id": "rid-1",
|
||||
"values": {
|
||||
"RESOURCE_TOKEN_1": "token-1",
|
||||
"RESOURCE_TOKEN_2": "token-2"
|
||||
}
|
||||
},
|
||||
"id": "cid-1",
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-2':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"resource_id": "rid-2",
|
||||
"values": {
|
||||
"RESOURCE_TOKEN_3": "token-3",
|
||||
"RESOURCE_TOKEN_4": "token-4"
|
||||
}
|
||||
},
|
||||
"id": "cid-2",
|
||||
}
|
||||
],
|
||||
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-3':
|
||||
[
|
||||
{
|
||||
"body": {
|
||||
"resource_id": "rid-3",
|
||||
"values": {
|
||||
"RESOURCE_TOKEN_1": "token-5",
|
||||
"RESOURCE_TOKEN_2": "token-6"
|
||||
}
|
||||
},
|
||||
"id": "cid-3",
|
||||
}
|
||||
],
|
||||
'https://api.identity.manifold.co/v1/teams':
|
||||
[
|
||||
{
|
||||
"id": "tid-1",
|
||||
"body": {
|
||||
"name": "Team 1",
|
||||
"label": "team-1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "tid-2",
|
||||
"body": {
|
||||
"name": "Team 2",
|
||||
"label": "team-2"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def mock_fixture(open_url_mock, fixture=None, data=None, headers=None):
|
||||
if not headers:
|
||||
headers = {}
|
||||
if fixture:
|
||||
data = json.dumps(API_FIXTURES[fixture])
|
||||
if 'content-type' not in headers:
|
||||
headers['content-type'] = 'application/json'
|
||||
|
||||
open_url_mock.return_value.read.return_value = data
|
||||
open_url_mock.return_value.headers = headers
|
||||
|
||||
|
||||
class TestManifoldApiClient(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_sends_default_headers(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, data='hello')
|
||||
client = ManifoldApiClient('token-123')
|
||||
client.request('test', 'endpoint')
|
||||
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_decodes_json(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, fixture='https://api.marketplace.manifold.co/v1/resources')
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertIsInstance(client.request('marketplace', 'resources'), list)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_streams_text(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, data='hello', headers={'content-type': "text/plain"})
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertEqual('hello', client.request('test', 'endpoint'))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_processes_parameterized_headers(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, data='hello')
|
||||
client = ManifoldApiClient('token-123')
|
||||
client.request('test', 'endpoint', headers={'X-HEADER': 'MANIFOLD'})
|
||||
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123',
|
||||
'X-HEADER': 'MANIFOLD'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_passes_arbitrary_parameters(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, data='hello')
|
||||
client = ManifoldApiClient('token-123')
|
||||
client.request('test', 'endpoint', use_proxy=False, timeout=5)
|
||||
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0',
|
||||
use_proxy=False, timeout=5)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_raises_on_incorrect_json(self, open_url_mock):
|
||||
mock_fixture(open_url_mock, data='noJson', headers={'content-type': "application/json"})
|
||||
client = ManifoldApiClient('token-123')
|
||||
with self.assertRaises(ApiError) as context:
|
||||
client.request('test', 'endpoint')
|
||||
self.assertEqual('JSON response can\'t be parsed while requesting https://api.test.manifold.co/v1/endpoint:\n'
|
||||
'noJson',
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_raises_on_status_500(self, open_url_mock):
|
||||
open_url_mock.side_effect = HTTPError('https://api.test.manifold.co/v1/endpoint',
|
||||
500, 'Server error', {}, six.StringIO('ERROR'))
|
||||
client = ManifoldApiClient('token-123')
|
||||
with self.assertRaises(ApiError) as context:
|
||||
client.request('test', 'endpoint')
|
||||
self.assertEqual('Server returned: HTTP Error 500: Server error while requesting '
|
||||
'https://api.test.manifold.co/v1/endpoint:\nERROR',
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_raises_on_bad_url(self, open_url_mock):
|
||||
open_url_mock.side_effect = URLError('URL is invalid')
|
||||
client = ManifoldApiClient('token-123')
|
||||
with self.assertRaises(ApiError) as context:
|
||||
client.request('test', 'endpoint')
|
||||
self.assertEqual('Failed lookup url for https://api.test.manifold.co/v1/endpoint : <url'
|
||||
'open error URL is invalid>',
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_raises_on_ssl_error(self, open_url_mock):
|
||||
open_url_mock.side_effect = SSLValidationError('SSL Error')
|
||||
client = ManifoldApiClient('token-123')
|
||||
with self.assertRaises(ApiError) as context:
|
||||
client.request('test', 'endpoint')
|
||||
self.assertEqual('Error validating the server\'s certificate for https://api.test.manifold.co/v1/endpoint: '
|
||||
'SSL Error',
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_request_raises_on_connection_error(self, open_url_mock):
|
||||
open_url_mock.side_effect = ConnectionError('Unknown connection error')
|
||||
client = ManifoldApiClient('token-123')
|
||||
with self.assertRaises(ApiError) as context:
|
||||
client.request('test', 'endpoint')
|
||||
self.assertEqual('Error connecting to https://api.test.manifold.co/v1/endpoint: Unknown connection error',
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_resources_get_all(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/resources'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_resources())
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_resources_filter_label(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/resources?label=resource-1'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_resources(label='resource-1'))
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_resources_filter_team_and_project(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_resources(team_id='tid-1', project_id='pid-1'))
|
||||
args, kwargs = open_url_mock.call_args
|
||||
url_called = args[0]
|
||||
# Dict order is not guaranteed, so an url may have querystring parameters order randomized
|
||||
self.assertIn('team_id=tid-1', url_called)
|
||||
self.assertIn('project_id=pid-1', url_called)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_teams_get_all(self, open_url_mock):
|
||||
url = 'https://api.identity.manifold.co/v1/teams'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_teams())
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_teams_filter_label(self, open_url_mock):
|
||||
url = 'https://api.identity.manifold.co/v1/teams'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url][1:2], client.get_teams(label='team-2'))
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_projects_get_all(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/projects'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_projects())
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_projects_filter_label(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/projects?label=project-2'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_projects(label='project-2'))
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.open_url')
|
||||
def test_get_credentials(self, open_url_mock):
|
||||
url = 'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-1'
|
||||
mock_fixture(open_url_mock, fixture=url)
|
||||
client = ManifoldApiClient('token-123')
|
||||
self.assertListEqual(API_FIXTURES[url], client.get_credentials(resource_id='rid-1'))
|
||||
open_url_mock.assert_called_with(url,
|
||||
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
|
||||
http_agent='python-manifold-ansible-1.0.0')
|
||||
|
||||
|
||||
class TestLookupModule(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.lookup = lookup_loader.get('community.general.manifold')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_get_all(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
|
||||
'RESOURCE_TOKEN_2': 'token-2',
|
||||
'RESOURCE_TOKEN_3': 'token-3',
|
||||
'RESOURCE_TOKEN_4': 'token-4'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_get_one_resource(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_3': 'token-3',
|
||||
'RESOURCE_TOKEN_4': 'token-4'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?label=resource-2']
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run(['resource-2'], api_token='token-123'))
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None, label='resource-2')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_get_two_resources(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
|
||||
'RESOURCE_TOKEN_2': 'token-2',
|
||||
'RESOURCE_TOKEN_3': 'token-3',
|
||||
'RESOURCE_TOKEN_4': 'token-4'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run(['resource-1', 'resource-2'], api_token='token-123'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.display')
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_get_resources_with_same_credential_names(self, client_mock, display_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_1': 'token-5',
|
||||
'RESOURCE_TOKEN_2': 'token-6'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?project_id=pid-2']
|
||||
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects?label=project-2']
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-2'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
display_mock.warning.assert_has_calls([
|
||||
call("'RESOURCE_TOKEN_1' with label 'resource-1' was replaced by resource data with label 'resource-3'"),
|
||||
call("'RESOURCE_TOKEN_2' with label 'resource-1' was replaced by resource data with label 'resource-3'")],
|
||||
any_order=True
|
||||
)
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-2')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_filter_by_team(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
|
||||
'RESOURCE_TOKEN_2': 'token-2'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?team_id=tid-1']
|
||||
client_mock.return_value.get_teams.return_value = API_FIXTURES['https://api.identity.manifold.co/v1/teams'][0:1]
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', team='team-1'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id='tid-1', project_id=None)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_filter_by_project(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_3': 'token-3',
|
||||
'RESOURCE_TOKEN_4': 'token-4'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?project_id=pid-1']
|
||||
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects'][0:1]
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-1'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-1')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_filter_by_team_and_project(self, client_mock):
|
||||
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
|
||||
'RESOURCE_TOKEN_2': 'token-2'
|
||||
}]
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1']
|
||||
client_mock.return_value.get_teams.return_value = API_FIXTURES['https://api.identity.manifold.co/v1/teams'][0:1]
|
||||
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects'][0:1]
|
||||
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
|
||||
'credentials?resource_id={0}'.format(x)]
|
||||
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-1'))
|
||||
client_mock.assert_called_with('token-123')
|
||||
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-1')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_raise_team_doesnt_exist(self, client_mock):
|
||||
client_mock.return_value.get_teams.return_value = []
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run([], api_token='token-123', team='no-team')
|
||||
self.assertEqual("Team 'no-team' does not exist",
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_raise_project_doesnt_exist(self, client_mock):
|
||||
client_mock.return_value.get_projects.return_value = []
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run([], api_token='token-123', project='no-project')
|
||||
self.assertEqual("Project 'no-project' does not exist",
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_raise_resource_doesnt_exist(self, client_mock):
|
||||
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run(['resource-1', 'no-resource-1', 'no-resource-2'], api_token='token-123')
|
||||
self.assertEqual("Resource(s) no-resource-1, no-resource-2 do not exist",
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_catch_api_error(self, client_mock):
|
||||
client_mock.side_effect = ApiError('Generic error')
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run([], api_token='token-123')
|
||||
self.assertEqual("API Error: Generic error",
|
||||
str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_catch_unhandled_exception(self, client_mock):
|
||||
client_mock.side_effect = Exception('Unknown error')
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run([], api_token='token-123')
|
||||
self.assertTrue('Exception: Unknown error' in str(context.exception))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_falls_back_to_env_var(self, client_mock):
|
||||
client_mock.return_value.get_resources.return_value = []
|
||||
client_mock.return_value.get_credentials.return_value = []
|
||||
try:
|
||||
os.environ['MANIFOLD_API_TOKEN'] = 'token-321'
|
||||
self.lookup.run([])
|
||||
finally:
|
||||
os.environ.pop('MANIFOLD_API_TOKEN', None)
|
||||
client_mock.assert_called_with('token-321')
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.lookup.manifold.ManifoldApiClient')
|
||||
def test_falls_raises_on_no_token(self, client_mock):
|
||||
client_mock.return_value.get_resources.return_value = []
|
||||
client_mock.return_value.get_credentials.return_value = []
|
||||
os.environ.pop('MANIFOLD_API_TOKEN', None)
|
||||
with self.assertRaises(AnsibleError) as context:
|
||||
self.lookup.run([])
|
||||
assert 'api_token' in str(context.exception)
|
||||
@@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021, RevBits <info@revbits.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.unittest import TestCase
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import (
|
||||
patch,
|
||||
MagicMock,
|
||||
)
|
||||
from ansible_collections.community.general.plugins.lookup import revbitspss
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
|
||||
|
||||
class MockPamSecrets(MagicMock):
|
||||
RESPONSE = 'dummy value'
|
||||
|
||||
def get_pam_secret(self, path):
|
||||
return self.RESPONSE
|
||||
|
||||
|
||||
class TestLookupModule(TestCase):
|
||||
def setUp(self):
|
||||
revbitspss.ANOTHER_LIBRARY_IMPORT_ERROR = None
|
||||
self.lookup = lookup_loader.get("community.general.revbitspss")
|
||||
|
||||
@patch(
|
||||
"ansible_collections.community.general.plugins.lookup.revbitspss.LookupModule.Client",
|
||||
MockPamSecrets(),
|
||||
)
|
||||
def test_get_pam_secret(self):
|
||||
terms = ['dummy secret']
|
||||
variables = []
|
||||
kwargs = {
|
||||
"base_url": 'https://dummy.url',
|
||||
"api_key": 'dummy'
|
||||
}
|
||||
self.assertListEqual(
|
||||
[{'dummy secret': 'dummy value'}],
|
||||
self.lookup.run(terms, variables, **kwargs)
|
||||
)
|
||||
@@ -0,0 +1,120 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, Adam Migus <adam@migus.org>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.unittest import TestCase
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import (
|
||||
patch,
|
||||
DEFAULT,
|
||||
MagicMock,
|
||||
)
|
||||
from ansible_collections.community.general.plugins.lookup import tss
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
|
||||
|
||||
TSS_IMPORT_PATH = 'ansible_collections.community.general.plugins.lookup.tss'
|
||||
|
||||
|
||||
def make_absolute(name):
|
||||
return '.'.join([TSS_IMPORT_PATH, name])
|
||||
|
||||
|
||||
class SecretServerError(Exception):
|
||||
def __init__(self):
|
||||
self.message = ''
|
||||
|
||||
|
||||
class MockSecretServer(MagicMock):
|
||||
RESPONSE = '{"foo": "bar"}'
|
||||
|
||||
def get_secret_json(self, path):
|
||||
return self.RESPONSE
|
||||
|
||||
|
||||
class MockFaultySecretServer(MagicMock):
|
||||
def get_secret_json(self, path):
|
||||
raise SecretServerError
|
||||
|
||||
|
||||
@patch(make_absolute('SecretServer'), MockSecretServer())
|
||||
class TestTSSClient(TestCase):
|
||||
def setUp(self):
|
||||
self.server_params = {
|
||||
'base_url': '',
|
||||
'username': '',
|
||||
'domain': '',
|
||||
'password': '',
|
||||
'api_path_uri': '',
|
||||
'token_path_uri': '',
|
||||
}
|
||||
|
||||
def test_from_params(self):
|
||||
with patch(make_absolute('HAS_TSS_AUTHORIZER'), False):
|
||||
self.assert_client_version('v0')
|
||||
|
||||
with patch.dict(self.server_params, {'domain': 'foo'}):
|
||||
with self.assertRaises(tss.AnsibleError):
|
||||
self._get_client()
|
||||
|
||||
with patch.multiple(TSS_IMPORT_PATH,
|
||||
HAS_TSS_AUTHORIZER=True,
|
||||
PasswordGrantAuthorizer=DEFAULT,
|
||||
DomainPasswordGrantAuthorizer=DEFAULT):
|
||||
|
||||
self.assert_client_version('v1')
|
||||
|
||||
with patch.dict(self.server_params, {'domain': 'foo'}):
|
||||
self.assert_client_version('v1')
|
||||
|
||||
def assert_client_version(self, version):
|
||||
version_to_class = {
|
||||
'v0': tss.TSSClientV0,
|
||||
'v1': tss.TSSClientV1
|
||||
}
|
||||
|
||||
client = self._get_client()
|
||||
self.assertIsInstance(client, version_to_class[version])
|
||||
|
||||
def _get_client(self):
|
||||
return tss.TSSClient.from_params(**self.server_params)
|
||||
|
||||
|
||||
class TestLookupModule(TestCase):
|
||||
VALID_TERMS = [1]
|
||||
INVALID_TERMS = ['foo']
|
||||
|
||||
def setUp(self):
|
||||
self.lookup = lookup_loader.get("community.general.tss")
|
||||
|
||||
@patch.multiple(TSS_IMPORT_PATH,
|
||||
HAS_TSS_SDK=False,
|
||||
SecretServer=MockSecretServer)
|
||||
def test_missing_sdk(self):
|
||||
with self.assertRaises(tss.AnsibleError):
|
||||
self._run_lookup(self.VALID_TERMS)
|
||||
|
||||
@patch.multiple(TSS_IMPORT_PATH,
|
||||
HAS_TSS_SDK=True,
|
||||
SecretServerError=SecretServerError)
|
||||
def test_get_secret_json(self):
|
||||
with patch(make_absolute('SecretServer'), MockSecretServer):
|
||||
self.assertListEqual([MockSecretServer.RESPONSE], self._run_lookup(self.VALID_TERMS))
|
||||
|
||||
with self.assertRaises(tss.AnsibleOptionsError):
|
||||
self._run_lookup(self.INVALID_TERMS)
|
||||
|
||||
with patch(make_absolute('SecretServer'), MockFaultySecretServer):
|
||||
with self.assertRaises(tss.AnsibleError):
|
||||
self._run_lookup(self.VALID_TERMS)
|
||||
|
||||
def _run_lookup(self, terms, variables=None, **kwargs):
|
||||
variables = variables or []
|
||||
kwargs = kwargs or {"base_url": "dummy", "username": "dummy", "password": "dummy"}
|
||||
|
||||
return self.lookup.run(terms, variables, **kwargs)
|
||||
@@ -0,0 +1,54 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import random
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.cloud import _exponential_backoff, \
|
||||
_full_jitter_backoff
|
||||
|
||||
|
||||
class ExponentialBackoffStrategyTestCase(unittest.TestCase):
|
||||
def test_no_retries(self):
|
||||
strategy = _exponential_backoff(retries=0)
|
||||
result = list(strategy())
|
||||
self.assertEqual(result, [], 'list should be empty')
|
||||
|
||||
def test_exponential_backoff(self):
|
||||
strategy = _exponential_backoff(retries=5, delay=1, backoff=2)
|
||||
result = list(strategy())
|
||||
self.assertEqual(result, [1, 2, 4, 8, 16])
|
||||
|
||||
def test_max_delay(self):
|
||||
strategy = _exponential_backoff(retries=7, delay=1, backoff=2, max_delay=60)
|
||||
result = list(strategy())
|
||||
self.assertEqual(result, [1, 2, 4, 8, 16, 32, 60])
|
||||
|
||||
def test_max_delay_none(self):
|
||||
strategy = _exponential_backoff(retries=7, delay=1, backoff=2, max_delay=None)
|
||||
result = list(strategy())
|
||||
self.assertEqual(result, [1, 2, 4, 8, 16, 32, 64])
|
||||
|
||||
|
||||
class FullJitterBackoffStrategyTestCase(unittest.TestCase):
|
||||
def test_no_retries(self):
|
||||
strategy = _full_jitter_backoff(retries=0)
|
||||
result = list(strategy())
|
||||
self.assertEqual(result, [], 'list should be empty')
|
||||
|
||||
def test_full_jitter(self):
|
||||
retries = 5
|
||||
seed = 1
|
||||
|
||||
r = random.Random(seed)
|
||||
expected = [r.randint(0, 2**i) for i in range(0, retries)]
|
||||
|
||||
strategy = _full_jitter_backoff(
|
||||
retries=retries, delay=1, _random=random.Random(seed))
|
||||
result = list(strategy())
|
||||
|
||||
self.assertEqual(result, expected)
|
||||
@@ -0,0 +1,128 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import random
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.scaleway import SecretVariables, argon2
|
||||
|
||||
|
||||
class SecretVariablesTestCase(unittest.TestCase):
|
||||
def test_dict_to_list(self):
|
||||
source = dict(
|
||||
attribute1="value1",
|
||||
attribute2="value2"
|
||||
)
|
||||
expect = [
|
||||
dict(key="attribute1", value="value1"),
|
||||
dict(key="attribute2", value="value2")
|
||||
]
|
||||
|
||||
result = SecretVariables.dict_to_list(source)
|
||||
result = sorted(result, key=lambda el: el['key'])
|
||||
self.assertEqual(result, expect)
|
||||
|
||||
def test_list_to_dict(self):
|
||||
source = [
|
||||
dict(key="secret1", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc"),
|
||||
dict(key="secret2", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI")
|
||||
]
|
||||
expect = dict(
|
||||
secret1="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc",
|
||||
secret2="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"
|
||||
)
|
||||
|
||||
self.assertEqual(SecretVariables.list_to_dict(source, hashed=True), expect)
|
||||
|
||||
def test_list_to_dict(self):
|
||||
source = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="value2")
|
||||
]
|
||||
expect = dict(
|
||||
secret1="value1",
|
||||
secret2="value2"
|
||||
)
|
||||
|
||||
self.assertEqual(SecretVariables.list_to_dict(source, hashed=False), expect)
|
||||
|
||||
@unittest.skipIf(argon2 is None, "Missing required 'argon2' library")
|
||||
def test_decode_full(self):
|
||||
source_secret = [
|
||||
dict(key="secret1", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc"),
|
||||
dict(key="secret2", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"),
|
||||
]
|
||||
source_value = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="value2"),
|
||||
]
|
||||
|
||||
expect = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="value2"),
|
||||
]
|
||||
|
||||
result = SecretVariables.decode(source_secret, source_value)
|
||||
result = sorted(result, key=lambda el: el['key'])
|
||||
self.assertEqual(result, expect)
|
||||
|
||||
@unittest.skipIf(argon2 is None, "Missing required 'argon2' library")
|
||||
def test_decode_dict_divergent_values(self):
|
||||
source_secret = [
|
||||
dict(key="secret1", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc"),
|
||||
dict(key="secret2", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"),
|
||||
]
|
||||
source_value = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="diverged_value2"),
|
||||
]
|
||||
|
||||
expect = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"),
|
||||
]
|
||||
|
||||
result = SecretVariables.decode(source_secret, source_value)
|
||||
result = sorted(result, key=lambda el: el['key'])
|
||||
self.assertEqual(result, expect)
|
||||
|
||||
@unittest.skipIf(argon2 is None, "Missing required 'argon2' library")
|
||||
def test_decode_dict_missing_values_left(self):
|
||||
source_secret = [
|
||||
dict(key="secret1", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc"),
|
||||
]
|
||||
source_value = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="value2"),
|
||||
]
|
||||
|
||||
expect = [
|
||||
dict(key="secret1", value="value1"),
|
||||
]
|
||||
|
||||
result = SecretVariables.decode(source_secret, source_value)
|
||||
result = sorted(result, key=lambda el: el['key'])
|
||||
self.assertEqual(result, expect)
|
||||
|
||||
@unittest.skipIf(argon2 is None, "Missing required 'argon2' library")
|
||||
def test_decode_dict_missing_values_right(self):
|
||||
source_secret = [
|
||||
dict(key="secret1", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$NuZk+6UATHNFV78nFRXFvA$3kivcXfzNHI1c/4ZBpP8BeBSGhhI82NfOh4Dd48JJgc"),
|
||||
dict(key="secret2", hashed_value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"),
|
||||
]
|
||||
source_value = [
|
||||
dict(key="secret1", value="value1"),
|
||||
]
|
||||
|
||||
expect = [
|
||||
dict(key="secret1", value="value1"),
|
||||
dict(key="secret2", value="$argon2id$v=19$m=65536,t=1,p=2$etGO/Z8ImYDeKr6uFsyPAQ$FbL5+hG/duDEpa8UCYqXpEUQ5EacKg6i2iAs+Dq4dAI"),
|
||||
]
|
||||
|
||||
result = SecretVariables.decode(source_secret, source_value)
|
||||
result = sorted(result, key=lambda el: el['key'])
|
||||
self.assertEqual(result, expect)
|
||||
@@ -0,0 +1,73 @@
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
|
||||
import ansible.module_utils.basic
|
||||
from ansible.module_utils.six import PY3, string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stdin(mocker, request):
|
||||
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
old_argv = sys.argv
|
||||
sys.argv = ['ansible_unittest']
|
||||
|
||||
if isinstance(request.param, string_types):
|
||||
args = request.param
|
||||
elif isinstance(request.param, MutableMapping):
|
||||
if 'ANSIBLE_MODULE_ARGS' not in request.param:
|
||||
request.param = {'ANSIBLE_MODULE_ARGS': request.param}
|
||||
if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
|
||||
request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
|
||||
if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
|
||||
request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
|
||||
args = json.dumps(request.param)
|
||||
else:
|
||||
raise Exception('Malformed data to the stdin pytest fixture')
|
||||
|
||||
fake_stdin = BytesIO(to_bytes(args, errors='surrogate_or_strict'))
|
||||
if PY3:
|
||||
mocker.patch('ansible.module_utils.basic.sys.stdin', mocker.MagicMock())
|
||||
mocker.patch('ansible.module_utils.basic.sys.stdin.buffer', fake_stdin)
|
||||
else:
|
||||
mocker.patch('ansible.module_utils.basic.sys.stdin', fake_stdin)
|
||||
|
||||
yield fake_stdin
|
||||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = old_args
|
||||
sys.argv = old_argv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def am(stdin, request):
|
||||
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
old_argv = sys.argv
|
||||
sys.argv = ['ansible_unittest']
|
||||
|
||||
argspec = {}
|
||||
if hasattr(request, 'param'):
|
||||
if isinstance(request.param, dict):
|
||||
argspec = request.param
|
||||
|
||||
am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec=argspec,
|
||||
)
|
||||
am._name = 'ansible_unittest'
|
||||
|
||||
yield am
|
||||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = old_args
|
||||
sys.argv = old_argv
|
||||
@@ -0,0 +1,167 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# 2018.07.26 --- use DictComparison instead of GcpRequest
|
||||
#
|
||||
# Copyright (c) 2016, Tom Melendez <tom@supertom.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.hwc_utils import are_different_dicts
|
||||
|
||||
|
||||
class HwcDictComparisonTestCase(unittest.TestCase):
|
||||
def test_simple_no_difference(self):
|
||||
value1 = {
|
||||
'foo': 'bar',
|
||||
'test': 'original'
|
||||
}
|
||||
|
||||
self.assertFalse(are_different_dicts(value1, value1))
|
||||
|
||||
def test_simple_different(self):
|
||||
value1 = {
|
||||
'foo': 'bar',
|
||||
'test': 'original'
|
||||
}
|
||||
value2 = {
|
||||
'foo': 'bar',
|
||||
'test': 'different'
|
||||
}
|
||||
value3 = {
|
||||
'test': 'original'
|
||||
}
|
||||
|
||||
self.assertTrue(are_different_dicts(value1, value2))
|
||||
self.assertTrue(are_different_dicts(value1, value3))
|
||||
self.assertTrue(are_different_dicts(value2, value3))
|
||||
|
||||
def test_nested_dictionaries_no_difference(self):
|
||||
value1 = {
|
||||
'foo': {
|
||||
'quiet': {
|
||||
'tree': 'test'
|
||||
},
|
||||
'bar': 'baz'
|
||||
},
|
||||
'test': 'original'
|
||||
}
|
||||
|
||||
self.assertFalse(are_different_dicts(value1, value1))
|
||||
|
||||
def test_nested_dictionaries_with_difference(self):
|
||||
value1 = {
|
||||
'foo': {
|
||||
'quiet': {
|
||||
'tree': 'test'
|
||||
},
|
||||
'bar': 'baz'
|
||||
},
|
||||
'test': 'original'
|
||||
}
|
||||
value2 = {
|
||||
'foo': {
|
||||
'quiet': {
|
||||
'tree': 'baz'
|
||||
},
|
||||
'bar': 'hello'
|
||||
},
|
||||
'test': 'original'
|
||||
}
|
||||
value3 = {
|
||||
'foo': {
|
||||
'quiet': {
|
||||
'tree': 'test'
|
||||
},
|
||||
'bar': 'baz'
|
||||
}
|
||||
}
|
||||
|
||||
self.assertTrue(are_different_dicts(value1, value2))
|
||||
self.assertTrue(are_different_dicts(value1, value3))
|
||||
self.assertTrue(are_different_dicts(value2, value3))
|
||||
|
||||
def test_arrays_strings_no_difference(self):
|
||||
value1 = {
|
||||
'foo': [
|
||||
'baz',
|
||||
'bar'
|
||||
]
|
||||
}
|
||||
|
||||
self.assertFalse(are_different_dicts(value1, value1))
|
||||
|
||||
def test_arrays_strings_with_difference(self):
|
||||
value1 = {
|
||||
'foo': [
|
||||
'baz',
|
||||
'bar',
|
||||
]
|
||||
}
|
||||
|
||||
value2 = {
|
||||
'foo': [
|
||||
'baz',
|
||||
'hello'
|
||||
]
|
||||
}
|
||||
value3 = {
|
||||
'foo': [
|
||||
'bar',
|
||||
]
|
||||
}
|
||||
|
||||
self.assertTrue(are_different_dicts(value1, value2))
|
||||
self.assertTrue(are_different_dicts(value1, value3))
|
||||
self.assertTrue(are_different_dicts(value2, value3))
|
||||
|
||||
def test_arrays_dicts_with_no_difference(self):
|
||||
value1 = {
|
||||
'foo': [
|
||||
{
|
||||
'test': 'value',
|
||||
'foo': 'bar'
|
||||
},
|
||||
{
|
||||
'different': 'dict'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertFalse(are_different_dicts(value1, value1))
|
||||
|
||||
def test_arrays_dicts_with_difference(self):
|
||||
value1 = {
|
||||
'foo': [
|
||||
{
|
||||
'test': 'value',
|
||||
'foo': 'bar'
|
||||
},
|
||||
{
|
||||
'different': 'dict'
|
||||
}
|
||||
]
|
||||
}
|
||||
value2 = {
|
||||
'foo': [
|
||||
{
|
||||
'test': 'value2',
|
||||
'foo': 'bar2'
|
||||
},
|
||||
]
|
||||
}
|
||||
value3 = {
|
||||
'foo': [
|
||||
{
|
||||
'test': 'value',
|
||||
'foo': 'bar'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertTrue(are_different_dicts(value1, value2))
|
||||
self.assertTrue(are_different_dicts(value1, value3))
|
||||
self.assertTrue(are_different_dicts(value2, value3))
|
||||
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.hwc_utils import (HwcModuleException, navigate_value)
|
||||
|
||||
|
||||
class HwcUtilsTestCase(unittest.TestCase):
|
||||
def test_navigate_value(self):
|
||||
value = {
|
||||
'foo': {
|
||||
'quiet': {
|
||||
'tree': 'test',
|
||||
"trees": [0, 1]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
self.assertEqual(navigate_value(value, ["foo", "quiet", "tree"]),
|
||||
"test")
|
||||
|
||||
self.assertEqual(
|
||||
navigate_value(value, ["foo", "quiet", "trees"],
|
||||
{"foo.quiet.trees": 1}),
|
||||
1)
|
||||
|
||||
self.assertRaisesRegexp(HwcModuleException,
|
||||
r".* key\(q\) is not exist in dict",
|
||||
navigate_value, value, ["foo", "q", "tree"])
|
||||
|
||||
self.assertRaisesRegexp(HwcModuleException,
|
||||
r".* the index is out of list",
|
||||
navigate_value, value,
|
||||
["foo", "quiet", "trees"],
|
||||
{"foo.quiet.trees": 2})
|
||||
@@ -0,0 +1,165 @@
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
from itertools import count
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import (
|
||||
get_token,
|
||||
KeycloakError,
|
||||
)
|
||||
from ansible.module_utils.six import StringIO
|
||||
from ansible.module_utils.six.moves.urllib.error import HTTPError
|
||||
|
||||
module_params_creds = {
|
||||
'auth_keycloak_url': 'http://keycloak.url/auth',
|
||||
'validate_certs': True,
|
||||
'auth_realm': 'master',
|
||||
'client_id': 'admin-cli',
|
||||
'auth_username': 'admin',
|
||||
'auth_password': 'admin',
|
||||
'client_secret': None,
|
||||
}
|
||||
|
||||
|
||||
def build_mocked_request(get_id_user_count, response_dict):
|
||||
def _mocked_requests(*args, **kwargs):
|
||||
url = args[0]
|
||||
method = kwargs['method']
|
||||
future_response = response_dict.get(url, None)
|
||||
return get_response(future_response, method, get_id_user_count)
|
||||
return _mocked_requests
|
||||
|
||||
|
||||
def get_response(object_with_future_response, method, get_id_call_count):
|
||||
if callable(object_with_future_response):
|
||||
return object_with_future_response()
|
||||
if isinstance(object_with_future_response, dict):
|
||||
return get_response(
|
||||
object_with_future_response[method], method, get_id_call_count)
|
||||
if isinstance(object_with_future_response, list):
|
||||
try:
|
||||
call_number = get_id_call_count.__next__()
|
||||
except AttributeError:
|
||||
# manage python 2 versions.
|
||||
call_number = get_id_call_count.next()
|
||||
return get_response(
|
||||
object_with_future_response[call_number], method, get_id_call_count)
|
||||
return object_with_future_response
|
||||
|
||||
|
||||
def create_wrapper(text_as_string):
|
||||
"""Allow to mock many times a call to one address.
|
||||
Without this function, the StringIO is empty for the second call.
|
||||
"""
|
||||
def _create_wrapper():
|
||||
return StringIO(text_as_string)
|
||||
return _create_wrapper
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_good_connection(mocker):
|
||||
token_response = {
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"access_token": "alongtoken"}'), }
|
||||
return mocker.patch(
|
||||
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
|
||||
side_effect=build_mocked_request(count(), token_response),
|
||||
autospec=True
|
||||
)
|
||||
|
||||
|
||||
def test_connect_to_keycloak_with_creds(mock_good_connection):
|
||||
keycloak_header = get_token(module_params_creds)
|
||||
assert keycloak_header == {
|
||||
'Authorization': 'Bearer alongtoken',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
def test_connect_to_keycloak_with_token(mock_good_connection):
|
||||
module_params_token = {
|
||||
'auth_keycloak_url': 'http://keycloak.url/auth',
|
||||
'validate_certs': True,
|
||||
'client_id': 'admin-cli',
|
||||
'token': "alongtoken"
|
||||
}
|
||||
keycloak_header = get_token(module_params_token)
|
||||
assert keycloak_header == {
|
||||
'Authorization': 'Bearer alongtoken',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_bad_json_returned(mocker):
|
||||
token_response = {
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"access_token":'), }
|
||||
return mocker.patch(
|
||||
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
|
||||
side_effect=build_mocked_request(count(), token_response),
|
||||
autospec=True
|
||||
)
|
||||
|
||||
|
||||
def test_bad_json_returned(mock_bad_json_returned):
|
||||
with pytest.raises(KeycloakError) as raised_error:
|
||||
get_token(module_params_creds)
|
||||
# cannot check all the message, different errors message for the value
|
||||
# error in python 2.6, 2.7 and 3.*.
|
||||
assert (
|
||||
'API returned invalid JSON when trying to obtain access token from '
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token: '
|
||||
) in str(raised_error.value)
|
||||
|
||||
|
||||
def raise_401(url):
|
||||
def _raise_401():
|
||||
raise HTTPError(url=url, code=401, msg='Unauthorized', hdrs='', fp=StringIO(''))
|
||||
return _raise_401
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_401_returned(mocker):
|
||||
token_response = {
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': raise_401(
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token'),
|
||||
}
|
||||
return mocker.patch(
|
||||
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
|
||||
side_effect=build_mocked_request(count(), token_response),
|
||||
autospec=True
|
||||
)
|
||||
|
||||
|
||||
def test_error_returned(mock_401_returned):
|
||||
with pytest.raises(KeycloakError) as raised_error:
|
||||
get_token(module_params_creds)
|
||||
assert str(raised_error.value) == (
|
||||
'Could not obtain access token from http://keycloak.url'
|
||||
'/auth/realms/master/protocol/openid-connect/token: '
|
||||
'HTTP Error 401: Unauthorized'
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_json_without_token_returned(mocker):
|
||||
token_response = {
|
||||
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"not_token": "It is not a token"}'), }
|
||||
return mocker.patch(
|
||||
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
|
||||
side_effect=build_mocked_request(count(), token_response),
|
||||
autospec=True
|
||||
)
|
||||
|
||||
|
||||
def test_json_without_token_returned(mock_json_without_token_returned):
|
||||
with pytest.raises(KeycloakError) as raised_error:
|
||||
get_token(module_params_creds)
|
||||
assert str(raised_error.value) == (
|
||||
'Could not obtain access token from http://keycloak.url'
|
||||
'/auth/realms/master/protocol/openid-connect/token'
|
||||
)
|
||||
@@ -0,0 +1,632 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021, Florian Dambrine <android.florian@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible_collections.community.general.plugins.module_utils.net_tools.pritunl import (
|
||||
api,
|
||||
)
|
||||
from mock import MagicMock
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
# Pritunl Mocks
|
||||
|
||||
PRITUNL_ORGS = [
|
||||
{
|
||||
"auth_api": False,
|
||||
"name": "Foo",
|
||||
"auth_token": None,
|
||||
"user_count": 0,
|
||||
"auth_secret": None,
|
||||
"id": "csftwlu6uhralzi2dpmhekz3",
|
||||
},
|
||||
{
|
||||
"auth_api": False,
|
||||
"name": "GumGum",
|
||||
"auth_token": None,
|
||||
"user_count": 3,
|
||||
"auth_secret": None,
|
||||
"id": "58070daee63f3b2e6e472c36",
|
||||
},
|
||||
{
|
||||
"auth_api": False,
|
||||
"name": "Bar",
|
||||
"auth_token": None,
|
||||
"user_count": 0,
|
||||
"auth_secret": None,
|
||||
"id": "v1sncsxxybnsylc8gpqg85pg",
|
||||
},
|
||||
]
|
||||
|
||||
NEW_PRITUNL_ORG = {
|
||||
"auth_api": False,
|
||||
"name": "NewOrg",
|
||||
"auth_token": None,
|
||||
"user_count": 0,
|
||||
"auth_secret": None,
|
||||
"id": "604a140ae63f3b36bc34c7bd",
|
||||
}
|
||||
|
||||
PRITUNL_USERS = [
|
||||
{
|
||||
"auth_type": "google",
|
||||
"dns_servers": None,
|
||||
"pin": True,
|
||||
"dns_suffix": None,
|
||||
"servers": [
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "580711322bb66c1d59b9568f",
|
||||
"virt_address6": "fd00:c0a8: 9700: 0: 192: 168: 101: 27",
|
||||
"virt_address": "192.168.101.27",
|
||||
"name": "vpn-A",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "580711322bb66c1d59b9568f",
|
||||
"device_name": None,
|
||||
},
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"virt_address6": "fd00:c0a8:f200: 0: 192: 168: 201: 37",
|
||||
"virt_address": "192.168.201.37",
|
||||
"name": "vpn-B",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"device_name": None,
|
||||
},
|
||||
],
|
||||
"disabled": False,
|
||||
"network_links": [],
|
||||
"port_forwarding": [],
|
||||
"id": "58070dafe63f3b2e6e472c3b",
|
||||
"organization_name": "GumGum",
|
||||
"type": "server",
|
||||
"email": "bot@company.com",
|
||||
"status": True,
|
||||
"dns_mapping": None,
|
||||
"otp_secret": "123456789ABCDEFG",
|
||||
"client_to_client": False,
|
||||
"sso": "google",
|
||||
"bypass_secondary": False,
|
||||
"groups": ["admin", "multiregion"],
|
||||
"audit": False,
|
||||
"name": "bot",
|
||||
"gravatar": True,
|
||||
"otp_auth": True,
|
||||
"organization": "58070daee63f3b2e6e472c36",
|
||||
},
|
||||
{
|
||||
"auth_type": "google",
|
||||
"dns_servers": None,
|
||||
"pin": True,
|
||||
"dns_suffix": None,
|
||||
"servers": [
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "580711322bb66c1d59b9568f",
|
||||
"virt_address6": "fd00:c0a8: 9700: 0: 192: 168: 101: 27",
|
||||
"virt_address": "192.168.101.27",
|
||||
"name": "vpn-A",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "580711322bb66c1d59b9568f",
|
||||
"device_name": None,
|
||||
},
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"virt_address6": "fd00:c0a8:f200: 0: 192: 168: 201: 37",
|
||||
"virt_address": "192.168.201.37",
|
||||
"name": "vpn-B",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"device_name": None,
|
||||
},
|
||||
],
|
||||
"disabled": False,
|
||||
"network_links": [],
|
||||
"port_forwarding": [],
|
||||
"id": "58070dafe63f3b2e6e472c3b",
|
||||
"organization_name": "GumGum",
|
||||
"type": "client",
|
||||
"email": "florian@company.com",
|
||||
"status": True,
|
||||
"dns_mapping": None,
|
||||
"otp_secret": "123456789ABCDEFG",
|
||||
"client_to_client": False,
|
||||
"sso": "google",
|
||||
"bypass_secondary": False,
|
||||
"groups": ["web", "database"],
|
||||
"audit": False,
|
||||
"name": "florian",
|
||||
"gravatar": True,
|
||||
"otp_auth": True,
|
||||
"organization": "58070daee63f3b2e6e472c36",
|
||||
},
|
||||
{
|
||||
"auth_type": "google",
|
||||
"dns_servers": None,
|
||||
"pin": True,
|
||||
"dns_suffix": None,
|
||||
"servers": [
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "580711322bb66c1d59b9568f",
|
||||
"virt_address6": "fd00:c0a8: 9700: 0: 192: 168: 101: 27",
|
||||
"virt_address": "192.168.101.27",
|
||||
"name": "vpn-A",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "580711322bb66c1d59b9568f",
|
||||
"device_name": None,
|
||||
},
|
||||
{
|
||||
"status": False,
|
||||
"platform": None,
|
||||
"server_id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"virt_address6": "fd00:c0a8:f200: 0: 192: 168: 201: 37",
|
||||
"virt_address": "192.168.201.37",
|
||||
"name": "vpn-B",
|
||||
"real_address": None,
|
||||
"connected_since": None,
|
||||
"id": "5dad2cc6e63f3b3f4a6dfea5",
|
||||
"device_name": None,
|
||||
},
|
||||
],
|
||||
"disabled": False,
|
||||
"network_links": [],
|
||||
"port_forwarding": [],
|
||||
"id": "58070dafe63f3b2e6e472c3b",
|
||||
"organization_name": "GumGum",
|
||||
"type": "server",
|
||||
"email": "ops@company.com",
|
||||
"status": True,
|
||||
"dns_mapping": None,
|
||||
"otp_secret": "123456789ABCDEFG",
|
||||
"client_to_client": False,
|
||||
"sso": "google",
|
||||
"bypass_secondary": False,
|
||||
"groups": ["web", "database"],
|
||||
"audit": False,
|
||||
"name": "ops",
|
||||
"gravatar": True,
|
||||
"otp_auth": True,
|
||||
"organization": "58070daee63f3b2e6e472c36",
|
||||
},
|
||||
]
|
||||
|
||||
NEW_PRITUNL_USER = {
|
||||
"auth_type": "local",
|
||||
"disabled": False,
|
||||
"dns_servers": None,
|
||||
"otp_secret": "6M4UWP2BCJBSYZAT",
|
||||
"name": "alice",
|
||||
"pin": False,
|
||||
"dns_suffix": None,
|
||||
"client_to_client": False,
|
||||
"email": "alice@company.com",
|
||||
"organization_name": "GumGum",
|
||||
"bypass_secondary": False,
|
||||
"groups": ["a", "b"],
|
||||
"organization": "58070daee63f3b2e6e472c36",
|
||||
"port_forwarding": [],
|
||||
"type": "client",
|
||||
"id": "590add71e63f3b72d8bb951a",
|
||||
}
|
||||
|
||||
NEW_PRITUNL_USER_UPDATED = dict_merge(
|
||||
NEW_PRITUNL_USER,
|
||||
{
|
||||
"disabled": True,
|
||||
"name": "bob",
|
||||
"email": "bob@company.com",
|
||||
"groups": ["c", "d"],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class PritunlEmptyOrganizationMock(MagicMock):
|
||||
"""Pritunl API Mock for organization GET API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps([])
|
||||
|
||||
|
||||
class PritunlListOrganizationMock(MagicMock):
|
||||
"""Pritunl API Mock for organization GET API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps(PRITUNL_ORGS)
|
||||
|
||||
|
||||
class PritunlListUserMock(MagicMock):
|
||||
"""Pritunl API Mock for user GET API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps(PRITUNL_USERS)
|
||||
|
||||
|
||||
class PritunlErrorMock(MagicMock):
|
||||
"""Pritunl API Mock for API call failures."""
|
||||
|
||||
def getcode(self):
|
||||
return 500
|
||||
|
||||
def read(self):
|
||||
return "{}"
|
||||
|
||||
|
||||
class PritunlPostOrganizationMock(MagicMock):
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps(NEW_PRITUNL_ORG)
|
||||
|
||||
|
||||
class PritunlListOrganizationAfterPostMock(MagicMock):
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps(PRITUNL_ORGS + [NEW_PRITUNL_ORG])
|
||||
|
||||
|
||||
class PritunlPostUserMock(MagicMock):
|
||||
"""Pritunl API Mock for POST API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps([NEW_PRITUNL_USER])
|
||||
|
||||
|
||||
class PritunlPutUserMock(MagicMock):
|
||||
"""Pritunl API Mock for PUT API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return json.dumps(NEW_PRITUNL_USER_UPDATED)
|
||||
|
||||
|
||||
class PritunlDeleteOrganizationMock(MagicMock):
|
||||
"""Pritunl API Mock for DELETE API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return "{}"
|
||||
|
||||
|
||||
class PritunlDeleteUserMock(MagicMock):
|
||||
"""Pritunl API Mock for DELETE API calls."""
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
return "{}"
|
||||
|
||||
|
||||
# Ansible Module Mock and Pytest mock fixtures
|
||||
|
||||
|
||||
class ModuleFailException(Exception):
|
||||
def __init__(self, msg, **kwargs):
|
||||
super(ModuleFailException, self).__init__(msg)
|
||||
self.fail_msg = msg
|
||||
self.fail_kwargs = kwargs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pritunl_settings():
|
||||
return {
|
||||
"api_token": "token",
|
||||
"api_secret": "secret",
|
||||
"base_url": "https://pritunl.domain.com",
|
||||
"validate_certs": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pritunl_organization_data():
|
||||
return {
|
||||
"name": NEW_PRITUNL_ORG["name"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pritunl_user_data():
|
||||
return {
|
||||
"name": NEW_PRITUNL_USER["name"],
|
||||
"email": NEW_PRITUNL_USER["email"],
|
||||
"groups": NEW_PRITUNL_USER["groups"],
|
||||
"disabled": NEW_PRITUNL_USER["disabled"],
|
||||
"type": NEW_PRITUNL_USER["type"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_pritunl_organization_mock():
|
||||
return PritunlListOrganizationMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_pritunl_user_mock():
|
||||
return PritunlListUserMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_pritunl_error_mock():
|
||||
return PritunlErrorMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def post_pritunl_organization_mock():
|
||||
return PritunlPostOrganizationMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def post_pritunl_user_mock():
|
||||
return PritunlPostUserMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def put_pritunl_user_mock():
|
||||
return PritunlPutUserMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def delete_pritunl_organization_mock():
|
||||
return PritunlDeleteOrganizationMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def delete_pritunl_user_mock():
|
||||
return PritunlDeleteUserMock()
|
||||
|
||||
|
||||
class TestPritunlApi:
|
||||
"""
|
||||
Test class to validate CRUD operations on Pritunl.
|
||||
"""
|
||||
|
||||
# Test for GET / list operation on Pritunl API
|
||||
@pytest.mark.parametrize(
|
||||
"org_id,org_user_count",
|
||||
[
|
||||
("58070daee63f3b2e6e472c36", 3),
|
||||
("v1sncsxxybnsylc8gpqg85pg", 0),
|
||||
],
|
||||
)
|
||||
def test_list_all_pritunl_organization(
|
||||
self,
|
||||
pritunl_settings,
|
||||
get_pritunl_organization_mock,
|
||||
org_id,
|
||||
org_user_count,
|
||||
):
|
||||
api._get_pritunl_organizations = get_pritunl_organization_mock()
|
||||
|
||||
response = api.list_pritunl_organizations(**pritunl_settings)
|
||||
|
||||
assert len(response) == 3
|
||||
|
||||
for org in response:
|
||||
if org["id"] == org_id:
|
||||
org["user_count"] == org_user_count
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"org_filters,org_expected",
|
||||
[
|
||||
({"id": "58070daee63f3b2e6e472c36"}, "GumGum"),
|
||||
({"name": "GumGum"}, "GumGum"),
|
||||
],
|
||||
)
|
||||
def test_list_filtered_pritunl_organization(
|
||||
self,
|
||||
pritunl_settings,
|
||||
get_pritunl_organization_mock,
|
||||
org_filters,
|
||||
org_expected,
|
||||
):
|
||||
api._get_pritunl_organizations = get_pritunl_organization_mock()
|
||||
|
||||
response = api.list_pritunl_organizations(
|
||||
**dict_merge(pritunl_settings, {"filters": org_filters})
|
||||
)
|
||||
|
||||
assert len(response) == 1
|
||||
assert response[0]["name"] == org_expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"org_id,org_user_count",
|
||||
[("58070daee63f3b2e6e472c36", 3)],
|
||||
)
|
||||
def test_list_all_pritunl_user(
|
||||
self, pritunl_settings, get_pritunl_user_mock, org_id, org_user_count
|
||||
):
|
||||
api._get_pritunl_users = get_pritunl_user_mock()
|
||||
|
||||
response = api.list_pritunl_users(
|
||||
**dict_merge(pritunl_settings, {"organization_id": org_id})
|
||||
)
|
||||
|
||||
assert len(response) == org_user_count
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"org_id,user_filters,user_expected",
|
||||
[
|
||||
("58070daee63f3b2e6e472c36", {"email": "bot@company.com"}, "bot"),
|
||||
("58070daee63f3b2e6e472c36", {"name": "florian"}, "florian"),
|
||||
],
|
||||
)
|
||||
def test_list_filtered_pritunl_user(
|
||||
self,
|
||||
pritunl_settings,
|
||||
get_pritunl_user_mock,
|
||||
org_id,
|
||||
user_filters,
|
||||
user_expected,
|
||||
):
|
||||
api._get_pritunl_users = get_pritunl_user_mock()
|
||||
|
||||
response = api.list_pritunl_users(
|
||||
**dict_merge(
|
||||
pritunl_settings, {"organization_id": org_id, "filters": user_filters}
|
||||
)
|
||||
)
|
||||
|
||||
assert len(response) > 0
|
||||
|
||||
for user in response:
|
||||
assert user["organization"] == org_id
|
||||
assert user["name"] == user_expected
|
||||
|
||||
# Test for POST operation on Pritunl API
|
||||
def test_add_pritunl_organization(
|
||||
self,
|
||||
pritunl_settings,
|
||||
pritunl_organization_data,
|
||||
post_pritunl_organization_mock,
|
||||
):
|
||||
api._post_pritunl_organization = post_pritunl_organization_mock()
|
||||
|
||||
create_response = api.post_pritunl_organization(
|
||||
**dict_merge(
|
||||
pritunl_settings,
|
||||
{"organization_name": pritunl_organization_data["name"]},
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure provided settings match with the ones returned by Pritunl
|
||||
for k, v in iteritems(pritunl_organization_data):
|
||||
assert create_response[k] == v
|
||||
|
||||
@pytest.mark.parametrize("org_id", [("58070daee63f3b2e6e472c36")])
|
||||
def test_add_and_update_pritunl_user(
|
||||
self,
|
||||
pritunl_settings,
|
||||
pritunl_user_data,
|
||||
post_pritunl_user_mock,
|
||||
put_pritunl_user_mock,
|
||||
org_id,
|
||||
):
|
||||
api._post_pritunl_user = post_pritunl_user_mock()
|
||||
api._put_pritunl_user = put_pritunl_user_mock()
|
||||
|
||||
create_response = api.post_pritunl_user(
|
||||
**dict_merge(
|
||||
pritunl_settings,
|
||||
{
|
||||
"organization_id": org_id,
|
||||
"user_data": pritunl_user_data,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure provided settings match with the ones returned by Pritunl
|
||||
for k, v in iteritems(pritunl_user_data):
|
||||
assert create_response[k] == v
|
||||
|
||||
# Update the newly created user to ensure only certain settings are changed
|
||||
|
||||
user_updates = {
|
||||
"name": "bob",
|
||||
"email": "bob@company.com",
|
||||
"disabled": True,
|
||||
}
|
||||
|
||||
update_response = api.post_pritunl_user(
|
||||
**dict_merge(
|
||||
pritunl_settings,
|
||||
{
|
||||
"organization_id": org_id,
|
||||
"user_id": create_response["id"],
|
||||
"user_data": dict_merge(pritunl_user_data, user_updates),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure only certain settings changed and the rest remained untouched.
|
||||
for k, v in iteritems(update_response):
|
||||
if k in update_response:
|
||||
assert update_response[k] == v
|
||||
else:
|
||||
assert update_response[k] == create_response[k]
|
||||
|
||||
# Test for DELETE operation on Pritunl API
|
||||
|
||||
@pytest.mark.parametrize("org_id", [("58070daee63f3b2e6e472c36")])
|
||||
def test_delete_pritunl_organization(
|
||||
self, pritunl_settings, org_id, delete_pritunl_organization_mock
|
||||
):
|
||||
api._delete_pritunl_organization = delete_pritunl_organization_mock()
|
||||
|
||||
response = api.delete_pritunl_organization(
|
||||
**dict_merge(
|
||||
pritunl_settings,
|
||||
{
|
||||
"organization_id": org_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
assert response == {}
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"org_id,user_id", [("58070daee63f3b2e6e472c36", "590add71e63f3b72d8bb951a")]
|
||||
)
|
||||
def test_delete_pritunl_user(
|
||||
self, pritunl_settings, org_id, user_id, delete_pritunl_user_mock
|
||||
):
|
||||
api._delete_pritunl_user = delete_pritunl_user_mock()
|
||||
|
||||
response = api.delete_pritunl_user(
|
||||
**dict_merge(
|
||||
pritunl_settings,
|
||||
{
|
||||
"organization_id": org_id,
|
||||
"user_id": user_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
assert response == {}
|
||||
|
||||
# Test API call errors
|
||||
def test_pritunl_error(self, pritunl_settings, get_pritunl_error_mock):
|
||||
api.pritunl_auth_request = get_pritunl_error_mock()
|
||||
|
||||
with pytest.raises(api.PritunlException):
|
||||
response = api.list_pritunl_organizations(**pritunl_settings)
|
||||
@@ -0,0 +1,370 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from sys import version_info
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock, PropertyMock
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, fmt
|
||||
|
||||
|
||||
TC_FORMATS = dict(
|
||||
simple_boolean__true=(fmt.as_bool, ("--superflag",), True, ["--superflag"]),
|
||||
simple_boolean__false=(fmt.as_bool, ("--superflag",), False, []),
|
||||
simple_boolean__none=(fmt.as_bool, ("--superflag",), None, []),
|
||||
simple_boolean_not__true=(fmt.as_bool_not, ("--superflag",), True, []),
|
||||
simple_boolean_not__false=(fmt.as_bool_not, ("--superflag",), False, ["--superflag"]),
|
||||
simple_boolean_not__none=(fmt.as_bool_not, ("--superflag",), None, ["--superflag"]),
|
||||
simple_optval__str=(fmt.as_optval, ("-t",), "potatoes", ["-tpotatoes"]),
|
||||
simple_optval__int=(fmt.as_optval, ("-t",), 42, ["-t42"]),
|
||||
simple_opt_val__str=(fmt.as_opt_val, ("-t",), "potatoes", ["-t", "potatoes"]),
|
||||
simple_opt_val__int=(fmt.as_opt_val, ("-t",), 42, ["-t", "42"]),
|
||||
simple_opt_eq_val__str=(fmt.as_opt_eq_val, ("--food",), "potatoes", ["--food=potatoes"]),
|
||||
simple_opt_eq_val__int=(fmt.as_opt_eq_val, ("--answer",), 42, ["--answer=42"]),
|
||||
simple_list_potato=(fmt.as_list, (), "literal_potato", ["literal_potato"]),
|
||||
simple_list_42=(fmt.as_list, (), 42, ["42"]),
|
||||
simple_map=(fmt.as_map, ({'a': 1, 'b': 2, 'c': 3},), 'b', ["2"]),
|
||||
simple_default_type__list=(fmt.as_default_type, ("list",), [1, 2, 3, 5, 8], ["--1", "--2", "--3", "--5", "--8"]),
|
||||
simple_default_type__bool_true=(fmt.as_default_type, ("bool", "what"), True, ["--what"]),
|
||||
simple_default_type__bool_false=(fmt.as_default_type, ("bool", "what"), False, []),
|
||||
simple_default_type__potato=(fmt.as_default_type, ("any-other-type", "potato"), "42", ["--potato", "42"]),
|
||||
simple_fixed_true=(fmt.as_fixed, [("--always-here", "--forever")], True, ["--always-here", "--forever"]),
|
||||
simple_fixed_false=(fmt.as_fixed, [("--always-here", "--forever")], False, ["--always-here", "--forever"]),
|
||||
simple_fixed_none=(fmt.as_fixed, [("--always-here", "--forever")], None, ["--always-here", "--forever"]),
|
||||
simple_fixed_str=(fmt.as_fixed, [("--always-here", "--forever")], "something", ["--always-here", "--forever"]),
|
||||
)
|
||||
if tuple(version_info) >= (3, 1):
|
||||
from collections import OrderedDict
|
||||
|
||||
# needs OrderedDict to provide a consistent key order
|
||||
TC_FORMATS["simple_default_type__dict"] = ( # type: ignore
|
||||
fmt.as_default_type,
|
||||
("dict",),
|
||||
OrderedDict((('a', 1), ('b', 2))),
|
||||
["--a=1", "--b=2"]
|
||||
)
|
||||
TC_FORMATS_IDS = sorted(TC_FORMATS.keys())
|
||||
|
||||
|
||||
@pytest.mark.parametrize('func, fmt_opt, value, expected',
|
||||
(TC_FORMATS[tc] for tc in TC_FORMATS_IDS),
|
||||
ids=TC_FORMATS_IDS)
|
||||
def test_arg_format(func, fmt_opt, value, expected):
|
||||
fmt_func = func(*fmt_opt)
|
||||
actual = fmt_func(value, ctx_ignore_none=True)
|
||||
print("formatted string = {0}".format(actual))
|
||||
assert actual == expected, "actual = {0}".format(actual)
|
||||
|
||||
|
||||
TC_RUNNER = dict(
|
||||
# SAMPLE: This shows all possible elements of a test case. It does not actually run.
|
||||
#
|
||||
# testcase_name=(
|
||||
# # input
|
||||
# dict(
|
||||
# args_bundle = dict(
|
||||
# param1=dict(
|
||||
# type="int",
|
||||
# value=11,
|
||||
# fmt_func=fmt.as_opt_eq_val,
|
||||
# fmt_arg="--answer",
|
||||
# ),
|
||||
# param2=dict(
|
||||
# fmt_func=fmt.as_bool,
|
||||
# fmt_arg="--bb-here",
|
||||
# )
|
||||
# ),
|
||||
# runner_init_args = dict(
|
||||
# command="testing",
|
||||
# default_args_order=(),
|
||||
# check_rc=False,
|
||||
# force_lang="C",
|
||||
# path_prefix=None,
|
||||
# environ_update=None,
|
||||
# ),
|
||||
# runner_ctx_args = dict(
|
||||
# args_order=['aa', 'bb'],
|
||||
# output_process=None,
|
||||
# ignore_value_none=True,
|
||||
# ),
|
||||
# ),
|
||||
# # command execution
|
||||
# dict(
|
||||
# runner_ctx_run_args = dict(bb=True),
|
||||
# rc = 0,
|
||||
# out = "",
|
||||
# err = "",
|
||||
# ),
|
||||
# # expected
|
||||
# dict(
|
||||
# results=(),
|
||||
# run_info=dict(
|
||||
# cmd=['/mock/bin/testing', '--answer=11', '--bb-here'],
|
||||
# environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'},
|
||||
# ),
|
||||
# exc=None,
|
||||
# ),
|
||||
# ),
|
||||
#
|
||||
aa_bb=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(),
|
||||
runner_ctx_args=dict(args_order=['aa', 'bb']),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=True), rc=0, out="", err=""),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=11', '--bb-here'],
|
||||
environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'},
|
||||
args_order=('aa', 'bb'),
|
||||
),
|
||||
),
|
||||
),
|
||||
aa_bb_default_order=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(default_args_order=['bb', 'aa']),
|
||||
runner_ctx_args=dict(),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=True), rc=0, out="", err=""),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--bb-here', '--answer=11'],
|
||||
environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'},
|
||||
args_order=('bb', 'aa'),
|
||||
),
|
||||
),
|
||||
),
|
||||
aa_bb_default_order_args_order=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(default_args_order=['bb', 'aa']),
|
||||
runner_ctx_args=dict(args_order=['aa', 'bb']),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=True), rc=0, out="", err=""),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=11', '--bb-here'],
|
||||
environ_update={'LANGUAGE': 'C', 'LC_ALL': 'C'},
|
||||
args_order=('aa', 'bb'),
|
||||
),
|
||||
),
|
||||
),
|
||||
aa_bb_dup_in_args_order=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(),
|
||||
runner_ctx_args=dict(args_order=['aa', 'bb', 'aa']),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=True), rc=0, out="", err=""),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=11', '--bb-here', '--answer=11'],
|
||||
),
|
||||
),
|
||||
),
|
||||
aa_bb_process_output=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=11, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(default_args_order=['bb', 'aa']),
|
||||
runner_ctx_args=dict(
|
||||
args_order=['aa', 'bb'],
|
||||
output_process=lambda rc, out, err: '-/-'.join([str(rc), out, err])
|
||||
),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=True), rc=0, out="ni", err="nu"),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=11', '--bb-here'],
|
||||
),
|
||||
results="0-/-ni-/-nu"
|
||||
),
|
||||
),
|
||||
aa_bb_ignore_none_with_none=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=49, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(default_args_order=['bb', 'aa']),
|
||||
runner_ctx_args=dict(
|
||||
args_order=['aa', 'bb'],
|
||||
ignore_value_none=True, # default
|
||||
),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(bb=None), rc=0, out="ni", err="nu"),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=49'],
|
||||
),
|
||||
),
|
||||
),
|
||||
aa_bb_ignore_not_none_with_none=(
|
||||
dict(
|
||||
args_bundle=dict(
|
||||
aa=dict(type="int", value=49, fmt_func=fmt.as_opt_eq_val, fmt_arg="--answer"),
|
||||
bb=dict(fmt_func=fmt.as_bool, fmt_arg="--bb-here"),
|
||||
),
|
||||
runner_init_args=dict(default_args_order=['bb', 'aa']),
|
||||
runner_ctx_args=dict(
|
||||
args_order=['aa', 'bb'],
|
||||
ignore_value_none=False,
|
||||
),
|
||||
),
|
||||
dict(runner_ctx_run_args=dict(aa=None, bb=True), rc=0, out="ni", err="nu"),
|
||||
dict(
|
||||
run_info=dict(
|
||||
cmd=['/mock/bin/testing', '--answer=None', '--bb-here'],
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
TC_RUNNER_IDS = sorted(TC_RUNNER.keys())
|
||||
|
||||
|
||||
@pytest.mark.parametrize('runner_input, cmd_execution, expected',
|
||||
(TC_RUNNER[tc] for tc in TC_RUNNER_IDS),
|
||||
ids=TC_RUNNER_IDS)
|
||||
def test_runner_context(runner_input, cmd_execution, expected):
|
||||
arg_spec = {}
|
||||
params = {}
|
||||
arg_formats = {}
|
||||
for k, v in runner_input['args_bundle'].items():
|
||||
try:
|
||||
arg_spec[k] = {'type': v['type']}
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
params[k] = v['value']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
arg_formats[k] = v['fmt_func'](v['fmt_arg'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
orig_results = tuple(cmd_execution[x] for x in ('rc', 'out', 'err'))
|
||||
|
||||
print("arg_spec={0}\nparams={1}\narg_formats={2}\n".format(
|
||||
arg_spec,
|
||||
params,
|
||||
arg_formats,
|
||||
))
|
||||
|
||||
module = MagicMock()
|
||||
type(module).argument_spec = PropertyMock(return_value=arg_spec)
|
||||
type(module).params = PropertyMock(return_value=params)
|
||||
module.get_bin_path.return_value = '/mock/bin/testing'
|
||||
module.run_command.return_value = orig_results
|
||||
|
||||
runner = CmdRunner(
|
||||
module=module,
|
||||
command="testing",
|
||||
arg_formats=arg_formats,
|
||||
**runner_input['runner_init_args']
|
||||
)
|
||||
|
||||
def _assert_run_info(actual, expected):
|
||||
reduced = dict((k, actual[k]) for k in expected.keys())
|
||||
assert reduced == expected, "{0}".format(reduced)
|
||||
|
||||
def _assert_run(runner_input, cmd_execution, expected, ctx, results):
|
||||
_assert_run_info(ctx.run_info, expected['run_info'])
|
||||
assert results == expected.get('results', orig_results)
|
||||
|
||||
exc = expected.get("exc")
|
||||
if exc:
|
||||
with pytest.raises(exc):
|
||||
with runner.context(**runner_input['runner_ctx_args']) as ctx:
|
||||
results = ctx.run(**cmd_execution['runner_ctx_run_args'])
|
||||
_assert_run(runner_input, cmd_execution, expected, ctx, results)
|
||||
|
||||
else:
|
||||
with runner.context(**runner_input['runner_ctx_args']) as ctx:
|
||||
results = ctx.run(**cmd_execution['runner_ctx_run_args'])
|
||||
_assert_run(runner_input, cmd_execution, expected, ctx, results)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('runner_input, cmd_execution, expected',
|
||||
(TC_RUNNER[tc] for tc in TC_RUNNER_IDS),
|
||||
ids=TC_RUNNER_IDS)
|
||||
def test_runner_callable(runner_input, cmd_execution, expected):
|
||||
arg_spec = {}
|
||||
params = {}
|
||||
arg_formats = {}
|
||||
for k, v in runner_input['args_bundle'].items():
|
||||
try:
|
||||
arg_spec[k] = {'type': v['type']}
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
params[k] = v['value']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
arg_formats[k] = v['fmt_func'](v['fmt_arg'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
orig_results = tuple(cmd_execution[x] for x in ('rc', 'out', 'err'))
|
||||
|
||||
print("arg_spec={0}\nparams={1}\narg_formats={2}\n".format(
|
||||
arg_spec,
|
||||
params,
|
||||
arg_formats,
|
||||
))
|
||||
|
||||
module = MagicMock()
|
||||
type(module).argument_spec = PropertyMock(return_value=arg_spec)
|
||||
type(module).params = PropertyMock(return_value=params)
|
||||
module.get_bin_path.return_value = '/mock/bin/testing'
|
||||
module.run_command.return_value = orig_results
|
||||
|
||||
runner = CmdRunner(
|
||||
module=module,
|
||||
command="testing",
|
||||
arg_formats=arg_formats,
|
||||
**runner_input['runner_init_args']
|
||||
)
|
||||
|
||||
def _assert_run_info(actual, expected):
|
||||
reduced = dict((k, actual[k]) for k in expected.keys())
|
||||
assert reduced == expected, "{0}".format(reduced)
|
||||
|
||||
def _assert_run(runner_input, cmd_execution, expected, ctx, results):
|
||||
_assert_run_info(ctx.run_info, expected['run_info'])
|
||||
assert results == expected.get('results', orig_results)
|
||||
|
||||
exc = expected.get("exc")
|
||||
if exc:
|
||||
with pytest.raises(exc):
|
||||
with runner(**runner_input['runner_ctx_args']) as ctx:
|
||||
results = ctx.run(**cmd_execution['runner_ctx_run_args'])
|
||||
_assert_run(runner_input, cmd_execution, expected, ctx, results)
|
||||
|
||||
else:
|
||||
with runner(**runner_input['runner_ctx_args']) as ctx:
|
||||
results = ctx.run(**cmd_execution['runner_ctx_run_args'])
|
||||
_assert_run(runner_input, cmd_execution, expected, ctx, results)
|
||||
@@ -0,0 +1,166 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils import csv
|
||||
|
||||
|
||||
VALID_CSV = [
|
||||
(
|
||||
'excel',
|
||||
{},
|
||||
None,
|
||||
"id,name,role\n1,foo,bar\n2,bar,baz",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"name": "foo",
|
||||
"role": "bar",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "bar",
|
||||
"role": "baz",
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
'excel',
|
||||
{"skipinitialspace": True},
|
||||
None,
|
||||
"id,name,role\n1, foo, bar\n2, bar, baz",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"name": "foo",
|
||||
"role": "bar",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "bar",
|
||||
"role": "baz",
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
'excel',
|
||||
{"delimiter": '|'},
|
||||
None,
|
||||
"id|name|role\n1|foo|bar\n2|bar|baz",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"name": "foo",
|
||||
"role": "bar",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "bar",
|
||||
"role": "baz",
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
'unix',
|
||||
{},
|
||||
None,
|
||||
"id,name,role\n1,foo,bar\n2,bar,baz",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"name": "foo",
|
||||
"role": "bar",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "bar",
|
||||
"role": "baz",
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
'excel',
|
||||
{},
|
||||
['id', 'name', 'role'],
|
||||
"1,foo,bar\n2,bar,baz",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"name": "foo",
|
||||
"role": "bar",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "bar",
|
||||
"role": "baz",
|
||||
},
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
INVALID_CSV = [
|
||||
(
|
||||
'excel',
|
||||
{'strict': True},
|
||||
None,
|
||||
'id,name,role\n1,"f"oo",bar\n2,bar,baz',
|
||||
),
|
||||
]
|
||||
|
||||
INVALID_DIALECT = [
|
||||
(
|
||||
'invalid',
|
||||
{},
|
||||
None,
|
||||
"id,name,role\n1,foo,bar\n2,bar,baz",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialect,dialect_params,fieldnames,data,expected", VALID_CSV)
|
||||
def test_valid_csv(data, dialect, dialect_params, fieldnames, expected):
|
||||
dialect = csv.initialize_dialect(dialect, **dialect_params)
|
||||
reader = csv.read_csv(data, dialect, fieldnames)
|
||||
result = True
|
||||
|
||||
for idx, row in enumerate(reader):
|
||||
for k, v in row.items():
|
||||
if expected[idx][k] != v:
|
||||
result = False
|
||||
break
|
||||
|
||||
assert result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialect,dialect_params,fieldnames,data", INVALID_CSV)
|
||||
def test_invalid_csv(data, dialect, dialect_params, fieldnames):
|
||||
dialect = csv.initialize_dialect(dialect, **dialect_params)
|
||||
reader = csv.read_csv(data, dialect, fieldnames)
|
||||
result = False
|
||||
|
||||
try:
|
||||
for row in reader:
|
||||
continue
|
||||
except csv.CSVError:
|
||||
result = True
|
||||
|
||||
assert result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialect,dialect_params,fieldnames,data", INVALID_DIALECT)
|
||||
def test_invalid_dialect(data, dialect, dialect_params, fieldnames):
|
||||
result = False
|
||||
|
||||
try:
|
||||
dialect = csv.initialize_dialect(dialect, **dialect_params)
|
||||
except csv.DialectNotAvailableError:
|
||||
result = True
|
||||
|
||||
assert result
|
||||
@@ -0,0 +1,143 @@
|
||||
# Copyright (c) Ansible project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.database import (
|
||||
is_input_dangerous,
|
||||
pg_quote_identifier,
|
||||
SQLParseError,
|
||||
)
|
||||
|
||||
# These are all valid strings
|
||||
# The results are based on interpreting the identifier as a table name
|
||||
VALID = {
|
||||
# User quoted
|
||||
'"public.table"': '"public.table"',
|
||||
'"public"."table"': '"public"."table"',
|
||||
'"schema test"."table test"': '"schema test"."table test"',
|
||||
|
||||
# We quote part
|
||||
'public.table': '"public"."table"',
|
||||
'"public".table': '"public"."table"',
|
||||
'public."table"': '"public"."table"',
|
||||
'schema test.table test': '"schema test"."table test"',
|
||||
'"schema test".table test': '"schema test"."table test"',
|
||||
'schema test."table test"': '"schema test"."table test"',
|
||||
|
||||
# Embedded double quotes
|
||||
'table "test"': '"table ""test"""',
|
||||
'public."table ""test"""': '"public"."table ""test"""',
|
||||
'public.table "test"': '"public"."table ""test"""',
|
||||
'schema "test".table': '"schema ""test"""."table"',
|
||||
'"schema ""test""".table': '"schema ""test"""."table"',
|
||||
'"""wat"""."""test"""': '"""wat"""."""test"""',
|
||||
# Sigh, handle these as well:
|
||||
'"no end quote': '"""no end quote"',
|
||||
'schema."table': '"schema"."""table"',
|
||||
'"schema.table': '"""schema"."table"',
|
||||
'schema."table.something': '"schema"."""table"."something"',
|
||||
|
||||
# Embedded dots
|
||||
'"schema.test"."table.test"': '"schema.test"."table.test"',
|
||||
'"schema.".table': '"schema."."table"',
|
||||
'"schema."."table"': '"schema."."table"',
|
||||
'schema.".table"': '"schema".".table"',
|
||||
'"schema".".table"': '"schema".".table"',
|
||||
'"schema.".".table"': '"schema.".".table"',
|
||||
# These are valid but maybe not what the user intended
|
||||
'."table"': '".""table"""',
|
||||
'table.': '"table."',
|
||||
}
|
||||
|
||||
INVALID = {
|
||||
('test.too.many.dots', 'table'): 'PostgreSQL does not support table with more than 3 dots',
|
||||
('"test.too".many.dots', 'database'): 'PostgreSQL does not support database with more than 1 dots',
|
||||
('test.too."many.dots"', 'database'): 'PostgreSQL does not support database with more than 1 dots',
|
||||
('"test"."too"."many"."dots"', 'database'): "PostgreSQL does not support database with more than 1 dots",
|
||||
('"test"."too"."many"."dots"', 'schema'): "PostgreSQL does not support schema with more than 2 dots",
|
||||
('"test"."too"."many"."dots"', 'table'): "PostgreSQL does not support table with more than 3 dots",
|
||||
('"test"."too"."many"."dots"."for"."column"', 'column'): "PostgreSQL does not support column with more than 4 dots",
|
||||
('"table "invalid" double quote"', 'table'): 'User escaped identifiers must escape extra quotes',
|
||||
('"schema "invalid"""."table "invalid"', 'table'): 'User escaped identifiers must escape extra quotes',
|
||||
('"schema."table"', 'table'): 'User escaped identifiers must escape extra quotes',
|
||||
('"schema".', 'table'): 'Identifier name unspecified or unquoted trailing dot',
|
||||
}
|
||||
|
||||
HOW_MANY_DOTS = (
|
||||
('role', 'role', '"role"',
|
||||
'PostgreSQL does not support role with more than 1 dots'),
|
||||
('db', 'database', '"db"',
|
||||
'PostgreSQL does not support database with more than 1 dots'),
|
||||
('db.schema', 'schema', '"db"."schema"',
|
||||
'PostgreSQL does not support schema with more than 2 dots'),
|
||||
('db.schema.table', 'table', '"db"."schema"."table"',
|
||||
'PostgreSQL does not support table with more than 3 dots'),
|
||||
('db.schema.table.column', 'column', '"db"."schema"."table"."column"',
|
||||
'PostgreSQL does not support column with more than 4 dots'),
|
||||
)
|
||||
|
||||
VALID_QUOTES = ((test, VALID[test]) for test in sorted(VALID))
|
||||
INVALID_QUOTES = ((test[0], test[1], INVALID[test]) for test in sorted(INVALID))
|
||||
|
||||
IS_STRINGS_DANGEROUS = (
|
||||
(u'', False),
|
||||
(u' ', False),
|
||||
(u'alternative database', False),
|
||||
(u'backup of TRUNCATED table', False),
|
||||
(u'bob.dropper', False),
|
||||
(u'd\'artagnan', False),
|
||||
(u'user_with_select_update_truncate_right', False),
|
||||
(u';DROP DATABASE fluffy_pets_photos', True),
|
||||
(u';drop DATABASE fluffy_pets_photos', True),
|
||||
(u'; TRUNCATE TABLE his_valuable_table', True),
|
||||
(u'; truncate TABLE his_valuable_table', True),
|
||||
(u'\'--', True),
|
||||
(u'"--', True),
|
||||
(u'\' union select username, password from admin_credentials', True),
|
||||
(u'\' UNION SELECT username, password from admin_credentials', True),
|
||||
(u'\' intersect select', True),
|
||||
(u'\' INTERSECT select', True),
|
||||
(u'\' except select', True),
|
||||
(u'\' EXCEPT select', True),
|
||||
(u';ALTER TABLE prices', True),
|
||||
(u';alter table prices', True),
|
||||
(u"; UPDATE products SET price = '0'", True),
|
||||
(u";update products SET price = '0'", True),
|
||||
(u"; DELETE FROM products", True),
|
||||
(u"; delete FROM products", True),
|
||||
(u"; SELECT * FROM products", True),
|
||||
(u" ; select * from products", True),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("identifier, quoted_identifier", VALID_QUOTES)
|
||||
def test_valid_quotes(identifier, quoted_identifier):
|
||||
assert pg_quote_identifier(identifier, 'table') == quoted_identifier
|
||||
|
||||
|
||||
@pytest.mark.parametrize("identifier, id_type, msg", INVALID_QUOTES)
|
||||
def test_invalid_quotes(identifier, id_type, msg):
|
||||
with pytest.raises(SQLParseError) as ex:
|
||||
pg_quote_identifier(identifier, id_type)
|
||||
|
||||
ex.match(msg)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("identifier, id_type, quoted_identifier, msg", HOW_MANY_DOTS)
|
||||
def test_how_many_dots(identifier, id_type, quoted_identifier, msg):
|
||||
assert pg_quote_identifier(identifier, id_type) == quoted_identifier
|
||||
|
||||
with pytest.raises(SQLParseError) as ex:
|
||||
pg_quote_identifier('%s.more' % identifier, id_type)
|
||||
|
||||
ex.match(msg)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("string, result", IS_STRINGS_DANGEROUS)
|
||||
def test_is_input_dangerous(string, result):
|
||||
assert is_input_dangerous(string) == result
|
||||
@@ -0,0 +1,117 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2015, Michael Scherer <mscherer@redhat.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils import known_hosts
|
||||
|
||||
|
||||
URLS = {
|
||||
'ssh://one.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'one.example.org',
|
||||
'add_host_key_cmd': " -t rsa one.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'ssh+git://two.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'two.example.org',
|
||||
'add_host_key_cmd': " -t rsa two.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'rsync://three.example.org/user/example.git': {
|
||||
'is_ssh_url': False,
|
||||
'get_fqdn': 'three.example.org',
|
||||
'add_host_key_cmd': None, # not called for non-ssh urls
|
||||
'port': None,
|
||||
},
|
||||
'git@four.example.org:user/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'four.example.org',
|
||||
'add_host_key_cmd': " -t rsa four.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'git+ssh://five.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'five.example.org',
|
||||
'add_host_key_cmd': " -t rsa five.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://six.example.org:21/example.org': {
|
||||
# ssh on FTP Port?
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'six.example.org',
|
||||
'add_host_key_cmd': " -t rsa -p 21 six.example.org",
|
||||
'port': '21',
|
||||
},
|
||||
'ssh://[2001:DB8::abcd:abcd]/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://[2001:DB8::abcd:abcd]:22/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa -p 22 [2001:DB8::abcd:abcd]",
|
||||
'port': '22',
|
||||
},
|
||||
'username@[2001:DB8::abcd:abcd]/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'username@[2001:DB8::abcd:abcd]:path/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://internal.git.server:7999/repos/repo.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'internal.git.server',
|
||||
'add_host_key_cmd': " -t rsa -p 7999 internal.git.server",
|
||||
'port': '7999',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('url, is_ssh_url', ((k, URLS[k]['is_ssh_url']) for k in sorted(URLS)))
|
||||
def test_is_ssh_url(url, is_ssh_url):
|
||||
assert known_hosts.is_ssh_url(url) == is_ssh_url
|
||||
|
||||
|
||||
@pytest.mark.parametrize('url, fqdn, port', ((k, URLS[k]['get_fqdn'], URLS[k]['port']) for k in sorted(URLS)))
|
||||
def test_get_fqdn_and_port(url, fqdn, port):
|
||||
assert known_hosts.get_fqdn_and_port(url) == (fqdn, port)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fqdn, port, add_host_key_cmd, stdin',
|
||||
((URLS[k]['get_fqdn'], URLS[k]['port'], URLS[k]['add_host_key_cmd'], {})
|
||||
for k in sorted(URLS) if URLS[k]['is_ssh_url']),
|
||||
indirect=['stdin'])
|
||||
def test_add_host_key(am, mocker, fqdn, port, add_host_key_cmd):
|
||||
get_bin_path = mocker.MagicMock()
|
||||
get_bin_path.return_value = keyscan_cmd = "/custom/path/ssh-keyscan"
|
||||
am.get_bin_path = get_bin_path
|
||||
|
||||
run_command = mocker.MagicMock()
|
||||
run_command.return_value = (0, "Needs output, otherwise thinks ssh-keyscan timed out'", "")
|
||||
am.run_command = run_command
|
||||
|
||||
append_to_file = mocker.MagicMock()
|
||||
append_to_file.return_value = (None,)
|
||||
am.append_to_file = append_to_file
|
||||
|
||||
mocker.patch('os.path.isdir', return_value=True)
|
||||
mocker.patch('os.path.exists', return_value=True)
|
||||
|
||||
known_hosts.add_host_key(am, fqdn, port=port)
|
||||
run_command.assert_called_with(keyscan_cmd + add_host_key_cmd)
|
||||
@@ -0,0 +1,239 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2020, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import (
|
||||
ArgFormat, DependencyCtxMgr, VarMeta, VarDict, cause_changes
|
||||
)
|
||||
|
||||
|
||||
def single_lambda_2star(x, y, z):
|
||||
return ["piggies=[{0},{1},{2}]".format(x, y, z)]
|
||||
|
||||
|
||||
ARG_FORMATS = dict(
|
||||
simple_boolean_true=("--superflag", ArgFormat.BOOLEAN, 0,
|
||||
True, ["--superflag"]),
|
||||
simple_boolean_false=("--superflag", ArgFormat.BOOLEAN, 0,
|
||||
False, []),
|
||||
simple_boolean_none=("--superflag", ArgFormat.BOOLEAN, 0,
|
||||
None, []),
|
||||
simple_boolean_not_true=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
|
||||
True, []),
|
||||
simple_boolean_not_false=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
|
||||
False, ["--superflag"]),
|
||||
simple_boolean_not_none=("--superflag", ArgFormat.BOOLEAN_NOT, 0,
|
||||
None, ["--superflag"]),
|
||||
single_printf=("--param=%s", ArgFormat.PRINTF, 0,
|
||||
"potatoes", ["--param=potatoes"]),
|
||||
single_printf_no_substitution=("--param", ArgFormat.PRINTF, 0,
|
||||
"potatoes", ["--param"]),
|
||||
single_printf_none=("--param=%s", ArgFormat.PRINTF, 0,
|
||||
None, []),
|
||||
multiple_printf=(["--param", "free-%s"], ArgFormat.PRINTF, 0,
|
||||
"potatoes", ["--param", "free-potatoes"]),
|
||||
single_format=("--param={0}", ArgFormat.FORMAT, 0,
|
||||
"potatoes", ["--param=potatoes"]),
|
||||
single_format_none=("--param={0}", ArgFormat.FORMAT, 0,
|
||||
None, []),
|
||||
single_format_no_substitution=("--param", ArgFormat.FORMAT, 0,
|
||||
"potatoes", ["--param"]),
|
||||
multiple_format=(["--param", "free-{0}"], ArgFormat.FORMAT, 0,
|
||||
"potatoes", ["--param", "free-potatoes"]),
|
||||
multiple_format_none=(["--param", "free-{0}"], ArgFormat.FORMAT, 0,
|
||||
None, []),
|
||||
single_lambda_0star=((lambda v: ["piggies=[{0},{1},{2}]".format(v[0], v[1], v[2])]), None, 0,
|
||||
['a', 'b', 'c'], ["piggies=[a,b,c]"]),
|
||||
single_lambda_0star_none=((lambda v: ["piggies=[{0},{1},{2}]".format(v[0], v[1], v[2])]), None, 0,
|
||||
None, []),
|
||||
single_lambda_1star=((lambda a, b, c: ["piggies=[{0},{1},{2}]".format(a, b, c)]), None, 1,
|
||||
['a', 'b', 'c'], ["piggies=[a,b,c]"]),
|
||||
single_lambda_1star_none=((lambda a, b, c: ["piggies=[{0},{1},{2}]".format(a, b, c)]), None, 1,
|
||||
None, []),
|
||||
single_lambda_2star=(single_lambda_2star, None, 2,
|
||||
dict(z='c', x='a', y='b'), ["piggies=[a,b,c]"]),
|
||||
single_lambda_2star_none=(single_lambda_2star, None, 2,
|
||||
None, []),
|
||||
)
|
||||
ARG_FORMATS_IDS = sorted(ARG_FORMATS.keys())
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fmt, style, stars, value, expected',
|
||||
(ARG_FORMATS[tc] for tc in ARG_FORMATS_IDS),
|
||||
ids=ARG_FORMATS_IDS)
|
||||
def test_arg_format(fmt, style, stars, value, expected):
|
||||
af = ArgFormat('name', fmt, style, stars)
|
||||
actual = af.to_text(value)
|
||||
print("formatted string = {0}".format(actual))
|
||||
assert actual == expected, "actual = {0}".format(actual)
|
||||
|
||||
|
||||
ARG_FORMATS_FAIL = dict(
|
||||
int_fmt=(3, None, 0, "", [""]),
|
||||
bool_fmt=(True, None, 0, "", [""]),
|
||||
)
|
||||
ARG_FORMATS_FAIL_IDS = sorted(ARG_FORMATS_FAIL.keys())
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fmt, style, stars, value, expected',
|
||||
(ARG_FORMATS_FAIL[tc] for tc in ARG_FORMATS_FAIL_IDS),
|
||||
ids=ARG_FORMATS_FAIL_IDS)
|
||||
def test_arg_format_fail(fmt, style, stars, value, expected):
|
||||
with pytest.raises(TypeError):
|
||||
af = ArgFormat('name', fmt, style, stars)
|
||||
actual = af.to_text(value)
|
||||
print("formatted string = {0}".format(actual))
|
||||
|
||||
|
||||
def test_dependency_ctxmgr():
|
||||
ctx = DependencyCtxMgr("POTATOES", "Potatoes must be installed")
|
||||
with ctx:
|
||||
import potatoes_that_will_never_be_there
|
||||
print("POTATOES: ctx.text={0}".format(ctx.text))
|
||||
assert ctx.text == "Potatoes must be installed"
|
||||
assert not ctx.has_it
|
||||
|
||||
ctx = DependencyCtxMgr("POTATOES2")
|
||||
with ctx:
|
||||
import potatoes_that_will_never_be_there_again
|
||||
assert not ctx.has_it
|
||||
print("POTATOES2: ctx.text={0}".format(ctx.text))
|
||||
assert ctx.text.startswith("No module named")
|
||||
assert "potatoes_that_will_never_be_there_again" in ctx.text
|
||||
|
||||
ctx = DependencyCtxMgr("TYPING")
|
||||
with ctx:
|
||||
import sys
|
||||
assert ctx.has_it
|
||||
|
||||
|
||||
def test_variable_meta():
|
||||
meta = VarMeta()
|
||||
assert meta.output is True
|
||||
assert meta.diff is False
|
||||
assert meta.value is None
|
||||
meta.set_value("abc")
|
||||
assert meta.initial_value == "abc"
|
||||
assert meta.value == "abc"
|
||||
assert meta.diff_result is None
|
||||
meta.set_value("def")
|
||||
assert meta.initial_value == "abc"
|
||||
assert meta.value == "def"
|
||||
assert meta.diff_result is None
|
||||
|
||||
|
||||
def test_variable_meta_diff():
|
||||
meta = VarMeta(diff=True)
|
||||
assert meta.output is True
|
||||
assert meta.diff is True
|
||||
assert meta.value is None
|
||||
meta.set_value("abc")
|
||||
assert meta.initial_value == "abc"
|
||||
assert meta.value == "abc"
|
||||
assert meta.diff_result is None
|
||||
meta.set_value("def")
|
||||
assert meta.initial_value == "abc"
|
||||
assert meta.value == "def"
|
||||
assert meta.diff_result == {"before": "abc", "after": "def"}
|
||||
meta.set_value("ghi")
|
||||
assert meta.initial_value == "abc"
|
||||
assert meta.value == "ghi"
|
||||
assert meta.diff_result == {"before": "abc", "after": "ghi"}
|
||||
|
||||
|
||||
def test_vardict():
|
||||
vd = VarDict()
|
||||
vd.set('a', 123)
|
||||
assert vd['a'] == 123
|
||||
assert vd.a == 123
|
||||
assert 'a' in vd._meta
|
||||
assert vd.meta('a').output is True
|
||||
assert vd.meta('a').diff is False
|
||||
assert vd.meta('a').change is False
|
||||
vd['b'] = 456
|
||||
assert vd.meta('b').output is True
|
||||
assert vd.meta('b').diff is False
|
||||
assert vd.meta('b').change is False
|
||||
vd.set_meta('a', diff=True, change=True)
|
||||
vd.set_meta('b', diff=True, output=False)
|
||||
vd['c'] = 789
|
||||
assert vd.has_changed('c') is False
|
||||
vd['a'] = 'new_a'
|
||||
assert vd.has_changed('a') is True
|
||||
vd['c'] = 'new_c'
|
||||
assert vd.has_changed('c') is False
|
||||
vd['b'] = 'new_b'
|
||||
assert vd.has_changed('b') is False
|
||||
assert vd.a == 'new_a'
|
||||
assert vd.c == 'new_c'
|
||||
assert vd.output() == {'a': 'new_a', 'c': 'new_c'}
|
||||
assert vd.diff() == {'before': {'a': 123}, 'after': {'a': 'new_a'}}, "diff={0}".format(vd.diff())
|
||||
|
||||
|
||||
def test_variable_meta_change():
|
||||
vd = VarDict()
|
||||
vd.set('a', 123, change=True)
|
||||
vd.set('b', [4, 5, 6], change=True)
|
||||
vd.set('c', {'m': 7, 'n': 8, 'o': 9}, change=True)
|
||||
vd.set('d', {'a1': {'a11': 33, 'a12': 34}}, change=True)
|
||||
|
||||
vd.a = 1234
|
||||
assert vd.has_changed('a') is True
|
||||
vd.b.append(7)
|
||||
assert vd.b == [4, 5, 6, 7]
|
||||
assert vd.has_changed('b')
|
||||
vd.c.update({'p': 10})
|
||||
assert vd.c == {'m': 7, 'n': 8, 'o': 9, 'p': 10}
|
||||
assert vd.has_changed('c')
|
||||
vd.d['a1'].update({'a13': 35})
|
||||
assert vd.d == {'a1': {'a11': 33, 'a12': 34, 'a13': 35}}
|
||||
assert vd.has_changed('d')
|
||||
|
||||
|
||||
class MockMH(object):
|
||||
changed = None
|
||||
|
||||
def _div(self, x, y):
|
||||
return x / y
|
||||
|
||||
func_none = cause_changes()(_div)
|
||||
func_onsucc = cause_changes(on_success=True)(_div)
|
||||
func_onfail = cause_changes(on_failure=True)(_div)
|
||||
func_onboth = cause_changes(on_success=True, on_failure=True)(_div)
|
||||
|
||||
|
||||
CAUSE_CHG_DECO_PARAMS = ['method', 'expect_exception', 'expect_changed']
|
||||
CAUSE_CHG_DECO = dict(
|
||||
none_succ=dict(method='func_none', expect_exception=False, expect_changed=None),
|
||||
none_fail=dict(method='func_none', expect_exception=True, expect_changed=None),
|
||||
onsucc_succ=dict(method='func_onsucc', expect_exception=False, expect_changed=True),
|
||||
onsucc_fail=dict(method='func_onsucc', expect_exception=True, expect_changed=None),
|
||||
onfail_succ=dict(method='func_onfail', expect_exception=False, expect_changed=None),
|
||||
onfail_fail=dict(method='func_onfail', expect_exception=True, expect_changed=True),
|
||||
onboth_succ=dict(method='func_onboth', expect_exception=False, expect_changed=True),
|
||||
onboth_fail=dict(method='func_onboth', expect_exception=True, expect_changed=True),
|
||||
)
|
||||
CAUSE_CHG_DECO_IDS = sorted(CAUSE_CHG_DECO.keys())
|
||||
|
||||
|
||||
@pytest.mark.parametrize(CAUSE_CHG_DECO_PARAMS,
|
||||
[[CAUSE_CHG_DECO[tc][param]
|
||||
for param in CAUSE_CHG_DECO_PARAMS]
|
||||
for tc in CAUSE_CHG_DECO_IDS],
|
||||
ids=CAUSE_CHG_DECO_IDS)
|
||||
def test_cause_changes_deco(method, expect_exception, expect_changed):
|
||||
mh = MockMH()
|
||||
if expect_exception:
|
||||
with pytest.raises(Exception):
|
||||
getattr(mh, method)(1, 0)
|
||||
else:
|
||||
getattr(mh, method)(9, 3)
|
||||
|
||||
assert mh.changed == expect_changed
|
||||
@@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.onepassword import OnePasswordConfig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def os_expanduser(mocker):
|
||||
def _os_expanduser(path):
|
||||
return path.replace("~", "/home/testuser")
|
||||
|
||||
mocker.patch("os.path.expanduser", side_effect=_os_expanduser)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def exists(mocker):
|
||||
def _exists(path):
|
||||
if "op/" in path:
|
||||
return True
|
||||
|
||||
return os.path.exists(path)
|
||||
|
||||
|
||||
def test_op_config(mocker, os_expanduser):
|
||||
mocker.patch("os.path.exists", side_effect=[False, True])
|
||||
op_config = OnePasswordConfig()
|
||||
|
||||
assert "/home/testuser/.config/op/config" == op_config.config_file_path
|
||||
|
||||
|
||||
def test_op_no_config(mocker, os_expanduser):
|
||||
mocker.patch("os.path.exists", return_value=False)
|
||||
op_config = OnePasswordConfig()
|
||||
|
||||
assert op_config.config_file_path is None
|
||||
@@ -0,0 +1,57 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2019, Andrey Tuzhilin <andrei.tuzhilin@gmail.com>
|
||||
# Copyright (c) 2020, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.saslprep import saslprep
|
||||
|
||||
|
||||
VALID = [
|
||||
(u'', u''),
|
||||
(u'\u00A0', u' '),
|
||||
(u'a', u'a'),
|
||||
(u'й', u'й'),
|
||||
(u'\u30DE\u30C8\u30EA\u30C3\u30AF\u30B9', u'\u30DE\u30C8\u30EA\u30C3\u30AF\u30B9'),
|
||||
(u'The\u00ADM\u00AAtr\u2168', u'TheMatrIX'),
|
||||
(u'I\u00ADX', u'IX'),
|
||||
(u'user', u'user'),
|
||||
(u'USER', u'USER'),
|
||||
(u'\u00AA', u'a'),
|
||||
(u'\u2168', u'IX'),
|
||||
(u'\u05BE\u00A0\u05BE', u'\u05BE\u0020\u05BE'),
|
||||
]
|
||||
|
||||
INVALID = [
|
||||
(None, TypeError),
|
||||
(b'', TypeError),
|
||||
(u'\u0221', ValueError),
|
||||
(u'\u0007', ValueError),
|
||||
(u'\u0627\u0031', ValueError),
|
||||
(u'\uE0001', ValueError),
|
||||
(u'\uE0020', ValueError),
|
||||
(u'\uFFF9', ValueError),
|
||||
(u'\uFDD0', ValueError),
|
||||
(u'\u0000', ValueError),
|
||||
(u'\u06DD', ValueError),
|
||||
(u'\uFFFFD', ValueError),
|
||||
(u'\uD800', ValueError),
|
||||
(u'\u200E', ValueError),
|
||||
(u'\u05BE\u00AA\u05BE', ValueError),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('source,target', VALID)
|
||||
def test_saslprep_conversions(source, target):
|
||||
assert saslprep(source) == target
|
||||
|
||||
|
||||
@pytest.mark.parametrize('source,exception', INVALID)
|
||||
def test_saslprep_exceptions(source, exception):
|
||||
with pytest.raises(exception) as ex:
|
||||
saslprep(source)
|
||||
@@ -0,0 +1,48 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
#
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM
|
||||
|
||||
|
||||
class FakeModule:
|
||||
def __init__(self, params):
|
||||
self.params = params
|
||||
|
||||
|
||||
def test_combine_headers_returns_only_default():
|
||||
expected = {"Accept": "application/json", "Content-type": "application/json"}
|
||||
module = FakeModule(
|
||||
params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234, 'utm_token': 'utm_token',
|
||||
'name': 'FakeName', 'headers': {}})
|
||||
result = UTM(module, "endpoint", [])._combine_headers()
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_combine_headers_returns_only_default2():
|
||||
expected = {"Accept": "application/json", "Content-type": "application/json"}
|
||||
module = FakeModule(
|
||||
params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234, 'utm_token': 'utm_token',
|
||||
'name': 'FakeName'})
|
||||
result = UTM(module, "endpoint", [])._combine_headers()
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_combine_headers_returns_combined():
|
||||
expected = {"Accept": "application/json", "Content-type": "application/json",
|
||||
"extraHeader": "extraHeaderValue"}
|
||||
module = FakeModule(params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234,
|
||||
'utm_token': 'utm_token', 'name': 'FakeName',
|
||||
"headers": {"extraHeader": "extraHeaderValue"}})
|
||||
result = UTM(module, "endpoint", [])._combine_headers()
|
||||
assert result == expected
|
||||
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class AnsibleModuleException(Exception):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class ExitJsonException(AnsibleModuleException):
|
||||
pass
|
||||
|
||||
|
||||
class FailJsonException(AnsibleModuleException):
|
||||
pass
|
||||
|
||||
|
||||
class FakeAnsibleModule:
|
||||
def __init__(self, params=None, check_mode=False):
|
||||
self.params = params
|
||||
self.check_mode = check_mode
|
||||
|
||||
def exit_json(self, *args, **kwargs):
|
||||
raise ExitJsonException(*args, **kwargs)
|
||||
|
||||
def fail_json(self, *args, **kwargs):
|
||||
raise FailJsonException(*args, **kwargs)
|
||||
@@ -0,0 +1,70 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
FAKE_API_VERSION = "1.1"
|
||||
|
||||
|
||||
class Failure(Exception):
|
||||
def __init__(self, details):
|
||||
self.details = details
|
||||
|
||||
def __str__(self):
|
||||
return str(self.details)
|
||||
|
||||
|
||||
class Session(object):
|
||||
def __init__(self, uri, transport=None, encoding=None, verbose=0,
|
||||
allow_none=1, ignore_ssl=False):
|
||||
|
||||
self.transport = transport
|
||||
self._session = None
|
||||
self.last_login_method = None
|
||||
self.last_login_params = None
|
||||
self.API_version = FAKE_API_VERSION
|
||||
|
||||
def _get_api_version(self):
|
||||
return FAKE_API_VERSION
|
||||
|
||||
def _login(self, method, params):
|
||||
self._session = "OpaqueRef:fake-xenapi-session-ref"
|
||||
self.last_login_method = method
|
||||
self.last_login_params = params
|
||||
self.API_version = self._get_api_version()
|
||||
|
||||
def _logout(self):
|
||||
self._session = None
|
||||
self.last_login_method = None
|
||||
self.last_login_params = None
|
||||
self.API_version = FAKE_API_VERSION
|
||||
|
||||
def xenapi_request(self, methodname, params):
|
||||
if methodname.startswith('login'):
|
||||
self._login(methodname, params)
|
||||
return None
|
||||
elif methodname == 'logout' or methodname == 'session.logout':
|
||||
self._logout()
|
||||
return None
|
||||
else:
|
||||
# Should be patched with mocker.patch().
|
||||
return None
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == 'handle':
|
||||
return self._session
|
||||
elif name == 'xenapi':
|
||||
# Should be patched with mocker.patch().
|
||||
return None
|
||||
elif name.startswith('login') or name.startswith('slave_local'):
|
||||
return lambda *params: self._login(name, params)
|
||||
elif name == 'logout':
|
||||
return self._logout
|
||||
|
||||
|
||||
def xapi_local():
|
||||
return Session("http://_var_lib_xcp_xapi/")
|
||||
@@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def fake_xenapi_ref(xenapi_class):
|
||||
return "OpaqueRef:fake-xenapi-%s-ref" % xenapi_class
|
||||
|
||||
|
||||
testcase_bad_xenapi_refs = {
|
||||
"params": [
|
||||
None,
|
||||
'',
|
||||
'OpaqueRef:NULL',
|
||||
],
|
||||
"ids": [
|
||||
'none',
|
||||
'empty',
|
||||
'ref-null',
|
||||
],
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import sys
|
||||
import importlib
|
||||
import os
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from .FakeAnsibleModule import FakeAnsibleModule
|
||||
from ansible.module_utils import six
|
||||
from mock import MagicMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_ansible_module(request):
|
||||
"""Returns fake AnsibleModule with fake module params."""
|
||||
if hasattr(request, 'param'):
|
||||
return FakeAnsibleModule(request.param)
|
||||
else:
|
||||
params = {
|
||||
"hostname": "somehost",
|
||||
"username": "someuser",
|
||||
"password": "somepwd",
|
||||
"validate_certs": True,
|
||||
}
|
||||
|
||||
return FakeAnsibleModule(params)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def XenAPI():
|
||||
"""Imports and returns fake XenAPI module."""
|
||||
|
||||
# Import of fake XenAPI module is wrapped by fixture so that it does not
|
||||
# affect other unit tests which could potentially also use XenAPI module.
|
||||
|
||||
# First we use importlib.import_module() to import the module and assign
|
||||
# it to a local symbol.
|
||||
fake_xenapi = importlib.import_module('ansible_collections.community.general.tests.unit.plugins.module_utils.xenserver.FakeXenAPI')
|
||||
|
||||
# Now we populate Python module cache with imported fake module using the
|
||||
# original module name (XenAPI). That way, any 'import XenAPI' statement
|
||||
# will just load already imported fake module from the cache.
|
||||
sys.modules['XenAPI'] = fake_xenapi
|
||||
|
||||
return fake_xenapi
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def xenserver(XenAPI):
|
||||
"""Imports and returns xenserver module util."""
|
||||
|
||||
# Since we are wrapping fake XenAPI module inside a fixture, all modules
|
||||
# that depend on it have to be imported inside a test function. To make
|
||||
# this easier to handle and remove some code repetition, we wrap the import
|
||||
# of xenserver module util with a fixture.
|
||||
from ansible_collections.community.general.plugins.module_utils import xenserver
|
||||
|
||||
return xenserver
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_xenapi_failure(XenAPI, mocker):
|
||||
"""
|
||||
Returns mock object that raises XenAPI.Failure on any XenAPI
|
||||
method call.
|
||||
"""
|
||||
fake_error_msg = "Fake XAPI method call error!"
|
||||
|
||||
# We need to use our MagicMock based class that passes side_effect to its
|
||||
# children because calls to xenapi methods can generate an arbitrary
|
||||
# hierarchy of mock objects. Any such object when called should use the
|
||||
# same side_effect as its parent mock object.
|
||||
class MagicMockSideEffect(MagicMock):
|
||||
def _get_child_mock(self, **kw):
|
||||
child_mock = super(MagicMockSideEffect, self)._get_child_mock(**kw)
|
||||
child_mock.side_effect = self.side_effect
|
||||
return child_mock
|
||||
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', new=MagicMockSideEffect(), create=True)
|
||||
mocked_xenapi.side_effect = XenAPI.Failure(fake_error_msg)
|
||||
|
||||
return mocked_xenapi, fake_error_msg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fixture_data_from_file(request):
|
||||
"""Loads fixture data from files."""
|
||||
if not hasattr(request, 'param'):
|
||||
return {}
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
if isinstance(request.param, six.string_types):
|
||||
request.param = [request.param]
|
||||
|
||||
for fixture_name in request.param:
|
||||
path = os.path.join(fixture_path, fixture_name)
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fixture_data[fixture_name] = data
|
||||
|
||||
return fixture_data
|
||||
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"cdrom": {
|
||||
"type": "none"
|
||||
},
|
||||
"customization_agent": "native",
|
||||
"disks": [
|
||||
{
|
||||
"name": "ansible-test-vm-1-C",
|
||||
"name_desc": "C:\\",
|
||||
"os_device": "xvda",
|
||||
"size": 42949672960,
|
||||
"sr": "Ansible Test Storage 1",
|
||||
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"vbd_userdevice": "0"
|
||||
}
|
||||
],
|
||||
"domid": "143",
|
||||
"folder": "/Ansible/Test",
|
||||
"hardware": {
|
||||
"memory_mb": 2048,
|
||||
"num_cpu_cores_per_socket": 2,
|
||||
"num_cpus": 2
|
||||
},
|
||||
"home_server": "",
|
||||
"is_template": false,
|
||||
"name": "ansible-test-vm-1",
|
||||
"name_desc": "Created by Ansible",
|
||||
"networks": [
|
||||
{
|
||||
"gateway": "10.0.0.1",
|
||||
"gateway6": "",
|
||||
"ip": "10.0.0.2",
|
||||
"ip6": [
|
||||
"fe80:0000:0000:0000:11e1:12c9:ef3b:75a0"
|
||||
],
|
||||
"mac": "7a:a6:48:1e:31:46",
|
||||
"mtu": "1500",
|
||||
"name": "Host internal management network",
|
||||
"netmask": "255.255.255.0",
|
||||
"prefix": "24",
|
||||
"prefix6": "",
|
||||
"vif_device": "0"
|
||||
}
|
||||
],
|
||||
"other_config": {
|
||||
"base_template_name": "Windows Server 2016 (64-bit)",
|
||||
"folder": "/Ansible/Test",
|
||||
"import_task": "OpaqueRef:e43eb71c-45d6-5351-09ff-96e4fb7d0fa5",
|
||||
"install-methods": "cdrom",
|
||||
"instant": "true",
|
||||
"mac_seed": "366fe8e0-878b-4320-8731-90d1ed3c0b93"
|
||||
},
|
||||
"platform": {
|
||||
"acpi": "1",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "2",
|
||||
"device_id": "0002",
|
||||
"hpet": "true",
|
||||
"nx": "true",
|
||||
"pae": "true",
|
||||
"timeoffset": "-28800",
|
||||
"vga": "std",
|
||||
"videoram": "8",
|
||||
"viridian": "true",
|
||||
"viridian_reference_tsc": "true",
|
||||
"viridian_time_ref_count": "true"
|
||||
},
|
||||
"state": "poweredon",
|
||||
"uuid": "81c373d7-a407-322f-911b-31386eb5215d",
|
||||
"xenstore_data": {
|
||||
"vm-data": ""
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,707 @@
|
||||
{
|
||||
"SR": {
|
||||
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
|
||||
"PBDs": [],
|
||||
"VDIs": [],
|
||||
"allowed_operations": [
|
||||
"unplug",
|
||||
"plug",
|
||||
"pbd_create",
|
||||
"update",
|
||||
"pbd_destroy",
|
||||
"vdi_resize",
|
||||
"vdi_clone",
|
||||
"scan",
|
||||
"vdi_snapshot",
|
||||
"vdi_mirror",
|
||||
"vdi_create",
|
||||
"vdi_destroy"
|
||||
],
|
||||
"blobs": {},
|
||||
"clustered": false,
|
||||
"content_type": "",
|
||||
"current_operations": {},
|
||||
"introduced_by": "OpaqueRef:NULL",
|
||||
"is_tools_sr": false,
|
||||
"local_cache_enabled": false,
|
||||
"name_description": "",
|
||||
"name_label": "Ansible Test Storage 1",
|
||||
"other_config": {
|
||||
"auto-scan": "false"
|
||||
},
|
||||
"physical_size": "2521133219840",
|
||||
"physical_utilisation": "1551485632512",
|
||||
"shared": true,
|
||||
"sm_config": {
|
||||
"allocation": "thick",
|
||||
"devserial": "scsi-3600a098038302d353624495242443848",
|
||||
"multipathable": "true",
|
||||
"use_vhd": "true"
|
||||
},
|
||||
"tags": [],
|
||||
"type": "lvmohba",
|
||||
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"virtual_allocation": "1556925644800"
|
||||
}
|
||||
},
|
||||
"VBD": {
|
||||
"OpaqueRef:1c0a7c6d-09e5-9b2c-bbe3-9a73aadcff9f": {
|
||||
"VDI": "OpaqueRef:NULL",
|
||||
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unpause",
|
||||
"insert",
|
||||
"pause"
|
||||
],
|
||||
"bootable": false,
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "xvdd",
|
||||
"empty": true,
|
||||
"metrics": "OpaqueRef:1a36eae4-87c8-0945-cee9-c85a71fd843f",
|
||||
"mode": "RO",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "CD",
|
||||
"unpluggable": true,
|
||||
"userdevice": "3",
|
||||
"uuid": "e6aacd53-a2c8-649f-b405-93fcb811411a"
|
||||
},
|
||||
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3": {
|
||||
"VDI": "OpaqueRef:fd20510d-e9ca-b966-3b98-4ae547dacf9a",
|
||||
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unpause",
|
||||
"unplug",
|
||||
"unplug_force",
|
||||
"pause"
|
||||
],
|
||||
"bootable": true,
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "xvda",
|
||||
"empty": false,
|
||||
"metrics": "OpaqueRef:ddbd70d4-7dde-b51e-6208-eb434b300009",
|
||||
"mode": "RW",
|
||||
"other_config": {
|
||||
"owner": "true"
|
||||
},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "Disk",
|
||||
"unpluggable": true,
|
||||
"userdevice": "0",
|
||||
"uuid": "ffd6de9c-c416-1d52-3e9d-3bcbf567245e"
|
||||
}
|
||||
},
|
||||
"VDI": {
|
||||
"OpaqueRef:fd20510d-e9ca-b966-3b98-4ae547dacf9a": {
|
||||
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
|
||||
"VBDs": [
|
||||
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3"
|
||||
],
|
||||
"allow_caching": false,
|
||||
"allowed_operations": [
|
||||
"clone",
|
||||
"snapshot"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"is_a_snapshot": false,
|
||||
"is_tools_iso": false,
|
||||
"location": "b807f67b-3f37-4a6e-ad6c-033f812ab093",
|
||||
"managed": true,
|
||||
"metadata_latest": false,
|
||||
"metadata_of_pool": "",
|
||||
"missing": false,
|
||||
"name_description": "C:\\",
|
||||
"name_label": "ansible-test-vm-1-C",
|
||||
"on_boot": "persist",
|
||||
"other_config": {},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"physical_utilisation": "43041947648",
|
||||
"read_only": false,
|
||||
"sharable": false,
|
||||
"sm_config": {
|
||||
"host_OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770": "RW",
|
||||
"read-caching-enabled-on-92ac8132-276b-4d0f-9d3a-54db51e4a438": "false",
|
||||
"read-caching-reason-92ac8132-276b-4d0f-9d3a-54db51e4a438": "LICENSE_RESTRICTION",
|
||||
"vdi_type": "vhd"
|
||||
},
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"storage_lock": false,
|
||||
"tags": [],
|
||||
"type": "system",
|
||||
"uuid": "b807f67b-3f37-4a6e-ad6c-033f812ab093",
|
||||
"virtual_size": "42949672960",
|
||||
"xenstore_data": {}
|
||||
}
|
||||
},
|
||||
"VIF": {
|
||||
"OpaqueRef:38da2120-6086-5043-8383-ab0a53ede42a": {
|
||||
"MAC": "7a:a6:48:1e:31:46",
|
||||
"MAC_autogenerated": false,
|
||||
"MTU": "1500",
|
||||
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unplug"
|
||||
],
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "0",
|
||||
"ipv4_addresses": [
|
||||
"10.0.0.2/24"
|
||||
],
|
||||
"ipv4_allowed": [],
|
||||
"ipv4_configuration_mode": "Static",
|
||||
"ipv4_gateway": "10.0.0.1",
|
||||
"ipv6_addresses": [
|
||||
""
|
||||
],
|
||||
"ipv6_allowed": [],
|
||||
"ipv6_configuration_mode": "None",
|
||||
"ipv6_gateway": "",
|
||||
"locking_mode": "network_default",
|
||||
"metrics": "OpaqueRef:15502939-df0f-0095-1ce3-e51367199d27",
|
||||
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"uuid": "bd108d25-488a-f9b5-4c7b-02d40f1e38a8"
|
||||
}
|
||||
},
|
||||
"VM": {
|
||||
"OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c": {
|
||||
"HVM_boot_params": {
|
||||
"order": "dc"
|
||||
},
|
||||
"HVM_boot_policy": "BIOS order",
|
||||
"HVM_shadow_multiplier": 1.0,
|
||||
"PCI_bus": "",
|
||||
"PV_args": "",
|
||||
"PV_bootloader": "",
|
||||
"PV_bootloader_args": "",
|
||||
"PV_kernel": "",
|
||||
"PV_legacy_args": "",
|
||||
"PV_ramdisk": "",
|
||||
"VBDs": [
|
||||
"OpaqueRef:1c0a7c6d-09e5-9b2c-bbe3-9a73aadcff9f",
|
||||
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3"
|
||||
],
|
||||
"VCPUs_at_startup": "2",
|
||||
"VCPUs_max": "2",
|
||||
"VCPUs_params": {},
|
||||
"VGPUs": [],
|
||||
"VIFs": [
|
||||
"OpaqueRef:38da2120-6086-5043-8383-ab0a53ede42a"
|
||||
],
|
||||
"VTPMs": [],
|
||||
"actions_after_crash": "restart",
|
||||
"actions_after_reboot": "restart",
|
||||
"actions_after_shutdown": "destroy",
|
||||
"affinity": "OpaqueRef:NULL",
|
||||
"allowed_operations": [
|
||||
"changing_dynamic_range",
|
||||
"migrate_send",
|
||||
"pool_migrate",
|
||||
"changing_VCPUs_live",
|
||||
"suspend",
|
||||
"hard_reboot",
|
||||
"hard_shutdown",
|
||||
"clean_reboot",
|
||||
"clean_shutdown",
|
||||
"pause",
|
||||
"checkpoint",
|
||||
"snapshot"
|
||||
],
|
||||
"appliance": "OpaqueRef:NULL",
|
||||
"attached_PCIs": [],
|
||||
"bios_strings": {
|
||||
"bios-vendor": "Xen",
|
||||
"bios-version": "",
|
||||
"hp-rombios": "",
|
||||
"oem-1": "Xen",
|
||||
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
|
||||
"system-manufacturer": "Xen",
|
||||
"system-product-name": "HVM domU",
|
||||
"system-serial-number": "",
|
||||
"system-version": ""
|
||||
},
|
||||
"blobs": {},
|
||||
"blocked_operations": {},
|
||||
"children": [],
|
||||
"consoles": [
|
||||
"OpaqueRef:4fa7d34e-1fb6-9e88-1b21-41a3c6550d8b"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"domarch": "",
|
||||
"domid": "143",
|
||||
"generation_id": "3274224479562869847:6952848762503845513",
|
||||
"guest_metrics": "OpaqueRef:453f21be-954d-2ca8-e38e-09741e91350c",
|
||||
"ha_always_run": false,
|
||||
"ha_restart_priority": "",
|
||||
"hardware_platform_version": "0",
|
||||
"has_vendor_device": false,
|
||||
"is_a_snapshot": false,
|
||||
"is_a_template": false,
|
||||
"is_control_domain": false,
|
||||
"is_default_template": false,
|
||||
"is_snapshot_from_vmpp": false,
|
||||
"is_vmss_snapshot": false,
|
||||
"last_boot_CPU_flags": {
|
||||
"features": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
|
||||
"vendor": "GenuineIntel"
|
||||
},
|
||||
"last_booted_record": "",
|
||||
"memory_dynamic_max": "2147483648",
|
||||
"memory_dynamic_min": "2147483648",
|
||||
"memory_overhead": "20971520",
|
||||
"memory_static_max": "2147483648",
|
||||
"memory_static_min": "1073741824",
|
||||
"memory_target": "2147483648",
|
||||
"metrics": "OpaqueRef:6eede779-4e55-7cfb-8b8a-e4b9becf770b",
|
||||
"name_description": "Created by Ansible",
|
||||
"name_label": "ansible-test-vm-1",
|
||||
"order": "0",
|
||||
"other_config": {
|
||||
"base_template_name": "Windows Server 2016 (64-bit)",
|
||||
"folder": "/Ansible/Test",
|
||||
"import_task": "OpaqueRef:e43eb71c-45d6-5351-09ff-96e4fb7d0fa5",
|
||||
"install-methods": "cdrom",
|
||||
"instant": "true",
|
||||
"mac_seed": "366fe8e0-878b-4320-8731-90d1ed3c0b93"
|
||||
},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"platform": {
|
||||
"acpi": "1",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "2",
|
||||
"device_id": "0002",
|
||||
"hpet": "true",
|
||||
"nx": "true",
|
||||
"pae": "true",
|
||||
"timeoffset": "-28800",
|
||||
"vga": "std",
|
||||
"videoram": "8",
|
||||
"viridian": "true",
|
||||
"viridian_reference_tsc": "true",
|
||||
"viridian_time_ref_count": "true"
|
||||
},
|
||||
"power_state": "Running",
|
||||
"protection_policy": "OpaqueRef:NULL",
|
||||
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"1649267441664\"/><restriction field=\"vcpus-max\" max=\"32\"/><restriction field=\"has-vendor-device\" value=\"true\"/><restriction max=\"255\" property=\"number-of-vbds\"/><restriction max=\"7\" property=\"number-of-vifs\"/></restrictions>",
|
||||
"reference_label": "windows-server-2016-64bit",
|
||||
"requires_reboot": false,
|
||||
"resident_on": "OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770",
|
||||
"shutdown_delay": "0",
|
||||
"snapshot_info": {},
|
||||
"snapshot_metadata": "",
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_schedule": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"start_delay": "0",
|
||||
"suspend_SR": "OpaqueRef:NULL",
|
||||
"suspend_VDI": "OpaqueRef:NULL",
|
||||
"tags": [],
|
||||
"transportable_snapshot_id": "",
|
||||
"user_version": "1",
|
||||
"uuid": "81c373d7-a407-322f-911b-31386eb5215d",
|
||||
"version": "0",
|
||||
"xenstore_data": {
|
||||
"vm-data": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"VM_guest_metrics": {
|
||||
"OpaqueRef:453f21be-954d-2ca8-e38e-09741e91350c": {
|
||||
"PV_drivers_detected": true,
|
||||
"PV_drivers_up_to_date": true,
|
||||
"PV_drivers_version": {
|
||||
"build": "1020",
|
||||
"major": "7",
|
||||
"micro": "0",
|
||||
"minor": "1"
|
||||
},
|
||||
"can_use_hotplug_vbd": "yes",
|
||||
"can_use_hotplug_vif": "yes",
|
||||
"disks": {},
|
||||
"last_updated": "20190113T19:40:34Z",
|
||||
"live": true,
|
||||
"memory": {},
|
||||
"networks": {
|
||||
"0/ip": "10.0.0.2",
|
||||
"0/ipv6/0": "fe80:0000:0000:0000:11e1:12c9:ef3b:75a0"
|
||||
},
|
||||
"os_version": {
|
||||
"distro": "windows",
|
||||
"major": "6",
|
||||
"minor": "2",
|
||||
"name": "Microsoft Windows Server 2016 Standard|C:\\Windows|\\Device\\Harddisk0\\Partition2",
|
||||
"spmajor": "0",
|
||||
"spminor": "0"
|
||||
},
|
||||
"other": {
|
||||
"data-ts": "1",
|
||||
"error": "WTSQueryUserToken : 1008 failed.",
|
||||
"feature-balloon": "1",
|
||||
"feature-poweroff": "1",
|
||||
"feature-reboot": "1",
|
||||
"feature-s3": "1",
|
||||
"feature-s4": "1",
|
||||
"feature-setcomputername": "1",
|
||||
"feature-static-ip-setting": "1",
|
||||
"feature-suspend": "1",
|
||||
"feature-ts": "1",
|
||||
"feature-ts2": "1",
|
||||
"feature-xs-batcmd": "1",
|
||||
"has-vendor-device": "0",
|
||||
"platform-feature-multiprocessor-suspend": "1"
|
||||
},
|
||||
"other_config": {},
|
||||
"uuid": "9ea6803f-12ca-3d6a-47b7-c90a33b67b98"
|
||||
}
|
||||
},
|
||||
"VM_metrics": {
|
||||
"OpaqueRef:6eede779-4e55-7cfb-8b8a-e4b9becf770b": {
|
||||
"VCPUs_CPU": {},
|
||||
"VCPUs_flags": {},
|
||||
"VCPUs_number": "2",
|
||||
"VCPUs_params": {},
|
||||
"VCPUs_utilisation": {},
|
||||
"hvm": true,
|
||||
"install_time": "20190113T19:31:47Z",
|
||||
"last_updated": "19700101T00:00:00Z",
|
||||
"memory_actual": "2147475456",
|
||||
"nested_virt": false,
|
||||
"nomigrate": false,
|
||||
"other_config": {},
|
||||
"start_time": "20190113T19:38:59Z",
|
||||
"state": [],
|
||||
"uuid": "c67fadf7-8143-0c92-c772-cd3901c18e70"
|
||||
}
|
||||
},
|
||||
"host": {
|
||||
"OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770": {
|
||||
"API_version_major": "2",
|
||||
"API_version_minor": "7",
|
||||
"API_version_vendor": "XenSource",
|
||||
"API_version_vendor_implementation": {},
|
||||
"PBDs": [],
|
||||
"PCIs": [],
|
||||
"PGPUs": [],
|
||||
"PIFs": [],
|
||||
"address": "10.0.0.1",
|
||||
"allowed_operations": [
|
||||
"vm_migrate",
|
||||
"provision",
|
||||
"vm_resume",
|
||||
"evacuate",
|
||||
"vm_start"
|
||||
],
|
||||
"bios_strings": {},
|
||||
"blobs": {},
|
||||
"capabilities": [
|
||||
"xen-3.0-x86_64",
|
||||
"xen-3.0-x86_32p",
|
||||
"hvm-3.0-x86_32",
|
||||
"hvm-3.0-x86_32p",
|
||||
"hvm-3.0-x86_64",
|
||||
""
|
||||
],
|
||||
"chipset_info": {
|
||||
"iommu": "true"
|
||||
},
|
||||
"control_domain": "OpaqueRef:a2a31555-f232-822b-8f36-10d75d44b79c",
|
||||
"cpu_configuration": {},
|
||||
"cpu_info": {
|
||||
"cpu_count": "40",
|
||||
"family": "6",
|
||||
"features": "7ffefbff-bfebfbff-00000021-2c100800",
|
||||
"features_hvm": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
|
||||
"features_pv": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
|
||||
"flags": "fpu de tsc msr pae mce cx8 apic sep mca cmov pat clflush acpi mmx fxsr sse sse2 ht syscall nx lm constant_tsc arch_perfmon rep_good nopl nonstop_tsc eagerfpu pni pclmulqdq monitor est ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm ida arat epb pln pts dtherm fsgsbase bmi1 avx2 bmi2 erms xsaveopt cqm_llc cqm_occup_llc",
|
||||
"model": "63",
|
||||
"modelname": "Intel(R) Xeon(R) CPU E5-2660 v3 @ 2.60GHz",
|
||||
"socket_count": "2",
|
||||
"speed": "2597.064",
|
||||
"stepping": "2",
|
||||
"vendor": "GenuineIntel"
|
||||
},
|
||||
"crash_dump_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
|
||||
"crashdumps": [],
|
||||
"current_operations": {},
|
||||
"display": "enabled",
|
||||
"edition": "free",
|
||||
"enabled": true,
|
||||
"external_auth_configuration": {},
|
||||
"external_auth_service_name": "",
|
||||
"external_auth_type": "",
|
||||
"features": [],
|
||||
"guest_VCPUs_params": {},
|
||||
"ha_network_peers": [],
|
||||
"ha_statefiles": [],
|
||||
"host_CPUs": [
|
||||
"OpaqueRef:f7e744f6-a6f9-c460-999a-c27e1395e2e0",
|
||||
"OpaqueRef:f6e5dcf0-0453-8f3f-88c1-7ad6e2ef3dd1",
|
||||
"OpaqueRef:f27a52fb-5feb-173d-1a07-d1735a83c2cc",
|
||||
"OpaqueRef:ed65327a-508a-ccfc-dba6-2a0175cb2432",
|
||||
"OpaqueRef:e41d2f2a-fe9e-72cb-8104-b22d6d314b13",
|
||||
"OpaqueRef:e1988469-b814-5d10-17a6-bfd7c62d2b5f",
|
||||
"OpaqueRef:d73967dc-b8d8-b47b-39f4-d599fdcabf55",
|
||||
"OpaqueRef:cba9ebd9-40dc-0611-d1bb-aa661bd0bf70",
|
||||
"OpaqueRef:c53d3110-4085-60af-8300-d879818789f7",
|
||||
"OpaqueRef:bee0cf87-7df6-79a6-94e8-36f98e69ad20",
|
||||
"OpaqueRef:bde28e83-213f-0e65-b6ad-0ae1ecebb98d",
|
||||
"OpaqueRef:bbfefe67-f65f-98cb-c3fc-cb8ea0588006",
|
||||
"OpaqueRef:b38ac595-afea-0ca0-49a0-9f5ef2368e3b",
|
||||
"OpaqueRef:b14ef333-78b1-193d-02da-dc9bfed36912",
|
||||
"OpaqueRef:afd478bf-57b9-0c79-f257-50aeb81504f1",
|
||||
"OpaqueRef:a307cd3a-2132-2e42-4ebc-cc1c7780736d",
|
||||
"OpaqueRef:a1a9df7d-88ba-64fd-a55c-0f6472e1753f",
|
||||
"OpaqueRef:a0e39c9c-3e0b-fa03-e5d0-93a09aa77393",
|
||||
"OpaqueRef:9fd5719b-36ab-8e25-7756-20a496ccb331",
|
||||
"OpaqueRef:9ac4195d-ac07-cfe2-bc19-27ee54cf91fb",
|
||||
"OpaqueRef:98c5c00c-1e2d-e22b-842e-79e85ce07873",
|
||||
"OpaqueRef:961129bf-e695-f206-7297-64f9007a64f3",
|
||||
"OpaqueRef:64368b4c-3488-2808-f0b3-42f2a656df2b",
|
||||
"OpaqueRef:620dabc0-d7c5-0dc8-52df-3be25194c2fb",
|
||||
"OpaqueRef:5cee2759-dd8e-7e1a-0727-21e196584030",
|
||||
"OpaqueRef:58f70163-863d-5787-ffbb-2416cb16ca1e",
|
||||
"OpaqueRef:4462f848-f396-653d-67f9-2bed13be2c58",
|
||||
"OpaqueRef:40e800c2-19db-7cd8-c045-5ae93f908cae",
|
||||
"OpaqueRef:3f84278b-dec6-ded0-1a33-4daa0ce75a2f",
|
||||
"OpaqueRef:3ef14992-62f6-e1f0-5715-0ee02a834a9c",
|
||||
"OpaqueRef:3e274c24-c55b-06f5-2c8f-415421043ab2",
|
||||
"OpaqueRef:35ff27da-f286-7b70-adc1-a200880bb79f",
|
||||
"OpaqueRef:2511aa53-8660-e442-3cd2-305982d1f751",
|
||||
"OpaqueRef:21d234e3-138c-81ca-9ed8-febc81b874e9",
|
||||
"OpaqueRef:1f9b4ee3-dcc7-114e-b401-dc3e94c07efa",
|
||||
"OpaqueRef:1b94a981-d340-dd07-41c2-b3ff3c545fed",
|
||||
"OpaqueRef:197ad104-64a8-5af3-8c7a-95f3d301aadd",
|
||||
"OpaqueRef:1672e747-dc4b-737b-ddcf-0a373f966012",
|
||||
"OpaqueRef:12ced494-a225-7584-456b-739331bb5114",
|
||||
"OpaqueRef:0139ff72-62ac-1a6a-8f6f-cb01d8a4ee92"
|
||||
],
|
||||
"hostname": "ansible-test-host-1",
|
||||
"license_params": {
|
||||
"address1": "",
|
||||
"address2": "",
|
||||
"city": "",
|
||||
"company": "",
|
||||
"country": "",
|
||||
"enable_xha": "true",
|
||||
"expiry": "20291231T23:00:00Z",
|
||||
"grace": "no",
|
||||
"license_type": "",
|
||||
"name": "",
|
||||
"platform_filter": "false",
|
||||
"postalcode": "",
|
||||
"productcode": "",
|
||||
"regular_nag_dialog": "false",
|
||||
"restrict_ad": "false",
|
||||
"restrict_batch_hotfix_apply": "true",
|
||||
"restrict_checkpoint": "false",
|
||||
"restrict_cifs": "true",
|
||||
"restrict_connection": "false",
|
||||
"restrict_cpu_masking": "false",
|
||||
"restrict_dmc": "false",
|
||||
"restrict_dr": "false",
|
||||
"restrict_email_alerting": "false",
|
||||
"restrict_equalogic": "false",
|
||||
"restrict_export_resource_data": "true",
|
||||
"restrict_gpu": "false",
|
||||
"restrict_guest_agent_auto_update": "true",
|
||||
"restrict_guest_ip_setting": "false",
|
||||
"restrict_health_check": "false",
|
||||
"restrict_historical_performance": "false",
|
||||
"restrict_hotfix_apply": "false",
|
||||
"restrict_integrated_gpu_passthrough": "false",
|
||||
"restrict_intellicache": "false",
|
||||
"restrict_lab": "false",
|
||||
"restrict_live_patching": "true",
|
||||
"restrict_marathon": "false",
|
||||
"restrict_nested_virt": "true",
|
||||
"restrict_netapp": "false",
|
||||
"restrict_pci_device_for_auto_update": "true",
|
||||
"restrict_pool_attached_storage": "false",
|
||||
"restrict_pooling": "false",
|
||||
"restrict_pvs_proxy": "true",
|
||||
"restrict_qos": "false",
|
||||
"restrict_rbac": "false",
|
||||
"restrict_read_caching": "true",
|
||||
"restrict_set_vcpus_number_live": "true",
|
||||
"restrict_ssl_legacy_switch": "false",
|
||||
"restrict_stage": "false",
|
||||
"restrict_storage_xen_motion": "false",
|
||||
"restrict_storagelink": "false",
|
||||
"restrict_storagelink_site_recovery": "false",
|
||||
"restrict_vgpu": "true",
|
||||
"restrict_vif_locking": "false",
|
||||
"restrict_vlan": "false",
|
||||
"restrict_vm_memory_introspection": "true",
|
||||
"restrict_vmpr": "false",
|
||||
"restrict_vmss": "false",
|
||||
"restrict_vss": "false",
|
||||
"restrict_vswitch_controller": "false",
|
||||
"restrict_web_selfservice": "true",
|
||||
"restrict_web_selfservice_manager": "true",
|
||||
"restrict_wlb": "true",
|
||||
"restrict_xcm": "true",
|
||||
"restrict_xen_motion": "false",
|
||||
"serialnumber": "",
|
||||
"sku_marketing_name": "Citrix XenServer",
|
||||
"sku_type": "free",
|
||||
"sockets": "2",
|
||||
"state": "",
|
||||
"version": ""
|
||||
},
|
||||
"license_server": {
|
||||
"address": "localhost",
|
||||
"port": "27000"
|
||||
},
|
||||
"local_cache_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
|
||||
"logging": {},
|
||||
"memory_overhead": "4606619648",
|
||||
"metrics": "OpaqueRef:82b6937a-60c2-96d8-4e78-9f9a1143033f",
|
||||
"name_description": "",
|
||||
"name_label": "ansible-test-host-1",
|
||||
"other_config": {
|
||||
"agent_start_time": "1532019557.",
|
||||
"boot_time": "1530023264.",
|
||||
"iscsi_iqn": "iqn.2018-06.com.example:c8bac750",
|
||||
"last_blob_sync_time": "1547394076.36",
|
||||
"multipathhandle": "dmp",
|
||||
"multipathing": "true"
|
||||
},
|
||||
"patches": [
|
||||
"OpaqueRef:f74ca18d-cfb7-e4fe-e5c4-819843de11e2",
|
||||
"OpaqueRef:f53ff05e-8dd8-3a15-d3b0-8dcf6004fbe2",
|
||||
"OpaqueRef:ed7f38da-1a50-a48b-60bf-933cabe8d7bc",
|
||||
"OpaqueRef:e7bb1462-51a5-1aaf-3b56-11b8ebd83a94",
|
||||
"OpaqueRef:d87b343b-6ba3-db8b-b80e-e02319ba5924",
|
||||
"OpaqueRef:ccb00450-ed04-4eaa-e6d7-130ef3722374",
|
||||
"OpaqueRef:b79b8864-11d9-1d5f-09e5-a66d7b64b9e2",
|
||||
"OpaqueRef:9bebcc7d-61ae-126b-3be0-9156026e586f",
|
||||
"OpaqueRef:740a1156-b991-00b8-ef50-fdbb22a4d911",
|
||||
"OpaqueRef:71def430-754b-2bfb-6c93-ec3b67b754e4",
|
||||
"OpaqueRef:6c73b00d-df66-1740-9578-2b14e46297ba",
|
||||
"OpaqueRef:6a53d2ae-3d6b-32ed-705f-fd53f1304470",
|
||||
"OpaqueRef:35a67684-b094-1c77-beff-8237d87c7a27",
|
||||
"OpaqueRef:33da42c2-c421-9859-79b7-ce9b6c394a1b",
|
||||
"OpaqueRef:2baa6b4b-9bbe-c1b2-23ce-c8c831ac581d",
|
||||
"OpaqueRef:2ac3beea-dee2-44e7-9f67-5fd216e593a0",
|
||||
"OpaqueRef:1bd8f24b-3190-6e7a-b36e-e2998197d062",
|
||||
"OpaqueRef:1694ea26-4930-6ca1-036e-273438375de9",
|
||||
"OpaqueRef:09813f03-0c6f-a6af-768f-ef4cdde2c641"
|
||||
],
|
||||
"power_on_config": {},
|
||||
"power_on_mode": "",
|
||||
"resident_VMs": [],
|
||||
"sched_policy": "credit",
|
||||
"software_version": {
|
||||
"build_number": "release/falcon/master/8",
|
||||
"date": "2017-05-11",
|
||||
"db_schema": "5.120",
|
||||
"dbv": "2017.0517",
|
||||
"hostname": "f7d02093adae",
|
||||
"linux": "4.4.0+10",
|
||||
"network_backend": "openvswitch",
|
||||
"platform_name": "XCP",
|
||||
"platform_version": "2.3.0",
|
||||
"product_brand": "XenServer",
|
||||
"product_version": "7.2.0",
|
||||
"product_version_text": "7.2",
|
||||
"product_version_text_short": "7.2",
|
||||
"xapi": "1.9",
|
||||
"xen": "4.7.5-2.12",
|
||||
"xencenter_max": "2.7",
|
||||
"xencenter_min": "2.7"
|
||||
},
|
||||
"ssl_legacy": true,
|
||||
"supported_bootloaders": [
|
||||
"pygrub",
|
||||
"eliloader"
|
||||
],
|
||||
"suspend_image_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
|
||||
"tags": [],
|
||||
"updates": [
|
||||
"OpaqueRef:b71938bf-4c4f-eb17-7e78-588e71297a74",
|
||||
"OpaqueRef:91cfa47b-52f9-a4e3-4e78-52e3eb3e5141",
|
||||
"OpaqueRef:e2209ae9-5362-3a20-f691-9294144e49f2",
|
||||
"OpaqueRef:6ac77a0f-f079-8067-85cc-c9ae2f8dcca9",
|
||||
"OpaqueRef:a17e721d-faf4-6ad1-c617-dd4899279534",
|
||||
"OpaqueRef:6c9b814c-e1c2-b8be-198f-de358686b10a",
|
||||
"OpaqueRef:fbaabbfe-88d5-d89b-5b3f-d6374601ca71",
|
||||
"OpaqueRef:9eccc765-9726-d220-96b1-2e85adf77ecc",
|
||||
"OpaqueRef:204558d7-dce0-2304-bdc5-80ec5fd7e3c3",
|
||||
"OpaqueRef:65b14ae7-f440-0c4d-4af9-c7946b90fd2f",
|
||||
"OpaqueRef:0760c608-b02e-743a-18a1-fa8f205374d6",
|
||||
"OpaqueRef:1ced32ca-fec4-8b44-0e8f-753c97f2d93f",
|
||||
"OpaqueRef:3fffd7c7-f4d1-6b03-a5b8-d75211bb7b8f",
|
||||
"OpaqueRef:01befb95-412e-e9dd-5b5d-edd50df61cb1",
|
||||
"OpaqueRef:a3f9481e-fe3d-1f00-235f-44d404f51128",
|
||||
"OpaqueRef:507ee5fc-59d3-e635-21d5-98a5cace4bf2",
|
||||
"OpaqueRef:7b4b5da1-54af-d0c4-3fea-394b4257bffe",
|
||||
"OpaqueRef:f61edc83-91d9-a161-113f-00c110196238",
|
||||
"OpaqueRef:7efce157-9b93-d116-f3f8-7eb0c6fb1a79"
|
||||
],
|
||||
"updates_requiring_reboot": [],
|
||||
"uuid": "92ac8132-276b-4d0f-9d3a-54db51e4a438",
|
||||
"virtual_hardware_platform_versions": [
|
||||
"0",
|
||||
"1",
|
||||
"2"
|
||||
]
|
||||
}
|
||||
},
|
||||
"network": {
|
||||
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
|
||||
"MTU": "1500",
|
||||
"PIFs": [],
|
||||
"VIFs": [],
|
||||
"allowed_operations": [],
|
||||
"assigned_ips": {
|
||||
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
|
||||
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
|
||||
},
|
||||
"blobs": {},
|
||||
"bridge": "xenapi",
|
||||
"current_operations": {},
|
||||
"default_locking_mode": "unlocked",
|
||||
"managed": true,
|
||||
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
|
||||
"name_label": "Host internal management network",
|
||||
"other_config": {
|
||||
"ip_begin": "169.254.0.1",
|
||||
"ip_end": "169.254.255.254",
|
||||
"is_guest_installer_network": "true",
|
||||
"is_host_internal_management_network": "true",
|
||||
"netmask": "255.255.0.0"
|
||||
},
|
||||
"tags": [],
|
||||
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,87 @@
|
||||
{
|
||||
"cdrom": {
|
||||
"type": "none"
|
||||
},
|
||||
"customization_agent": "custom",
|
||||
"disks": [
|
||||
{
|
||||
"name": "ansible-test-vm-2-root",
|
||||
"name_desc": "/",
|
||||
"os_device": "xvda",
|
||||
"size": 10737418240,
|
||||
"sr": "Ansible Test Storage 1",
|
||||
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"vbd_userdevice": "0"
|
||||
},
|
||||
{
|
||||
"name": "ansible-test-vm-2-mysql",
|
||||
"name_desc": "/var/lib/mysql",
|
||||
"os_device": "xvdb",
|
||||
"size": 1073741824,
|
||||
"sr": "Ansible Test Storage 1",
|
||||
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"vbd_userdevice": "1"
|
||||
}
|
||||
],
|
||||
"domid": "140",
|
||||
"folder": "/Ansible/Test",
|
||||
"hardware": {
|
||||
"memory_mb": 1024,
|
||||
"num_cpu_cores_per_socket": 1,
|
||||
"num_cpus": 1
|
||||
},
|
||||
"home_server": "ansible-test-host-2",
|
||||
"is_template": false,
|
||||
"name": "ansible-test-vm-2",
|
||||
"name_desc": "Created by Ansible",
|
||||
"networks": [
|
||||
{
|
||||
"gateway": "10.0.0.1",
|
||||
"gateway6": "",
|
||||
"ip": "169.254.0.2",
|
||||
"ip6": [],
|
||||
"mac": "16:87:31:70:d6:31",
|
||||
"mtu": "1500",
|
||||
"name": "Host internal management network",
|
||||
"netmask": "255.255.255.0",
|
||||
"prefix": "24",
|
||||
"prefix6": "",
|
||||
"vif_device": "0"
|
||||
}
|
||||
],
|
||||
"other_config": {
|
||||
"base_template_name": "CentOS 7",
|
||||
"folder": "/Ansible/Test",
|
||||
"import_task": "OpaqueRef:cf1402d3-b6c1-d908-fe62-06502e3b311a",
|
||||
"install-methods": "cdrom,nfs,http,ftp",
|
||||
"instant": "true",
|
||||
"linux_template": "true",
|
||||
"mac_seed": "0ab46664-f519-5383-166e-e4ea485ede7d"
|
||||
},
|
||||
"platform": {
|
||||
"acpi": "1",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "1",
|
||||
"device_id": "0001",
|
||||
"nx": "true",
|
||||
"pae": "true",
|
||||
"timeoffset": "0",
|
||||
"vga": "std",
|
||||
"videoram": "8",
|
||||
"viridian": "false"
|
||||
},
|
||||
"state": "poweredon",
|
||||
"uuid": "0a05d5ad-3e4b-f0dc-6101-8c56623958bc",
|
||||
"xenstore_data": {
|
||||
"vm-data": "",
|
||||
"vm-data/networks": "",
|
||||
"vm-data/networks/0": "",
|
||||
"vm-data/networks/0/gateway": "10.0.0.1",
|
||||
"vm-data/networks/0/ip": "10.0.0.3",
|
||||
"vm-data/networks/0/mac": "16:87:31:70:d6:31",
|
||||
"vm-data/networks/0/name": "Host internal management network",
|
||||
"vm-data/networks/0/netmask": "255.255.255.0",
|
||||
"vm-data/networks/0/prefix": "24",
|
||||
"vm-data/networks/0/type": "static"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,771 @@
|
||||
{
|
||||
"SR": {
|
||||
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
|
||||
"PBDs": [],
|
||||
"VDIs": [],
|
||||
"allowed_operations": [
|
||||
"unplug",
|
||||
"plug",
|
||||
"pbd_create",
|
||||
"update",
|
||||
"pbd_destroy",
|
||||
"vdi_resize",
|
||||
"vdi_clone",
|
||||
"scan",
|
||||
"vdi_snapshot",
|
||||
"vdi_mirror",
|
||||
"vdi_create",
|
||||
"vdi_destroy"
|
||||
],
|
||||
"blobs": {},
|
||||
"clustered": false,
|
||||
"content_type": "",
|
||||
"current_operations": {},
|
||||
"introduced_by": "OpaqueRef:NULL",
|
||||
"is_tools_sr": false,
|
||||
"local_cache_enabled": false,
|
||||
"name_description": "",
|
||||
"name_label": "Ansible Test Storage 1",
|
||||
"other_config": {
|
||||
"auto-scan": "false"
|
||||
},
|
||||
"physical_size": "2521133219840",
|
||||
"physical_utilisation": "1551485632512",
|
||||
"shared": true,
|
||||
"sm_config": {
|
||||
"allocation": "thick",
|
||||
"devserial": "scsi-3600a098038302d353624495242443848",
|
||||
"multipathable": "true",
|
||||
"use_vhd": "true"
|
||||
},
|
||||
"tags": [],
|
||||
"type": "lvmohba",
|
||||
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"virtual_allocation": "1556925644800"
|
||||
}
|
||||
},
|
||||
"VBD": {
|
||||
"OpaqueRef:510e214e-f0ba-3bc9-7834-a4f4d3fa33ef": {
|
||||
"VDI": "OpaqueRef:NULL",
|
||||
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unpause",
|
||||
"insert",
|
||||
"pause"
|
||||
],
|
||||
"bootable": false,
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "xvdd",
|
||||
"empty": true,
|
||||
"metrics": "OpaqueRef:1075bebe-ba71-66ef-ba30-8afbc83bc6b5",
|
||||
"mode": "RO",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "CD",
|
||||
"unpluggable": true,
|
||||
"userdevice": "3",
|
||||
"uuid": "79ee1d8e-944b-3bfd-ba4c-a0c165d84f3d"
|
||||
},
|
||||
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760": {
|
||||
"VDI": "OpaqueRef:102bef39-b134-d23a-9a50-490e1dbca8f7",
|
||||
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unpause",
|
||||
"pause"
|
||||
],
|
||||
"bootable": true,
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "xvda",
|
||||
"empty": false,
|
||||
"metrics": "OpaqueRef:1c71ccde-d7e9-10fb-569c-993b880fa790",
|
||||
"mode": "RW",
|
||||
"other_config": {
|
||||
"owner": ""
|
||||
},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "Disk",
|
||||
"unpluggable": false,
|
||||
"userdevice": "0",
|
||||
"uuid": "932fdf6d-7ac5-45e8-a48e-694af75726f1"
|
||||
},
|
||||
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151": {
|
||||
"VDI": "OpaqueRef:87b45ac6-af36-f4fd-6ebd-a08bed9001e4",
|
||||
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unpause",
|
||||
"unplug",
|
||||
"unplug_force",
|
||||
"pause"
|
||||
],
|
||||
"bootable": false,
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "xvdb",
|
||||
"empty": false,
|
||||
"metrics": "OpaqueRef:b8424146-d3ea-4850-db9a-47f0059c10ac",
|
||||
"mode": "RW",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "Disk",
|
||||
"unpluggable": true,
|
||||
"userdevice": "1",
|
||||
"uuid": "c0c1e648-3690-e1fb-9f47-24b4df0cb458"
|
||||
}
|
||||
},
|
||||
"VDI": {
|
||||
"OpaqueRef:102bef39-b134-d23a-9a50-490e1dbca8f7": {
|
||||
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
|
||||
"VBDs": [
|
||||
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760"
|
||||
],
|
||||
"allow_caching": false,
|
||||
"allowed_operations": [
|
||||
"clone",
|
||||
"snapshot"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"is_a_snapshot": false,
|
||||
"is_tools_iso": false,
|
||||
"location": "fa1202b8-326f-4235-802e-fafbed66b26b",
|
||||
"managed": true,
|
||||
"metadata_latest": false,
|
||||
"metadata_of_pool": "",
|
||||
"missing": false,
|
||||
"name_description": "/",
|
||||
"name_label": "ansible-test-vm-2-root",
|
||||
"on_boot": "persist",
|
||||
"other_config": {},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"physical_utilisation": "10766778368",
|
||||
"read_only": false,
|
||||
"sharable": false,
|
||||
"sm_config": {
|
||||
"host_OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": "RW",
|
||||
"read-caching-enabled-on-dff6702e-bcb6-4704-8dd4-952e8c883365": "false",
|
||||
"read-caching-reason-dff6702e-bcb6-4704-8dd4-952e8c883365": "LICENSE_RESTRICTION",
|
||||
"vdi_type": "vhd"
|
||||
},
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"storage_lock": false,
|
||||
"tags": [],
|
||||
"type": "system",
|
||||
"uuid": "fa1202b8-326f-4235-802e-fafbed66b26b",
|
||||
"virtual_size": "10737418240",
|
||||
"xenstore_data": {}
|
||||
},
|
||||
"OpaqueRef:87b45ac6-af36-f4fd-6ebd-a08bed9001e4": {
|
||||
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
|
||||
"VBDs": [
|
||||
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151"
|
||||
],
|
||||
"allow_caching": false,
|
||||
"allowed_operations": [
|
||||
"clone",
|
||||
"snapshot"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"is_a_snapshot": false,
|
||||
"is_tools_iso": false,
|
||||
"location": "ab3a4d72-f498-4687-86ce-ca937046db76",
|
||||
"managed": true,
|
||||
"metadata_latest": false,
|
||||
"metadata_of_pool": "",
|
||||
"missing": false,
|
||||
"name_description": "/var/lib/mysql",
|
||||
"name_label": "ansible-test-vm-2-mysql",
|
||||
"on_boot": "persist",
|
||||
"other_config": {},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"physical_utilisation": "1082130432",
|
||||
"read_only": false,
|
||||
"sharable": false,
|
||||
"sm_config": {
|
||||
"host_OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": "RW",
|
||||
"read-caching-enabled-on-dff6702e-bcb6-4704-8dd4-952e8c883365": "false",
|
||||
"read-caching-reason-dff6702e-bcb6-4704-8dd4-952e8c883365": "LICENSE_RESTRICTION",
|
||||
"vdi_type": "vhd"
|
||||
},
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"storage_lock": false,
|
||||
"tags": [],
|
||||
"type": "user",
|
||||
"uuid": "ab3a4d72-f498-4687-86ce-ca937046db76",
|
||||
"virtual_size": "1073741824",
|
||||
"xenstore_data": {}
|
||||
}
|
||||
},
|
||||
"VIF": {
|
||||
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": {
|
||||
"MAC": "16:87:31:70:d6:31",
|
||||
"MAC_autogenerated": false,
|
||||
"MTU": "1500",
|
||||
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"unplug"
|
||||
],
|
||||
"current_operations": {},
|
||||
"currently_attached": true,
|
||||
"device": "0",
|
||||
"ipv4_addresses": [],
|
||||
"ipv4_allowed": [],
|
||||
"ipv4_configuration_mode": "None",
|
||||
"ipv4_gateway": "",
|
||||
"ipv6_addresses": [],
|
||||
"ipv6_allowed": [],
|
||||
"ipv6_configuration_mode": "None",
|
||||
"ipv6_gateway": "",
|
||||
"locking_mode": "network_default",
|
||||
"metrics": "OpaqueRef:d74d5f20-f0ab-ee36-9a74-496ffb994232",
|
||||
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"uuid": "07b70134-9396-94fc-5105-179b430ce4f8"
|
||||
}
|
||||
},
|
||||
"VM": {
|
||||
"OpaqueRef:08632af0-473e-5106-f400-7910229e49be": {
|
||||
"HVM_boot_params": {
|
||||
"order": "cdn"
|
||||
},
|
||||
"HVM_boot_policy": "BIOS order",
|
||||
"HVM_shadow_multiplier": 1.0,
|
||||
"PCI_bus": "",
|
||||
"PV_args": "",
|
||||
"PV_bootloader": "",
|
||||
"PV_bootloader_args": "",
|
||||
"PV_kernel": "",
|
||||
"PV_legacy_args": "",
|
||||
"PV_ramdisk": "",
|
||||
"VBDs": [
|
||||
"OpaqueRef:510e214e-f0ba-3bc9-7834-a4f4d3fa33ef",
|
||||
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151",
|
||||
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760"
|
||||
],
|
||||
"VCPUs_at_startup": "1",
|
||||
"VCPUs_max": "1",
|
||||
"VCPUs_params": {},
|
||||
"VGPUs": [],
|
||||
"VIFs": [
|
||||
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9"
|
||||
],
|
||||
"VTPMs": [],
|
||||
"actions_after_crash": "restart",
|
||||
"actions_after_reboot": "restart",
|
||||
"actions_after_shutdown": "destroy",
|
||||
"affinity": "OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0",
|
||||
"allowed_operations": [
|
||||
"changing_dynamic_range",
|
||||
"migrate_send",
|
||||
"pool_migrate",
|
||||
"changing_VCPUs_live",
|
||||
"suspend",
|
||||
"hard_reboot",
|
||||
"hard_shutdown",
|
||||
"clean_reboot",
|
||||
"clean_shutdown",
|
||||
"pause",
|
||||
"checkpoint",
|
||||
"snapshot"
|
||||
],
|
||||
"appliance": "OpaqueRef:NULL",
|
||||
"attached_PCIs": [],
|
||||
"bios_strings": {
|
||||
"bios-vendor": "Xen",
|
||||
"bios-version": "",
|
||||
"hp-rombios": "",
|
||||
"oem-1": "Xen",
|
||||
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
|
||||
"system-manufacturer": "Xen",
|
||||
"system-product-name": "HVM domU",
|
||||
"system-serial-number": "",
|
||||
"system-version": ""
|
||||
},
|
||||
"blobs": {},
|
||||
"blocked_operations": {},
|
||||
"children": [],
|
||||
"consoles": [
|
||||
"OpaqueRef:2a24e023-a856-de30-aea3-2024bacdc71f"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"domarch": "",
|
||||
"domid": "140",
|
||||
"generation_id": "",
|
||||
"guest_metrics": "OpaqueRef:150d2dfa-b634-7965-92ab-31fc26382683",
|
||||
"ha_always_run": false,
|
||||
"ha_restart_priority": "",
|
||||
"hardware_platform_version": "0",
|
||||
"has_vendor_device": false,
|
||||
"is_a_snapshot": false,
|
||||
"is_a_template": false,
|
||||
"is_control_domain": false,
|
||||
"is_default_template": false,
|
||||
"is_snapshot_from_vmpp": false,
|
||||
"is_vmss_snapshot": false,
|
||||
"last_boot_CPU_flags": {
|
||||
"features": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
|
||||
"vendor": "GenuineIntel"
|
||||
},
|
||||
"last_booted_record": "",
|
||||
"memory_dynamic_max": "1073741824",
|
||||
"memory_dynamic_min": "1073741824",
|
||||
"memory_overhead": "11534336",
|
||||
"memory_static_max": "1073741824",
|
||||
"memory_static_min": "1073741824",
|
||||
"memory_target": "1073741824",
|
||||
"metrics": "OpaqueRef:b56b460b-6476-304d-b143-ce543ffab828",
|
||||
"name_description": "Created by Ansible",
|
||||
"name_label": "ansible-test-vm-2",
|
||||
"order": "0",
|
||||
"other_config": {
|
||||
"base_template_name": "CentOS 7",
|
||||
"folder": "/Ansible/Test",
|
||||
"import_task": "OpaqueRef:cf1402d3-b6c1-d908-fe62-06502e3b311a",
|
||||
"install-methods": "cdrom,nfs,http,ftp",
|
||||
"instant": "true",
|
||||
"linux_template": "true",
|
||||
"mac_seed": "0ab46664-f519-5383-166e-e4ea485ede7d"
|
||||
},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"platform": {
|
||||
"acpi": "1",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "1",
|
||||
"device_id": "0001",
|
||||
"nx": "true",
|
||||
"pae": "true",
|
||||
"timeoffset": "0",
|
||||
"vga": "std",
|
||||
"videoram": "8",
|
||||
"viridian": "false"
|
||||
},
|
||||
"power_state": "Running",
|
||||
"protection_policy": "OpaqueRef:NULL",
|
||||
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"549755813888\" /><restriction field=\"vcpus-max\" max=\"16\" /><restriction property=\"number-of-vbds\" max=\"16\" /><restriction property=\"number-of-vifs\" max=\"7\" /><restriction field=\"allow-gpu-passthrough\" value=\"0\" /></restrictions>",
|
||||
"reference_label": "",
|
||||
"requires_reboot": false,
|
||||
"resident_on": "OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0",
|
||||
"shutdown_delay": "0",
|
||||
"snapshot_info": {},
|
||||
"snapshot_metadata": "",
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_schedule": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"start_delay": "0",
|
||||
"suspend_SR": "OpaqueRef:NULL",
|
||||
"suspend_VDI": "OpaqueRef:NULL",
|
||||
"tags": [],
|
||||
"transportable_snapshot_id": "",
|
||||
"user_version": "1",
|
||||
"uuid": "0a05d5ad-3e4b-f0dc-6101-8c56623958bc",
|
||||
"version": "0",
|
||||
"xenstore_data": {
|
||||
"vm-data": "",
|
||||
"vm-data/networks": "",
|
||||
"vm-data/networks/0": "",
|
||||
"vm-data/networks/0/gateway": "10.0.0.1",
|
||||
"vm-data/networks/0/ip": "10.0.0.3",
|
||||
"vm-data/networks/0/mac": "16:87:31:70:d6:31",
|
||||
"vm-data/networks/0/name": "Host internal management network",
|
||||
"vm-data/networks/0/netmask": "255.255.255.0",
|
||||
"vm-data/networks/0/prefix": "24",
|
||||
"vm-data/networks/0/type": "static"
|
||||
}
|
||||
}
|
||||
},
|
||||
"VM_guest_metrics": {
|
||||
"OpaqueRef:150d2dfa-b634-7965-92ab-31fc26382683": {
|
||||
"PV_drivers_detected": true,
|
||||
"PV_drivers_up_to_date": true,
|
||||
"PV_drivers_version": {
|
||||
"build": "90977",
|
||||
"major": "6",
|
||||
"micro": "0",
|
||||
"minor": "5"
|
||||
},
|
||||
"can_use_hotplug_vbd": "unspecified",
|
||||
"can_use_hotplug_vif": "unspecified",
|
||||
"disks": {},
|
||||
"last_updated": "20190113T19:36:26Z",
|
||||
"live": true,
|
||||
"memory": {},
|
||||
"networks": {
|
||||
"0/ip": "169.254.0.2"
|
||||
},
|
||||
"os_version": {
|
||||
"distro": "centos",
|
||||
"major": "7",
|
||||
"minor": "2",
|
||||
"name": "CentOS Linux release 7.2.1511 (Core)",
|
||||
"uname": "3.10.0-327.22.2.el7.x86_64"
|
||||
},
|
||||
"other": {
|
||||
"feature-balloon": "1",
|
||||
"feature-shutdown": "1",
|
||||
"feature-suspend": "1",
|
||||
"feature-vcpu-hotplug": "1",
|
||||
"has-vendor-device": "0",
|
||||
"platform-feature-multiprocessor-suspend": "1"
|
||||
},
|
||||
"other_config": {},
|
||||
"uuid": "5c9d1be5-7eee-88f2-46c3-df1d44f9cdb5"
|
||||
}
|
||||
},
|
||||
"VM_metrics": {
|
||||
"OpaqueRef:b56b460b-6476-304d-b143-ce543ffab828": {
|
||||
"VCPUs_CPU": {},
|
||||
"VCPUs_flags": {},
|
||||
"VCPUs_number": "1",
|
||||
"VCPUs_params": {},
|
||||
"VCPUs_utilisation": {},
|
||||
"hvm": true,
|
||||
"install_time": "20190113T19:32:46Z",
|
||||
"last_updated": "19700101T00:00:00Z",
|
||||
"memory_actual": "1073729536",
|
||||
"nested_virt": false,
|
||||
"nomigrate": false,
|
||||
"other_config": {},
|
||||
"start_time": "20190113T19:35:15Z",
|
||||
"state": [],
|
||||
"uuid": "876dd44c-aad1-97bf-9ee5-4cd58eac7163"
|
||||
}
|
||||
},
|
||||
"host": {
|
||||
"OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": {
|
||||
"API_version_major": "2",
|
||||
"API_version_minor": "7",
|
||||
"API_version_vendor": "XenSource",
|
||||
"API_version_vendor_implementation": {},
|
||||
"PBDs": [],
|
||||
"PCIs": [],
|
||||
"PGPUs": [],
|
||||
"PIFs": [],
|
||||
"address": "10.0.0.1",
|
||||
"allowed_operations": [
|
||||
"vm_migrate",
|
||||
"provision",
|
||||
"vm_resume",
|
||||
"evacuate",
|
||||
"vm_start"
|
||||
],
|
||||
"bios_strings": {},
|
||||
"blobs": {},
|
||||
"capabilities": [
|
||||
"xen-3.0-x86_64",
|
||||
"xen-3.0-x86_32p",
|
||||
"hvm-3.0-x86_32",
|
||||
"hvm-3.0-x86_32p",
|
||||
"hvm-3.0-x86_64",
|
||||
""
|
||||
],
|
||||
"chipset_info": {
|
||||
"iommu": "true"
|
||||
},
|
||||
"control_domain": "OpaqueRef:ffcc92a1-8fde-df6f-a501-44b37811286b",
|
||||
"cpu_configuration": {},
|
||||
"cpu_info": {
|
||||
"cpu_count": "40",
|
||||
"family": "6",
|
||||
"features": "7ffefbff-bfebfbff-00000021-2c100800",
|
||||
"features_hvm": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
|
||||
"features_pv": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
|
||||
"flags": "fpu de tsc msr pae mce cx8 apic sep mca cmov pat clflush acpi mmx fxsr sse sse2 ht syscall nx lm constant_tsc arch_perfmon rep_good nopl nonstop_tsc eagerfpu pni pclmulqdq monitor est ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm ida arat epb pln pts dtherm fsgsbase bmi1 avx2 bmi2 erms xsaveopt cqm_llc cqm_occup_llc",
|
||||
"model": "63",
|
||||
"modelname": "Intel(R) Xeon(R) CPU E5-2660 v3 @ 2.60GHz",
|
||||
"socket_count": "2",
|
||||
"speed": "2597.070",
|
||||
"stepping": "2",
|
||||
"vendor": "GenuineIntel"
|
||||
},
|
||||
"crash_dump_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
|
||||
"crashdumps": [],
|
||||
"current_operations": {},
|
||||
"display": "enabled",
|
||||
"edition": "free",
|
||||
"enabled": true,
|
||||
"external_auth_configuration": {},
|
||||
"external_auth_service_name": "",
|
||||
"external_auth_type": "",
|
||||
"features": [],
|
||||
"guest_VCPUs_params": {},
|
||||
"ha_network_peers": [],
|
||||
"ha_statefiles": [],
|
||||
"host_CPUs": [
|
||||
"OpaqueRef:ec3ba9c4-9b57-236b-3eaa-b157affc1621",
|
||||
"OpaqueRef:e6de7ab3-f4ad-f271-e51b-e3d8c041d3fb",
|
||||
"OpaqueRef:e519ef88-bf41-86ac-16b3-c178cb4b78b1",
|
||||
"OpaqueRef:e48f1bc1-98ba-89e5-ab69-821c625f7f82",
|
||||
"OpaqueRef:e2659936-3de6-dbca-cc44-4af50960b2b7",
|
||||
"OpaqueRef:d0da1e31-20ac-4aff-8897-e80df8200648",
|
||||
"OpaqueRef:cec473ba-41a8-439d-b397-be0c60467b5d",
|
||||
"OpaqueRef:ce88014d-b06c-c959-0624-04d79b791885",
|
||||
"OpaqueRef:c656ca58-41fe-3689-d322-174aa5798beb",
|
||||
"OpaqueRef:c0a21f14-8f46-19de-1cf4-530a34c4aa17",
|
||||
"OpaqueRef:bf70c061-7b45-0497-7ef6-65a236e898e8",
|
||||
"OpaqueRef:b7a2ba0f-f11b-3633-ad47-4f5f76a600a8",
|
||||
"OpaqueRef:b4fef1fa-3aae-9790-f47e-6a17f645339c",
|
||||
"OpaqueRef:b4594721-f8f4-4475-61c5-4efeec1733f1",
|
||||
"OpaqueRef:9dcba36f-c29f-478f-f578-d1ea347410a6",
|
||||
"OpaqueRef:987897e8-1184-917e-6a5f-e205d0c739e5",
|
||||
"OpaqueRef:90f06d64-be18-7fdf-36ba-bbd696a26cf3",
|
||||
"OpaqueRef:90150bc1-e604-4cd4-35ad-9cfa8e985de3",
|
||||
"OpaqueRef:838f4ad4-8ad2-0d6c-a74e-26baa461de3d",
|
||||
"OpaqueRef:736fb523-d347-e8c0-089b-c9811d3c1195",
|
||||
"OpaqueRef:7137b479-87d4-9097-a684-e54cc4de5d09",
|
||||
"OpaqueRef:6e08fa1d-7d7b-d9be-1574-ffe95bd515fd",
|
||||
"OpaqueRef:6b9e6ecd-54e5-4248-5aea-ee5b99248818",
|
||||
"OpaqueRef:65d56b24-3445-b444-5125-c91e6966fd29",
|
||||
"OpaqueRef:60908eca-1e5c-c938-5b76-e8ff9d8899ab",
|
||||
"OpaqueRef:46e96878-c076-2164-2373-6cdd108c2436",
|
||||
"OpaqueRef:40ccdaf4-6008-2b83-92cb-ca197f73433f",
|
||||
"OpaqueRef:3bc8133a-ccb2-6790-152f-b3f577517751",
|
||||
"OpaqueRef:38c8edd8-0621-76de-53f6-86bef2a9e05c",
|
||||
"OpaqueRef:342c1bab-a211-a0eb-79a5-780bd5ad1f23",
|
||||
"OpaqueRef:1e20e6d0-5502-0dff-4f17-5d35eb833af1",
|
||||
"OpaqueRef:176baafa-0e63-7000-f754-25e2a6b74959",
|
||||
"OpaqueRef:16cab1a2-0111-b2af-6dfe-3724b79e6b6b",
|
||||
"OpaqueRef:0f213647-8362-9c5e-e99b-0ebaefc609ce",
|
||||
"OpaqueRef:0e019819-b41f-0bfb-d4ee-dd5484fea9b6",
|
||||
"OpaqueRef:0d39212f-82ba-190c-b304-19b3fa491fff",
|
||||
"OpaqueRef:087ce3ad-3b66-ae1e-3130-3ae640dcc638",
|
||||
"OpaqueRef:0730f24c-87ed-8296-8f14-3036e5ad2357",
|
||||
"OpaqueRef:04c27426-4895-39a7-9ade-ef33d3721c26",
|
||||
"OpaqueRef:017b27bf-0270-19e7-049a-5a9b3bb54898"
|
||||
],
|
||||
"hostname": "ansible-test-host-2",
|
||||
"license_params": {
|
||||
"address1": "",
|
||||
"address2": "",
|
||||
"city": "",
|
||||
"company": "",
|
||||
"country": "",
|
||||
"enable_xha": "true",
|
||||
"expiry": "20291231T23:00:00Z",
|
||||
"grace": "no",
|
||||
"license_type": "",
|
||||
"name": "",
|
||||
"platform_filter": "false",
|
||||
"postalcode": "",
|
||||
"productcode": "",
|
||||
"regular_nag_dialog": "false",
|
||||
"restrict_ad": "false",
|
||||
"restrict_batch_hotfix_apply": "true",
|
||||
"restrict_checkpoint": "false",
|
||||
"restrict_cifs": "true",
|
||||
"restrict_connection": "false",
|
||||
"restrict_cpu_masking": "false",
|
||||
"restrict_dmc": "false",
|
||||
"restrict_dr": "false",
|
||||
"restrict_email_alerting": "false",
|
||||
"restrict_equalogic": "false",
|
||||
"restrict_export_resource_data": "true",
|
||||
"restrict_gpu": "false",
|
||||
"restrict_guest_agent_auto_update": "true",
|
||||
"restrict_guest_ip_setting": "false",
|
||||
"restrict_health_check": "false",
|
||||
"restrict_historical_performance": "false",
|
||||
"restrict_hotfix_apply": "false",
|
||||
"restrict_integrated_gpu_passthrough": "false",
|
||||
"restrict_intellicache": "false",
|
||||
"restrict_lab": "false",
|
||||
"restrict_live_patching": "true",
|
||||
"restrict_marathon": "false",
|
||||
"restrict_nested_virt": "true",
|
||||
"restrict_netapp": "false",
|
||||
"restrict_pci_device_for_auto_update": "true",
|
||||
"restrict_pool_attached_storage": "false",
|
||||
"restrict_pooling": "false",
|
||||
"restrict_pvs_proxy": "true",
|
||||
"restrict_qos": "false",
|
||||
"restrict_rbac": "false",
|
||||
"restrict_read_caching": "true",
|
||||
"restrict_set_vcpus_number_live": "true",
|
||||
"restrict_ssl_legacy_switch": "false",
|
||||
"restrict_stage": "false",
|
||||
"restrict_storage_xen_motion": "false",
|
||||
"restrict_storagelink": "false",
|
||||
"restrict_storagelink_site_recovery": "false",
|
||||
"restrict_vgpu": "true",
|
||||
"restrict_vif_locking": "false",
|
||||
"restrict_vlan": "false",
|
||||
"restrict_vm_memory_introspection": "true",
|
||||
"restrict_vmpr": "false",
|
||||
"restrict_vmss": "false",
|
||||
"restrict_vss": "false",
|
||||
"restrict_vswitch_controller": "false",
|
||||
"restrict_web_selfservice": "true",
|
||||
"restrict_web_selfservice_manager": "true",
|
||||
"restrict_wlb": "true",
|
||||
"restrict_xcm": "true",
|
||||
"restrict_xen_motion": "false",
|
||||
"serialnumber": "",
|
||||
"sku_marketing_name": "Citrix XenServer",
|
||||
"sku_type": "free",
|
||||
"sockets": "2",
|
||||
"state": "",
|
||||
"version": ""
|
||||
},
|
||||
"license_server": {
|
||||
"address": "localhost",
|
||||
"port": "27000"
|
||||
},
|
||||
"local_cache_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
|
||||
"logging": {},
|
||||
"memory_overhead": "4865126400",
|
||||
"metrics": "OpaqueRef:f55653cb-92eb-8257-f2ee-7a2d1c2d6aef",
|
||||
"name_description": "",
|
||||
"name_label": "ansible-test-host-2",
|
||||
"other_config": {
|
||||
"agent_start_time": "1532019582.",
|
||||
"boot_time": "1528986759.",
|
||||
"iscsi_iqn": "iqn.2018-06.com.example:87b7637d",
|
||||
"last_blob_sync_time": "1547394065.41",
|
||||
"multipathhandle": "dmp",
|
||||
"multipathing": "true"
|
||||
},
|
||||
"patches": [
|
||||
"OpaqueRef:f5bd18b6-1423-893a-5d7f-7095338e6a2d",
|
||||
"OpaqueRef:eecb0b95-87fb-a53e-651c-9741efd18bb6",
|
||||
"OpaqueRef:e92c9ef3-2e51-1a36-d400-9e237982b782",
|
||||
"OpaqueRef:cc98226c-2c08-799e-5f15-7761a398e4a0",
|
||||
"OpaqueRef:c4f35e66-d064-55a7-6946-7f4b145275a6",
|
||||
"OpaqueRef:c3794494-f894-6141-b811-f37a8fe60094",
|
||||
"OpaqueRef:bcf61af7-63a9-e430-5b7c-a740ba470596",
|
||||
"OpaqueRef:b58ac71e-797e-6f66-71ad-fe298c94fd10",
|
||||
"OpaqueRef:a2ea18fd-5343-f8db-718d-f059c2a8cce0",
|
||||
"OpaqueRef:929db459-6861-c588-158f-70f763331d6d",
|
||||
"OpaqueRef:92962d94-2205-f6e1-12f9-b55a99fd824d",
|
||||
"OpaqueRef:65dfb07a-f90d-dad9-9ab8-1cc2b1e79afb",
|
||||
"OpaqueRef:537a87c4-3bf4-969f-f06a-2dd8d3a018a2",
|
||||
"OpaqueRef:32dd1de3-c9c8-bcbb-27a0-83d4a930876d",
|
||||
"OpaqueRef:30a8ccc8-74a9-b31f-0403-66b117e281b6",
|
||||
"OpaqueRef:24545c44-ffd1-8a28-18c6-3d008bf4d63e",
|
||||
"OpaqueRef:1fcef81b-7c44-a4db-f59a-c4a147da9c49",
|
||||
"OpaqueRef:1e98a240-514b-1863-5518-c771d0ebf579",
|
||||
"OpaqueRef:1632cab2-b268-6ce8-4f7b-ce7fd4bfa1eb"
|
||||
],
|
||||
"power_on_config": {},
|
||||
"power_on_mode": "",
|
||||
"resident_VMs": [],
|
||||
"sched_policy": "credit",
|
||||
"software_version": {
|
||||
"build_number": "release/falcon/master/8",
|
||||
"date": "2017-05-11",
|
||||
"db_schema": "5.120",
|
||||
"dbv": "2017.0517",
|
||||
"hostname": "f7d02093adae",
|
||||
"linux": "4.4.0+10",
|
||||
"network_backend": "openvswitch",
|
||||
"platform_name": "XCP",
|
||||
"platform_version": "2.3.0",
|
||||
"product_brand": "XenServer",
|
||||
"product_version": "7.2.0",
|
||||
"product_version_text": "7.2",
|
||||
"product_version_text_short": "7.2",
|
||||
"xapi": "1.9",
|
||||
"xen": "4.7.5-2.12",
|
||||
"xencenter_max": "2.7",
|
||||
"xencenter_min": "2.7"
|
||||
},
|
||||
"ssl_legacy": true,
|
||||
"supported_bootloaders": [
|
||||
"pygrub",
|
||||
"eliloader"
|
||||
],
|
||||
"suspend_image_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
|
||||
"tags": [],
|
||||
"updates": [
|
||||
"OpaqueRef:7b4b5da1-54af-d0c4-3fea-394b4257bffe",
|
||||
"OpaqueRef:fbaabbfe-88d5-d89b-5b3f-d6374601ca71",
|
||||
"OpaqueRef:507ee5fc-59d3-e635-21d5-98a5cace4bf2",
|
||||
"OpaqueRef:6c9b814c-e1c2-b8be-198f-de358686b10a",
|
||||
"OpaqueRef:a17e721d-faf4-6ad1-c617-dd4899279534",
|
||||
"OpaqueRef:6ac77a0f-f079-8067-85cc-c9ae2f8dcca9",
|
||||
"OpaqueRef:f61edc83-91d9-a161-113f-00c110196238",
|
||||
"OpaqueRef:b71938bf-4c4f-eb17-7e78-588e71297a74",
|
||||
"OpaqueRef:01befb95-412e-e9dd-5b5d-edd50df61cb1",
|
||||
"OpaqueRef:a3f9481e-fe3d-1f00-235f-44d404f51128",
|
||||
"OpaqueRef:0760c608-b02e-743a-18a1-fa8f205374d6",
|
||||
"OpaqueRef:204558d7-dce0-2304-bdc5-80ec5fd7e3c3",
|
||||
"OpaqueRef:9eccc765-9726-d220-96b1-2e85adf77ecc",
|
||||
"OpaqueRef:91cfa47b-52f9-a4e3-4e78-52e3eb3e5141",
|
||||
"OpaqueRef:3fffd7c7-f4d1-6b03-a5b8-d75211bb7b8f",
|
||||
"OpaqueRef:7efce157-9b93-d116-f3f8-7eb0c6fb1a79",
|
||||
"OpaqueRef:e2209ae9-5362-3a20-f691-9294144e49f2",
|
||||
"OpaqueRef:1ced32ca-fec4-8b44-0e8f-753c97f2d93f",
|
||||
"OpaqueRef:65b14ae7-f440-0c4d-4af9-c7946b90fd2f"
|
||||
],
|
||||
"updates_requiring_reboot": [],
|
||||
"uuid": "dff6702e-bcb6-4704-8dd4-952e8c883365",
|
||||
"virtual_hardware_platform_versions": [
|
||||
"0",
|
||||
"1",
|
||||
"2"
|
||||
]
|
||||
}
|
||||
},
|
||||
"network": {
|
||||
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
|
||||
"MTU": "1500",
|
||||
"PIFs": [],
|
||||
"VIFs": [],
|
||||
"allowed_operations": [],
|
||||
"assigned_ips": {
|
||||
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
|
||||
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
|
||||
},
|
||||
"blobs": {},
|
||||
"bridge": "xenapi",
|
||||
"current_operations": {},
|
||||
"default_locking_mode": "unlocked",
|
||||
"managed": true,
|
||||
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
|
||||
"name_label": "Host internal management network",
|
||||
"other_config": {
|
||||
"ip_begin": "169.254.0.1",
|
||||
"ip_end": "169.254.255.254",
|
||||
"is_guest_installer_network": "true",
|
||||
"is_host_internal_management_network": "true",
|
||||
"netmask": "255.255.0.0"
|
||||
},
|
||||
"tags": [],
|
||||
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"cdrom": {
|
||||
"type": "none"
|
||||
},
|
||||
"customization_agent": "custom",
|
||||
"disks": [
|
||||
{
|
||||
"name": "ansible-test-vm-3-root",
|
||||
"name_desc": "/",
|
||||
"os_device": "xvda",
|
||||
"size": 8589934592,
|
||||
"sr": "Ansible Test Storage 1",
|
||||
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"vbd_userdevice": "0"
|
||||
}
|
||||
],
|
||||
"domid": "-1",
|
||||
"folder": "",
|
||||
"hardware": {
|
||||
"memory_mb": 1024,
|
||||
"num_cpu_cores_per_socket": 1,
|
||||
"num_cpus": 1
|
||||
},
|
||||
"home_server": "",
|
||||
"is_template": false,
|
||||
"name": "ansible-test-vm-3",
|
||||
"name_desc": "Created by Ansible",
|
||||
"networks": [
|
||||
{
|
||||
"gateway": "",
|
||||
"gateway6": "",
|
||||
"ip": "169.254.0.3",
|
||||
"ip6": [],
|
||||
"mac": "72:fb:c7:ac:b9:97",
|
||||
"mtu": "1500",
|
||||
"name": "Host internal management network",
|
||||
"netmask": "",
|
||||
"prefix": "",
|
||||
"prefix6": "",
|
||||
"vif_device": "0"
|
||||
}
|
||||
],
|
||||
"other_config": {
|
||||
"auto_poweron": "true",
|
||||
"base_template_name": "zatemplate",
|
||||
"import_task": "OpaqueRef:9948fd82-6d79-8882-2f01-4edc8795e361",
|
||||
"install-methods": "cdrom,nfs,http,ftp",
|
||||
"install-repository": "http://mirror.centos.org/centos-6/6.2/os/x86_64/",
|
||||
"instant": "true",
|
||||
"last_shutdown_action": "Destroy",
|
||||
"last_shutdown_initiator": "external",
|
||||
"last_shutdown_reason": "halted",
|
||||
"last_shutdown_time": "20140314T21:16:41Z",
|
||||
"linux_template": "true",
|
||||
"mac_seed": "06e27068-70c2-4c69-614b-7c54b5a4a781",
|
||||
"rhel6": "true"
|
||||
},
|
||||
"platform": {
|
||||
"acpi": "true",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "1",
|
||||
"nx": "false",
|
||||
"pae": "true",
|
||||
"viridian": "true"
|
||||
},
|
||||
"state": "poweredoff",
|
||||
"uuid": "8f5bc97c-42fa-d619-aba4-d25eced735e0",
|
||||
"xenstore_data": {
|
||||
"vm-data": "",
|
||||
"vm-data/networks": "",
|
||||
"vm-data/networks/0": "",
|
||||
"vm-data/networks/0/mac": "72:fb:c7:ac:b9:97",
|
||||
"vm-data/networks/0/name": "Host internal management network"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,420 @@
|
||||
{
|
||||
"SR": {
|
||||
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
|
||||
"PBDs": [],
|
||||
"VDIs": [],
|
||||
"allowed_operations": [
|
||||
"unplug",
|
||||
"plug",
|
||||
"pbd_create",
|
||||
"update",
|
||||
"pbd_destroy",
|
||||
"vdi_resize",
|
||||
"vdi_clone",
|
||||
"scan",
|
||||
"vdi_snapshot",
|
||||
"vdi_mirror",
|
||||
"vdi_create",
|
||||
"vdi_destroy"
|
||||
],
|
||||
"blobs": {},
|
||||
"clustered": false,
|
||||
"content_type": "",
|
||||
"current_operations": {},
|
||||
"introduced_by": "OpaqueRef:NULL",
|
||||
"is_tools_sr": false,
|
||||
"local_cache_enabled": false,
|
||||
"name_description": "",
|
||||
"name_label": "Ansible Test Storage 1",
|
||||
"other_config": {
|
||||
"auto-scan": "false"
|
||||
},
|
||||
"physical_size": "2521133219840",
|
||||
"physical_utilisation": "1551485632512",
|
||||
"shared": true,
|
||||
"sm_config": {
|
||||
"allocation": "thick",
|
||||
"devserial": "scsi-3600a098038302d353624495242443848",
|
||||
"multipathable": "true",
|
||||
"use_vhd": "true"
|
||||
},
|
||||
"tags": [],
|
||||
"type": "lvmohba",
|
||||
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
|
||||
"virtual_allocation": "1556925644800"
|
||||
}
|
||||
},
|
||||
"VBD": {
|
||||
"OpaqueRef:024b722e-8d0f-65e6-359e-f301a009b683": {
|
||||
"VDI": "OpaqueRef:NULL",
|
||||
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
|
||||
"allowed_operations": [
|
||||
"attach",
|
||||
"insert"
|
||||
],
|
||||
"bootable": false,
|
||||
"current_operations": {},
|
||||
"currently_attached": false,
|
||||
"device": "",
|
||||
"empty": true,
|
||||
"metrics": "OpaqueRef:81509584-b22f-bc71-3c4e-e6c3bdca71f0",
|
||||
"mode": "RO",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "CD",
|
||||
"unpluggable": true,
|
||||
"userdevice": "3",
|
||||
"uuid": "38d850d0-c402-490e-6b97-1d23558c4e0e"
|
||||
},
|
||||
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c": {
|
||||
"VDI": "OpaqueRef:4d3e9fc7-ae61-b312-e0a8-b53bee06282e",
|
||||
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
|
||||
"allowed_operations": [
|
||||
"attach"
|
||||
],
|
||||
"bootable": true,
|
||||
"current_operations": {},
|
||||
"currently_attached": false,
|
||||
"device": "xvda",
|
||||
"empty": false,
|
||||
"metrics": "OpaqueRef:529f6071-5627-28c5-1f41-ee8c0733f1da",
|
||||
"mode": "RW",
|
||||
"other_config": {
|
||||
"owner": ""
|
||||
},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"storage_lock": false,
|
||||
"type": "Disk",
|
||||
"unpluggable": false,
|
||||
"userdevice": "0",
|
||||
"uuid": "3fd7d35c-cb9d-f0c4-726b-e188ef0dc446"
|
||||
}
|
||||
},
|
||||
"VDI": {
|
||||
"OpaqueRef:4d3e9fc7-ae61-b312-e0a8-b53bee06282e": {
|
||||
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
|
||||
"VBDs": [
|
||||
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c"
|
||||
],
|
||||
"allow_caching": false,
|
||||
"allowed_operations": [
|
||||
"forget",
|
||||
"generate_config",
|
||||
"update",
|
||||
"resize",
|
||||
"destroy",
|
||||
"clone",
|
||||
"copy",
|
||||
"snapshot"
|
||||
],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"is_a_snapshot": false,
|
||||
"is_tools_iso": false,
|
||||
"location": "bdd0baeb-5447-4963-9e71-a5ff6e85fa59",
|
||||
"managed": true,
|
||||
"metadata_latest": false,
|
||||
"metadata_of_pool": "",
|
||||
"missing": false,
|
||||
"name_description": "/",
|
||||
"name_label": "ansible-test-vm-3-root",
|
||||
"on_boot": "persist",
|
||||
"other_config": {
|
||||
"content_id": "cd8e8b2b-f158-c519-02f0-81d130fe83c5"
|
||||
},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"physical_utilisation": "8615100416",
|
||||
"read_only": false,
|
||||
"sharable": false,
|
||||
"sm_config": {
|
||||
"vdi_type": "vhd"
|
||||
},
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"storage_lock": false,
|
||||
"tags": [],
|
||||
"type": "system",
|
||||
"uuid": "bdd0baeb-5447-4963-9e71-a5ff6e85fa59",
|
||||
"virtual_size": "8589934592",
|
||||
"xenstore_data": {}
|
||||
}
|
||||
},
|
||||
"VIF": {
|
||||
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": {
|
||||
"MAC": "72:fb:c7:ac:b9:97",
|
||||
"MAC_autogenerated": true,
|
||||
"MTU": "1500",
|
||||
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
|
||||
"allowed_operations": [
|
||||
"attach"
|
||||
],
|
||||
"current_operations": {},
|
||||
"currently_attached": false,
|
||||
"device": "0",
|
||||
"ipv4_addresses": [],
|
||||
"ipv4_allowed": [],
|
||||
"ipv4_configuration_mode": "None",
|
||||
"ipv4_gateway": "",
|
||||
"ipv6_addresses": [],
|
||||
"ipv6_allowed": [],
|
||||
"ipv6_configuration_mode": "None",
|
||||
"ipv6_gateway": "",
|
||||
"locking_mode": "network_default",
|
||||
"metrics": "OpaqueRef:e5b53fb1-3e99-4bf5-6b00-95fdba1f2610",
|
||||
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
|
||||
"other_config": {},
|
||||
"qos_algorithm_params": {},
|
||||
"qos_algorithm_type": "",
|
||||
"qos_supported_algorithms": [],
|
||||
"runtime_properties": {},
|
||||
"status_code": "0",
|
||||
"status_detail": "",
|
||||
"uuid": "94bd4913-4940-437c-a1c3-50f7eb354c55"
|
||||
}
|
||||
},
|
||||
"VM": {
|
||||
"OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2": {
|
||||
"HVM_boot_params": {
|
||||
"order": ""
|
||||
},
|
||||
"HVM_boot_policy": "",
|
||||
"HVM_shadow_multiplier": 1.0,
|
||||
"PCI_bus": "",
|
||||
"PV_args": "graphical utf8",
|
||||
"PV_bootloader": "pygrub",
|
||||
"PV_bootloader_args": "",
|
||||
"PV_kernel": "",
|
||||
"PV_legacy_args": "",
|
||||
"PV_ramdisk": "",
|
||||
"VBDs": [
|
||||
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c",
|
||||
"OpaqueRef:024b722e-8d0f-65e6-359e-f301a009b683"
|
||||
],
|
||||
"VCPUs_at_startup": "1",
|
||||
"VCPUs_max": "1",
|
||||
"VCPUs_params": {},
|
||||
"VGPUs": [],
|
||||
"VIFs": [
|
||||
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab"
|
||||
],
|
||||
"VTPMs": [],
|
||||
"actions_after_crash": "restart",
|
||||
"actions_after_reboot": "restart",
|
||||
"actions_after_shutdown": "destroy",
|
||||
"affinity": "OpaqueRef:NULL",
|
||||
"allowed_operations": [
|
||||
"changing_dynamic_range",
|
||||
"changing_shadow_memory",
|
||||
"changing_static_range",
|
||||
"make_into_template",
|
||||
"migrate_send",
|
||||
"destroy",
|
||||
"export",
|
||||
"start_on",
|
||||
"start",
|
||||
"clone",
|
||||
"copy",
|
||||
"snapshot"
|
||||
],
|
||||
"appliance": "OpaqueRef:NULL",
|
||||
"attached_PCIs": [],
|
||||
"bios_strings": {
|
||||
"bios-vendor": "Xen",
|
||||
"bios-version": "",
|
||||
"hp-rombios": "",
|
||||
"oem-1": "Xen",
|
||||
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
|
||||
"system-manufacturer": "Xen",
|
||||
"system-product-name": "HVM domU",
|
||||
"system-serial-number": "",
|
||||
"system-version": ""
|
||||
},
|
||||
"blobs": {},
|
||||
"blocked_operations": {},
|
||||
"children": [],
|
||||
"consoles": [],
|
||||
"crash_dumps": [],
|
||||
"current_operations": {},
|
||||
"domarch": "",
|
||||
"domid": "-1",
|
||||
"generation_id": "",
|
||||
"guest_metrics": "OpaqueRef:6a8acd85-4cab-4e52-27d5-5f4a51c1bf69",
|
||||
"ha_always_run": false,
|
||||
"ha_restart_priority": "",
|
||||
"hardware_platform_version": "0",
|
||||
"has_vendor_device": false,
|
||||
"is_a_snapshot": false,
|
||||
"is_a_template": false,
|
||||
"is_control_domain": false,
|
||||
"is_default_template": false,
|
||||
"is_snapshot_from_vmpp": false,
|
||||
"is_vmss_snapshot": false,
|
||||
"last_boot_CPU_flags": {
|
||||
"features": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
|
||||
"vendor": "GenuineIntel"
|
||||
},
|
||||
"last_booted_record": "",
|
||||
"memory_dynamic_max": "1073741824",
|
||||
"memory_dynamic_min": "1073741824",
|
||||
"memory_overhead": "10485760",
|
||||
"memory_static_max": "1073741824",
|
||||
"memory_static_min": "536870912",
|
||||
"memory_target": "0",
|
||||
"metrics": "OpaqueRef:87fc5829-478b-1dcd-989f-50e8ba58a87d",
|
||||
"name_description": "Created by Ansible",
|
||||
"name_label": "ansible-test-vm-3",
|
||||
"order": "0",
|
||||
"other_config": {
|
||||
"auto_poweron": "true",
|
||||
"base_template_name": "zatemplate",
|
||||
"import_task": "OpaqueRef:9948fd82-6d79-8882-2f01-4edc8795e361",
|
||||
"install-methods": "cdrom,nfs,http,ftp",
|
||||
"install-repository": "http://mirror.centos.org/centos-6/6.2/os/x86_64/",
|
||||
"instant": "true",
|
||||
"last_shutdown_action": "Destroy",
|
||||
"last_shutdown_initiator": "external",
|
||||
"last_shutdown_reason": "halted",
|
||||
"last_shutdown_time": "20140314T21:16:41Z",
|
||||
"linux_template": "true",
|
||||
"mac_seed": "06e27068-70c2-4c69-614b-7c54b5a4a781",
|
||||
"rhel6": "true"
|
||||
},
|
||||
"parent": "OpaqueRef:NULL",
|
||||
"platform": {
|
||||
"acpi": "true",
|
||||
"apic": "true",
|
||||
"cores-per-socket": "1",
|
||||
"nx": "false",
|
||||
"pae": "true",
|
||||
"viridian": "true"
|
||||
},
|
||||
"power_state": "Halted",
|
||||
"protection_policy": "OpaqueRef:NULL",
|
||||
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"17179869184\" /><restriction field=\"vcpus-max\" max=\"8\" /><restriction property=\"number-of-vbds\" max=\"7\" /><restriction property=\"number-of-vifs\" max=\"7\" /></restrictions>",
|
||||
"reference_label": "",
|
||||
"requires_reboot": false,
|
||||
"resident_on": "OpaqueRef:NULL",
|
||||
"shutdown_delay": "0",
|
||||
"snapshot_info": {},
|
||||
"snapshot_metadata": "",
|
||||
"snapshot_of": "OpaqueRef:NULL",
|
||||
"snapshot_schedule": "OpaqueRef:NULL",
|
||||
"snapshot_time": "19700101T00:00:00Z",
|
||||
"snapshots": [],
|
||||
"start_delay": "0",
|
||||
"suspend_SR": "OpaqueRef:NULL",
|
||||
"suspend_VDI": "OpaqueRef:NULL",
|
||||
"tags": [
|
||||
"web-frontend"
|
||||
],
|
||||
"transportable_snapshot_id": "",
|
||||
"user_version": "1",
|
||||
"uuid": "8f5bc97c-42fa-d619-aba4-d25eced735e0",
|
||||
"version": "0",
|
||||
"xenstore_data": {
|
||||
"vm-data": "",
|
||||
"vm-data/networks": "",
|
||||
"vm-data/networks/0": "",
|
||||
"vm-data/networks/0/mac": "72:fb:c7:ac:b9:97",
|
||||
"vm-data/networks/0/name": "Host internal management network"
|
||||
}
|
||||
}
|
||||
},
|
||||
"VM_guest_metrics": {
|
||||
"OpaqueRef:6a8acd85-4cab-4e52-27d5-5f4a51c1bf69": {
|
||||
"PV_drivers_detected": true,
|
||||
"PV_drivers_up_to_date": true,
|
||||
"PV_drivers_version": {
|
||||
"build": "46676",
|
||||
"major": "5",
|
||||
"micro": "100",
|
||||
"minor": "6"
|
||||
},
|
||||
"can_use_hotplug_vbd": "unspecified",
|
||||
"can_use_hotplug_vif": "unspecified",
|
||||
"disks": {},
|
||||
"last_updated": "20190113T19:36:07Z",
|
||||
"live": true,
|
||||
"memory": {},
|
||||
"networks": {
|
||||
"0/ip": "169.254.0.3"
|
||||
},
|
||||
"os_version": {
|
||||
"distro": "centos",
|
||||
"major": "6",
|
||||
"minor": "10",
|
||||
"name": "CentOS release 6.10 (Final)",
|
||||
"uname": "2.6.32-754.6.3.el6.x86_64"
|
||||
},
|
||||
"other": {
|
||||
"feature-balloon": "1",
|
||||
"has-vendor-device": "0",
|
||||
"platform-feature-multiprocessor-suspend": "1"
|
||||
},
|
||||
"other_config": {},
|
||||
"uuid": "3928a6a4-1acd-c134-ed35-eb0ccfaed65c"
|
||||
}
|
||||
},
|
||||
"VM_metrics": {
|
||||
"OpaqueRef:87fc5829-478b-1dcd-989f-50e8ba58a87d": {
|
||||
"VCPUs_CPU": {},
|
||||
"VCPUs_flags": {},
|
||||
"VCPUs_number": "0",
|
||||
"VCPUs_params": {},
|
||||
"VCPUs_utilisation": {
|
||||
"0": 0.0
|
||||
},
|
||||
"hvm": false,
|
||||
"install_time": "20190113T19:35:05Z",
|
||||
"last_updated": "19700101T00:00:00Z",
|
||||
"memory_actual": "1073741824",
|
||||
"nested_virt": false,
|
||||
"nomigrate": false,
|
||||
"other_config": {},
|
||||
"start_time": "19700101T00:00:00Z",
|
||||
"state": [],
|
||||
"uuid": "6cb05fe9-b83e-34c8-29e0-3b793e1da661"
|
||||
}
|
||||
},
|
||||
"host": {},
|
||||
"network": {
|
||||
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
|
||||
"MTU": "1500",
|
||||
"PIFs": [],
|
||||
"VIFs": [],
|
||||
"allowed_operations": [],
|
||||
"assigned_ips": {
|
||||
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
|
||||
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
|
||||
},
|
||||
"blobs": {},
|
||||
"bridge": "xenapi",
|
||||
"current_operations": {},
|
||||
"default_locking_mode": "unlocked",
|
||||
"managed": true,
|
||||
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
|
||||
"name_label": "Host internal management network",
|
||||
"other_config": {
|
||||
"ip_begin": "169.254.0.1",
|
||||
"ip_end": "169.254.255.254",
|
||||
"is_guest_installer_network": "true",
|
||||
"is_host_internal_management_network": "true",
|
||||
"netmask": "255.255.0.0"
|
||||
},
|
||||
"tags": [],
|
||||
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from .common import testcase_bad_xenapi_refs
|
||||
|
||||
|
||||
testcase_gather_vm_params_and_facts = {
|
||||
"params": [
|
||||
["ansible-test-vm-1-params.json", "ansible-test-vm-1-facts.json"],
|
||||
["ansible-test-vm-2-params.json", "ansible-test-vm-2-facts.json"],
|
||||
["ansible-test-vm-3-params.json", "ansible-test-vm-3-facts.json"],
|
||||
],
|
||||
"ids": [
|
||||
"ansible-test-vm-1",
|
||||
"ansible-test-vm-2",
|
||||
"ansible-test-vm-3",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
|
||||
def test_gather_vm_params_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
|
||||
"""Tests return of empty dict on bad vm_ref."""
|
||||
assert xenserver.gather_vm_params(fake_ansible_module, vm_ref) == {}
|
||||
|
||||
|
||||
def test_gather_vm_facts_no_vm_params(fake_ansible_module, xenserver):
|
||||
"""Tests return of empty facts dict when vm_params is not available"""
|
||||
assert xenserver.gather_vm_facts(fake_ansible_module, None) == {}
|
||||
assert xenserver.gather_vm_facts(fake_ansible_module, {}) == {}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fixture_data_from_file',
|
||||
testcase_gather_vm_params_and_facts['params'],
|
||||
ids=testcase_gather_vm_params_and_facts['ids'],
|
||||
indirect=True)
|
||||
def test_gather_vm_params_and_facts(mocker, fake_ansible_module, XenAPI, xenserver, fixture_data_from_file):
|
||||
"""Tests proper parsing of VM parameters and facts."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
if "params" in list(fixture_data_from_file.keys())[0]:
|
||||
params_file = list(fixture_data_from_file.keys())[0]
|
||||
facts_file = list(fixture_data_from_file.keys())[1]
|
||||
else:
|
||||
params_file = list(fixture_data_from_file.keys())[1]
|
||||
facts_file = list(fixture_data_from_file.keys())[0]
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM'][obj_ref],
|
||||
"VM_metrics.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM_metrics'][obj_ref],
|
||||
"VM_guest_metrics.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM_guest_metrics'][obj_ref],
|
||||
"VBD.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VBD'][obj_ref],
|
||||
"VDI.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VDI'][obj_ref],
|
||||
"SR.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['SR'][obj_ref],
|
||||
"VIF.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VIF'][obj_ref],
|
||||
"network.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['network'][obj_ref],
|
||||
"host.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['host'][obj_ref],
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.get_xenserver_version', return_value=[7, 2, 0])
|
||||
|
||||
vm_ref = list(fixture_data_from_file[params_file]['VM'].keys())[0]
|
||||
|
||||
assert xenserver.gather_vm_facts(fake_ansible_module, xenserver.gather_vm_params(fake_ansible_module, vm_ref)) == fixture_data_from_file[facts_file]
|
||||
@@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from .FakeAnsibleModule import FailJsonException
|
||||
from .common import fake_xenapi_ref
|
||||
|
||||
|
||||
def test_get_object_ref_xenapi_failure(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests catching of XenAPI failures."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI method call error!'))
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, "name")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: Fake XAPI method call error!"
|
||||
|
||||
|
||||
def test_get_object_ref_bad_uuid_and_name(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests failure on bad object uuid and/or name."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request')
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, None, msg_prefix="Test: ")
|
||||
|
||||
mocked_xenapi.xenapi_request.assert_not_called()
|
||||
assert exc_info.value.kwargs['msg'] == "Test: no valid name or UUID supplied for VM!"
|
||||
|
||||
|
||||
def test_get_object_ref_uuid_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests when object is not found by uuid."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI not found error!'))
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", msg_prefix="Test: ")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Test: VM with UUID 'fake-uuid' not found!"
|
||||
assert xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", fail=False, msg_prefix="Test: ") is None
|
||||
|
||||
|
||||
def test_get_object_ref_name_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests when object is not found by name."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[])
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Test: VM with name 'name' not found!"
|
||||
assert xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ") is None
|
||||
|
||||
|
||||
def test_get_object_ref_name_multiple_found(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests when multiple objects are found by name."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[fake_xenapi_ref('VM'), fake_xenapi_ref('VM')])
|
||||
|
||||
error_msg = "Test: multiple VMs with name 'name' found! Please use UUID."
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == error_msg
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == error_msg
|
||||
@@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def test_xapi_to_module_vm_power_state_bad_power_state(xenserver):
|
||||
"""Tests that None is returned on bad power state."""
|
||||
assert xenserver.xapi_to_module_vm_power_state("bad") is None
|
||||
|
||||
|
||||
def test_module_to_xapi_vm_power_state_bad_power_state(xenserver):
|
||||
"""Tests that None is returned on bad power state."""
|
||||
assert xenserver.module_to_xapi_vm_power_state("bad") is None
|
||||
@@ -0,0 +1,183 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.module_utils.common.network import is_mac
|
||||
|
||||
testcase_is_valid_mac_addr = [
|
||||
('A4-23-8D-F8-C9-E5', True),
|
||||
('35:71:F4:11:0B:D8', True),
|
||||
('b3-bd-20-59-0c-cf', True),
|
||||
('32:61:ca:65:f1:f4', True),
|
||||
('asdf', False),
|
||||
('A4-23-8D-G8-C9-E5', False),
|
||||
('A4-3-8D-F8-C9-E5', False),
|
||||
('A4-23-88D-F8-C9-E5', False),
|
||||
('A4-23-8D-F8-C9_E5', False),
|
||||
('A4-23--8D-F8-C9-E5', False),
|
||||
]
|
||||
|
||||
testcase_is_valid_ip_addr = [
|
||||
('0.0.0.0', True),
|
||||
('10.0.0.1', True),
|
||||
('192.168.0.1', True),
|
||||
('255.255.255.255', True),
|
||||
('asdf', False),
|
||||
('a.b.c.d', False),
|
||||
('345.345.345.345', False),
|
||||
('-10.0.0.1', False),
|
||||
]
|
||||
|
||||
testcase_is_valid_ip_netmask = [
|
||||
('240.0.0.0', True),
|
||||
('255.224.0.0', True),
|
||||
('255.255.248.0', True),
|
||||
('255.255.255.255', True),
|
||||
('asdf', False),
|
||||
('a.b.c.d', False),
|
||||
('192.168.0.1', False),
|
||||
('255.0.248.0', False),
|
||||
]
|
||||
|
||||
testcase_is_valid_ip_prefix = [
|
||||
('0', True),
|
||||
('16', True),
|
||||
('24', True),
|
||||
('32', True),
|
||||
('asdf', False),
|
||||
('-10', False),
|
||||
('60', False),
|
||||
('60s', False),
|
||||
]
|
||||
|
||||
testcase_ip_prefix_to_netmask = {
|
||||
"params": [
|
||||
('0', '0.0.0.0'),
|
||||
('8', '255.0.0.0'),
|
||||
('11', '255.224.0.0'),
|
||||
('16', '255.255.0.0'),
|
||||
('21', '255.255.248.0'),
|
||||
('24', '255.255.255.0'),
|
||||
('26', '255.255.255.192'),
|
||||
('32', '255.255.255.255'),
|
||||
('a', ''),
|
||||
('60', ''),
|
||||
],
|
||||
"ids": [
|
||||
'0',
|
||||
'8',
|
||||
'11',
|
||||
'16',
|
||||
'21',
|
||||
'24',
|
||||
'26',
|
||||
'32',
|
||||
'a',
|
||||
'60',
|
||||
],
|
||||
}
|
||||
|
||||
testcase_ip_netmask_to_prefix = {
|
||||
"params": [
|
||||
('0.0.0.0', '0'),
|
||||
('255.0.0.0', '8'),
|
||||
('255.224.0.0', '11'),
|
||||
('255.255.0.0', '16'),
|
||||
('255.255.248.0', '21'),
|
||||
('255.255.255.0', '24'),
|
||||
('255.255.255.192', '26'),
|
||||
('255.255.255.255', '32'),
|
||||
('a', ''),
|
||||
('60', ''),
|
||||
],
|
||||
"ids": [
|
||||
'0.0.0.0',
|
||||
'255.0.0.0',
|
||||
'255.224.0.0',
|
||||
'255.255.0.0',
|
||||
'255.255.248.0',
|
||||
'255.255.255.0',
|
||||
'255.255.255.192',
|
||||
'255.255.255.255',
|
||||
'a',
|
||||
'60',
|
||||
],
|
||||
}
|
||||
|
||||
testcase_is_valid_ip6_addr = [
|
||||
('::1', True),
|
||||
('2001:DB8:0:0:8:800:200C:417A', True),
|
||||
('2001:DB8::8:800:200C:417A', True),
|
||||
('FF01::101', True),
|
||||
('asdf', False),
|
||||
('2001:DB8:0:0:8:800:200C:417A:221', False),
|
||||
('FF01::101::2', False),
|
||||
('2001:db8:85a3::8a2e:370k:7334', False),
|
||||
]
|
||||
|
||||
testcase_is_valid_ip6_prefix = [
|
||||
('0', True),
|
||||
('56', True),
|
||||
('78', True),
|
||||
('128', True),
|
||||
('asdf', False),
|
||||
('-10', False),
|
||||
('345', False),
|
||||
('60s', False),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('mac_addr, result', testcase_is_valid_mac_addr)
|
||||
def test_is_valid_mac_addr(xenserver, mac_addr, result):
|
||||
"""Tests against examples of valid and invalid mac addresses."""
|
||||
assert is_mac(mac_addr) is result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip_addr, result', testcase_is_valid_ip_addr)
|
||||
def test_is_valid_ip_addr(xenserver, ip_addr, result):
|
||||
"""Tests against examples of valid and invalid ip addresses."""
|
||||
assert xenserver.is_valid_ip_addr(ip_addr) is result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip_netmask, result', testcase_is_valid_ip_netmask)
|
||||
def test_is_valid_ip_netmask(xenserver, ip_netmask, result):
|
||||
"""Tests against examples of valid and invalid ip netmasks."""
|
||||
assert xenserver.is_valid_ip_netmask(ip_netmask) is result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip_prefix, result', testcase_is_valid_ip_prefix)
|
||||
def test_is_valid_ip_prefix(xenserver, ip_prefix, result):
|
||||
"""Tests against examples of valid and invalid ip prefixes."""
|
||||
assert xenserver.is_valid_ip_prefix(ip_prefix) is result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip_prefix, ip_netmask', testcase_ip_prefix_to_netmask['params'], ids=testcase_ip_prefix_to_netmask['ids'])
|
||||
def test_ip_prefix_to_netmask(xenserver, ip_prefix, ip_netmask):
|
||||
"""Tests ip prefix to netmask conversion."""
|
||||
assert xenserver.ip_prefix_to_netmask(ip_prefix) == ip_netmask
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip_netmask, ip_prefix', testcase_ip_netmask_to_prefix['params'], ids=testcase_ip_netmask_to_prefix['ids'])
|
||||
def test_ip_netmask_to_prefix(xenserver, ip_netmask, ip_prefix):
|
||||
"""Tests ip netmask to prefix conversion."""
|
||||
assert xenserver.ip_netmask_to_prefix(ip_netmask) == ip_prefix
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip6_addr, result', testcase_is_valid_ip6_addr)
|
||||
def test_is_valid_ip6_addr(xenserver, ip6_addr, result):
|
||||
"""Tests against examples of valid and invalid ip6 addresses."""
|
||||
assert xenserver.is_valid_ip6_addr(ip6_addr) is result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ip6_prefix, result', testcase_is_valid_ip6_prefix)
|
||||
def test_is_valid_ip6_prefix(xenserver, ip6_prefix, result):
|
||||
"""Tests against examples of valid and invalid ip6 prefixes."""
|
||||
assert xenserver.is_valid_ip6_prefix(ip6_prefix) is result
|
||||
@@ -0,0 +1,414 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from .FakeAnsibleModule import FailJsonException
|
||||
from .common import fake_xenapi_ref, testcase_bad_xenapi_refs
|
||||
|
||||
|
||||
testcase_set_vm_power_state_bad_transitions = {
|
||||
"params": [
|
||||
('restarted', 'Halted', "Cannot restart VM in state 'poweredoff'!"),
|
||||
('restarted', 'Suspended', "Cannot restart VM in state 'suspended'!"),
|
||||
('suspended', 'Halted', "Cannot suspend VM in state 'poweredoff'!"),
|
||||
('suspended', 'Paused', "Cannot suspend VM in state 'paused'!"),
|
||||
('shutdownguest', 'Halted', "Cannot shutdown guest when VM is in state 'poweredoff'!"),
|
||||
('shutdownguest', 'Suspended', "Cannot shutdown guest when VM is in state 'suspended'!"),
|
||||
('shutdownguest', 'Paused', "Cannot shutdown guest when VM is in state 'paused'!"),
|
||||
('rebootguest', 'Halted', "Cannot reboot guest when VM is in state 'poweredoff'!"),
|
||||
('rebootguest', 'Suspended', "Cannot reboot guest when VM is in state 'suspended'!"),
|
||||
('rebootguest', 'Paused', "Cannot reboot guest when VM is in state 'paused'!"),
|
||||
],
|
||||
"ids": [
|
||||
"poweredoff->restarted",
|
||||
"suspended->restarted",
|
||||
"poweredoff->suspended",
|
||||
"paused->suspended",
|
||||
"poweredoff->shutdownguest",
|
||||
"suspended->shutdownguest",
|
||||
"paused->shutdownguest",
|
||||
"poweredoff->rebootguest",
|
||||
"suspended->rebootguest",
|
||||
"paused->rebootguest",
|
||||
],
|
||||
}
|
||||
|
||||
testcase_set_vm_power_state_task_timeout = {
|
||||
"params": [
|
||||
('shutdownguest', "Guest shutdown task failed: 'timeout'!"),
|
||||
('rebootguest', "Guest reboot task failed: 'timeout'!"),
|
||||
],
|
||||
"ids": [
|
||||
"shutdownguest-timeout",
|
||||
"rebootguest-timeout",
|
||||
],
|
||||
}
|
||||
|
||||
testcase_set_vm_power_state_no_transitions = {
|
||||
"params": [
|
||||
('poweredon', "Running"),
|
||||
('Poweredon', "Running"),
|
||||
('powered-on', "Running"),
|
||||
('Powered_on', "Running"),
|
||||
('poweredoff', "Halted"),
|
||||
('Poweredoff', "Halted"),
|
||||
('powered-off', "Halted"),
|
||||
('powered_off', "Halted"),
|
||||
('suspended', "Suspended"),
|
||||
('Suspended', "Suspended"),
|
||||
],
|
||||
"ids": [
|
||||
"poweredon",
|
||||
"poweredon-cap",
|
||||
"poweredon-dash",
|
||||
"poweredon-under",
|
||||
"poweredoff",
|
||||
"poweredoff-cap",
|
||||
"poweredoff-dash",
|
||||
"poweredoff-under",
|
||||
"suspended",
|
||||
"suspended-cap",
|
||||
],
|
||||
}
|
||||
|
||||
testcase_set_vm_power_state_transitions = {
|
||||
"params": [
|
||||
('poweredon', 'Halted', 'running', 'VM.start'),
|
||||
('Poweredon', 'Halted', 'running', 'VM.start'),
|
||||
('powered-on', 'Halted', 'running', 'VM.start'),
|
||||
('Powered_on', 'Halted', 'running', 'VM.start'),
|
||||
('poweredon', 'Suspended', 'running', 'VM.resume'),
|
||||
('Poweredon', 'Suspended', 'running', 'VM.resume'),
|
||||
('powered-on', 'Suspended', 'running', 'VM.resume'),
|
||||
('Powered_on', 'Suspended', 'running', 'VM.resume'),
|
||||
('poweredon', 'Paused', 'running', 'VM.unpause'),
|
||||
('Poweredon', 'Paused', 'running', 'VM.unpause'),
|
||||
('powered-on', 'Paused', 'running', 'VM.unpause'),
|
||||
('Powered_on', 'Paused', 'running', 'VM.unpause'),
|
||||
('poweredoff', 'Running', 'halted', 'VM.hard_shutdown'),
|
||||
('Poweredoff', 'Running', 'halted', 'VM.hard_shutdown'),
|
||||
('powered-off', 'Running', 'halted', 'VM.hard_shutdown'),
|
||||
('powered_off', 'Running', 'halted', 'VM.hard_shutdown'),
|
||||
('poweredoff', 'Suspended', 'halted', 'VM.hard_shutdown'),
|
||||
('Poweredoff', 'Suspended', 'halted', 'VM.hard_shutdown'),
|
||||
('powered-off', 'Suspended', 'halted', 'VM.hard_shutdown'),
|
||||
('powered_off', 'Suspended', 'halted', 'VM.hard_shutdown'),
|
||||
('poweredoff', 'Paused', 'halted', 'VM.hard_shutdown'),
|
||||
('Poweredoff', 'Paused', 'halted', 'VM.hard_shutdown'),
|
||||
('powered-off', 'Paused', 'halted', 'VM.hard_shutdown'),
|
||||
('powered_off', 'Paused', 'halted', 'VM.hard_shutdown'),
|
||||
('restarted', 'Running', 'running', 'VM.hard_reboot'),
|
||||
('Restarted', 'Running', 'running', 'VM.hard_reboot'),
|
||||
('restarted', 'Paused', 'running', 'VM.hard_reboot'),
|
||||
('Restarted', 'Paused', 'running', 'VM.hard_reboot'),
|
||||
('suspended', 'Running', 'suspended', 'VM.suspend'),
|
||||
('Suspended', 'Running', 'suspended', 'VM.suspend'),
|
||||
('shutdownguest', 'Running', 'halted', 'VM.clean_shutdown'),
|
||||
('Shutdownguest', 'Running', 'halted', 'VM.clean_shutdown'),
|
||||
('shutdown-guest', 'Running', 'halted', 'VM.clean_shutdown'),
|
||||
('shutdown_guest', 'Running', 'halted', 'VM.clean_shutdown'),
|
||||
('rebootguest', 'Running', 'running', 'VM.clean_reboot'),
|
||||
('rebootguest', 'Running', 'running', 'VM.clean_reboot'),
|
||||
('reboot-guest', 'Running', 'running', 'VM.clean_reboot'),
|
||||
('reboot_guest', 'Running', 'running', 'VM.clean_reboot'),
|
||||
],
|
||||
"ids": [
|
||||
"poweredoff->poweredon",
|
||||
"poweredoff->poweredon-cap",
|
||||
"poweredoff->poweredon-dash",
|
||||
"poweredoff->poweredon-under",
|
||||
"suspended->poweredon",
|
||||
"suspended->poweredon-cap",
|
||||
"suspended->poweredon-dash",
|
||||
"suspended->poweredon-under",
|
||||
"paused->poweredon",
|
||||
"paused->poweredon-cap",
|
||||
"paused->poweredon-dash",
|
||||
"paused->poweredon-under",
|
||||
"poweredon->poweredoff",
|
||||
"poweredon->poweredoff-cap",
|
||||
"poweredon->poweredoff-dash",
|
||||
"poweredon->poweredoff-under",
|
||||
"suspended->poweredoff",
|
||||
"suspended->poweredoff-cap",
|
||||
"suspended->poweredoff-dash",
|
||||
"suspended->poweredoff-under",
|
||||
"paused->poweredoff",
|
||||
"paused->poweredoff-cap",
|
||||
"paused->poweredoff-dash",
|
||||
"paused->poweredoff-under",
|
||||
"poweredon->restarted",
|
||||
"poweredon->restarted-cap",
|
||||
"paused->restarted",
|
||||
"paused->restarted-cap",
|
||||
"poweredon->suspended",
|
||||
"poweredon->suspended-cap",
|
||||
"poweredon->shutdownguest",
|
||||
"poweredon->shutdownguest-cap",
|
||||
"poweredon->shutdownguest-dash",
|
||||
"poweredon->shutdownguest-under",
|
||||
"poweredon->rebootguest",
|
||||
"poweredon->rebootguest-cap",
|
||||
"poweredon->rebootguest-dash",
|
||||
"poweredon->rebootguest-under",
|
||||
],
|
||||
}
|
||||
|
||||
testcase_set_vm_power_state_transitions_async = {
|
||||
"params": [
|
||||
('shutdownguest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
|
||||
('Shutdownguest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
|
||||
('shutdown-guest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
|
||||
('shutdown_guest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
|
||||
('rebootguest', 'Running', 'running', 'Async.VM.clean_reboot'),
|
||||
('rebootguest', 'Running', 'running', 'Async.VM.clean_reboot'),
|
||||
('reboot-guest', 'Running', 'running', 'Async.VM.clean_reboot'),
|
||||
('reboot_guest', 'Running', 'running', 'Async.VM.clean_reboot'),
|
||||
],
|
||||
"ids": [
|
||||
"poweredon->shutdownguest",
|
||||
"poweredon->shutdownguest-cap",
|
||||
"poweredon->shutdownguest-dash",
|
||||
"poweredon->shutdownguest-under",
|
||||
"poweredon->rebootguest",
|
||||
"poweredon->rebootguest-cap",
|
||||
"poweredon->rebootguest-dash",
|
||||
"poweredon->rebootguest-under",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
|
||||
def test_set_vm_power_state_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
|
||||
"""Tests failure on bad vm_ref."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.set_vm_power_state(fake_ansible_module, vm_ref, None)
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Cannot set VM power state. Invalid VM reference supplied!"
|
||||
|
||||
|
||||
def test_set_vm_power_state_xenapi_failure(mock_xenapi_failure, fake_ansible_module, xenserver):
|
||||
"""Tests catching of XenAPI failures."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), "poweredon")
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
|
||||
|
||||
|
||||
def test_set_vm_power_state_bad_power_state(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests failure on unsupported power state."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": "Running",
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), "bad")
|
||||
|
||||
# Beside VM.get_power_state() no other method should have been
|
||||
# called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 1
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Requested VM power state 'bad' is unsupported!"
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state_desired, power_state_current, error_msg',
|
||||
testcase_set_vm_power_state_bad_transitions['params'],
|
||||
ids=testcase_set_vm_power_state_bad_transitions['ids'])
|
||||
def test_set_vm_power_state_bad_transition(mocker, fake_ansible_module, XenAPI, xenserver, power_state_desired, power_state_current, error_msg):
|
||||
"""Tests failure on bad power state transition."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": power_state_current,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired)
|
||||
|
||||
# Beside VM.get_power_state() no other method should have been
|
||||
# called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 1
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == error_msg
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state, error_msg',
|
||||
testcase_set_vm_power_state_task_timeout['params'],
|
||||
ids=testcase_set_vm_power_state_task_timeout['ids'])
|
||||
def test_set_vm_power_state_task_timeout(mocker, fake_ansible_module, XenAPI, xenserver, power_state, error_msg):
|
||||
"""Tests failure on async task timeout."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": "Running",
|
||||
"Async.VM.clean_shutdown.return_value": fake_xenapi_ref('task'),
|
||||
"Async.VM.clean_reboot.return_value": fake_xenapi_ref('task'),
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.wait_for_task', return_value="timeout")
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state, timeout=1)
|
||||
|
||||
# Beside VM.get_power_state() only one of Async.VM.clean_shutdown or
|
||||
# Async.VM.clean_reboot should have been called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 2
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == error_msg
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state_desired, power_state_current',
|
||||
testcase_set_vm_power_state_no_transitions['params'],
|
||||
ids=testcase_set_vm_power_state_no_transitions['ids'])
|
||||
def test_set_vm_power_state_no_transition(mocker, fake_ansible_module, XenAPI, xenserver, power_state_desired, power_state_current):
|
||||
"""Tests regular invocation without power state transition."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": power_state_current,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired)
|
||||
|
||||
# Beside VM.get_power_state() no other method should have been
|
||||
# called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 1
|
||||
|
||||
assert result[0] is False
|
||||
assert result[1] == power_state_current.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
|
||||
testcase_set_vm_power_state_transitions['params'],
|
||||
ids=testcase_set_vm_power_state_transitions['ids'])
|
||||
def test_set_vm_power_state_transition(mocker,
|
||||
fake_ansible_module,
|
||||
XenAPI,
|
||||
xenserver,
|
||||
power_state_desired,
|
||||
power_state_current,
|
||||
power_state_resulting,
|
||||
activated_xenapi_method):
|
||||
"""Tests regular invocation with power state transition."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": power_state_current,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=0)
|
||||
|
||||
mocked_xenapi_method = mocked_xenapi
|
||||
|
||||
for activated_xenapi_class in activated_xenapi_method.split('.'):
|
||||
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
|
||||
|
||||
mocked_xenapi_method.assert_called_once()
|
||||
|
||||
# Beside VM.get_power_state() only activated_xenapi_method should have
|
||||
# been called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 2
|
||||
|
||||
assert result[0] is True
|
||||
assert result[1] == power_state_resulting
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
|
||||
testcase_set_vm_power_state_transitions_async['params'],
|
||||
ids=testcase_set_vm_power_state_transitions_async['ids'])
|
||||
def test_set_vm_power_state_transition_async(mocker,
|
||||
fake_ansible_module,
|
||||
XenAPI,
|
||||
xenserver,
|
||||
power_state_desired,
|
||||
power_state_current,
|
||||
power_state_resulting,
|
||||
activated_xenapi_method):
|
||||
"""
|
||||
Tests regular invocation with async power state transition
|
||||
(shutdownguest and rebootguest only).
|
||||
"""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": power_state_current,
|
||||
"%s.return_value" % activated_xenapi_method: fake_xenapi_ref('task'),
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.wait_for_task', return_value="")
|
||||
|
||||
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=1)
|
||||
|
||||
mocked_xenapi_method = mocked_xenapi
|
||||
|
||||
for activated_xenapi_class in activated_xenapi_method.split('.'):
|
||||
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
|
||||
|
||||
mocked_xenapi_method.assert_called_once()
|
||||
|
||||
# Beside VM.get_power_state() only activated_xenapi_method should have
|
||||
# been called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 2
|
||||
|
||||
assert result[0] is True
|
||||
assert result[1] == power_state_resulting
|
||||
|
||||
|
||||
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
|
||||
testcase_set_vm_power_state_transitions['params'],
|
||||
ids=testcase_set_vm_power_state_transitions['ids'])
|
||||
def test_set_vm_power_state_transition_check_mode(mocker,
|
||||
fake_ansible_module,
|
||||
XenAPI,
|
||||
xenserver,
|
||||
power_state_desired,
|
||||
power_state_current,
|
||||
power_state_resulting,
|
||||
activated_xenapi_method):
|
||||
"""Tests regular invocation with power state transition in check mode."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": power_state_current,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
fake_ansible_module.check_mode = True
|
||||
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=0)
|
||||
|
||||
mocked_xenapi_method = mocked_xenapi
|
||||
|
||||
for activated_xenapi_class in activated_xenapi_method.split('.'):
|
||||
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
|
||||
|
||||
mocked_xenapi_method.assert_not_called()
|
||||
|
||||
# Beside VM.get_power_state() no other method should have been
|
||||
# called additionally.
|
||||
assert len(mocked_xenapi.method_calls) == 1
|
||||
|
||||
assert result[0] is True
|
||||
assert result[1] == power_state_resulting
|
||||
@@ -0,0 +1,221 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
from .FakeAnsibleModule import FailJsonException
|
||||
from .common import fake_xenapi_ref, testcase_bad_xenapi_refs
|
||||
|
||||
|
||||
testcase_wait_for_vm_ip_address_bad_power_states = {
|
||||
"params": [
|
||||
'Halted',
|
||||
'Paused',
|
||||
'Suspended',
|
||||
'Other',
|
||||
],
|
||||
"ids": [
|
||||
'state-halted',
|
||||
'state-paused',
|
||||
'state-suspended',
|
||||
'state-other',
|
||||
]
|
||||
}
|
||||
|
||||
testcase_wait_for_vm_ip_address_bad_guest_metrics = {
|
||||
"params": [
|
||||
('OpaqueRef:NULL', {"networks": {}}),
|
||||
(fake_xenapi_ref('VM_guest_metrics'), {"networks": {}}),
|
||||
],
|
||||
"ids": [
|
||||
'vm_guest_metrics_ref-null, no-ip',
|
||||
'vm_guest_metrics_ref-ok, no-ip',
|
||||
],
|
||||
}
|
||||
|
||||
testcase_wait_for_task_all_statuses = {
|
||||
"params": [
|
||||
('Success', ''),
|
||||
('Failure', 'failure'),
|
||||
('Cancelling', 'cancelling'),
|
||||
('Cancelled', 'cancelled'),
|
||||
('Other', 'other'),
|
||||
],
|
||||
"ids": [
|
||||
'task-success',
|
||||
'task-failure',
|
||||
'task-cancelling',
|
||||
'task-cancelled',
|
||||
'task-other',
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
|
||||
def test_wait_for_vm_ip_address_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
|
||||
"""Tests failure on bad vm_ref."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_vm_ip_address(fake_ansible_module, vm_ref)
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Cannot wait for VM IP address. Invalid VM reference supplied!"
|
||||
|
||||
|
||||
def test_wait_for_vm_ip_address_xenapi_failure(mock_xenapi_failure, xenserver, fake_ansible_module):
|
||||
"""Tests catching of XenAPI failures."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('bad_power_state',
|
||||
testcase_wait_for_vm_ip_address_bad_power_states['params'],
|
||||
ids=testcase_wait_for_vm_ip_address_bad_power_states['ids'])
|
||||
def test_wait_for_vm_ip_address_bad_power_state(mocker, fake_ansible_module, XenAPI, xenserver, bad_power_state):
|
||||
"""Tests failure on bad power state."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": bad_power_state,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == ("Cannot wait for VM IP address when VM is in state '%s'!" %
|
||||
xenserver.xapi_to_module_vm_power_state(bad_power_state.lower()))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('bad_guest_metrics_ref, bad_guest_metrics',
|
||||
testcase_wait_for_vm_ip_address_bad_guest_metrics['params'],
|
||||
ids=testcase_wait_for_vm_ip_address_bad_guest_metrics['ids'])
|
||||
def test_wait_for_vm_ip_address_timeout(mocker, fake_ansible_module, XenAPI, xenserver, bad_guest_metrics_ref, bad_guest_metrics):
|
||||
"""Tests timeout."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": "Running",
|
||||
"VM.get_guest_metrics.return_value": bad_guest_metrics_ref,
|
||||
"VM_guest_metrics.get_record.return_value": bad_guest_metrics,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('time.sleep')
|
||||
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'), timeout=1)
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Timed out waiting for VM IP address!"
|
||||
|
||||
|
||||
def test_wait_for_vm_ip_address(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests regular invocation."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
# This mock simulates regular VM IP acquirement lifecycle:
|
||||
#
|
||||
# 1) First, no guest metrics are available because VM is not yet fully
|
||||
# booted and guest agent is not yet started.
|
||||
# 2) Next, guest agent is started and guest metrics are available but
|
||||
# IP address is still not acquired.
|
||||
# 3) Lastly, IP address is acquired by VM on its primary VIF.
|
||||
mocked_returns = {
|
||||
"VM.get_power_state.return_value": "Running",
|
||||
"VM.get_guest_metrics.side_effect": [
|
||||
'OpaqueRef:NULL',
|
||||
fake_xenapi_ref('VM_guest_metrics'),
|
||||
fake_xenapi_ref('VM_guest_metrics'),
|
||||
],
|
||||
"VM_guest_metrics.get_record.side_effect": [
|
||||
{
|
||||
"networks": {},
|
||||
},
|
||||
{
|
||||
"networks": {
|
||||
"0/ip": "192.168.0.1",
|
||||
"1/ip": "10.0.0.1",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('time.sleep')
|
||||
|
||||
fake_guest_metrics = xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
|
||||
|
||||
assert fake_guest_metrics == mocked_returns['VM_guest_metrics.get_record.side_effect'][1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('task_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
|
||||
def test_wait_for_task_bad_task_ref(fake_ansible_module, xenserver, task_ref):
|
||||
"""Tests failure on bad task_ref."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_task(fake_ansible_module, task_ref)
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "Cannot wait for task. Invalid task reference supplied!"
|
||||
|
||||
|
||||
def test_wait_for_task_xenapi_failure(mock_xenapi_failure, fake_ansible_module, xenserver):
|
||||
"""Tests catching of XenAPI failures."""
|
||||
with pytest.raises(FailJsonException) as exc_info:
|
||||
xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'))
|
||||
|
||||
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
|
||||
|
||||
|
||||
def test_wait_for_task_timeout(mocker, fake_ansible_module, XenAPI, xenserver):
|
||||
"""Tests timeout."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
mocked_returns = {
|
||||
"task.get_status.return_value": "Pending",
|
||||
"task.destroy.return_value": None,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('time.sleep')
|
||||
|
||||
fake_result = xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'), timeout=1)
|
||||
|
||||
mocked_xenapi.task.destroy.assert_called_once()
|
||||
assert fake_result == "timeout"
|
||||
|
||||
|
||||
@pytest.mark.parametrize('task_status, result',
|
||||
testcase_wait_for_task_all_statuses['params'],
|
||||
ids=testcase_wait_for_task_all_statuses['ids'])
|
||||
def test_wait_for_task(mocker, fake_ansible_module, XenAPI, xenserver, task_status, result):
|
||||
"""Tests regular invocation."""
|
||||
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
|
||||
|
||||
# Mock will first return Pending status and on second invocation it will
|
||||
# return one of possible final statuses.
|
||||
mocked_returns = {
|
||||
"task.get_status.side_effect": [
|
||||
'Pending',
|
||||
task_status,
|
||||
],
|
||||
"task.destroy.return_value": None,
|
||||
}
|
||||
|
||||
mocked_xenapi.configure_mock(**mocked_returns)
|
||||
|
||||
mocker.patch('time.sleep')
|
||||
|
||||
fake_result = xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'))
|
||||
|
||||
mocked_xenapi.task.destroy.assert_called_once()
|
||||
assert fake_result == result
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user