mirror of
https://github.com/ansible-collections/community.routeros.git
synced 2025-07-10 02:04:37 +02:00
Add api_info and api_modify modules (#91)
* Add api_info and api_modify modules. * Add documentation, and cross-references for the modules. * Bump version added for new modules. * Ignore dynamic entries. Allow _info module to return them. * Document behavior for dynamic entries. * Update license headers. * Add disclaimer. * Improve formulation. * Auto-generate path list in DOCUMENTATION. * Add test to make sure lists have been generated automatically.
This commit is contained in:
parent
1abaa8e7ca
commit
2911710f1b
21 changed files with 3277 additions and 6 deletions
|
@ -32,6 +32,8 @@ Please note that `community.routeros.api` module does **not** support Windows ju
|
||||||
- `community.routeros.api`
|
- `community.routeros.api`
|
||||||
- `community.routeros.api_facts`
|
- `community.routeros.api_facts`
|
||||||
- `community.routeros.api_find_and_modify`
|
- `community.routeros.api_find_and_modify`
|
||||||
|
- `community.routeros.api_info`
|
||||||
|
- `community.routeros.api_modify`
|
||||||
- `community.routeros.command`
|
- `community.routeros.command`
|
||||||
- `community.routeros.facts`
|
- `community.routeros.facts`
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
How to connect to RouterOS devices with the RouterOS API
|
How to connect to RouterOS devices with the RouterOS API
|
||||||
========================================================
|
========================================================
|
||||||
|
|
||||||
You can use the :ref:`community.routeros.api module <ansible_collections.community.routeros.api_module>` to connect to a RouterOS device with the RouterOS API. A more specific module to modify certain entries is the :ref:`community.routeros.api_find_and_modify module <ansible_collections.community.routeros.api_find_and_modify_module>`. The :ref:`community.routeros.api_facts module <ansible_collections.community.routeros.api_facts_module>` allows to retrieve Ansible facts using the RouterOS API.
|
You can use the :ref:`community.routeros.api module <ansible_collections.community.routeros.api_module>` to connect to a RouterOS device with the RouterOS API. More specific module to modify certain entries are the :ref:`community.routeros.api_modify <ansible_collections.community.routeros.api_modify_module>` and :ref:`community.routeros.api_find_and_modify <ansible_collections.community.routeros.api_find_and_modify_module>` modules. The :ref:`community.routeros.api_info module <ansible_collections.community.routeros.api_info_module>` allows to retrieve information on specific predefined paths that can be used as input for the ``community.routeros.api_modify`` module, and the :ref:`community.routeros.api_facts module <ansible_collections.community.routeros.api_facts_module>` allows to retrieve Ansible facts using the RouterOS API.
|
||||||
|
|
||||||
No special setup is needed; the module needs to be run on a host that can connect to the device's API. The most common case is that the module is run on ``localhost``, either by using ``hosts: localhost`` in the playbook, or by using ``delegate_to: localhost`` for the task. The following example shows how to run the equivalent of ``/ip address print``:
|
No special setup is needed; the module needs to be run on a host that can connect to the device's API. The most common case is that the module is run on ``localhost``, either by using ``hosts: localhost`` in the playbook, or by using ``delegate_to: localhost`` for the task. The following example shows how to run the equivalent of ``/ip address print``:
|
||||||
|
|
||||||
|
|
|
@ -9,3 +9,5 @@ action_groups:
|
||||||
- api
|
- api
|
||||||
- api_facts
|
- api_facts
|
||||||
- api_find_and_modify
|
- api_find_and_modify
|
||||||
|
- api_info
|
||||||
|
- api_modify
|
||||||
|
|
|
@ -61,10 +61,7 @@ class KeyInfo(object):
|
||||||
|
|
||||||
|
|
||||||
def split_path(path):
|
def split_path(path):
|
||||||
parts = path.split()
|
return path.split()
|
||||||
if len(parts) == 1 and parts[0] == '':
|
|
||||||
parts = []
|
|
||||||
return parts
|
|
||||||
|
|
||||||
|
|
||||||
def join_path(path):
|
def join_path(path):
|
||||||
|
|
|
@ -19,6 +19,9 @@ description:
|
||||||
notes:
|
notes:
|
||||||
- I(add), I(remove), I(update), I(cmd) and I(query) are mutually exclusive.
|
- I(add), I(remove), I(update), I(cmd) and I(query) are mutually exclusive.
|
||||||
- I(check_mode) is not supported.
|
- I(check_mode) is not supported.
|
||||||
|
- Use the M(community.routeros.api_modify) and M(community.routeros.api_find_and_modify) modules
|
||||||
|
for more specific modifications, and the M(community.routeros.api_info) module for a more controlled
|
||||||
|
way of returning all entries for a path.
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.routeros.api
|
- community.routeros.api
|
||||||
options:
|
options:
|
||||||
|
@ -133,6 +136,10 @@ options:
|
||||||
seealso:
|
seealso:
|
||||||
- ref: ansible_collections.community.routeros.docsite.quoting
|
- ref: ansible_collections.community.routeros.docsite.quoting
|
||||||
description: How to quote and unquote commands and arguments
|
description: How to quote and unquote commands and arguments
|
||||||
|
- module: community.routeros.api_facts
|
||||||
|
- module: community.routeros.api_find_and_modify
|
||||||
|
- module: community.routeros.api_info
|
||||||
|
- module: community.routeros.api_modify
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
|
|
|
@ -44,6 +44,9 @@ options:
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.routeros.facts
|
- module: community.routeros.facts
|
||||||
- module: community.routeros.api
|
- module: community.routeros.api
|
||||||
|
- module: community.routeros.api_find_and_modify
|
||||||
|
- module: community.routeros.api_info
|
||||||
|
- module: community.routeros.api_modify
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = """
|
EXAMPLES = """
|
||||||
|
|
|
@ -17,6 +17,8 @@ short_description: Find and modify information using the API
|
||||||
version_added: 2.1.0
|
version_added: 2.1.0
|
||||||
description:
|
description:
|
||||||
- Allows to find entries for a path by conditions and modify the values of these entries.
|
- Allows to find entries for a path by conditions and modify the values of these entries.
|
||||||
|
- Use the M(community.routeros.api_find_and_modify) module to set all entries of a path to specific values,
|
||||||
|
or change multiple entries in different ways in one step.
|
||||||
notes:
|
notes:
|
||||||
- "If you want to change values based on their old values (like change all comments 'foo' to 'bar') and make sure that
|
- "If you want to change values based on their old values (like change all comments 'foo' to 'bar') and make sure that
|
||||||
there are at least N such values, you can use I(require_matches_min=N) together with I(allow_no_matches=true).
|
there are at least N such values, you can use I(require_matches_min=N) together with I(allow_no_matches=true).
|
||||||
|
@ -65,6 +67,9 @@ options:
|
||||||
type: bool
|
type: bool
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.routeros.api
|
- module: community.routeros.api
|
||||||
|
- module: community.routeros.api_facts
|
||||||
|
- module: community.routeros.api_modify
|
||||||
|
- module: community.routeros.api_info
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
|
|
290
plugins/modules/api_info.py
Normal file
290
plugins/modules/api_info.py
Normal file
|
@ -0,0 +1,290 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2022, Felix Fontein (@felixfontein) <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
---
|
||||||
|
module: api_info
|
||||||
|
author:
|
||||||
|
- "Felix Fontein (@felixfontein)"
|
||||||
|
short_description: Retrieve information from API
|
||||||
|
version_added: 2.2.0
|
||||||
|
description:
|
||||||
|
- Allows to retrieve information for a path using the API.
|
||||||
|
- This can be used to backup a path to restore it with the M(community.routeros.api_modify) module.
|
||||||
|
- Entries are normalized, and dynamic entries are not returned. Use the I(handle_disabled) and
|
||||||
|
I(hide_defaults) options to control normalization, the I(include_dynamic) option to also return
|
||||||
|
dynamic entries, and use I(unfiltered) to return all fields including counters.
|
||||||
|
- B(Note) that this module is still heavily in development, and only supports B(some) paths.
|
||||||
|
If you want to support new paths, or think you found problems with existing paths, please first
|
||||||
|
L(create an issue in the community.routeros Issue Tracker,https://github.com/ansible-collections/community.routeros/issues/).
|
||||||
|
notes:
|
||||||
|
- Supports I(check_mode).
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- community.routeros.api
|
||||||
|
options:
|
||||||
|
path:
|
||||||
|
description:
|
||||||
|
- Path to query.
|
||||||
|
- An example value is C(ip address). This is equivalent to running C(/ip address print) in the RouterOS CLI.
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
choices:
|
||||||
|
# BEGIN PATH LIST
|
||||||
|
- caps-man aaa
|
||||||
|
- certificate settings
|
||||||
|
- interface bridge port
|
||||||
|
- interface bridge port-controller
|
||||||
|
- interface bridge port-extender
|
||||||
|
- interface bridge settings
|
||||||
|
- interface detect-internet
|
||||||
|
- interface ethernet
|
||||||
|
- interface ethernet switch
|
||||||
|
- interface ethernet switch port
|
||||||
|
- interface l2tp-server server
|
||||||
|
- interface ovpn-server server
|
||||||
|
- interface pptp-server server
|
||||||
|
- interface sstp-server server
|
||||||
|
- interface wireless align
|
||||||
|
- interface wireless cap
|
||||||
|
- interface wireless sniffer
|
||||||
|
- interface wireless snooper
|
||||||
|
- ip accounting
|
||||||
|
- ip accounting web-access
|
||||||
|
- ip address
|
||||||
|
- ip cloud
|
||||||
|
- ip cloud advanced
|
||||||
|
- ip dhcp-client
|
||||||
|
- ip dhcp-client option
|
||||||
|
- ip dhcp-server
|
||||||
|
- ip dhcp-server config
|
||||||
|
- ip dhcp-server lease
|
||||||
|
- ip dhcp-server network
|
||||||
|
- ip dns
|
||||||
|
- ip dns static
|
||||||
|
- ip firewall address-list
|
||||||
|
- ip firewall connection tracking
|
||||||
|
- ip firewall filter
|
||||||
|
- ip firewall nat
|
||||||
|
- ip firewall service-port
|
||||||
|
- ip hotspot service-port
|
||||||
|
- ip ipsec settings
|
||||||
|
- ip neighbor discovery-settings
|
||||||
|
- ip pool
|
||||||
|
- ip proxy
|
||||||
|
- ip service
|
||||||
|
- ip settings
|
||||||
|
- ip smb
|
||||||
|
- ip socks
|
||||||
|
- ip ssh
|
||||||
|
- ip tftp settings
|
||||||
|
- ip traffic-flow
|
||||||
|
- ip traffic-flow ipfix
|
||||||
|
- ip upnp
|
||||||
|
- ipv6 dhcp-client
|
||||||
|
- ipv6 firewall address-list
|
||||||
|
- ipv6 firewall filter
|
||||||
|
- ipv6 nd prefix default
|
||||||
|
- ipv6 settings
|
||||||
|
- mpls
|
||||||
|
- mpls ldp
|
||||||
|
- port firmware
|
||||||
|
- ppp aaa
|
||||||
|
- queue interface
|
||||||
|
- radius incoming
|
||||||
|
- routing bgp instance
|
||||||
|
- routing mme
|
||||||
|
- routing rip
|
||||||
|
- routing ripng
|
||||||
|
- snmp
|
||||||
|
- system clock
|
||||||
|
- system clock manual
|
||||||
|
- system identity
|
||||||
|
- system leds settings
|
||||||
|
- system note
|
||||||
|
- system ntp client
|
||||||
|
- system package update
|
||||||
|
- system routerboard settings
|
||||||
|
- system upgrade mirror
|
||||||
|
- system watchdog
|
||||||
|
- tool bandwidth-server
|
||||||
|
- tool e-mail
|
||||||
|
- tool graphing
|
||||||
|
- tool mac-server
|
||||||
|
- tool mac-server mac-winbox
|
||||||
|
- tool mac-server ping
|
||||||
|
- tool romon
|
||||||
|
- tool sms
|
||||||
|
- tool sniffer
|
||||||
|
- tool traffic-generator
|
||||||
|
- user aaa
|
||||||
|
- user group
|
||||||
|
# END PATH LIST
|
||||||
|
unfiltered:
|
||||||
|
description:
|
||||||
|
- Whether to output all fields, and not just the ones supported as input for M(community.routeros.api_modify).
|
||||||
|
- Unfiltered output can contain counters and other state information.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
handle_disabled:
|
||||||
|
description:
|
||||||
|
- How to handle unset values.
|
||||||
|
- C(exclamation) prepends the keys with C(!) in the output with value C(null).
|
||||||
|
- C(null-value) uses the regular key with value C(null).
|
||||||
|
- C(omit) omits these values from the result.
|
||||||
|
type: str
|
||||||
|
choices:
|
||||||
|
- exclamation
|
||||||
|
- null-value
|
||||||
|
- omit
|
||||||
|
default: exclamation
|
||||||
|
hide_defaults:
|
||||||
|
description:
|
||||||
|
- Whether to hide default values.
|
||||||
|
type: bool
|
||||||
|
default: true
|
||||||
|
include_dynamic:
|
||||||
|
description:
|
||||||
|
- Whether to include dynamic values.
|
||||||
|
- By default, they are not returned, and the C(dynamic) keys are omitted.
|
||||||
|
- If set to C(true), they are returned as well, and the C(dynamic) keys are returned as well.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
seealso:
|
||||||
|
- module: community.routeros.api
|
||||||
|
- module: community.routeros.api_facts
|
||||||
|
- module: community.routeros.api_find_and_modify
|
||||||
|
- module: community.routeros.api_modify
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
---
|
||||||
|
- name: Get IP addresses
|
||||||
|
community.routeros.api_info:
|
||||||
|
hostname: "{{ hostname }}"
|
||||||
|
password: "{{ password }}"
|
||||||
|
username: "{{ username }}"
|
||||||
|
path: ip address
|
||||||
|
register: ip_addresses
|
||||||
|
|
||||||
|
- name: Print data for IP addresses
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: ip_addresses.result
|
||||||
|
'''
|
||||||
|
|
||||||
|
RETURN = '''
|
||||||
|
---
|
||||||
|
result:
|
||||||
|
description: A list of all elements for the current path.
|
||||||
|
sample:
|
||||||
|
- '.id': '*1'
|
||||||
|
actual-interface: bridge
|
||||||
|
address: "192.168.88.1/24"
|
||||||
|
comment: defconf
|
||||||
|
disabled: false
|
||||||
|
dynamic: false
|
||||||
|
interface: bridge
|
||||||
|
invalid: false
|
||||||
|
network: 192.168.88.0
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
returned: always
|
||||||
|
'''
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
|
from ansible_collections.community.routeros.plugins.module_utils.api import (
|
||||||
|
api_argument_spec,
|
||||||
|
check_has_library,
|
||||||
|
create_api,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ansible_collections.community.routeros.plugins.module_utils._api_data import (
|
||||||
|
PATHS,
|
||||||
|
join_path,
|
||||||
|
split_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from librouteros.exceptions import LibRouterosError
|
||||||
|
except Exception:
|
||||||
|
# Handled in api module_utils
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def compose_api_path(api, path):
|
||||||
|
api_path = api.path()
|
||||||
|
for p in path:
|
||||||
|
api_path = api_path.join(p)
|
||||||
|
return api_path
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
module_args = dict(
|
||||||
|
path=dict(type='str', required=True, choices=sorted([join_path(path) for path in PATHS if PATHS[path].fully_understood])),
|
||||||
|
unfiltered=dict(type='bool', default=False),
|
||||||
|
handle_disabled=dict(type='str', choices=['exclamation', 'null-value', 'omit'], default='exclamation'),
|
||||||
|
hide_defaults=dict(type='bool', default=True),
|
||||||
|
include_dynamic=dict(type='bool', default=False),
|
||||||
|
)
|
||||||
|
module_args.update(api_argument_spec())
|
||||||
|
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=module_args,
|
||||||
|
supports_check_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
check_has_library(module)
|
||||||
|
api = create_api(module)
|
||||||
|
|
||||||
|
path = split_path(module.params['path'])
|
||||||
|
path_info = PATHS.get(tuple(path))
|
||||||
|
if path_info is None:
|
||||||
|
module.fail_json(msg='Path /{path} is not yet supported'.format(path='/'.join(path)))
|
||||||
|
|
||||||
|
handle_disabled = module.params['handle_disabled']
|
||||||
|
hide_defaults = module.params['hide_defaults']
|
||||||
|
include_dynamic = module.params['include_dynamic']
|
||||||
|
try:
|
||||||
|
api_path = compose_api_path(api, path)
|
||||||
|
|
||||||
|
result = []
|
||||||
|
unfiltered = module.params['unfiltered']
|
||||||
|
for entry in api_path:
|
||||||
|
if not include_dynamic:
|
||||||
|
if entry.get('dynamic', False):
|
||||||
|
continue
|
||||||
|
if not unfiltered:
|
||||||
|
for k in list(entry):
|
||||||
|
if k == '.id':
|
||||||
|
continue
|
||||||
|
if k == 'dynamic' and include_dynamic:
|
||||||
|
continue
|
||||||
|
if k not in path_info.fields:
|
||||||
|
entry.pop(k)
|
||||||
|
if handle_disabled != 'omit':
|
||||||
|
for k in path_info.fields:
|
||||||
|
if k not in entry:
|
||||||
|
if handle_disabled == 'exclamation':
|
||||||
|
k = '!%s' % k
|
||||||
|
entry[k] = None
|
||||||
|
if hide_defaults:
|
||||||
|
for k, field_info in path_info.fields.items():
|
||||||
|
if field_info.default is not None and entry.get(k) == field_info.default:
|
||||||
|
entry.pop(k)
|
||||||
|
result.append(entry)
|
||||||
|
|
||||||
|
module.exit_json(result=result)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(msg=to_native(e))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
918
plugins/modules/api_modify.py
Normal file
918
plugins/modules/api_modify.py
Normal file
|
@ -0,0 +1,918 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2022, Felix Fontein (@felixfontein) <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
---
|
||||||
|
module: api_modify
|
||||||
|
author:
|
||||||
|
- "Felix Fontein (@felixfontein)"
|
||||||
|
short_description: Modify data at paths with API
|
||||||
|
version_added: 2.2.0
|
||||||
|
description:
|
||||||
|
- Allows to modify information for a path using the API.
|
||||||
|
- Use the M(community.routeros.api_find_and_modify) module to modify one or multiple entries in a controlled way
|
||||||
|
depending on some search conditions.
|
||||||
|
- To make a backup of a path that can be restored with this module, use the M(community.routeros.api_info) module.
|
||||||
|
- The module ignores dynamic entries.
|
||||||
|
- B(Note) that this module is still heavily in development, and only supports B(some) paths.
|
||||||
|
If you want to support new paths, or think you found problems with existing paths, please first
|
||||||
|
L(create an issue in the community.routeros Issue Tracker,https://github.com/ansible-collections/community.routeros/issues/).
|
||||||
|
notes:
|
||||||
|
- Supports I(check_mode).
|
||||||
|
requirements:
|
||||||
|
- Needs L(ordereddict,https://pypi.org/project/ordereddict) for Python 2.6
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- community.routeros.api
|
||||||
|
options:
|
||||||
|
path:
|
||||||
|
description:
|
||||||
|
- Path to query.
|
||||||
|
- An example value is C(ip address). This is equivalent to running modification commands in C(/ip address) in the RouterOS CLI.
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
choices:
|
||||||
|
# BEGIN PATH LIST
|
||||||
|
- caps-man aaa
|
||||||
|
- certificate settings
|
||||||
|
- interface bridge port
|
||||||
|
- interface bridge port-controller
|
||||||
|
- interface bridge port-extender
|
||||||
|
- interface bridge settings
|
||||||
|
- interface detect-internet
|
||||||
|
- interface ethernet
|
||||||
|
- interface ethernet switch
|
||||||
|
- interface ethernet switch port
|
||||||
|
- interface l2tp-server server
|
||||||
|
- interface ovpn-server server
|
||||||
|
- interface pptp-server server
|
||||||
|
- interface sstp-server server
|
||||||
|
- interface wireless align
|
||||||
|
- interface wireless cap
|
||||||
|
- interface wireless sniffer
|
||||||
|
- interface wireless snooper
|
||||||
|
- ip accounting
|
||||||
|
- ip accounting web-access
|
||||||
|
- ip address
|
||||||
|
- ip cloud
|
||||||
|
- ip cloud advanced
|
||||||
|
- ip dhcp-client
|
||||||
|
- ip dhcp-client option
|
||||||
|
- ip dhcp-server
|
||||||
|
- ip dhcp-server config
|
||||||
|
- ip dhcp-server lease
|
||||||
|
- ip dhcp-server network
|
||||||
|
- ip dns
|
||||||
|
- ip dns static
|
||||||
|
- ip firewall address-list
|
||||||
|
- ip firewall connection tracking
|
||||||
|
- ip firewall filter
|
||||||
|
- ip firewall nat
|
||||||
|
- ip firewall service-port
|
||||||
|
- ip hotspot service-port
|
||||||
|
- ip ipsec settings
|
||||||
|
- ip neighbor discovery-settings
|
||||||
|
- ip pool
|
||||||
|
- ip proxy
|
||||||
|
- ip service
|
||||||
|
- ip settings
|
||||||
|
- ip smb
|
||||||
|
- ip socks
|
||||||
|
- ip ssh
|
||||||
|
- ip tftp settings
|
||||||
|
- ip traffic-flow
|
||||||
|
- ip traffic-flow ipfix
|
||||||
|
- ip upnp
|
||||||
|
- ipv6 dhcp-client
|
||||||
|
- ipv6 firewall address-list
|
||||||
|
- ipv6 firewall filter
|
||||||
|
- ipv6 nd prefix default
|
||||||
|
- ipv6 settings
|
||||||
|
- mpls
|
||||||
|
- mpls ldp
|
||||||
|
- port firmware
|
||||||
|
- ppp aaa
|
||||||
|
- queue interface
|
||||||
|
- radius incoming
|
||||||
|
- routing bgp instance
|
||||||
|
- routing mme
|
||||||
|
- routing rip
|
||||||
|
- routing ripng
|
||||||
|
- snmp
|
||||||
|
- system clock
|
||||||
|
- system clock manual
|
||||||
|
- system identity
|
||||||
|
- system leds settings
|
||||||
|
- system note
|
||||||
|
- system ntp client
|
||||||
|
- system package update
|
||||||
|
- system routerboard settings
|
||||||
|
- system upgrade mirror
|
||||||
|
- system watchdog
|
||||||
|
- tool bandwidth-server
|
||||||
|
- tool e-mail
|
||||||
|
- tool graphing
|
||||||
|
- tool mac-server
|
||||||
|
- tool mac-server mac-winbox
|
||||||
|
- tool mac-server ping
|
||||||
|
- tool romon
|
||||||
|
- tool sms
|
||||||
|
- tool sniffer
|
||||||
|
- tool traffic-generator
|
||||||
|
- user aaa
|
||||||
|
- user group
|
||||||
|
# END PATH LIST
|
||||||
|
data:
|
||||||
|
description:
|
||||||
|
- Data to ensure that is present for this path.
|
||||||
|
- Fields not provided will not be modified.
|
||||||
|
- If C(.id) appears in an entry, it will be ignored.
|
||||||
|
required: true
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
ensure_order:
|
||||||
|
description:
|
||||||
|
- Whether to ensure the same order of the config as present in I(data).
|
||||||
|
- Requires I(handle_absent_entries=remove).
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
handle_absent_entries:
|
||||||
|
description:
|
||||||
|
- How to handle entries that are present in the current config, but not in I(data).
|
||||||
|
- C(ignore) ignores them.
|
||||||
|
- C(remove) removes them.
|
||||||
|
type: str
|
||||||
|
choices:
|
||||||
|
- ignore
|
||||||
|
- remove
|
||||||
|
default: ignore
|
||||||
|
handle_entries_content:
|
||||||
|
description:
|
||||||
|
- For a single entry in I(data), this describes how to handle fields that are not mentioned
|
||||||
|
in that entry, but appear in the actual config.
|
||||||
|
- If C(ignore), they are not modified.
|
||||||
|
- If C(remove), they are removed. If at least one cannot be removed, the module will fail.
|
||||||
|
- If C(remove_as_much_as_possible), all that can be removed will be removed. The ones that
|
||||||
|
cannot be removed will be kept.
|
||||||
|
type: str
|
||||||
|
choices:
|
||||||
|
- ignore
|
||||||
|
- remove
|
||||||
|
- remove_as_much_as_possible
|
||||||
|
default: ignore
|
||||||
|
seealso:
|
||||||
|
- module: community.routeros.api
|
||||||
|
- module: community.routeros.api_facts
|
||||||
|
- module: community.routeros.api_find_and_modify
|
||||||
|
- module: community.routeros.api_info
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
---
|
||||||
|
- name: Setup DHCP server networks
|
||||||
|
# Ensures that we have exactly two DHCP server networks (in the specified order)
|
||||||
|
community.routeros.api_modify:
|
||||||
|
path: ip dhcp-server network
|
||||||
|
handle_absent_entries: remove
|
||||||
|
handle_entries_content: remove_as_much_as_possible
|
||||||
|
ensure_order: true
|
||||||
|
data:
|
||||||
|
- address: 192.168.88.0/24
|
||||||
|
comment: admin network
|
||||||
|
dns-server: 192.168.88.1
|
||||||
|
gateway: 192.168.88.1
|
||||||
|
- address: 192.168.1.0/24
|
||||||
|
comment: customer network 1
|
||||||
|
dns-server: 192.168.1.1
|
||||||
|
gateway: 192.168.1.1
|
||||||
|
netmask: 24
|
||||||
|
|
||||||
|
- name: Adjust NAT
|
||||||
|
community.routeros.api_modify:
|
||||||
|
hostname: "{{ hostname }}"
|
||||||
|
password: "{{ password }}"
|
||||||
|
username: "{{ username }}"
|
||||||
|
path: ip firewall nat
|
||||||
|
data:
|
||||||
|
- action: masquerade
|
||||||
|
chain: srcnat
|
||||||
|
comment: NAT to WAN
|
||||||
|
out-interface-list: WAN
|
||||||
|
# Three ways to unset values:
|
||||||
|
# - nothing after `:`
|
||||||
|
# - "empty" value (null/~/None)
|
||||||
|
# - prepend '!'
|
||||||
|
out-interface:
|
||||||
|
to-addresses: ~
|
||||||
|
'!to-ports':
|
||||||
|
'''
|
||||||
|
|
||||||
|
RETURN = '''
|
||||||
|
---
|
||||||
|
old_data:
|
||||||
|
description:
|
||||||
|
- A list of all elements for the current path before a change was made.
|
||||||
|
sample:
|
||||||
|
- '.id': '*1'
|
||||||
|
actual-interface: bridge
|
||||||
|
address: "192.168.88.1/24"
|
||||||
|
comment: defconf
|
||||||
|
disabled: false
|
||||||
|
dynamic: false
|
||||||
|
interface: bridge
|
||||||
|
invalid: false
|
||||||
|
network: 192.168.88.0
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
returned: always
|
||||||
|
new_data:
|
||||||
|
description:
|
||||||
|
- A list of all elements for the current path after a change was made.
|
||||||
|
sample:
|
||||||
|
- '.id': '*1'
|
||||||
|
actual-interface: bridge
|
||||||
|
address: "192.168.1.1/24"
|
||||||
|
comment: awesome
|
||||||
|
disabled: false
|
||||||
|
dynamic: false
|
||||||
|
interface: bridge
|
||||||
|
invalid: false
|
||||||
|
network: 192.168.1.0
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
returned: always
|
||||||
|
'''
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||||
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
|
from ansible_collections.community.routeros.plugins.module_utils.api import (
|
||||||
|
api_argument_spec,
|
||||||
|
check_has_library,
|
||||||
|
create_api,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ansible_collections.community.routeros.plugins.module_utils._api_data import (
|
||||||
|
PATHS,
|
||||||
|
join_path,
|
||||||
|
split_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
HAS_ORDEREDDICT = True
|
||||||
|
try:
|
||||||
|
from collections import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ordereddict import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
HAS_ORDEREDDICT = False
|
||||||
|
OrderedDict = dict
|
||||||
|
|
||||||
|
try:
|
||||||
|
from librouteros.exceptions import LibRouterosError
|
||||||
|
except Exception:
|
||||||
|
# Handled in api module_utils
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def compose_api_path(api, path):
|
||||||
|
api_path = api.path()
|
||||||
|
for p in path:
|
||||||
|
api_path = api_path.join(p)
|
||||||
|
return api_path
|
||||||
|
|
||||||
|
|
||||||
|
def find_modifications(old_entry, new_entry, path_info, module, for_text='', return_none_instead_of_fail=False):
|
||||||
|
modifications = OrderedDict()
|
||||||
|
updated_entry = old_entry.copy()
|
||||||
|
for k, v in new_entry.items():
|
||||||
|
if k == '.id':
|
||||||
|
continue
|
||||||
|
disabled_k = None
|
||||||
|
if k.startswith('!'):
|
||||||
|
disabled_k = k[1:]
|
||||||
|
elif v is None or v == path_info.fields[k].remove_value:
|
||||||
|
disabled_k = k
|
||||||
|
if disabled_k is not None:
|
||||||
|
if disabled_k in old_entry:
|
||||||
|
if path_info.fields[disabled_k].remove_value is not None:
|
||||||
|
modifications[disabled_k] = path_info.fields[disabled_k].remove_value
|
||||||
|
else:
|
||||||
|
modifications['!%s' % disabled_k] = ''
|
||||||
|
del updated_entry[disabled_k]
|
||||||
|
continue
|
||||||
|
if k not in old_entry and path_info.fields[k].default == v:
|
||||||
|
continue
|
||||||
|
if k not in old_entry or old_entry[k] != v:
|
||||||
|
modifications[k] = v
|
||||||
|
updated_entry[k] = v
|
||||||
|
handle_entries_content = module.params['handle_entries_content']
|
||||||
|
if handle_entries_content != 'ignore':
|
||||||
|
for k in old_entry:
|
||||||
|
if k == '.id' or k in new_entry or ('!%s' % k) in new_entry or k not in path_info.fields:
|
||||||
|
continue
|
||||||
|
field_info = path_info.fields[k]
|
||||||
|
if field_info.default is not None and field_info.default == old_entry[k]:
|
||||||
|
continue
|
||||||
|
if field_info.remove_value is not None and field_info.remove_value == old_entry[k]:
|
||||||
|
continue
|
||||||
|
if field_info.can_disable:
|
||||||
|
if field_info.remove_value is not None:
|
||||||
|
modifications[k] = field_info.remove_value
|
||||||
|
else:
|
||||||
|
modifications['!%s' % k] = ''
|
||||||
|
del updated_entry[k]
|
||||||
|
elif field_info.default is not None:
|
||||||
|
modifications[k] = field_info.default
|
||||||
|
updated_entry[k] = field_info.default
|
||||||
|
elif handle_entries_content == 'remove':
|
||||||
|
if return_none_instead_of_fail:
|
||||||
|
return None, None
|
||||||
|
module.fail_json(msg='Key "{key}" cannot be removed{for_text}.'.format(key=k, for_text=for_text))
|
||||||
|
return modifications, updated_entry
|
||||||
|
|
||||||
|
|
||||||
|
def essentially_same_weight(old_entry, new_entry, path_info, module):
|
||||||
|
for k, v in new_entry.items():
|
||||||
|
if k == '.id':
|
||||||
|
continue
|
||||||
|
disabled_k = None
|
||||||
|
if k.startswith('!'):
|
||||||
|
disabled_k = k[1:]
|
||||||
|
elif v is None or v == path_info.fields[k].remove_value:
|
||||||
|
disabled_k = k
|
||||||
|
if disabled_k is not None:
|
||||||
|
if disabled_k in old_entry:
|
||||||
|
return None
|
||||||
|
continue
|
||||||
|
if k not in old_entry and path_info.fields[k].default == v:
|
||||||
|
continue
|
||||||
|
if k not in old_entry or old_entry[k] != v:
|
||||||
|
return None
|
||||||
|
handle_entries_content = module.params['handle_entries_content']
|
||||||
|
weight = 0
|
||||||
|
for k in old_entry:
|
||||||
|
if k == '.id' or k in new_entry or ('!%s' % k) in new_entry or k not in path_info.fields:
|
||||||
|
continue
|
||||||
|
field_info = path_info.fields[k]
|
||||||
|
if field_info.default is not None and field_info.default == old_entry[k]:
|
||||||
|
continue
|
||||||
|
if handle_entries_content != 'ignore':
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
weight += 1
|
||||||
|
return weight
|
||||||
|
|
||||||
|
|
||||||
|
def format_pk(primary_keys, values):
|
||||||
|
return ', '.join('{pk}="{value}"'.format(pk=pk, value=value) for pk, value in zip(primary_keys, values))
|
||||||
|
|
||||||
|
|
||||||
|
def polish_entry(entry, path_info, module, for_text):
|
||||||
|
if '.id' in entry:
|
||||||
|
entry.pop('.id')
|
||||||
|
for key, value in entry.items():
|
||||||
|
real_key = key
|
||||||
|
disabled_key = False
|
||||||
|
if key.startswith('!'):
|
||||||
|
disabled_key = True
|
||||||
|
key = key[1:]
|
||||||
|
if key in entry:
|
||||||
|
module.fail_json(msg='Not both "{key}" and "!{key}" must appear{for_text}.'.format(key=key, for_text=for_text))
|
||||||
|
key_info = path_info.fields.get(key)
|
||||||
|
if key_info is None:
|
||||||
|
module.fail_json(msg='Unknown key "{key}"{for_text}.'.format(key=real_key, for_text=for_text))
|
||||||
|
if disabled_key:
|
||||||
|
if not key_info.can_disable:
|
||||||
|
module.fail_json(msg='Key "!{key}" must not be disabled (leading "!"){for_text}.'.format(key=key, for_text=for_text))
|
||||||
|
if value not in (None, '', key_info.remove_value):
|
||||||
|
module.fail_json(msg='Disabled key "!{key}" must not have a value{for_text}.'.format(key=key, for_text=for_text))
|
||||||
|
elif value is None:
|
||||||
|
if not key_info.can_disable:
|
||||||
|
module.fail_json(msg='Key "{key}" must not be disabled (value null/~/None){for_text}.'.format(key=key, for_text=for_text))
|
||||||
|
for key, field_info in path_info.fields.items():
|
||||||
|
if field_info.required and key not in entry:
|
||||||
|
module.fail_json(msg='Key "{key}" must be present{for_text}.'.format(key=key, for_text=for_text))
|
||||||
|
|
||||||
|
|
||||||
|
def remove_irrelevant_data(entry, path_info):
|
||||||
|
for k, v in list(entry.items()):
|
||||||
|
if k == '.id':
|
||||||
|
continue
|
||||||
|
if k not in path_info.fields or v is None:
|
||||||
|
del entry[k]
|
||||||
|
|
||||||
|
|
||||||
|
def match_entries(new_entries, old_entries, path_info, module):
|
||||||
|
matching_old_entries = [None for entry in new_entries]
|
||||||
|
old_entries = list(old_entries)
|
||||||
|
matches = []
|
||||||
|
handle_absent_entries = module.params['handle_absent_entries']
|
||||||
|
if handle_absent_entries == 'remove':
|
||||||
|
for new_index, (unused, new_entry) in enumerate(new_entries):
|
||||||
|
for old_index, (unused, old_entry) in enumerate(old_entries):
|
||||||
|
modifications, unused = find_modifications(old_entry, new_entry, path_info, module, return_none_instead_of_fail=True)
|
||||||
|
if modifications is not None:
|
||||||
|
matches.append((new_index, old_index, len(modifications)))
|
||||||
|
else:
|
||||||
|
for new_index, (unused, new_entry) in enumerate(new_entries):
|
||||||
|
for old_index, (unused, old_entry) in enumerate(old_entries):
|
||||||
|
weight = essentially_same_weight(old_entry, new_entry, path_info, module)
|
||||||
|
if weight is not None:
|
||||||
|
matches.append((new_index, old_index, weight))
|
||||||
|
matches.sort(key=lambda entry: entry[2])
|
||||||
|
for new_index, old_index, rating in matches:
|
||||||
|
if matching_old_entries[new_index] is not None or old_entries[old_index] is None:
|
||||||
|
continue
|
||||||
|
matching_old_entries[new_index], old_entries[old_index] = old_entries[old_index], None
|
||||||
|
unmatched_old_entries = [index_entry for index_entry in old_entries if index_entry is not None]
|
||||||
|
return matching_old_entries, unmatched_old_entries
|
||||||
|
|
||||||
|
|
||||||
|
def remove_dynamic(entries):
|
||||||
|
result = []
|
||||||
|
for entry in entries:
|
||||||
|
if entry.get('dynamic', False):
|
||||||
|
continue
|
||||||
|
result.append(entry)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def sync_list(module, api, path, path_info):
|
||||||
|
handle_absent_entries = module.params['handle_absent_entries']
|
||||||
|
handle_entries_content = module.params['handle_entries_content']
|
||||||
|
if handle_absent_entries == 'remove':
|
||||||
|
if handle_entries_content == 'ignore':
|
||||||
|
module.fail_json('For this path, handle_absent_entries=remove cannot be combined with handle_entries_content=ignore')
|
||||||
|
|
||||||
|
stratify_keys = path_info.stratify_keys or ()
|
||||||
|
|
||||||
|
data = module.params['data']
|
||||||
|
stratified_data = defaultdict(list)
|
||||||
|
for index, entry in enumerate(data):
|
||||||
|
for stratify_key in stratify_keys:
|
||||||
|
if stratify_key not in entry:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Every element in data must contain "{stratify_key}". For example, the element at index #{index} does not provide it.'.format(
|
||||||
|
stratify_key=stratify_key,
|
||||||
|
index=index + 1,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sks = tuple(entry[stratify_key] for stratify_key in stratify_keys)
|
||||||
|
polish_entry(
|
||||||
|
entry, path_info, module,
|
||||||
|
' at index {index}'.format(index=index + 1),
|
||||||
|
)
|
||||||
|
stratified_data[sks].append((index, entry))
|
||||||
|
stratified_data = dict(stratified_data)
|
||||||
|
|
||||||
|
api_path = compose_api_path(api, path)
|
||||||
|
|
||||||
|
old_data = list(api_path)
|
||||||
|
old_data = remove_dynamic(old_data)
|
||||||
|
stratified_old_data = defaultdict(list)
|
||||||
|
for index, entry in enumerate(old_data):
|
||||||
|
sks = tuple(entry[stratify_key] for stratify_key in stratify_keys)
|
||||||
|
stratified_old_data[sks].append((index, entry))
|
||||||
|
stratified_old_data = dict(stratified_old_data)
|
||||||
|
|
||||||
|
create_list = []
|
||||||
|
modify_list = []
|
||||||
|
remove_list = []
|
||||||
|
|
||||||
|
new_data = []
|
||||||
|
for key, indexed_entries in stratified_data.items():
|
||||||
|
old_entries = stratified_old_data.pop(key, [])
|
||||||
|
|
||||||
|
# Try to match indexed_entries with old_entries
|
||||||
|
matching_old_entries, unmatched_old_entries = match_entries(indexed_entries, old_entries, path_info, module)
|
||||||
|
|
||||||
|
# Update existing entries
|
||||||
|
for (index, new_entry), potential_old_entry in zip(indexed_entries, matching_old_entries):
|
||||||
|
if potential_old_entry is not None:
|
||||||
|
old_index, old_entry = potential_old_entry
|
||||||
|
modifications, updated_entry = find_modifications(
|
||||||
|
old_entry, new_entry, path_info, module,
|
||||||
|
' at index {index}'.format(index=index + 1),
|
||||||
|
)
|
||||||
|
# Add to modification list if there are changes
|
||||||
|
if modifications:
|
||||||
|
modifications['.id'] = old_entry['.id']
|
||||||
|
modify_list.append(modifications)
|
||||||
|
new_data.append((old_index, updated_entry))
|
||||||
|
new_entry['.id'] = old_entry['.id']
|
||||||
|
else:
|
||||||
|
create_list.append(new_entry)
|
||||||
|
|
||||||
|
if handle_absent_entries == 'remove':
|
||||||
|
remove_list.extend(entry['.id'] for index, entry in unmatched_old_entries)
|
||||||
|
else:
|
||||||
|
new_data.extend(unmatched_old_entries)
|
||||||
|
|
||||||
|
for key, entries in stratified_old_data.items():
|
||||||
|
if handle_absent_entries == 'remove':
|
||||||
|
remove_list.extend(entry['.id'] for index, entry in entries)
|
||||||
|
else:
|
||||||
|
new_data.extend(entries)
|
||||||
|
|
||||||
|
new_data = [entry for index, entry in sorted(new_data, key=lambda entry: entry[0])]
|
||||||
|
new_data.extend(create_list)
|
||||||
|
|
||||||
|
reorder_list = []
|
||||||
|
if module.params['ensure_order']:
|
||||||
|
for index, entry in enumerate(data):
|
||||||
|
if '.id' in entry:
|
||||||
|
def match(current_entry):
|
||||||
|
return current_entry['.id'] == entry['.id']
|
||||||
|
|
||||||
|
else:
|
||||||
|
def match(current_entry):
|
||||||
|
return current_entry is entry
|
||||||
|
|
||||||
|
current_index = next(current_index + index for current_index, current_entry in enumerate(new_data[index:]) if match(current_entry))
|
||||||
|
if current_index != index:
|
||||||
|
reorder_list.append((index, new_data[current_index], new_data[index]))
|
||||||
|
new_data.insert(index, new_data.pop(current_index))
|
||||||
|
|
||||||
|
if not module.check_mode:
|
||||||
|
if remove_list:
|
||||||
|
try:
|
||||||
|
api_path.remove(*remove_list)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while removing {remove_list}: {error}'.format(
|
||||||
|
remove_list=', '.join(['ID {id}'.format(id=id) for id in remove_list]),
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for modifications in modify_list:
|
||||||
|
try:
|
||||||
|
api_path.update(**modifications)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while modifying for ID {id}: {error}'.format(
|
||||||
|
id=modifications['.id'],
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for entry in create_list:
|
||||||
|
try:
|
||||||
|
entry['.id'] = api_path.add(**entry)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while creating entry: {error}'.format(
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for new_index, new_entry, old_entry in reorder_list:
|
||||||
|
try:
|
||||||
|
for res in api_path('move', numbers=new_entry['.id'], destination=old_entry['.id']):
|
||||||
|
pass
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while moving entry ID {element_id} to position #{new_index} ID ({new_id}): {error}'.format(
|
||||||
|
element_id=new_entry['.id'],
|
||||||
|
new_index=new_index,
|
||||||
|
new_id=old_entry['.id'],
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# For sake of completeness, retrieve the full new data:
|
||||||
|
if modify_list or create_list or reorder_list:
|
||||||
|
new_data = remove_dynamic(list(api_path))
|
||||||
|
|
||||||
|
# Remove 'irrelevant' data
|
||||||
|
for entry in old_data:
|
||||||
|
remove_irrelevant_data(entry, path_info)
|
||||||
|
for entry in new_data:
|
||||||
|
remove_irrelevant_data(entry, path_info)
|
||||||
|
|
||||||
|
# Produce return value
|
||||||
|
more = {}
|
||||||
|
if module._diff:
|
||||||
|
more['diff'] = {
|
||||||
|
'before': {
|
||||||
|
'data': old_data,
|
||||||
|
},
|
||||||
|
'after': {
|
||||||
|
'data': new_data,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
module.exit_json(
|
||||||
|
changed=bool(create_list or modify_list or remove_list or reorder_list),
|
||||||
|
old_data=old_data,
|
||||||
|
new_data=new_data,
|
||||||
|
**more
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_with_primary_keys(module, api, path, path_info):
|
||||||
|
primary_keys = path_info.primary_keys
|
||||||
|
|
||||||
|
if path_info.fixed_entries:
|
||||||
|
if module.params['ensure_order']:
|
||||||
|
module.fail_json(msg='ensure_order=true cannot be used with this path')
|
||||||
|
if module.params['handle_absent_entries'] == 'remove':
|
||||||
|
module.fail_json(msg='handle_absent_entries=remove cannot be used with this path')
|
||||||
|
|
||||||
|
data = module.params['data']
|
||||||
|
new_data_by_key = OrderedDict()
|
||||||
|
for index, entry in enumerate(data):
|
||||||
|
for primary_key in primary_keys:
|
||||||
|
if primary_key not in entry:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Every element in data must contain "{primary_key}". For example, the element at index #{index} does not provide it.'.format(
|
||||||
|
primary_key=primary_key,
|
||||||
|
index=index + 1,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
pks = tuple(entry[primary_key] for primary_key in primary_keys)
|
||||||
|
if pks in new_data_by_key:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Every element in data must contain a unique value for {primary_keys}. The value {value} appears at least twice.'.format(
|
||||||
|
primary_keys=','.join(primary_keys),
|
||||||
|
value=','.join(['"{0}"'.format(pk) for pk in pks]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
polish_entry(
|
||||||
|
entry, path_info, module,
|
||||||
|
' for {values}'.format(
|
||||||
|
values=', '.join([
|
||||||
|
'{primary_key}="{value}"'.format(primary_key=primary_key, value=value)
|
||||||
|
for primary_key, value in zip(primary_keys, pks)
|
||||||
|
])
|
||||||
|
),
|
||||||
|
)
|
||||||
|
new_data_by_key[pks] = entry
|
||||||
|
|
||||||
|
api_path = compose_api_path(api, path)
|
||||||
|
|
||||||
|
old_data = list(api_path)
|
||||||
|
old_data = remove_dynamic(old_data)
|
||||||
|
old_data_by_key = OrderedDict()
|
||||||
|
id_by_key = {}
|
||||||
|
for entry in old_data:
|
||||||
|
pks = tuple(entry[primary_key] for primary_key in primary_keys)
|
||||||
|
old_data_by_key[pks] = entry
|
||||||
|
id_by_key[pks] = entry['.id']
|
||||||
|
new_data = []
|
||||||
|
|
||||||
|
create_list = []
|
||||||
|
modify_list = []
|
||||||
|
remove_list = []
|
||||||
|
remove_keys = []
|
||||||
|
handle_absent_entries = module.params['handle_absent_entries']
|
||||||
|
for key, old_entry in old_data_by_key.items():
|
||||||
|
new_entry = new_data_by_key.pop(key, None)
|
||||||
|
if new_entry is None:
|
||||||
|
if handle_absent_entries == 'remove':
|
||||||
|
remove_list.append(old_entry['.id'])
|
||||||
|
remove_keys.append(key)
|
||||||
|
else:
|
||||||
|
new_data.append(old_entry)
|
||||||
|
else:
|
||||||
|
modifications, updated_entry = find_modifications(
|
||||||
|
old_entry, new_entry, path_info, module,
|
||||||
|
' for {values}'.format(
|
||||||
|
values=', '.join([
|
||||||
|
'{primary_key}="{value}"'.format(primary_key=primary_key, value=value)
|
||||||
|
for primary_key, value in zip(primary_keys, key)
|
||||||
|
])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
new_data.append(updated_entry)
|
||||||
|
# Add to modification list if there are changes
|
||||||
|
if modifications:
|
||||||
|
modifications['.id'] = old_entry['.id']
|
||||||
|
modify_list.append((key, modifications))
|
||||||
|
for new_entry in new_data_by_key.values():
|
||||||
|
if path_info.fixed_entries:
|
||||||
|
module.fail_json(msg='Cannot add new entry {values} to this path'.format(
|
||||||
|
values=', '.join([
|
||||||
|
'{primary_key}="{value}"'.format(primary_key=primary_key, value=new_entry[primary_key])
|
||||||
|
for primary_key in primary_keys
|
||||||
|
]),
|
||||||
|
))
|
||||||
|
create_list.append(new_entry)
|
||||||
|
new_entry = new_entry.copy()
|
||||||
|
for key in list(new_entry):
|
||||||
|
if key.startswith('!'):
|
||||||
|
new_entry.pop(key)
|
||||||
|
new_data.append(new_entry)
|
||||||
|
|
||||||
|
reorder_list = []
|
||||||
|
if module.params['ensure_order']:
|
||||||
|
index_by_key = dict()
|
||||||
|
for index, entry in enumerate(new_data):
|
||||||
|
index_by_key[tuple(entry[primary_key] for primary_key in primary_keys)] = index
|
||||||
|
for index, source_entry in enumerate(data):
|
||||||
|
source_pks = tuple(source_entry[primary_key] for primary_key in primary_keys)
|
||||||
|
source_index = index_by_key.pop(source_pks)
|
||||||
|
if index == source_index:
|
||||||
|
continue
|
||||||
|
entry = new_data[index]
|
||||||
|
pks = tuple(entry[primary_key] for primary_key in primary_keys)
|
||||||
|
reorder_list.append((source_pks, index, pks))
|
||||||
|
for k, v in index_by_key.items():
|
||||||
|
if v >= index and v < source_index:
|
||||||
|
index_by_key[k] = v + 1
|
||||||
|
new_data.insert(index, new_data.pop(source_index))
|
||||||
|
|
||||||
|
if not module.check_mode:
|
||||||
|
if remove_list:
|
||||||
|
try:
|
||||||
|
api_path.remove(*remove_list)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while removing {remove_list}: {error}'.format(
|
||||||
|
remove_list=', '.join([
|
||||||
|
'{identifier} (ID {id})'.format(identifier=format_pk(primary_keys, key), id=id)
|
||||||
|
for id, key in zip(remove_list, remove_keys)
|
||||||
|
]),
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for key, modifications in modify_list:
|
||||||
|
try:
|
||||||
|
api_path.update(**modifications)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while modifying for {identifier} (ID {id}): {error}'.format(
|
||||||
|
identifier=format_pk(primary_keys, key),
|
||||||
|
id=modifications['.id'],
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for entry in create_list:
|
||||||
|
try:
|
||||||
|
entry['.id'] = api_path.add(**entry)
|
||||||
|
# Store ID for primary keys
|
||||||
|
pks = tuple(entry[primary_key] for primary_key in primary_keys)
|
||||||
|
id_by_key[pks] = entry['.id']
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while creating entry for {identifier}: {error}'.format(
|
||||||
|
identifier=format_pk(primary_keys, [entry[pk] for pk in primary_keys]),
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for element_pks, new_index, new_pks in reorder_list:
|
||||||
|
try:
|
||||||
|
element_id = id_by_key[element_pks]
|
||||||
|
new_id = id_by_key[new_pks]
|
||||||
|
for res in api_path('move', numbers=element_id, destination=new_id):
|
||||||
|
pass
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Error while moving entry ID {element_id} to position of ID {new_id}: {error}'.format(
|
||||||
|
element_id=element_id,
|
||||||
|
new_id=new_id,
|
||||||
|
error=to_native(e),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# For sake of completeness, retrieve the full new data:
|
||||||
|
if modify_list or create_list or reorder_list:
|
||||||
|
new_data = remove_dynamic(list(api_path))
|
||||||
|
|
||||||
|
# Remove 'irrelevant' data
|
||||||
|
for entry in old_data:
|
||||||
|
remove_irrelevant_data(entry, path_info)
|
||||||
|
for entry in new_data:
|
||||||
|
remove_irrelevant_data(entry, path_info)
|
||||||
|
|
||||||
|
# Produce return value
|
||||||
|
more = {}
|
||||||
|
if module._diff:
|
||||||
|
more['diff'] = {
|
||||||
|
'before': {
|
||||||
|
'data': old_data,
|
||||||
|
},
|
||||||
|
'after': {
|
||||||
|
'data': new_data,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
module.exit_json(
|
||||||
|
changed=bool(create_list or modify_list or remove_list or reorder_list),
|
||||||
|
old_data=old_data,
|
||||||
|
new_data=new_data,
|
||||||
|
**more
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_single_value(module, api, path, path_info):
|
||||||
|
data = module.params['data']
|
||||||
|
if len(data) != 1:
|
||||||
|
module.fail_json(msg='Data must be a list with exactly one element.')
|
||||||
|
new_entry = data[0]
|
||||||
|
polish_entry(new_entry, path_info, module, '')
|
||||||
|
|
||||||
|
api_path = compose_api_path(api, path)
|
||||||
|
|
||||||
|
old_data = list(api_path)
|
||||||
|
if len(old_data) != 1:
|
||||||
|
module.fail_json(
|
||||||
|
msg='Internal error: retrieving /{path} resulted in {count} elements. Expected exactly 1.'.format(
|
||||||
|
path=join_path(path),
|
||||||
|
count=len(old_data)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
old_entry = old_data[0]
|
||||||
|
|
||||||
|
# Determine modifications
|
||||||
|
modifications, updated_entry = find_modifications(old_entry, new_entry, path_info, module, '')
|
||||||
|
# Do modifications
|
||||||
|
if modifications:
|
||||||
|
if not module.check_mode:
|
||||||
|
# Actually do modification
|
||||||
|
try:
|
||||||
|
api_path.update(**modifications)
|
||||||
|
except (LibRouterosError, UnicodeEncodeError) as e:
|
||||||
|
module.fail_json(msg='Error while modifying: {error}'.format(error=to_native(e)))
|
||||||
|
# Retrieve latest version
|
||||||
|
new_data = list(api_path)
|
||||||
|
if len(new_data) == 1:
|
||||||
|
updated_entry = new_data[0]
|
||||||
|
|
||||||
|
# Remove 'irrelevant' data
|
||||||
|
remove_irrelevant_data(old_entry, path_info)
|
||||||
|
remove_irrelevant_data(updated_entry, path_info)
|
||||||
|
|
||||||
|
# Produce return value
|
||||||
|
more = {}
|
||||||
|
if module._diff:
|
||||||
|
more['diff'] = {
|
||||||
|
'before': old_entry,
|
||||||
|
'after': updated_entry,
|
||||||
|
}
|
||||||
|
module.exit_json(
|
||||||
|
changed=bool(modifications),
|
||||||
|
old_data=[old_entry],
|
||||||
|
new_data=[updated_entry],
|
||||||
|
**more
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_backend(path_info):
|
||||||
|
if path_info is None:
|
||||||
|
return None
|
||||||
|
if not path_info.fully_understood:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if path_info.primary_keys:
|
||||||
|
return sync_with_primary_keys
|
||||||
|
|
||||||
|
if path_info.single_value:
|
||||||
|
return sync_single_value
|
||||||
|
|
||||||
|
if not path_info.has_identifier:
|
||||||
|
return sync_list
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
path_choices = sorted([join_path(path) for path, path_info in PATHS.items() if get_backend(path_info) is not None])
|
||||||
|
module_args = dict(
|
||||||
|
path=dict(type='str', required=True, choices=path_choices),
|
||||||
|
data=dict(type='list', elements='dict', required=True),
|
||||||
|
handle_absent_entries=dict(type='str', choices=['ignore', 'remove'], default='ignore'),
|
||||||
|
handle_entries_content=dict(type='str', choices=['ignore', 'remove', 'remove_as_much_as_possible'], default='ignore'),
|
||||||
|
ensure_order=dict(type='bool', default=False),
|
||||||
|
)
|
||||||
|
module_args.update(api_argument_spec())
|
||||||
|
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=module_args,
|
||||||
|
supports_check_mode=True,
|
||||||
|
)
|
||||||
|
if module.params['ensure_order'] and module.params['handle_absent_entries'] == 'ignore':
|
||||||
|
module.fail_json(msg='ensure_order=true requires handle_absent_entries=remove')
|
||||||
|
|
||||||
|
if not HAS_ORDEREDDICT:
|
||||||
|
# This should never happen for Python 2.7+
|
||||||
|
module.fail_json(msg=missing_required_lib('ordereddict'))
|
||||||
|
|
||||||
|
check_has_library(module)
|
||||||
|
api = create_api(module)
|
||||||
|
|
||||||
|
path = split_path(module.params['path'])
|
||||||
|
path_info = PATHS.get(tuple(path))
|
||||||
|
backend = get_backend(path_info)
|
||||||
|
if path_info is None or backend is None:
|
||||||
|
module.fail_json(msg='Path /{path} is not yet supported'.format(path='/'.join(path)))
|
||||||
|
|
||||||
|
backend(module, api, path, path_info)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
8
tests/sanity/extra/update-docs.json
Normal file
8
tests/sanity/extra/update-docs.json
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"include_symlinks": false,
|
||||||
|
"prefixes": [
|
||||||
|
"docs/docsite/rst/api-guide.rst",
|
||||||
|
"plugins/modules/",
|
||||||
|
],
|
||||||
|
"output": "path-line-column-message",
|
||||||
|
}
|
22
tests/sanity/extra/update-docs.py
Normal file
22
tests/sanity/extra/update-docs.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
"""Check whether update-docs.py modifies something."""
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point."""
|
||||||
|
p = subprocess.run(['./update-docs.py'], check=False)
|
||||||
|
if p.returncode not in (0, 1):
|
||||||
|
print('{0}:0:0: unexpected return code {1}'.format(sys.argv[0], p.returncode))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
6
tests/sanity/ignore-2.10.txt
Normal file
6
tests/sanity/ignore-2.10.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
update-docs.py compile-2.6
|
||||||
|
update-docs.py compile-2.7
|
||||||
|
update-docs.py compile-3.5
|
||||||
|
update-docs.py future-import-boilerplate
|
||||||
|
update-docs.py metaclass-boilerplate
|
||||||
|
update-docs.py shebang
|
6
tests/sanity/ignore-2.11.txt
Normal file
6
tests/sanity/ignore-2.11.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
update-docs.py compile-2.6
|
||||||
|
update-docs.py compile-2.7
|
||||||
|
update-docs.py compile-3.5
|
||||||
|
update-docs.py future-import-boilerplate
|
||||||
|
update-docs.py metaclass-boilerplate
|
||||||
|
update-docs.py shebang
|
1
tests/sanity/ignore-2.12.txt
Normal file
1
tests/sanity/ignore-2.12.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
update-docs.py shebang
|
1
tests/sanity/ignore-2.13.txt
Normal file
1
tests/sanity/ignore-2.13.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
update-docs.py shebang
|
1
tests/sanity/ignore-2.14.txt
Normal file
1
tests/sanity/ignore-2.14.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
update-docs.py shebang
|
6
tests/sanity/ignore-2.9.txt
Normal file
6
tests/sanity/ignore-2.9.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
update-docs.py compile-2.6
|
||||||
|
update-docs.py compile-2.7
|
||||||
|
update-docs.py compile-3.5
|
||||||
|
update-docs.py future-import-boilerplate
|
||||||
|
update-docs.py metaclass-boilerplate
|
||||||
|
update-docs.py shebang
|
|
@ -130,8 +130,10 @@ def _normalize_entry(entry, path_info):
|
||||||
del entry['!%s' % key]
|
del entry['!%s' % key]
|
||||||
|
|
||||||
|
|
||||||
def massage_expected_result_data(values, path, keep_all=False):
|
def massage_expected_result_data(values, path, keep_all=False, remove_dynamic=False):
|
||||||
path_info = PATHS[path]
|
path_info = PATHS[path]
|
||||||
|
if remove_dynamic:
|
||||||
|
values = [entry for entry in values if not entry.get('dynamic', False)]
|
||||||
values = [entry.copy() for entry in values]
|
values = [entry.copy() for entry in values]
|
||||||
for entry in values:
|
for entry in values:
|
||||||
_normalize_entry(entry, path_info)
|
_normalize_entry(entry, path_info)
|
||||||
|
|
408
tests/unit/plugins/modules/test_api_info.py
Normal file
408
tests/unit/plugins/modules/test_api_info.py
Normal file
|
@ -0,0 +1,408 @@
|
||||||
|
# Copyright (c) 2022, Felix Fontein (@felixfontein) <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Make coding more python3-ish
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ansible_collections.community.routeros.tests.unit.compat.mock import patch, MagicMock
|
||||||
|
from ansible_collections.community.routeros.tests.unit.plugins.modules.fake_api import FakeLibRouterosError, Key, fake_ros_api
|
||||||
|
from ansible_collections.community.routeros.tests.unit.plugins.modules.utils import set_module_args, AnsibleExitJson, AnsibleFailJson, ModuleTestCase
|
||||||
|
from ansible_collections.community.routeros.plugins.modules import api_info
|
||||||
|
|
||||||
|
|
||||||
|
class TestRouterosApiInfoModule(ModuleTestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestRouterosApiInfoModule, self).setUp()
|
||||||
|
self.module = api_info
|
||||||
|
self.module.LibRouterosError = FakeLibRouterosError
|
||||||
|
self.module.connect = MagicMock(new=fake_ros_api)
|
||||||
|
self.module.check_has_library = MagicMock()
|
||||||
|
self.patch_create_api = patch('ansible_collections.community.routeros.plugins.modules.api_info.create_api', MagicMock(new=fake_ros_api))
|
||||||
|
self.patch_create_api.start()
|
||||||
|
self.module.Key = MagicMock(new=Key)
|
||||||
|
self.config_module_args = {
|
||||||
|
'username': 'admin',
|
||||||
|
'password': 'pаss',
|
||||||
|
'hostname': '127.0.0.1',
|
||||||
|
}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.patch_create_api.stop()
|
||||||
|
|
||||||
|
def test_module_fail_when_required_args_missing(self):
|
||||||
|
with self.assertRaises(AnsibleFailJson) as exc:
|
||||||
|
set_module_args({})
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['failed'], True)
|
||||||
|
|
||||||
|
def test_invalid_path(self):
|
||||||
|
with self.assertRaises(AnsibleFailJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'something invalid'
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['failed'], True)
|
||||||
|
self.assertEqual(result['msg'].startswith('value of path must be one of: '), True)
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_empty_result(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = []
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'ip dns static'
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_regular_result(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'called-format': 'mac:ssid',
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'mac-caching': 'disabled',
|
||||||
|
'mac-format': 'XX:XX:XX:XX:XX:XX',
|
||||||
|
'mac-mode': 'as-username',
|
||||||
|
'foo': 'bar',
|
||||||
|
'.id': '*1',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'caps-man aaa',
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_result_with_defaults(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'called-format': 'mac:ssid',
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'mac-caching': 'disabled',
|
||||||
|
'mac-format': 'XX:XX:XX:XX:XX:XX',
|
||||||
|
'mac-mode': 'as-username',
|
||||||
|
'foo': 'bar',
|
||||||
|
'.id': '*1',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'caps-man aaa',
|
||||||
|
'hide_defaults': False,
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'called-format': 'mac:ssid',
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'mac-caching': 'disabled',
|
||||||
|
'mac-format': 'XX:XX:XX:XX:XX:XX',
|
||||||
|
'mac-mode': 'as-username',
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_full_result(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'called-format': 'mac:ssid',
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'mac-caching': 'disabled',
|
||||||
|
'mac-format': 'XX:XX:XX:XX:XX:XX',
|
||||||
|
'mac-mode': 'as-username',
|
||||||
|
'foo': 'bar',
|
||||||
|
'.id': '*1',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'caps-man aaa',
|
||||||
|
'unfiltered': True,
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'interim-update': 'enabled',
|
||||||
|
'foo': 'bar',
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_disabled_exclamation(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'.id': '*1',
|
||||||
|
'dynamic': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'chain': 'forward',
|
||||||
|
'action': 'drop',
|
||||||
|
'in-interface': 'sfp1',
|
||||||
|
'.id': '*2',
|
||||||
|
'dynamic': True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'ip firewall filter',
|
||||||
|
'handle_disabled': 'exclamation',
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'!action': None,
|
||||||
|
'!comment': None,
|
||||||
|
'!connection-bytes': None,
|
||||||
|
'!connection-limit': None,
|
||||||
|
'!connection-mark': None,
|
||||||
|
'!connection-nat-state': None,
|
||||||
|
'!connection-rate': None,
|
||||||
|
'!connection-state': None,
|
||||||
|
'!connection-type': None,
|
||||||
|
'!content': None,
|
||||||
|
'!disabled': None,
|
||||||
|
'!dscp': None,
|
||||||
|
'!dst-address': None,
|
||||||
|
'!dst-address-list': None,
|
||||||
|
'!dst-address-type': None,
|
||||||
|
'!dst-limit': None,
|
||||||
|
'!dst-port': None,
|
||||||
|
'!fragment': None,
|
||||||
|
'!hotspot': None,
|
||||||
|
'!icmp-options': None,
|
||||||
|
'!in-bridge-port': None,
|
||||||
|
'!in-bridge-port-list': None,
|
||||||
|
'!in-interface': None,
|
||||||
|
'!ingress-priority': None,
|
||||||
|
'!ipsec-policy': None,
|
||||||
|
'!ipv4-options': None,
|
||||||
|
'!layer7-protocol': None,
|
||||||
|
'!limit': None,
|
||||||
|
'!log': None,
|
||||||
|
'!log-prefix': None,
|
||||||
|
'!nth': None,
|
||||||
|
'!out-bridge-port': None,
|
||||||
|
'!out-bridge-port-list': None,
|
||||||
|
'!out-interface': None,
|
||||||
|
'!out-interface-list': None,
|
||||||
|
'!p2p': None,
|
||||||
|
'!packet-mark': None,
|
||||||
|
'!packet-size': None,
|
||||||
|
'!per-connection-classifier': None,
|
||||||
|
'!port': None,
|
||||||
|
'!priority': None,
|
||||||
|
'!protocol': None,
|
||||||
|
'!psd': None,
|
||||||
|
'!random': None,
|
||||||
|
'!routing-mark': None,
|
||||||
|
'!routing-table': None,
|
||||||
|
'!src-address': None,
|
||||||
|
'!src-address-list': None,
|
||||||
|
'!src-address-type': None,
|
||||||
|
'!src-mac-address': None,
|
||||||
|
'!src-port': None,
|
||||||
|
'!tcp-flags': None,
|
||||||
|
'!tcp-mss': None,
|
||||||
|
'!time': None,
|
||||||
|
'!tls-host': None,
|
||||||
|
'!ttl': None,
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_disabled_null_value(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'.id': '*1',
|
||||||
|
'dynamic': False,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'ip firewall filter',
|
||||||
|
'handle_disabled': 'null-value',
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'action': None,
|
||||||
|
'comment': None,
|
||||||
|
'connection-bytes': None,
|
||||||
|
'connection-limit': None,
|
||||||
|
'connection-mark': None,
|
||||||
|
'connection-nat-state': None,
|
||||||
|
'connection-rate': None,
|
||||||
|
'connection-state': None,
|
||||||
|
'connection-type': None,
|
||||||
|
'content': None,
|
||||||
|
'disabled': None,
|
||||||
|
'dscp': None,
|
||||||
|
'dst-address': None,
|
||||||
|
'dst-address-list': None,
|
||||||
|
'dst-address-type': None,
|
||||||
|
'dst-limit': None,
|
||||||
|
'dst-port': None,
|
||||||
|
'fragment': None,
|
||||||
|
'hotspot': None,
|
||||||
|
'icmp-options': None,
|
||||||
|
'in-bridge-port': None,
|
||||||
|
'in-bridge-port-list': None,
|
||||||
|
'in-interface': None,
|
||||||
|
'ingress-priority': None,
|
||||||
|
'ipsec-policy': None,
|
||||||
|
'ipv4-options': None,
|
||||||
|
'layer7-protocol': None,
|
||||||
|
'limit': None,
|
||||||
|
'log': None,
|
||||||
|
'log-prefix': None,
|
||||||
|
'nth': None,
|
||||||
|
'out-bridge-port': None,
|
||||||
|
'out-bridge-port-list': None,
|
||||||
|
'out-interface': None,
|
||||||
|
'out-interface-list': None,
|
||||||
|
'p2p': None,
|
||||||
|
'packet-mark': None,
|
||||||
|
'packet-size': None,
|
||||||
|
'per-connection-classifier': None,
|
||||||
|
'port': None,
|
||||||
|
'priority': None,
|
||||||
|
'protocol': None,
|
||||||
|
'psd': None,
|
||||||
|
'random': None,
|
||||||
|
'routing-mark': None,
|
||||||
|
'routing-table': None,
|
||||||
|
'src-address': None,
|
||||||
|
'src-address-list': None,
|
||||||
|
'src-address-type': None,
|
||||||
|
'src-mac-address': None,
|
||||||
|
'src-port': None,
|
||||||
|
'tcp-flags': None,
|
||||||
|
'tcp-mss': None,
|
||||||
|
'time': None,
|
||||||
|
'tls-host': None,
|
||||||
|
'ttl': None,
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_disabled_omit(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'.id': '*1',
|
||||||
|
'dynamic': False,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'ip firewall filter',
|
||||||
|
'handle_disabled': 'omit',
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'.id': '*1',
|
||||||
|
}])
|
||||||
|
|
||||||
|
@patch('ansible_collections.community.routeros.plugins.modules.api_info.compose_api_path')
|
||||||
|
def test_dynamic(self, mock_compose_api_path):
|
||||||
|
mock_compose_api_path.return_value = [
|
||||||
|
{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'dynamic': False,
|
||||||
|
'.id': '*1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'chain': 'forward',
|
||||||
|
'action': 'drop',
|
||||||
|
'in-interface': 'sfp1',
|
||||||
|
'.id': '*2',
|
||||||
|
'dynamic': True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
with self.assertRaises(AnsibleExitJson) as exc:
|
||||||
|
args = self.config_module_args.copy()
|
||||||
|
args.update({
|
||||||
|
'path': 'ip firewall filter',
|
||||||
|
'handle_disabled': 'omit',
|
||||||
|
'include_dynamic': True,
|
||||||
|
})
|
||||||
|
set_module_args(args)
|
||||||
|
self.module.main()
|
||||||
|
|
||||||
|
result = exc.exception.args[0]
|
||||||
|
self.assertEqual(result['changed'], False)
|
||||||
|
self.assertEqual(result['result'], [
|
||||||
|
{
|
||||||
|
'chain': 'input',
|
||||||
|
'in-interface-list': 'LAN',
|
||||||
|
'.id': '*1',
|
||||||
|
'dynamic': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'chain': 'forward',
|
||||||
|
'action': 'drop',
|
||||||
|
'in-interface': 'sfp1',
|
||||||
|
'.id': '*2',
|
||||||
|
'dynamic': True,
|
||||||
|
},
|
||||||
|
])
|
1540
tests/unit/plugins/modules/test_api_modify.py
Normal file
1540
tests/unit/plugins/modules/test_api_modify.py
Normal file
File diff suppressed because it is too large
Load diff
46
update-docs.py
Executable file
46
update-docs.py
Executable file
|
@ -0,0 +1,46 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2022, Felix Fontein (@felixfontein) <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
'''
|
||||||
|
Updates DOCUMENTATION of modules using module_utils._api_data with the correct list of supported paths.
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from plugins.module_utils._api_data import (
|
||||||
|
PATHS,
|
||||||
|
join_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
MODULES = [
|
||||||
|
'plugins/modules/api_info.py',
|
||||||
|
'plugins/modules/api_modify.py',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def update_file(file, begin_line, end_line, choice_line, path_choices):
|
||||||
|
with open(file, 'r', encoding='utf-8') as f:
|
||||||
|
lines = f.read().splitlines()
|
||||||
|
begin_index = lines.index(begin_line)
|
||||||
|
end_index = lines.index(end_line, begin_index + 1)
|
||||||
|
new_lines = lines[:begin_index + 1] + [choice_line.format(choice=choice) for choice in path_choices] + lines[end_index:]
|
||||||
|
if lines != new_lines:
|
||||||
|
print(f'{file} has been updated')
|
||||||
|
with open(file, 'w', encoding='utf-8') as f:
|
||||||
|
f.write('\n'.join(new_lines) + '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
path_choices = sorted([join_path(path) for path, path_info in PATHS.items() if path_info.fully_understood])
|
||||||
|
|
||||||
|
for file in MODULES:
|
||||||
|
update_file(file, ' # BEGIN PATH LIST', ' # END PATH LIST', ' - {choice}', path_choices)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
Loading…
Add table
Add a link
Reference in a new issue