mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-07 20:47:16 +00:00
Initial commit
This commit is contained in:
commit
aebc1b03fd
4861 changed files with 812621 additions and 0 deletions
443
plugins/modules/web_infrastructure/apache2_mod_proxy.py
Normal file
443
plugins/modules/web_infrastructure/apache2_mod_proxy.py
Normal file
|
|
@ -0,0 +1,443 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016, Olivier Boukili <boukili.olivier@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apache2_mod_proxy
|
||||
author: Olivier Boukili (@oboukili)
|
||||
short_description: Set and/or get members' attributes of an Apache httpd 2.4 mod_proxy balancer pool
|
||||
description:
|
||||
- Set and/or get members' attributes of an Apache httpd 2.4 mod_proxy balancer
|
||||
pool, using HTTP POST and GET requests. The httpd mod_proxy balancer-member
|
||||
status page has to be enabled and accessible, as this module relies on parsing
|
||||
this page. This module supports ansible check_mode, and requires BeautifulSoup
|
||||
python module.
|
||||
options:
|
||||
balancer_url_suffix:
|
||||
description:
|
||||
- Suffix of the balancer pool url required to access the balancer pool
|
||||
status page (e.g. balancer_vhost[:port]/balancer_url_suffix).
|
||||
default: /balancer-manager/
|
||||
balancer_vhost:
|
||||
description:
|
||||
- (ipv4|ipv6|fqdn):port of the Apache httpd 2.4 mod_proxy balancer pool.
|
||||
required: true
|
||||
member_host:
|
||||
description:
|
||||
- (ipv4|ipv6|fqdn) of the balancer member to get or to set attributes to.
|
||||
Port number is autodetected and should not be specified here.
|
||||
If undefined, apache2_mod_proxy module will return a members list of
|
||||
dictionaries of all the current balancer pool members' attributes.
|
||||
state:
|
||||
description:
|
||||
- Desired state of the member host.
|
||||
(absent|disabled),drained,hot_standby,ignore_errors can be
|
||||
simultaneously invoked by separating them with a comma (e.g. state=drained,ignore_errors).
|
||||
choices: ["present", "absent", "enabled", "disabled", "drained", "hot_standby", "ignore_errors"]
|
||||
tls:
|
||||
description:
|
||||
- Use https to access balancer management page.
|
||||
type: bool
|
||||
default: 'no'
|
||||
validate_certs:
|
||||
description:
|
||||
- Validate ssl/tls certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Get all current balancer pool members' attributes:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: 10.0.0.2
|
||||
|
||||
# Get a specific member's attributes:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: myws.mydomain.org
|
||||
balancer_suffix: /lb/
|
||||
member_host: node1.myws.mydomain.org
|
||||
|
||||
# Enable all balancer pool members:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ myloadbalancer_host }}'
|
||||
register: result
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ myloadbalancer_host }}'
|
||||
member_host: '{{ item.host }}'
|
||||
state: present
|
||||
with_items: '{{ result.members }}'
|
||||
|
||||
# Gracefully disable a member from a loadbalancer node:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ vhost_host }}'
|
||||
member_host: '{{ member.host }}'
|
||||
state: drained
|
||||
delegate_to: myloadbalancernode
|
||||
- wait_for:
|
||||
host: '{{ member.host }}'
|
||||
port: '{{ member.port }}'
|
||||
state: drained
|
||||
delegate_to: myloadbalancernode
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ vhost_host }}'
|
||||
member_host: '{{ member.host }}'
|
||||
state: absent
|
||||
delegate_to: myloadbalancernode
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
member:
|
||||
description: specific balancer member information dictionary, returned when apache2_mod_proxy module is invoked with member_host parameter.
|
||||
type: dict
|
||||
returned: success
|
||||
sample:
|
||||
{"attributes":
|
||||
{"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.20",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.20:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false
|
||||
}
|
||||
}
|
||||
members:
|
||||
description: list of member (defined above) dictionaries, returned when apache2_mod_proxy is invoked with no member_host and state args.
|
||||
returned: success
|
||||
type: list
|
||||
sample:
|
||||
[{"attributes": {
|
||||
"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.20",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.20:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false
|
||||
}
|
||||
},
|
||||
{"attributes": {
|
||||
"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.21",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.21:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false}
|
||||
}
|
||||
]
|
||||
'''
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
BEAUTIFUL_SOUP_IMP_ERR = None
|
||||
try:
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
except ImportError:
|
||||
BEAUTIFUL_SOUP_IMP_ERR = traceback.format_exc()
|
||||
HAS_BEAUTIFULSOUP = False
|
||||
else:
|
||||
HAS_BEAUTIFULSOUP = True
|
||||
|
||||
# balancer member attributes extraction regexp:
|
||||
EXPRESSION = r"(b=([\w\.\-]+)&w=(https?|ajp|wss?|ftp|[sf]cgi)://([\w\.\-]+):?(\d*)([/\w\.\-]*)&?[\w\-\=]*)"
|
||||
# Apache2 server version extraction regexp:
|
||||
APACHE_VERSION_EXPRESSION = r"SERVER VERSION: APACHE/([\d.]+)"
|
||||
|
||||
|
||||
def regexp_extraction(string, _regexp, groups=1):
|
||||
""" Returns the capture group (default=1) specified in the regexp, applied to the string """
|
||||
regexp_search = re.search(string=str(string), pattern=str(_regexp))
|
||||
if regexp_search:
|
||||
if regexp_search.group(groups) != '':
|
||||
return str(regexp_search.group(groups))
|
||||
return None
|
||||
|
||||
|
||||
class BalancerMember(object):
|
||||
""" Apache 2.4 mod_proxy LB balancer member.
|
||||
attributes:
|
||||
read-only:
|
||||
host -> member host (string),
|
||||
management_url -> member management url (string),
|
||||
protocol -> member protocol (string)
|
||||
port -> member port (string),
|
||||
path -> member location (string),
|
||||
balancer_url -> url of this member's parent balancer (string),
|
||||
attributes -> whole member attributes (dictionary)
|
||||
module -> ansible module instance (AnsibleModule object).
|
||||
writable:
|
||||
status -> status of the member (dictionary)
|
||||
"""
|
||||
|
||||
def __init__(self, management_url, balancer_url, module):
|
||||
self.host = regexp_extraction(management_url, str(EXPRESSION), 4)
|
||||
self.management_url = str(management_url)
|
||||
self.protocol = regexp_extraction(management_url, EXPRESSION, 3)
|
||||
self.port = regexp_extraction(management_url, EXPRESSION, 5)
|
||||
self.path = regexp_extraction(management_url, EXPRESSION, 6)
|
||||
self.balancer_url = str(balancer_url)
|
||||
self.module = module
|
||||
|
||||
def get_member_attributes(self):
|
||||
""" Returns a dictionary of a balancer member's attributes."""
|
||||
|
||||
balancer_member_page = fetch_url(self.module, self.management_url)
|
||||
|
||||
if balancer_member_page[1]['status'] != 200:
|
||||
self.module.fail_json(msg="Could not get balancer_member_page, check for connectivity! " + balancer_member_page[1])
|
||||
else:
|
||||
try:
|
||||
soup = BeautifulSoup(balancer_member_page[0])
|
||||
except TypeError:
|
||||
self.module.fail_json(msg="Cannot parse balancer_member_page HTML! " + str(soup))
|
||||
else:
|
||||
subsoup = soup.findAll('table')[1].findAll('tr')
|
||||
keys = subsoup[0].findAll('th')
|
||||
for valuesset in subsoup[1::1]:
|
||||
if re.search(pattern=self.host, string=str(valuesset)):
|
||||
values = valuesset.findAll('td')
|
||||
return dict((keys[x].string, values[x].string) for x in range(0, len(keys)))
|
||||
|
||||
def get_member_status(self):
|
||||
""" Returns a dictionary of a balancer member's status attributes."""
|
||||
status_mapping = {'disabled': 'Dis',
|
||||
'drained': 'Drn',
|
||||
'hot_standby': 'Stby',
|
||||
'ignore_errors': 'Ign'}
|
||||
status = {}
|
||||
actual_status = str(self.attributes['Status'])
|
||||
for mode in status_mapping.keys():
|
||||
if re.search(pattern=status_mapping[mode], string=actual_status):
|
||||
status[mode] = True
|
||||
else:
|
||||
status[mode] = False
|
||||
return status
|
||||
|
||||
def set_member_status(self, values):
|
||||
""" Sets a balancer member's status attributes amongst pre-mapped values."""
|
||||
values_mapping = {'disabled': '&w_status_D',
|
||||
'drained': '&w_status_N',
|
||||
'hot_standby': '&w_status_H',
|
||||
'ignore_errors': '&w_status_I'}
|
||||
|
||||
request_body = regexp_extraction(self.management_url, EXPRESSION, 1)
|
||||
for k in values_mapping.keys():
|
||||
if values[str(k)]:
|
||||
request_body = request_body + str(values_mapping[k]) + '=1'
|
||||
else:
|
||||
request_body = request_body + str(values_mapping[k]) + '=0'
|
||||
|
||||
response = fetch_url(self.module, self.management_url, data=str(request_body))
|
||||
if response[1]['status'] != 200:
|
||||
self.module.fail_json(msg="Could not set the member status! " + self.host + " " + response[1]['status'])
|
||||
|
||||
attributes = property(get_member_attributes)
|
||||
status = property(get_member_status, set_member_status)
|
||||
|
||||
|
||||
class Balancer(object):
|
||||
""" Apache httpd 2.4 mod_proxy balancer object"""
|
||||
|
||||
def __init__(self, host, suffix, module, members=None, tls=False):
|
||||
if tls:
|
||||
self.base_url = str(str('https://') + str(host))
|
||||
self.url = str(str('https://') + str(host) + str(suffix))
|
||||
else:
|
||||
self.base_url = str(str('http://') + str(host))
|
||||
self.url = str(str('http://') + str(host) + str(suffix))
|
||||
self.module = module
|
||||
self.page = self.fetch_balancer_page()
|
||||
if members is None:
|
||||
self._members = []
|
||||
|
||||
def fetch_balancer_page(self):
|
||||
""" Returns the balancer management html page as a string for later parsing."""
|
||||
page = fetch_url(self.module, str(self.url))
|
||||
if page[1]['status'] != 200:
|
||||
self.module.fail_json(msg="Could not get balancer page! HTTP status response: " + str(page[1]['status']))
|
||||
else:
|
||||
content = page[0].read()
|
||||
apache_version = regexp_extraction(content.upper(), APACHE_VERSION_EXPRESSION, 1)
|
||||
if apache_version:
|
||||
if not re.search(pattern=r"2\.4\.[\d]*", string=apache_version):
|
||||
self.module.fail_json(msg="This module only acts on an Apache2 2.4+ instance, current Apache2 version: " + str(apache_version))
|
||||
return content
|
||||
else:
|
||||
self.module.fail_json(msg="Could not get the Apache server version from the balancer-manager")
|
||||
|
||||
def get_balancer_members(self):
|
||||
""" Returns members of the balancer as a generator object for later iteration."""
|
||||
try:
|
||||
soup = BeautifulSoup(self.page)
|
||||
except TypeError:
|
||||
self.module.fail_json(msg="Cannot parse balancer page HTML! " + str(self.page))
|
||||
else:
|
||||
for element in soup.findAll('a')[1::1]:
|
||||
balancer_member_suffix = str(element.get('href'))
|
||||
if not balancer_member_suffix:
|
||||
self.module.fail_json(msg="Argument 'balancer_member_suffix' is empty!")
|
||||
else:
|
||||
yield BalancerMember(str(self.base_url + balancer_member_suffix), str(self.url), self.module)
|
||||
|
||||
members = property(get_balancer_members)
|
||||
|
||||
|
||||
def main():
|
||||
""" Initiates module."""
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
balancer_vhost=dict(required=True, default=None, type='str'),
|
||||
balancer_url_suffix=dict(default="/balancer-manager/", type='str'),
|
||||
member_host=dict(type='str'),
|
||||
state=dict(type='str'),
|
||||
tls=dict(default=False, type='bool'),
|
||||
validate_certs=dict(default=True, type='bool')
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if HAS_BEAUTIFULSOUP is False:
|
||||
module.fail_json(msg=missing_required_lib('BeautifulSoup'), exception=BEAUTIFUL_SOUP_IMP_ERR)
|
||||
|
||||
if module.params['state'] is not None:
|
||||
states = module.params['state'].split(',')
|
||||
if (len(states) > 1) and (("present" in states) or ("enabled" in states)):
|
||||
module.fail_json(msg="state present/enabled is mutually exclusive with other states!")
|
||||
else:
|
||||
for _state in states:
|
||||
if _state not in ['present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors']:
|
||||
module.fail_json(
|
||||
msg="State can only take values amongst 'present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors'."
|
||||
)
|
||||
else:
|
||||
states = ['None']
|
||||
|
||||
mybalancer = Balancer(module.params['balancer_vhost'],
|
||||
module.params['balancer_url_suffix'],
|
||||
module=module,
|
||||
tls=module.params['tls'])
|
||||
|
||||
if module.params['member_host'] is None:
|
||||
json_output_list = []
|
||||
for member in mybalancer.members:
|
||||
json_output_list.append({
|
||||
"host": member.host,
|
||||
"status": member.status,
|
||||
"protocol": member.protocol,
|
||||
"port": member.port,
|
||||
"path": member.path,
|
||||
"attributes": member.attributes,
|
||||
"management_url": member.management_url,
|
||||
"balancer_url": member.balancer_url
|
||||
})
|
||||
module.exit_json(
|
||||
changed=False,
|
||||
members=json_output_list
|
||||
)
|
||||
else:
|
||||
changed = False
|
||||
member_exists = False
|
||||
member_status = {'disabled': False, 'drained': False, 'hot_standby': False, 'ignore_errors': False}
|
||||
for mode in member_status.keys():
|
||||
for state in states:
|
||||
if mode == state:
|
||||
member_status[mode] = True
|
||||
elif mode == 'disabled' and state == 'absent':
|
||||
member_status[mode] = True
|
||||
|
||||
for member in mybalancer.members:
|
||||
if str(member.host) == str(module.params['member_host']):
|
||||
member_exists = True
|
||||
if module.params['state'] is not None:
|
||||
member_status_before = member.status
|
||||
if not module.check_mode:
|
||||
member_status_after = member.status = member_status
|
||||
else:
|
||||
member_status_after = member_status
|
||||
if member_status_before != member_status_after:
|
||||
changed = True
|
||||
json_output = {
|
||||
"host": member.host,
|
||||
"status": member.status,
|
||||
"protocol": member.protocol,
|
||||
"port": member.port,
|
||||
"path": member.path,
|
||||
"attributes": member.attributes,
|
||||
"management_url": member.management_url,
|
||||
"balancer_url": member.balancer_url
|
||||
}
|
||||
if member_exists:
|
||||
module.exit_json(
|
||||
changed=changed,
|
||||
member=json_output
|
||||
)
|
||||
else:
|
||||
module.fail_json(msg=str(module.params['member_host']) + ' is not a member of the balancer ' + str(module.params['balancer_vhost']) + '!')
|
||||
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
263
plugins/modules/web_infrastructure/apache2_module.py
Normal file
263
plugins/modules/web_infrastructure/apache2_module.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
#!/usr/bin/python
|
||||
# coding: utf-8 -*-
|
||||
|
||||
# (c) 2013-2014, Christian Berendt <berendt@b1-systems.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apache2_module
|
||||
author:
|
||||
- Christian Berendt (@berendt)
|
||||
- Ralf Hertel (@n0trax)
|
||||
- Robin Roth (@robinro)
|
||||
short_description: Enables/disables a module of the Apache2 webserver.
|
||||
description:
|
||||
- Enables or disables a specified module of the Apache2 webserver.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the module to enable/disable as given to C(a2enmod/a2dismod).
|
||||
required: true
|
||||
identifier:
|
||||
description:
|
||||
- Identifier of the module as listed by C(apache2ctl -M).
|
||||
This is optional and usually determined automatically by the common convention of
|
||||
appending C(_module) to I(name) as well as custom exception for popular modules.
|
||||
required: False
|
||||
force:
|
||||
description:
|
||||
- Force disabling of default modules and override Debian warnings.
|
||||
required: false
|
||||
type: bool
|
||||
default: False
|
||||
state:
|
||||
description:
|
||||
- Desired state of the module.
|
||||
choices: ['present', 'absent']
|
||||
default: present
|
||||
ignore_configcheck:
|
||||
description:
|
||||
- Ignore configuration checks about inconsistent module configuration. Especially for mpm_* modules.
|
||||
type: bool
|
||||
default: False
|
||||
requirements: ["a2enmod","a2dismod"]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# enables the Apache2 module "wsgi"
|
||||
- apache2_module:
|
||||
state: present
|
||||
name: wsgi
|
||||
# disables the Apache2 module "wsgi"
|
||||
- apache2_module:
|
||||
state: absent
|
||||
name: wsgi
|
||||
# disable default modules for Debian
|
||||
- apache2_module:
|
||||
state: absent
|
||||
name: autoindex
|
||||
force: True
|
||||
# disable mpm_worker and ignore warnings about missing mpm module
|
||||
- apache2_module:
|
||||
state: absent
|
||||
name: mpm_worker
|
||||
ignore_configcheck: True
|
||||
# enable dump_io module, which is identified as dumpio_module inside apache2
|
||||
- apache2_module:
|
||||
state: present
|
||||
name: dump_io
|
||||
identifier: dumpio_module
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
result:
|
||||
description: message about action taken
|
||||
returned: always
|
||||
type: str
|
||||
warnings:
|
||||
description: list of warning messages
|
||||
returned: when needed
|
||||
type: list
|
||||
rc:
|
||||
description: return code of underlying command
|
||||
returned: failed
|
||||
type: int
|
||||
stdout:
|
||||
description: stdout of underlying command
|
||||
returned: failed
|
||||
type: str
|
||||
stderr:
|
||||
description: stderr of underlying command
|
||||
returned: failed
|
||||
type: str
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def _run_threaded(module):
|
||||
control_binary = _get_ctl_binary(module)
|
||||
|
||||
result, stdout, stderr = module.run_command("%s -V" % control_binary)
|
||||
|
||||
return bool(re.search(r'threaded:[ ]*yes', stdout))
|
||||
|
||||
|
||||
def _get_ctl_binary(module):
|
||||
for command in ['apache2ctl', 'apachectl']:
|
||||
ctl_binary = module.get_bin_path(command)
|
||||
if ctl_binary is not None:
|
||||
return ctl_binary
|
||||
|
||||
module.fail_json(
|
||||
msg="Neither of apache2ctl nor apachctl found."
|
||||
" At least one apache control binary is necessary."
|
||||
)
|
||||
|
||||
|
||||
def _module_is_enabled(module):
|
||||
control_binary = _get_ctl_binary(module)
|
||||
result, stdout, stderr = module.run_command("%s -M" % control_binary)
|
||||
|
||||
if result != 0:
|
||||
error_msg = "Error executing %s: %s" % (control_binary, stderr)
|
||||
if module.params['ignore_configcheck']:
|
||||
if 'AH00534' in stderr and 'mpm_' in module.params['name']:
|
||||
module.warnings.append(
|
||||
"No MPM module loaded! apache2 reload AND other module actions"
|
||||
" will fail if no MPM module is loaded immediately."
|
||||
)
|
||||
else:
|
||||
module.warnings.append(error_msg)
|
||||
return False
|
||||
else:
|
||||
module.fail_json(msg=error_msg)
|
||||
|
||||
searchstring = ' ' + module.params['identifier']
|
||||
return searchstring in stdout
|
||||
|
||||
|
||||
def create_apache_identifier(name):
|
||||
"""
|
||||
By convention if a module is loaded via name, it appears in apache2ctl -M as
|
||||
name_module.
|
||||
|
||||
Some modules don't follow this convention and we use replacements for those."""
|
||||
|
||||
# a2enmod name replacement to apache2ctl -M names
|
||||
text_workarounds = [
|
||||
('shib2', 'mod_shib'),
|
||||
('evasive', 'evasive20_module'),
|
||||
]
|
||||
|
||||
# re expressions to extract subparts of names
|
||||
re_workarounds = [
|
||||
('php', r'^(php\d)\.'),
|
||||
]
|
||||
|
||||
for a2enmod_spelling, module_name in text_workarounds:
|
||||
if a2enmod_spelling in name:
|
||||
return module_name
|
||||
|
||||
for search, reexpr in re_workarounds:
|
||||
if search in name:
|
||||
try:
|
||||
rematch = re.search(reexpr, name)
|
||||
return rematch.group(1) + '_module'
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return name + '_module'
|
||||
|
||||
|
||||
def _set_state(module, state):
|
||||
name = module.params['name']
|
||||
force = module.params['force']
|
||||
|
||||
want_enabled = state == 'present'
|
||||
state_string = {'present': 'enabled', 'absent': 'disabled'}[state]
|
||||
a2mod_binary = {'present': 'a2enmod', 'absent': 'a2dismod'}[state]
|
||||
success_msg = "Module %s %s" % (name, state_string)
|
||||
|
||||
if _module_is_enabled(module) != want_enabled:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True,
|
||||
result=success_msg,
|
||||
warnings=module.warnings)
|
||||
|
||||
a2mod_binary = module.get_bin_path(a2mod_binary)
|
||||
if a2mod_binary is None:
|
||||
module.fail_json(msg="%s not found. Perhaps this system does not use %s to manage apache" % (a2mod_binary, a2mod_binary))
|
||||
|
||||
if not want_enabled and force:
|
||||
# force exists only for a2dismod on debian
|
||||
a2mod_binary += ' -f'
|
||||
|
||||
result, stdout, stderr = module.run_command("%s %s" % (a2mod_binary, name))
|
||||
|
||||
if _module_is_enabled(module) == want_enabled:
|
||||
module.exit_json(changed=True,
|
||||
result=success_msg,
|
||||
warnings=module.warnings)
|
||||
else:
|
||||
msg = (
|
||||
'Failed to set module {name} to {state}:\n'
|
||||
'{stdout}\n'
|
||||
'Maybe the module identifier ({identifier}) was guessed incorrectly.'
|
||||
'Consider setting the "identifier" option.'
|
||||
).format(
|
||||
name=name,
|
||||
state=state_string,
|
||||
stdout=stdout,
|
||||
identifier=module.params['identifier']
|
||||
)
|
||||
module.fail_json(msg=msg,
|
||||
rc=result,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
else:
|
||||
module.exit_json(changed=False,
|
||||
result=success_msg,
|
||||
warnings=module.warnings)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True),
|
||||
identifier=dict(required=False, type='str'),
|
||||
force=dict(required=False, type='bool', default=False),
|
||||
state=dict(default='present', choices=['absent', 'present']),
|
||||
ignore_configcheck=dict(required=False, type='bool', default=False),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
module.warnings = []
|
||||
|
||||
name = module.params['name']
|
||||
if name == 'cgi' and _run_threaded(module):
|
||||
module.fail_json(msg="Your MPM seems to be threaded. No automatic actions on module %s possible." % name)
|
||||
|
||||
if not module.params['identifier']:
|
||||
module.params['identifier'] = create_apache_identifier(module.params['name'])
|
||||
|
||||
if module.params['state'] in ['present', 'absent']:
|
||||
_set_state(module, module.params['state'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
520
plugins/modules/web_infrastructure/deploy_helper.py
Normal file
520
plugins/modules/web_infrastructure/deploy_helper.py
Normal file
|
|
@ -0,0 +1,520 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Jasper N. Brouwer <jasper@nerdsweide.nl>
|
||||
# (c) 2014, Ramon de la Fuente <ramon@delafuente.nl>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: deploy_helper
|
||||
author: "Ramon de la Fuente (@ramondelafuente)"
|
||||
short_description: Manages some of the steps common in deploying projects.
|
||||
description:
|
||||
- The Deploy Helper manages some of the steps common in deploying software.
|
||||
It creates a folder structure, manages a symlink for the current release
|
||||
and cleans up old releases.
|
||||
- "Running it with the C(state=query) or C(state=present) will return the C(deploy_helper) fact.
|
||||
C(project_path), whatever you set in the path parameter,
|
||||
C(current_path), the path to the symlink that points to the active release,
|
||||
C(releases_path), the path to the folder to keep releases in,
|
||||
C(shared_path), the path to the folder to keep shared resources in,
|
||||
C(unfinished_filename), the file to check for to recognize unfinished builds,
|
||||
C(previous_release), the release the 'current' symlink is pointing to,
|
||||
C(previous_release_path), the full path to the 'current' symlink target,
|
||||
C(new_release), either the 'release' parameter or a generated timestamp,
|
||||
C(new_release_path), the path to the new release folder (not created by the module)."
|
||||
|
||||
options:
|
||||
path:
|
||||
required: True
|
||||
aliases: ['dest']
|
||||
description:
|
||||
- the root path of the project. Alias I(dest).
|
||||
Returned in the C(deploy_helper.project_path) fact.
|
||||
|
||||
state:
|
||||
description:
|
||||
- the state of the project.
|
||||
C(query) will only gather facts,
|
||||
C(present) will create the project I(root) folder, and in it the I(releases) and I(shared) folders,
|
||||
C(finalize) will remove the unfinished_filename file, create a symlink to the newly
|
||||
deployed release and optionally clean old releases,
|
||||
C(clean) will remove failed & old releases,
|
||||
C(absent) will remove the project folder (synonymous to the M(file) module with C(state=absent))
|
||||
choices: [ present, finalize, absent, clean, query ]
|
||||
default: present
|
||||
|
||||
release:
|
||||
description:
|
||||
- the release version that is being deployed. Defaults to a timestamp format %Y%m%d%H%M%S (i.e. '20141119223359').
|
||||
This parameter is optional during C(state=present), but needs to be set explicitly for C(state=finalize).
|
||||
You can use the generated fact C(release={{ deploy_helper.new_release }}).
|
||||
|
||||
releases_path:
|
||||
description:
|
||||
- the name of the folder that will hold the releases. This can be relative to C(path) or absolute.
|
||||
Returned in the C(deploy_helper.releases_path) fact.
|
||||
default: releases
|
||||
|
||||
shared_path:
|
||||
description:
|
||||
- the name of the folder that will hold the shared resources. This can be relative to C(path) or absolute.
|
||||
If this is set to an empty string, no shared folder will be created.
|
||||
Returned in the C(deploy_helper.shared_path) fact.
|
||||
default: shared
|
||||
|
||||
current_path:
|
||||
description:
|
||||
- the name of the symlink that is created when the deploy is finalized. Used in C(finalize) and C(clean).
|
||||
Returned in the C(deploy_helper.current_path) fact.
|
||||
default: current
|
||||
|
||||
unfinished_filename:
|
||||
description:
|
||||
- the name of the file that indicates a deploy has not finished. All folders in the releases_path that
|
||||
contain this file will be deleted on C(state=finalize) with clean=True, or C(state=clean). This file is
|
||||
automatically deleted from the I(new_release_path) during C(state=finalize).
|
||||
default: DEPLOY_UNFINISHED
|
||||
|
||||
clean:
|
||||
description:
|
||||
- Whether to run the clean procedure in case of C(state=finalize).
|
||||
type: bool
|
||||
default: 'yes'
|
||||
|
||||
keep_releases:
|
||||
description:
|
||||
- the number of old releases to keep when cleaning. Used in C(finalize) and C(clean). Any unfinished builds
|
||||
will be deleted first, so only correct releases will count. The current version will not count.
|
||||
default: 5
|
||||
|
||||
notes:
|
||||
- Facts are only returned for C(state=query) and C(state=present). If you use both, you should pass any overridden
|
||||
parameters to both calls, otherwise the second call will overwrite the facts of the first one.
|
||||
- When using C(state=clean), the releases are ordered by I(creation date). You should be able to switch to a
|
||||
new naming strategy without problems.
|
||||
- Because of the default behaviour of generating the I(new_release) fact, this module will not be idempotent
|
||||
unless you pass your own release name with C(release). Due to the nature of deploying software, this should not
|
||||
be much of a problem.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
|
||||
# General explanation, starting with an example folder structure for a project:
|
||||
|
||||
# root:
|
||||
# releases:
|
||||
# - 20140415234508
|
||||
# - 20140415235146
|
||||
# - 20140416082818
|
||||
#
|
||||
# shared:
|
||||
# - sessions
|
||||
# - uploads
|
||||
#
|
||||
# current: releases/20140416082818
|
||||
|
||||
|
||||
# The 'releases' folder holds all the available releases. A release is a complete build of the application being
|
||||
# deployed. This can be a clone of a repository for example, or a sync of a local folder on your filesystem.
|
||||
# Having timestamped folders is one way of having distinct releases, but you could choose your own strategy like
|
||||
# git tags or commit hashes.
|
||||
#
|
||||
# During a deploy, a new folder should be created in the releases folder and any build steps required should be
|
||||
# performed. Once the new build is ready, the deploy procedure is 'finalized' by replacing the 'current' symlink
|
||||
# with a link to this build.
|
||||
#
|
||||
# The 'shared' folder holds any resource that is shared between releases. Examples of this are web-server
|
||||
# session files, or files uploaded by users of your application. It's quite common to have symlinks from a release
|
||||
# folder pointing to a shared/subfolder, and creating these links would be automated as part of the build steps.
|
||||
#
|
||||
# The 'current' symlink points to one of the releases. Probably the latest one, unless a deploy is in progress.
|
||||
# The web-server's root for the project will go through this symlink, so the 'downtime' when switching to a new
|
||||
# release is reduced to the time it takes to switch the link.
|
||||
#
|
||||
# To distinguish between successful builds and unfinished ones, a file can be placed in the folder of the release
|
||||
# that is currently in progress. The existence of this file will mark it as unfinished, and allow an automated
|
||||
# procedure to remove it during cleanup.
|
||||
|
||||
|
||||
# Typical usage
|
||||
- name: Initialize the deploy root and gather facts
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
- name: Clone the project to the new release folder
|
||||
git:
|
||||
repo: git://foosball.example.org/path/to/repo.git
|
||||
dest: '{{ deploy_helper.new_release_path }}'
|
||||
version: v1.1.1
|
||||
- name: Add an unfinished file, to allow cleanup on successful finalize
|
||||
file:
|
||||
path: '{{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}'
|
||||
state: touch
|
||||
- name: Perform some build steps, like running your dependency manager for example
|
||||
composer:
|
||||
command: install
|
||||
working_dir: '{{ deploy_helper.new_release_path }}'
|
||||
- name: Create some folders in the shared folder
|
||||
file:
|
||||
path: '{{ deploy_helper.shared_path }}/{{ item }}'
|
||||
state: directory
|
||||
with_items:
|
||||
- sessions
|
||||
- uploads
|
||||
- name: Add symlinks from the new release to the shared folder
|
||||
file:
|
||||
path: '{{ deploy_helper.new_release_path }}/{{ item.path }}'
|
||||
src: '{{ deploy_helper.shared_path }}/{{ item.src }}'
|
||||
state: link
|
||||
with_items:
|
||||
- path: app/sessions
|
||||
src: sessions
|
||||
- path: web/uploads
|
||||
src: uploads
|
||||
- name: Finalize the deploy, removing the unfinished file and switching the symlink
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Retrieving facts before running a deploy
|
||||
- name: Run 'state=query' to gather facts without changing anything
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
state: query
|
||||
# Remember to set the 'release' parameter when you actually call 'state=present' later
|
||||
- name: Initialize the deploy root
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: present
|
||||
|
||||
# all paths can be absolute or relative (to the 'path' parameter)
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
releases_path: /var/www/project/releases
|
||||
shared_path: /var/www/shared
|
||||
current_path: /var/www/active
|
||||
|
||||
# Using your own naming strategy for releases (a version tag in this case):
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: v1.1.1
|
||||
state: present
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Using a different unfinished_filename:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
unfinished_filename: README.md
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Postponing the cleanup of older builds:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
clean: False
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
# Or running the cleanup ahead of the new deploy
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: present
|
||||
|
||||
# Keeping more old releases:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
keep_releases: 10
|
||||
# Or, if you use 'clean=false' on finalize:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
keep_releases: 10
|
||||
|
||||
# Removing the entire project root folder
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: absent
|
||||
|
||||
# Debugging the facts returned by the module
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
- debug:
|
||||
var: deploy_helper
|
||||
'''
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class DeployHelper(object):
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.file_args = module.load_file_common_arguments(module.params)
|
||||
|
||||
self.clean = module.params['clean']
|
||||
self.current_path = module.params['current_path']
|
||||
self.keep_releases = module.params['keep_releases']
|
||||
self.path = module.params['path']
|
||||
self.release = module.params['release']
|
||||
self.releases_path = module.params['releases_path']
|
||||
self.shared_path = module.params['shared_path']
|
||||
self.state = module.params['state']
|
||||
self.unfinished_filename = module.params['unfinished_filename']
|
||||
|
||||
def gather_facts(self):
|
||||
current_path = os.path.join(self.path, self.current_path)
|
||||
releases_path = os.path.join(self.path, self.releases_path)
|
||||
if self.shared_path:
|
||||
shared_path = os.path.join(self.path, self.shared_path)
|
||||
else:
|
||||
shared_path = None
|
||||
|
||||
previous_release, previous_release_path = self._get_last_release(current_path)
|
||||
|
||||
if not self.release and (self.state == 'query' or self.state == 'present'):
|
||||
self.release = time.strftime("%Y%m%d%H%M%S")
|
||||
|
||||
if self.release:
|
||||
new_release_path = os.path.join(releases_path, self.release)
|
||||
else:
|
||||
new_release_path = None
|
||||
|
||||
return {
|
||||
'project_path': self.path,
|
||||
'current_path': current_path,
|
||||
'releases_path': releases_path,
|
||||
'shared_path': shared_path,
|
||||
'previous_release': previous_release,
|
||||
'previous_release_path': previous_release_path,
|
||||
'new_release': self.release,
|
||||
'new_release_path': new_release_path,
|
||||
'unfinished_filename': self.unfinished_filename
|
||||
}
|
||||
|
||||
def delete_path(self, path):
|
||||
if not os.path.lexists(path):
|
||||
return False
|
||||
|
||||
if not os.path.isdir(path):
|
||||
self.module.fail_json(msg="%s exists but is not a directory" % path)
|
||||
|
||||
if not self.module.check_mode:
|
||||
try:
|
||||
shutil.rmtree(path, ignore_errors=False)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="rmtree failed: %s" % to_native(e), exception=traceback.format_exc())
|
||||
|
||||
return True
|
||||
|
||||
def create_path(self, path):
|
||||
changed = False
|
||||
|
||||
if not os.path.lexists(path):
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.makedirs(path)
|
||||
|
||||
elif not os.path.isdir(path):
|
||||
self.module.fail_json(msg="%s exists but is not a directory" % path)
|
||||
|
||||
changed += self.module.set_directory_attributes_if_different(self._get_file_args(path), changed)
|
||||
|
||||
return changed
|
||||
|
||||
def check_link(self, path):
|
||||
if os.path.lexists(path):
|
||||
if not os.path.islink(path):
|
||||
self.module.fail_json(msg="%s exists but is not a symbolic link" % path)
|
||||
|
||||
def create_link(self, source, link_name):
|
||||
changed = False
|
||||
|
||||
if os.path.islink(link_name):
|
||||
norm_link = os.path.normpath(os.path.realpath(link_name))
|
||||
norm_source = os.path.normpath(os.path.realpath(source))
|
||||
if norm_link == norm_source:
|
||||
changed = False
|
||||
else:
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
if not os.path.lexists(source):
|
||||
self.module.fail_json(msg="the symlink target %s doesn't exists" % source)
|
||||
tmp_link_name = link_name + '.' + self.unfinished_filename
|
||||
if os.path.islink(tmp_link_name):
|
||||
os.unlink(tmp_link_name)
|
||||
os.symlink(source, tmp_link_name)
|
||||
os.rename(tmp_link_name, link_name)
|
||||
else:
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.symlink(source, link_name)
|
||||
|
||||
return changed
|
||||
|
||||
def remove_unfinished_file(self, new_release_path):
|
||||
changed = False
|
||||
unfinished_file_path = os.path.join(new_release_path, self.unfinished_filename)
|
||||
if os.path.lexists(unfinished_file_path):
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.remove(unfinished_file_path)
|
||||
|
||||
return changed
|
||||
|
||||
def remove_unfinished_builds(self, releases_path):
|
||||
changes = 0
|
||||
|
||||
for release in os.listdir(releases_path):
|
||||
if os.path.isfile(os.path.join(releases_path, release, self.unfinished_filename)):
|
||||
if self.module.check_mode:
|
||||
changes += 1
|
||||
else:
|
||||
changes += self.delete_path(os.path.join(releases_path, release))
|
||||
|
||||
return changes
|
||||
|
||||
def remove_unfinished_link(self, path):
|
||||
changed = False
|
||||
|
||||
tmp_link_name = os.path.join(path, self.release + '.' + self.unfinished_filename)
|
||||
if not self.module.check_mode and os.path.exists(tmp_link_name):
|
||||
changed = True
|
||||
os.remove(tmp_link_name)
|
||||
|
||||
return changed
|
||||
|
||||
def cleanup(self, releases_path, reserve_version):
|
||||
changes = 0
|
||||
|
||||
if os.path.lexists(releases_path):
|
||||
releases = [f for f in os.listdir(releases_path) if os.path.isdir(os.path.join(releases_path, f))]
|
||||
try:
|
||||
releases.remove(reserve_version)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not self.module.check_mode:
|
||||
releases.sort(key=lambda x: os.path.getctime(os.path.join(releases_path, x)), reverse=True)
|
||||
for release in releases[self.keep_releases:]:
|
||||
changes += self.delete_path(os.path.join(releases_path, release))
|
||||
elif len(releases) > self.keep_releases:
|
||||
changes += (len(releases) - self.keep_releases)
|
||||
|
||||
return changes
|
||||
|
||||
def _get_file_args(self, path):
|
||||
file_args = self.file_args.copy()
|
||||
file_args['path'] = path
|
||||
return file_args
|
||||
|
||||
def _get_last_release(self, current_path):
|
||||
previous_release = None
|
||||
previous_release_path = None
|
||||
|
||||
if os.path.lexists(current_path):
|
||||
previous_release_path = os.path.realpath(current_path)
|
||||
previous_release = os.path.basename(previous_release_path)
|
||||
|
||||
return previous_release, previous_release_path
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(aliases=['dest'], required=True, type='path'),
|
||||
release=dict(required=False, type='str', default=None),
|
||||
releases_path=dict(required=False, type='str', default='releases'),
|
||||
shared_path=dict(required=False, type='path', default='shared'),
|
||||
current_path=dict(required=False, type='path', default='current'),
|
||||
keep_releases=dict(required=False, type='int', default=5),
|
||||
clean=dict(required=False, type='bool', default=True),
|
||||
unfinished_filename=dict(required=False, type='str', default='DEPLOY_UNFINISHED'),
|
||||
state=dict(required=False, choices=['present', 'absent', 'clean', 'finalize', 'query'], default='present')
|
||||
),
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
deploy_helper = DeployHelper(module)
|
||||
facts = deploy_helper.gather_facts()
|
||||
|
||||
result = {
|
||||
'state': deploy_helper.state
|
||||
}
|
||||
|
||||
changes = 0
|
||||
|
||||
if deploy_helper.state == 'query':
|
||||
result['ansible_facts'] = {'deploy_helper': facts}
|
||||
|
||||
elif deploy_helper.state == 'present':
|
||||
deploy_helper.check_link(facts['current_path'])
|
||||
changes += deploy_helper.create_path(facts['project_path'])
|
||||
changes += deploy_helper.create_path(facts['releases_path'])
|
||||
if deploy_helper.shared_path:
|
||||
changes += deploy_helper.create_path(facts['shared_path'])
|
||||
|
||||
result['ansible_facts'] = {'deploy_helper': facts}
|
||||
|
||||
elif deploy_helper.state == 'finalize':
|
||||
if not deploy_helper.release:
|
||||
module.fail_json(msg="'release' is a required parameter for state=finalize (try the 'deploy_helper.new_release' fact)")
|
||||
if deploy_helper.keep_releases <= 0:
|
||||
module.fail_json(msg="'keep_releases' should be at least 1")
|
||||
|
||||
changes += deploy_helper.remove_unfinished_file(facts['new_release_path'])
|
||||
changes += deploy_helper.create_link(facts['new_release_path'], facts['current_path'])
|
||||
if deploy_helper.clean:
|
||||
changes += deploy_helper.remove_unfinished_link(facts['project_path'])
|
||||
changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
|
||||
changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
|
||||
|
||||
elif deploy_helper.state == 'clean':
|
||||
changes += deploy_helper.remove_unfinished_link(facts['project_path'])
|
||||
changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
|
||||
changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
|
||||
|
||||
elif deploy_helper.state == 'absent':
|
||||
# destroy the facts
|
||||
result['ansible_facts'] = {'deploy_helper': []}
|
||||
changes += deploy_helper.delete_path(facts['project_path'])
|
||||
|
||||
if changes > 0:
|
||||
result['changed'] = True
|
||||
else:
|
||||
result['changed'] = False
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
310
plugins/modules/web_infrastructure/django_manage.py
Normal file
310
plugins/modules/web_infrastructure/django_manage.py
Normal file
|
|
@ -0,0 +1,310 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Scott Anderson <scottanderson42@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: django_manage
|
||||
short_description: Manages a Django application.
|
||||
description:
|
||||
- Manages a Django application using the I(manage.py) application frontend to I(django-admin). With the I(virtualenv) parameter, all
|
||||
management commands will be executed by the given I(virtualenv) installation.
|
||||
options:
|
||||
command:
|
||||
choices: [ 'cleanup', 'collectstatic', 'flush', 'loaddata', 'migrate', 'runfcgi', 'syncdb', 'test', 'validate', ]
|
||||
description:
|
||||
- The name of the Django management command to run. Built in commands are cleanup, collectstatic, flush, loaddata, migrate, runfcgi, syncdb,
|
||||
test, and validate.
|
||||
- Other commands can be entered, but will fail if they're unknown to Django. Other commands that may prompt for user input should be run
|
||||
with the I(--noinput) flag.
|
||||
required: true
|
||||
app_path:
|
||||
description:
|
||||
- The path to the root of the Django application where B(manage.py) lives.
|
||||
required: true
|
||||
settings:
|
||||
description:
|
||||
- The Python path to the application's settings module, such as 'myapp.settings'.
|
||||
required: false
|
||||
pythonpath:
|
||||
description:
|
||||
- A directory to add to the Python path. Typically used to include the settings module if it is located external to the application directory.
|
||||
required: false
|
||||
virtualenv:
|
||||
description:
|
||||
- An optional path to a I(virtualenv) installation to use while running the manage application.
|
||||
aliases: [virtualenv]
|
||||
apps:
|
||||
description:
|
||||
- A list of space-delimited apps to target. Used by the 'test' command.
|
||||
required: false
|
||||
cache_table:
|
||||
description:
|
||||
- The name of the table used for database-backed caching. Used by the 'createcachetable' command.
|
||||
required: false
|
||||
clear:
|
||||
description:
|
||||
- Clear the existing files before trying to copy or link the original file.
|
||||
- Used only with the 'collectstatic' command. The C(--noinput) argument will be added automatically.
|
||||
required: false
|
||||
default: no
|
||||
type: bool
|
||||
database:
|
||||
description:
|
||||
- The database to target. Used by the 'createcachetable', 'flush', 'loaddata', and 'syncdb' commands.
|
||||
required: false
|
||||
failfast:
|
||||
description:
|
||||
- Fail the command immediately if a test fails. Used by the 'test' command.
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
fixtures:
|
||||
description:
|
||||
- A space-delimited list of fixture file names to load in the database. B(Required) by the 'loaddata' command.
|
||||
required: false
|
||||
skip:
|
||||
description:
|
||||
- Will skip over out-of-order missing migrations, you can only use this parameter with I(migrate)
|
||||
required: false
|
||||
type: bool
|
||||
merge:
|
||||
description:
|
||||
- Will run out-of-order or missing migrations as they are not rollback migrations, you can only use this parameter with 'migrate' command
|
||||
required: false
|
||||
type: bool
|
||||
link:
|
||||
description:
|
||||
- Will create links to the files instead of copying them, you can only use this parameter with 'collectstatic' command
|
||||
required: false
|
||||
type: bool
|
||||
notes:
|
||||
- I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified.
|
||||
- This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location.
|
||||
- This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately.
|
||||
- To be able to use the migrate command with django versions < 1.7, you must have south installed and added as an app in your settings.
|
||||
- To be able to use the collectstatic command, you must have enabled staticfiles in your settings.
|
||||
- As of ansible 2.x, your I(manage.py) application must be executable (rwxr-xr-x), and must have a valid I(shebang), i.e. "#!/usr/bin/env python",
|
||||
for invoking the appropriate Python interpreter.
|
||||
requirements: [ "virtualenv", "django" ]
|
||||
author: "Scott Anderson (@tastychutney)"
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
# Run cleanup on the application installed in 'django_dir'.
|
||||
- django_manage:
|
||||
command: cleanup
|
||||
app_path: "{{ django_dir }}"
|
||||
|
||||
# Load the initial_data fixture into the application
|
||||
- django_manage:
|
||||
command: loaddata
|
||||
app_path: "{{ django_dir }}"
|
||||
fixtures: "{{ initial_data }}"
|
||||
|
||||
# Run syncdb on the application
|
||||
- django_manage:
|
||||
command: syncdb
|
||||
app_path: "{{ django_dir }}"
|
||||
settings: "{{ settings_app_name }}"
|
||||
pythonpath: "{{ settings_dir }}"
|
||||
virtualenv: "{{ virtualenv_dir }}"
|
||||
|
||||
# Run the SmokeTest test case from the main app. Useful for testing deploys.
|
||||
- django_manage:
|
||||
command: test
|
||||
app_path: "{{ django_dir }}"
|
||||
apps: main.SmokeTest
|
||||
|
||||
# Create an initial superuser.
|
||||
- django_manage:
|
||||
command: "createsuperuser --noinput --username=admin --email=admin@example.com"
|
||||
app_path: "{{ django_dir }}"
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def _fail(module, cmd, out, err, **kwargs):
|
||||
msg = ''
|
||||
if out:
|
||||
msg += "stdout: %s" % (out, )
|
||||
if err:
|
||||
msg += "\n:stderr: %s" % (err, )
|
||||
module.fail_json(cmd=cmd, msg=msg, **kwargs)
|
||||
|
||||
|
||||
def _ensure_virtualenv(module):
|
||||
|
||||
venv_param = module.params['virtualenv']
|
||||
if venv_param is None:
|
||||
return
|
||||
|
||||
vbin = os.path.join(venv_param, 'bin')
|
||||
activate = os.path.join(vbin, 'activate')
|
||||
|
||||
if not os.path.exists(activate):
|
||||
virtualenv = module.get_bin_path('virtualenv', True)
|
||||
vcmd = '%s %s' % (virtualenv, venv_param)
|
||||
vcmd = [virtualenv, venv_param]
|
||||
rc, out_venv, err_venv = module.run_command(vcmd)
|
||||
if rc != 0:
|
||||
_fail(module, vcmd, out_venv, err_venv)
|
||||
|
||||
os.environ["PATH"] = "%s:%s" % (vbin, os.environ["PATH"])
|
||||
os.environ["VIRTUAL_ENV"] = venv_param
|
||||
|
||||
|
||||
def createcachetable_filter_output(line):
|
||||
return "Already exists" not in line
|
||||
|
||||
|
||||
def flush_filter_output(line):
|
||||
return "Installed" in line and "Installed 0 object" not in line
|
||||
|
||||
|
||||
def loaddata_filter_output(line):
|
||||
return "Installed" in line and "Installed 0 object" not in line
|
||||
|
||||
|
||||
def syncdb_filter_output(line):
|
||||
return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line)
|
||||
|
||||
|
||||
def migrate_filter_output(line):
|
||||
return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line) or ("Applying" in line)
|
||||
|
||||
|
||||
def collectstatic_filter_output(line):
|
||||
return line and "0 static files" not in line
|
||||
|
||||
|
||||
def main():
|
||||
command_allowed_param_map = dict(
|
||||
cleanup=(),
|
||||
createcachetable=('cache_table', 'database', ),
|
||||
flush=('database', ),
|
||||
loaddata=('database', 'fixtures', ),
|
||||
syncdb=('database', ),
|
||||
test=('failfast', 'testrunner', 'liveserver', 'apps', ),
|
||||
validate=(),
|
||||
migrate=('apps', 'skip', 'merge', 'database',),
|
||||
collectstatic=('clear', 'link', ),
|
||||
)
|
||||
|
||||
command_required_param_map = dict(
|
||||
loaddata=('fixtures', ),
|
||||
)
|
||||
|
||||
# forces --noinput on every command that needs it
|
||||
noinput_commands = (
|
||||
'flush',
|
||||
'syncdb',
|
||||
'migrate',
|
||||
'test',
|
||||
'collectstatic',
|
||||
)
|
||||
|
||||
# These params are allowed for certain commands only
|
||||
specific_params = ('apps', 'clear', 'database', 'failfast', 'fixtures', 'liveserver', 'testrunner')
|
||||
|
||||
# These params are automatically added to the command if present
|
||||
general_params = ('settings', 'pythonpath', 'database',)
|
||||
specific_boolean_params = ('clear', 'failfast', 'skip', 'merge', 'link')
|
||||
end_of_command_params = ('apps', 'cache_table', 'fixtures')
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
command=dict(default=None, required=True),
|
||||
app_path=dict(default=None, required=True, type='path'),
|
||||
settings=dict(default=None, required=False),
|
||||
pythonpath=dict(default=None, required=False, aliases=['python_path']),
|
||||
virtualenv=dict(default=None, required=False, type='path', aliases=['virtual_env']),
|
||||
|
||||
apps=dict(default=None, required=False),
|
||||
cache_table=dict(default=None, required=False),
|
||||
clear=dict(default=None, required=False, type='bool'),
|
||||
database=dict(default=None, required=False),
|
||||
failfast=dict(default='no', required=False, type='bool', aliases=['fail_fast']),
|
||||
fixtures=dict(default=None, required=False),
|
||||
liveserver=dict(default=None, required=False, aliases=['live_server']),
|
||||
testrunner=dict(default=None, required=False, aliases=['test_runner']),
|
||||
skip=dict(default=None, required=False, type='bool'),
|
||||
merge=dict(default=None, required=False, type='bool'),
|
||||
link=dict(default=None, required=False, type='bool'),
|
||||
),
|
||||
)
|
||||
|
||||
command = module.params['command']
|
||||
app_path = module.params['app_path']
|
||||
virtualenv = module.params['virtualenv']
|
||||
|
||||
for param in specific_params:
|
||||
value = module.params[param]
|
||||
if param in specific_boolean_params:
|
||||
value = module.boolean(value)
|
||||
if value and param not in command_allowed_param_map[command]:
|
||||
module.fail_json(msg='%s param is incompatible with command=%s' % (param, command))
|
||||
|
||||
for param in command_required_param_map.get(command, ()):
|
||||
if not module.params[param]:
|
||||
module.fail_json(msg='%s param is required for command=%s' % (param, command))
|
||||
|
||||
_ensure_virtualenv(module)
|
||||
|
||||
cmd = "./manage.py %s" % (command, )
|
||||
|
||||
if command in noinput_commands:
|
||||
cmd = '%s --noinput' % cmd
|
||||
|
||||
for param in general_params:
|
||||
if module.params[param]:
|
||||
cmd = '%s --%s=%s' % (cmd, param, module.params[param])
|
||||
|
||||
for param in specific_boolean_params:
|
||||
if module.boolean(module.params[param]):
|
||||
cmd = '%s --%s' % (cmd, param)
|
||||
|
||||
# these params always get tacked on the end of the command
|
||||
for param in end_of_command_params:
|
||||
if module.params[param]:
|
||||
cmd = '%s %s' % (cmd, module.params[param])
|
||||
|
||||
rc, out, err = module.run_command(cmd, cwd=app_path)
|
||||
if rc != 0:
|
||||
if command == 'createcachetable' and 'table' in err and 'already exists' in err:
|
||||
out = 'Already exists.'
|
||||
else:
|
||||
if "Unknown command:" in err:
|
||||
_fail(module, cmd, err, "Unknown django command: %s" % command)
|
||||
_fail(module, cmd, out, err, path=os.environ["PATH"], syspath=sys.path)
|
||||
|
||||
changed = False
|
||||
|
||||
lines = out.split('\n')
|
||||
filt = globals().get(command + "_filter_output", None)
|
||||
if filt:
|
||||
filtered_output = list(filter(filt, lines))
|
||||
if len(filtered_output):
|
||||
changed = True
|
||||
|
||||
module.exit_json(changed=changed, out=out, cmd=cmd, app_path=app_path, virtualenv=virtualenv,
|
||||
settings=module.params['settings'], pythonpath=module.params['pythonpath'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
219
plugins/modules/web_infrastructure/ejabberd_user.py
Normal file
219
plugins/modules/web_infrastructure/ejabberd_user.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2013, Peter Sprygada <sprygada@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ejabberd_user
|
||||
author: "Peter Sprygada (@privateip)"
|
||||
short_description: Manages users for ejabberd servers
|
||||
requirements:
|
||||
- ejabberd with mod_admin_extra
|
||||
description:
|
||||
- This module provides user management for ejabberd servers
|
||||
options:
|
||||
username:
|
||||
description:
|
||||
- the name of the user to manage
|
||||
required: true
|
||||
host:
|
||||
description:
|
||||
- the ejabberd host associated with this username
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- the password to assign to the username
|
||||
required: false
|
||||
logging:
|
||||
description:
|
||||
- enables or disables the local syslog facility for this module
|
||||
required: false
|
||||
default: false
|
||||
type: bool
|
||||
state:
|
||||
description:
|
||||
- describe the desired state of the user to be managed
|
||||
required: false
|
||||
default: 'present'
|
||||
choices: [ 'present', 'absent' ]
|
||||
notes:
|
||||
- Password parameter is required for state == present only
|
||||
- Passwords must be stored in clear text for this release
|
||||
- The ejabberd configuration file must include mod_admin_extra as a module.
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
# Example playbook entries using the ejabberd_user module to manage users state.
|
||||
|
||||
- name: create a user if it does not exist
|
||||
ejabberd_user:
|
||||
username: test
|
||||
host: server
|
||||
password: password
|
||||
|
||||
- name: delete a user if it exists
|
||||
ejabberd_user:
|
||||
username: test
|
||||
host: server
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import syslog
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
class EjabberdUserException(Exception):
|
||||
""" Base exception for EjabberdUser class object """
|
||||
pass
|
||||
|
||||
|
||||
class EjabberdUser(object):
|
||||
""" This object represents a user resource for an ejabberd server. The
|
||||
object manages user creation and deletion using ejabberdctl. The following
|
||||
commands are currently supported:
|
||||
* ejabberdctl register
|
||||
* ejabberdctl deregister
|
||||
"""
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.logging = module.params.get('logging')
|
||||
self.state = module.params.get('state')
|
||||
self.host = module.params.get('host')
|
||||
self.user = module.params.get('username')
|
||||
self.pwd = module.params.get('password')
|
||||
|
||||
@property
|
||||
def changed(self):
|
||||
""" This method will check the current user and see if the password has
|
||||
changed. It will return True if the user does not match the supplied
|
||||
credentials and False if it does not
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('check_password', options)
|
||||
except EjabberdUserException:
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return rc
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
""" This method will check to see if the supplied username exists for
|
||||
host specified. If the user exists True is returned, otherwise False
|
||||
is returned
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host]
|
||||
(rc, out, err) = self.run_command('check_account', options)
|
||||
except EjabberdUserException:
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return not bool(int(rc))
|
||||
|
||||
def log(self, entry):
|
||||
""" This method will log information to the local syslog facility """
|
||||
if self.logging:
|
||||
syslog.openlog('ansible-%s' % self.module._name)
|
||||
syslog.syslog(syslog.LOG_NOTICE, entry)
|
||||
|
||||
def run_command(self, cmd, options):
|
||||
""" This method will run the any command specified and return the
|
||||
returns using the Ansible common module
|
||||
"""
|
||||
if not all(options):
|
||||
raise EjabberdUserException
|
||||
|
||||
cmd = 'ejabberdctl %s ' % cmd
|
||||
cmd += " ".join(options)
|
||||
self.log('command: %s' % cmd)
|
||||
return self.module.run_command(cmd.split())
|
||||
|
||||
def update(self):
|
||||
""" The update method will update the credentials for the user provided
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('change_password', options)
|
||||
except EjabberdUserException:
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
def create(self):
|
||||
""" The create method will create a new user on the host with the
|
||||
password provided
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('register', options)
|
||||
except EjabberdUserException:
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
def delete(self):
|
||||
""" The delete method will delete the user from the host
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host]
|
||||
(rc, out, err) = self.run_command('unregister', options)
|
||||
except EjabberdUserException:
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
host=dict(default=None, type='str'),
|
||||
username=dict(default=None, type='str'),
|
||||
password=dict(default=None, type='str', no_log=True),
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
logging=dict(default=False, type='bool')
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
obj = EjabberdUser(module)
|
||||
|
||||
rc = None
|
||||
result = dict(changed=False)
|
||||
|
||||
if obj.state == 'absent':
|
||||
if obj.exists:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.delete()
|
||||
if rc != 0:
|
||||
module.fail_json(msg=err, rc=rc)
|
||||
|
||||
elif obj.state == 'present':
|
||||
if not obj.exists:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.create()
|
||||
elif obj.changed:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.update()
|
||||
if rc is not None and rc != 0:
|
||||
module.fail_json(msg=err, rc=rc)
|
||||
|
||||
if rc is None:
|
||||
result['changed'] = False
|
||||
else:
|
||||
result['changed'] = True
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
227
plugins/modules/web_infrastructure/gunicorn.py
Normal file
227
plugins/modules/web_infrastructure/gunicorn.py
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2017, Alejandro Gomez <alexgomez2202@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: gunicorn
|
||||
short_description: Run gunicorn with various settings.
|
||||
description:
|
||||
- Starts gunicorn with the parameters specified. Common settings for gunicorn
|
||||
configuration are supported. For additional configuration use a config file
|
||||
See U(https://gunicorn-docs.readthedocs.io/en/latest/settings.html) for more
|
||||
options. It's recommended to always use the chdir option to avoid problems
|
||||
with the location of the app.
|
||||
requirements: [gunicorn]
|
||||
author:
|
||||
- "Alejandro Gomez (@agmezr)"
|
||||
options:
|
||||
app:
|
||||
required: true
|
||||
aliases: ['name']
|
||||
description:
|
||||
- The app module. A name refers to a WSGI callable that should be found in the specified module.
|
||||
venv:
|
||||
aliases: ['virtualenv']
|
||||
description:
|
||||
- 'Path to the virtualenv directory.'
|
||||
config:
|
||||
description:
|
||||
- 'Path to the gunicorn configuration file.'
|
||||
chdir:
|
||||
description:
|
||||
- 'Chdir to specified directory before apps loading.'
|
||||
pid:
|
||||
description:
|
||||
- 'A filename to use for the PID file. If not set and not found on the configuration file a tmp
|
||||
pid file will be created to check a successful run of gunicorn.'
|
||||
worker:
|
||||
choices: ['sync', 'eventlet', 'gevent', 'tornado ', 'gthread', 'gaiohttp']
|
||||
description:
|
||||
- 'The type of workers to use. The default class (sync) should handle most "normal" types of workloads.'
|
||||
user:
|
||||
description:
|
||||
- 'Switch worker processes to run as this user.'
|
||||
notes:
|
||||
- If not specified on config file, a temporary error log will be created on /tmp dir.
|
||||
Please make sure you have write access in /tmp dir. Not needed but will help you to
|
||||
identify any problem with configuration.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: simple gunicorn run example
|
||||
gunicorn:
|
||||
app: 'wsgi'
|
||||
chdir: '/workspace/example'
|
||||
|
||||
- name: run gunicorn on a virtualenv
|
||||
gunicorn:
|
||||
app: 'wsgi'
|
||||
chdir: '/workspace/example'
|
||||
venv: '/workspace/example/venv'
|
||||
|
||||
- name: run gunicorn with a config file
|
||||
gunicorn:
|
||||
app: 'wsgi'
|
||||
chdir: '/workspace/example'
|
||||
conf: '/workspace/example/gunicorn.cfg'
|
||||
|
||||
- name: run gunicorn as ansible user with specified pid and config file
|
||||
gunicorn:
|
||||
app: 'wsgi'
|
||||
chdir: '/workspace/example'
|
||||
conf: '/workspace/example/gunicorn.cfg'
|
||||
venv: '/workspace/example/venv'
|
||||
pid: '/workspace/example/gunicorn.pid'
|
||||
user: 'ansible'
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
gunicorn:
|
||||
description: process id of gunicorn
|
||||
returned: changed
|
||||
type: str
|
||||
sample: "1234"
|
||||
'''
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
# import ansible utils
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def search_existing_config(config, option):
|
||||
''' search in config file for specified option '''
|
||||
if config and os.path.isfile(config):
|
||||
data_config = None
|
||||
with open(config, 'r') as f:
|
||||
for line in f:
|
||||
if option in line:
|
||||
return line
|
||||
return None
|
||||
|
||||
|
||||
def remove_tmp_file(file_path):
|
||||
''' remove temporary files '''
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# available gunicorn options on module
|
||||
gunicorn_options = {
|
||||
'config': '-c',
|
||||
'chdir': '--chdir',
|
||||
'worker': '-k',
|
||||
'user': '-u',
|
||||
}
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
app=dict(required=True, type='str', aliases=['name']),
|
||||
venv=dict(required=False, type='path', default=None, aliases=['virtualenv']),
|
||||
config=dict(required=False, default=None, type='path', aliases=['conf']),
|
||||
chdir=dict(required=False, type='path', default=None),
|
||||
pid=dict(required=False, type='path', default=None),
|
||||
user=dict(required=False, type='str'),
|
||||
worker=dict(required=False,
|
||||
type='str',
|
||||
choices=['sync', 'eventlet', 'gevent', 'tornado ', 'gthread', 'gaiohttp']
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# temporary files in case no option provided
|
||||
tmp_error_log = os.path.join(module.tmpdir, 'gunicorn.temp.error.log')
|
||||
tmp_pid_file = os.path.join(module.tmpdir, 'gunicorn.temp.pid')
|
||||
|
||||
# remove temp file if exists
|
||||
remove_tmp_file(tmp_pid_file)
|
||||
remove_tmp_file(tmp_error_log)
|
||||
|
||||
# obtain app name and venv
|
||||
params = module.params
|
||||
app = params['app']
|
||||
venv = params['venv']
|
||||
pid = params['pid']
|
||||
|
||||
# use venv path if exists
|
||||
if venv:
|
||||
gunicorn_command = "/".join((venv, 'bin', 'gunicorn'))
|
||||
else:
|
||||
gunicorn_command = 'gunicorn'
|
||||
|
||||
# to daemonize the process
|
||||
options = ["-D"]
|
||||
|
||||
# fill options
|
||||
for option in gunicorn_options:
|
||||
param = params[option]
|
||||
if param:
|
||||
options.append(gunicorn_options[option])
|
||||
options.append(param)
|
||||
|
||||
error_log = search_existing_config(params['config'], 'errorlog')
|
||||
if not error_log:
|
||||
# place error log somewhere in case of fail
|
||||
options.append("--error-logfile")
|
||||
options.append(tmp_error_log)
|
||||
|
||||
pid_file = search_existing_config(params['config'], 'pid')
|
||||
if not params['pid'] and not pid_file:
|
||||
pid = tmp_pid_file
|
||||
|
||||
# add option for pid file if not found on config file
|
||||
if not pid_file:
|
||||
options.append('--pid')
|
||||
options.append(pid)
|
||||
|
||||
# put args together
|
||||
args = [gunicorn_command] + options + [app]
|
||||
rc, out, err = module.run_command(args, use_unsafe_shell=False, encoding=None)
|
||||
|
||||
if not err:
|
||||
# wait for gunicorn to dump to log
|
||||
time.sleep(0.5)
|
||||
if os.path.isfile(pid):
|
||||
with open(pid, 'r') as f:
|
||||
result = f.readline().strip()
|
||||
|
||||
if not params['pid']:
|
||||
os.remove(pid)
|
||||
|
||||
module.exit_json(changed=True, pid=result, debug=" ".join(args))
|
||||
else:
|
||||
# if user defined own error log, check that
|
||||
if error_log:
|
||||
error = 'Please check your {0}'.format(error_log.strip())
|
||||
else:
|
||||
if os.path.isfile(tmp_error_log):
|
||||
with open(tmp_error_log, 'r') as f:
|
||||
error = f.read()
|
||||
# delete tmp log
|
||||
os.remove(tmp_error_log)
|
||||
else:
|
||||
error = "Log not found"
|
||||
|
||||
module.fail_json(msg='Failed to start gunicorn. {0}'.format(error), error=err)
|
||||
|
||||
else:
|
||||
module.fail_json(msg='Failed to start gunicorn {0}'.format(err), error=err)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
274
plugins/modules/web_infrastructure/htpasswd.py
Normal file
274
plugins/modules/web_infrastructure/htpasswd.py
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Nimbis Services, Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: htpasswd
|
||||
short_description: manage user files for basic authentication
|
||||
description:
|
||||
- Add and remove username/password entries in a password file using htpasswd.
|
||||
- This is used by web servers such as Apache and Nginx for basic authentication.
|
||||
options:
|
||||
path:
|
||||
required: true
|
||||
aliases: [ dest, destfile ]
|
||||
description:
|
||||
- Path to the file that contains the usernames and passwords
|
||||
name:
|
||||
required: true
|
||||
aliases: [ username ]
|
||||
description:
|
||||
- User name to add or remove
|
||||
password:
|
||||
required: false
|
||||
description:
|
||||
- Password associated with user.
|
||||
- Must be specified if user does not exist yet.
|
||||
crypt_scheme:
|
||||
required: false
|
||||
choices: ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
|
||||
default: "apr_md5_crypt"
|
||||
description:
|
||||
- Encryption scheme to be used. As well as the four choices listed
|
||||
here, you can also use any other hash supported by passlib, such as
|
||||
md5_crypt and sha256_crypt, which are linux passwd hashes. If you
|
||||
do so the password file will not be compatible with Apache or Nginx
|
||||
state:
|
||||
required: false
|
||||
choices: [ present, absent ]
|
||||
default: "present"
|
||||
description:
|
||||
- Whether the user entry should be present or not
|
||||
create:
|
||||
required: false
|
||||
type: bool
|
||||
default: "yes"
|
||||
description:
|
||||
- Used with C(state=present). If specified, the file will be created
|
||||
if it does not already exist. If set to "no", will fail if the
|
||||
file does not exist
|
||||
notes:
|
||||
- "This module depends on the I(passlib) Python library, which needs to be installed on all target systems."
|
||||
- "On Debian, Ubuntu, or Fedora: install I(python-passlib)."
|
||||
- "On RHEL or CentOS: Enable EPEL, then install I(python-passlib)."
|
||||
requirements: [ passlib>=1.6 ]
|
||||
author: "Ansible Core Team"
|
||||
extends_documentation_fragment: files
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
# Add a user to a password file and ensure permissions are set
|
||||
- htpasswd:
|
||||
path: /etc/nginx/passwdfile
|
||||
name: janedoe
|
||||
password: '9s36?;fyNp'
|
||||
owner: root
|
||||
group: www-data
|
||||
mode: 0640
|
||||
|
||||
# Remove a user from a password file
|
||||
- htpasswd:
|
||||
path: /etc/apache2/passwdfile
|
||||
name: foobar
|
||||
state: absent
|
||||
|
||||
# Add a user to a password file suitable for use by libpam-pwdfile
|
||||
- htpasswd:
|
||||
path: /etc/mail/passwords
|
||||
name: alex
|
||||
password: oedu2eGh
|
||||
crypt_scheme: md5_crypt
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import traceback
|
||||
from distutils.version import LooseVersion
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
PASSLIB_IMP_ERR = None
|
||||
try:
|
||||
from passlib.apache import HtpasswdFile, htpasswd_context
|
||||
from passlib.context import CryptContext
|
||||
import passlib
|
||||
except ImportError:
|
||||
PASSLIB_IMP_ERR = traceback.format_exc()
|
||||
passlib_installed = False
|
||||
else:
|
||||
passlib_installed = True
|
||||
|
||||
apache_hashes = ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
|
||||
|
||||
|
||||
def create_missing_directories(dest):
|
||||
destpath = os.path.dirname(dest)
|
||||
if not os.path.exists(destpath):
|
||||
os.makedirs(destpath)
|
||||
|
||||
|
||||
def present(dest, username, password, crypt_scheme, create, check_mode):
|
||||
""" Ensures user is present
|
||||
|
||||
Returns (msg, changed) """
|
||||
if crypt_scheme in apache_hashes:
|
||||
context = htpasswd_context
|
||||
else:
|
||||
context = CryptContext(schemes=[crypt_scheme] + apache_hashes)
|
||||
if not os.path.exists(dest):
|
||||
if not create:
|
||||
raise ValueError('Destination %s does not exist' % dest)
|
||||
if check_mode:
|
||||
return ("Create %s" % dest, True)
|
||||
create_missing_directories(dest)
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=True, default_scheme=crypt_scheme, context=context)
|
||||
else:
|
||||
ht = HtpasswdFile(dest, autoload=False, default=crypt_scheme, context=context)
|
||||
if getattr(ht, 'set_password', None):
|
||||
ht.set_password(username, password)
|
||||
else:
|
||||
ht.update(username, password)
|
||||
ht.save()
|
||||
return ("Created %s and added %s" % (dest, username), True)
|
||||
else:
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=False, default_scheme=crypt_scheme, context=context)
|
||||
else:
|
||||
ht = HtpasswdFile(dest, default=crypt_scheme, context=context)
|
||||
|
||||
found = None
|
||||
if getattr(ht, 'check_password', None):
|
||||
found = ht.check_password(username, password)
|
||||
else:
|
||||
found = ht.verify(username, password)
|
||||
|
||||
if found:
|
||||
return ("%s already present" % username, False)
|
||||
else:
|
||||
if not check_mode:
|
||||
if getattr(ht, 'set_password', None):
|
||||
ht.set_password(username, password)
|
||||
else:
|
||||
ht.update(username, password)
|
||||
ht.save()
|
||||
return ("Add/update %s" % username, True)
|
||||
|
||||
|
||||
def absent(dest, username, check_mode):
|
||||
""" Ensures user is absent
|
||||
|
||||
Returns (msg, changed) """
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=False)
|
||||
else:
|
||||
ht = HtpasswdFile(dest)
|
||||
|
||||
if username not in ht.users():
|
||||
return ("%s not present" % username, False)
|
||||
else:
|
||||
if not check_mode:
|
||||
ht.delete(username)
|
||||
ht.save()
|
||||
return ("Remove %s" % username, True)
|
||||
|
||||
|
||||
def check_file_attrs(module, changed, message):
|
||||
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
if module.set_fs_attributes_if_different(file_args, False):
|
||||
|
||||
if changed:
|
||||
message += " and "
|
||||
changed = True
|
||||
message += "ownership, perms or SE linux context changed"
|
||||
|
||||
return message, changed
|
||||
|
||||
|
||||
def main():
|
||||
arg_spec = dict(
|
||||
path=dict(required=True, aliases=["dest", "destfile"]),
|
||||
name=dict(required=True, aliases=["username"]),
|
||||
password=dict(required=False, default=None, no_log=True),
|
||||
crypt_scheme=dict(required=False, default="apr_md5_crypt"),
|
||||
state=dict(required=False, default="present"),
|
||||
create=dict(type='bool', default='yes'),
|
||||
|
||||
)
|
||||
module = AnsibleModule(argument_spec=arg_spec,
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True)
|
||||
|
||||
path = module.params['path']
|
||||
username = module.params['name']
|
||||
password = module.params['password']
|
||||
crypt_scheme = module.params['crypt_scheme']
|
||||
state = module.params['state']
|
||||
create = module.params['create']
|
||||
check_mode = module.check_mode
|
||||
|
||||
if not passlib_installed:
|
||||
module.fail_json(msg=missing_required_lib("passlib"), exception=PASSLIB_IMP_ERR)
|
||||
|
||||
# Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
|
||||
try:
|
||||
f = open(path, "r")
|
||||
except IOError:
|
||||
# No preexisting file to remove blank lines from
|
||||
f = None
|
||||
else:
|
||||
try:
|
||||
lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# If the file gets edited, it returns true, so only edit the file if it has blank lines
|
||||
strip = False
|
||||
for line in lines:
|
||||
if not line.strip():
|
||||
strip = True
|
||||
break
|
||||
|
||||
if strip:
|
||||
# If check mode, create a temporary file
|
||||
if check_mode:
|
||||
temp = tempfile.NamedTemporaryFile()
|
||||
path = temp.name
|
||||
f = open(path, "w")
|
||||
try:
|
||||
[f.write(line) for line in lines if line.strip()]
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
try:
|
||||
if state == 'present':
|
||||
(msg, changed) = present(path, username, password, crypt_scheme, create, check_mode)
|
||||
elif state == 'absent':
|
||||
if not os.path.exists(path):
|
||||
module.exit_json(msg="%s not present" % username,
|
||||
warnings="%s does not exist" % path, changed=False)
|
||||
(msg, changed) = absent(path, username, check_mode)
|
||||
else:
|
||||
module.fail_json(msg="Invalid state: %s" % state)
|
||||
|
||||
check_file_attrs(module, changed, msg)
|
||||
module.exit_json(msg=msg, changed=changed)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
183
plugins/modules/web_infrastructure/jboss.py
Normal file
183
plugins/modules/web_infrastructure/jboss.py
Normal file
|
|
@ -0,0 +1,183 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Jeroen Hoekx <jeroen.hoekx@dsquare.be>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
module: jboss
|
||||
short_description: Deploy applications to JBoss
|
||||
description:
|
||||
- Deploy applications to JBoss standalone using the filesystem.
|
||||
options:
|
||||
deployment:
|
||||
required: true
|
||||
description:
|
||||
- The name of the deployment.
|
||||
type: str
|
||||
src:
|
||||
description:
|
||||
- The remote path of the application ear or war to deploy.
|
||||
- Required when I(state=present).
|
||||
- Ignored when I(state=absent).
|
||||
type: path
|
||||
deploy_path:
|
||||
default: /var/lib/jbossas/standalone/deployments
|
||||
description:
|
||||
- The location in the filesystem where the deployment scanner listens.
|
||||
type: path
|
||||
state:
|
||||
choices: [ present, absent ]
|
||||
default: "present"
|
||||
description:
|
||||
- Whether the application should be deployed or undeployed.
|
||||
type: str
|
||||
notes:
|
||||
- The JBoss standalone deployment-scanner has to be enabled in standalone.xml
|
||||
- The module can wait until I(deployment) file is deployed/undeployed by deployment-scanner.
|
||||
Duration of waiting time depends on scan-interval parameter from standalone.xml.
|
||||
- Ensure no identically named application is deployed through the JBoss CLI
|
||||
seealso:
|
||||
- name: WildFly reference
|
||||
description: Complete reference of the WildFly documentation.
|
||||
link: https://docs.wildfly.org
|
||||
author:
|
||||
- Jeroen Hoekx (@jhoekx)
|
||||
'''
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: Deploy a hello world application to the default deploy_path
|
||||
jboss:
|
||||
src: /tmp/hello-1.0-SNAPSHOT.war
|
||||
deployment: hello.war
|
||||
state: present
|
||||
|
||||
- name: Update the hello world application to the non-default deploy_path
|
||||
jboss:
|
||||
src: /tmp/hello-1.1-SNAPSHOT.war
|
||||
deploy_path: /opt/wildfly/deployment
|
||||
deployment: hello.war
|
||||
state: present
|
||||
|
||||
- name: Undeploy the hello world application from the default deploy_path
|
||||
jboss:
|
||||
deployment: hello.war
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = r""" # """
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
DEFAULT_DEPLOY_PATH = '/var/lib/jbossas/standalone/deployments'
|
||||
|
||||
|
||||
def is_deployed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.deployed" % deployment))
|
||||
|
||||
|
||||
def is_undeployed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.undeployed" % deployment))
|
||||
|
||||
|
||||
def is_failed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.failed" % deployment))
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
src=dict(type='path'),
|
||||
deployment=dict(type='str', required=True),
|
||||
deploy_path=dict(type='path', default=DEFAULT_DEPLOY_PATH),
|
||||
state=dict(type='str', choices=['absent', 'present'], default='present'),
|
||||
),
|
||||
required_if=[('state', 'present', ('src',))],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
result = dict(changed=False)
|
||||
|
||||
src = module.params['src']
|
||||
deployment = module.params['deployment']
|
||||
deploy_path = module.params['deploy_path']
|
||||
state = module.params['state']
|
||||
|
||||
if not os.path.exists(deploy_path):
|
||||
module.fail_json(msg="deploy_path does not exist.")
|
||||
|
||||
if state == 'absent' and src:
|
||||
module.warn('Parameter src is ignored when state=absent')
|
||||
elif state == 'present' and not os.path.exists(src):
|
||||
module.fail_json(msg='Source file %s does not exist.' % src)
|
||||
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
|
||||
# === when check_mode ===
|
||||
if module.check_mode:
|
||||
if state == 'present':
|
||||
if not deployed:
|
||||
result['changed'] = True
|
||||
|
||||
elif deployed:
|
||||
if module.sha1(src) != module.sha1(os.path.join(deploy_path, deployment)):
|
||||
result['changed'] = True
|
||||
|
||||
elif state == 'absent' and deployed:
|
||||
result['changed'] = True
|
||||
|
||||
module.exit_json(**result)
|
||||
# =======================
|
||||
|
||||
if state == 'present' and not deployed:
|
||||
if is_failed(deploy_path, deployment):
|
||||
# Clean up old failed deployment
|
||||
os.remove(os.path.join(deploy_path, "%s.failed" % deployment))
|
||||
|
||||
shutil.copyfile(src, os.path.join(deploy_path, deployment))
|
||||
while not deployed:
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Deploying %s failed.' % deployment)
|
||||
time.sleep(1)
|
||||
result['changed'] = True
|
||||
|
||||
if state == 'present' and deployed:
|
||||
if module.sha1(src) != module.sha1(os.path.join(deploy_path, deployment)):
|
||||
os.remove(os.path.join(deploy_path, "%s.deployed" % deployment))
|
||||
shutil.copyfile(src, os.path.join(deploy_path, deployment))
|
||||
deployed = False
|
||||
while not deployed:
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Deploying %s failed.' % deployment)
|
||||
time.sleep(1)
|
||||
result['changed'] = True
|
||||
|
||||
if state == 'absent' and deployed:
|
||||
os.remove(os.path.join(deploy_path, "%s.deployed" % deployment))
|
||||
while deployed:
|
||||
deployed = not is_undeployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Undeploying %s failed.' % deployment)
|
||||
time.sleep(1)
|
||||
result['changed'] = True
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
365
plugins/modules/web_infrastructure/jenkins_job.py
Normal file
365
plugins/modules/web_infrastructure/jenkins_job.py
Normal file
|
|
@ -0,0 +1,365 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright: (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: jenkins_job
|
||||
short_description: Manage jenkins jobs
|
||||
description:
|
||||
- Manage Jenkins jobs by using Jenkins REST API.
|
||||
requirements:
|
||||
- "python-jenkins >= 0.4.12"
|
||||
author: "Sergio Millan Rodriguez (@sermilrod)"
|
||||
options:
|
||||
config:
|
||||
description:
|
||||
- config in XML format.
|
||||
- Required if job does not yet exist.
|
||||
- Mutually exclusive with C(enabled).
|
||||
- Considered if C(state=present).
|
||||
required: false
|
||||
enabled:
|
||||
description:
|
||||
- Whether the job should be enabled or disabled.
|
||||
- Mutually exclusive with C(config).
|
||||
- Considered if C(state=present).
|
||||
type: bool
|
||||
required: false
|
||||
name:
|
||||
description:
|
||||
- Name of the Jenkins job.
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- Password to authenticate with the Jenkins server.
|
||||
required: false
|
||||
state:
|
||||
description:
|
||||
- Attribute that specifies if the job has to be created or deleted.
|
||||
required: false
|
||||
default: present
|
||||
choices: ['present', 'absent']
|
||||
token:
|
||||
description:
|
||||
- API token used to authenticate alternatively to password.
|
||||
required: false
|
||||
url:
|
||||
description:
|
||||
- URL where the Jenkins server is accessible.
|
||||
required: false
|
||||
default: http://localhost:8080
|
||||
user:
|
||||
description:
|
||||
- User to authenticate with the Jenkins server.
|
||||
required: false
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
config: "{{ lookup('file', 'templates/test.xml') }}"
|
||||
name: test
|
||||
password: admin
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
|
||||
# Create a jenkins job using the token
|
||||
- jenkins_job:
|
||||
config: "{{ lookup('template', 'templates/test.xml.j2') }}"
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
|
||||
# Delete a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
name: test
|
||||
password: admin
|
||||
state: absent
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
|
||||
# Delete a jenkins job using the token
|
||||
- jenkins_job:
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
state: absent
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
|
||||
# Disable a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
name: test
|
||||
password: admin
|
||||
enabled: False
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
|
||||
# Disable a jenkins job using the token
|
||||
- jenkins_job:
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
enabled: False
|
||||
url: http://localhost:8080
|
||||
user: admin
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
name:
|
||||
description: Name of the jenkins job.
|
||||
returned: success
|
||||
type: str
|
||||
sample: test-job
|
||||
state:
|
||||
description: State of the jenkins job.
|
||||
returned: success
|
||||
type: str
|
||||
sample: present
|
||||
enabled:
|
||||
description: Whether the jenkins job is enabled or not.
|
||||
returned: success
|
||||
type: bool
|
||||
sample: true
|
||||
user:
|
||||
description: User used for authentication.
|
||||
returned: success
|
||||
type: str
|
||||
sample: admin
|
||||
url:
|
||||
description: Url to connect to the Jenkins server.
|
||||
returned: success
|
||||
type: str
|
||||
sample: https://jenkins.mydomain.com
|
||||
'''
|
||||
|
||||
import traceback
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
JENKINS_IMP_ERR = None
|
||||
try:
|
||||
import jenkins
|
||||
python_jenkins_installed = True
|
||||
except ImportError:
|
||||
JENKINS_IMP_ERR = traceback.format_exc()
|
||||
python_jenkins_installed = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class JenkinsJob:
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
self.config = module.params.get('config')
|
||||
self.name = module.params.get('name')
|
||||
self.password = module.params.get('password')
|
||||
self.state = module.params.get('state')
|
||||
self.enabled = module.params.get('enabled')
|
||||
self.token = module.params.get('token')
|
||||
self.user = module.params.get('user')
|
||||
self.jenkins_url = module.params.get('url')
|
||||
self.server = self.get_jenkins_connection()
|
||||
|
||||
self.result = {
|
||||
'changed': False,
|
||||
'url': self.jenkins_url,
|
||||
'name': self.name,
|
||||
'user': self.user,
|
||||
'state': self.state,
|
||||
'diff': {
|
||||
'before': "",
|
||||
'after': ""
|
||||
}
|
||||
}
|
||||
|
||||
self.EXCL_STATE = "excluded state"
|
||||
|
||||
def get_jenkins_connection(self):
|
||||
try:
|
||||
if (self.user and self.password):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user, self.password)
|
||||
elif (self.user and self.token):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user, self.token)
|
||||
elif (self.user and not (self.password or self.token)):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user)
|
||||
else:
|
||||
return jenkins.Jenkins(self.jenkins_url)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to connect to Jenkins server, %s' % to_native(e), exception=traceback.format_exc())
|
||||
|
||||
def get_job_status(self):
|
||||
try:
|
||||
response = self.server.get_job_info(self.name)
|
||||
if "color" not in response:
|
||||
return self.EXCL_STATE
|
||||
else:
|
||||
return to_native(response['color'])
|
||||
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to fetch job information, %s' % to_native(e), exception=traceback.format_exc())
|
||||
|
||||
def job_exists(self):
|
||||
try:
|
||||
return bool(self.server.job_exists(self.name))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to validate if job exists, %s for %s' % (to_native(e), self.jenkins_url),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def get_config(self):
|
||||
return job_config_to_string(self.config)
|
||||
|
||||
def get_current_config(self):
|
||||
return job_config_to_string(self.server.get_job_config(self.name).encode('utf-8'))
|
||||
|
||||
def has_config_changed(self):
|
||||
# config is optional, if not provided we keep the current config as is
|
||||
if self.config is None:
|
||||
return False
|
||||
|
||||
config_file = self.get_config()
|
||||
machine_file = self.get_current_config()
|
||||
|
||||
self.result['diff']['after'] = config_file
|
||||
self.result['diff']['before'] = machine_file
|
||||
|
||||
if machine_file != config_file:
|
||||
return True
|
||||
return False
|
||||
|
||||
def present_job(self):
|
||||
if self.config is None and self.enabled is None:
|
||||
self.module.fail_json(msg='one of the following params is required on state=present: config,enabled')
|
||||
|
||||
if not self.job_exists():
|
||||
self.create_job()
|
||||
else:
|
||||
self.update_job()
|
||||
|
||||
def has_state_changed(self, status):
|
||||
# Keep in current state if enabled arg_spec is not given
|
||||
if self.enabled is None:
|
||||
return False
|
||||
|
||||
if ((self.enabled is False and status != "disabled") or (self.enabled is True and status == "disabled")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def switch_state(self):
|
||||
if self.enabled is False:
|
||||
self.server.disable_job(self.name)
|
||||
else:
|
||||
self.server.enable_job(self.name)
|
||||
|
||||
def update_job(self):
|
||||
try:
|
||||
status = self.get_job_status()
|
||||
|
||||
# Handle job config
|
||||
if self.has_config_changed():
|
||||
self.result['changed'] = True
|
||||
if not self.module.check_mode:
|
||||
self.server.reconfig_job(self.name, self.get_config())
|
||||
|
||||
# Handle job disable/enable
|
||||
elif (status != self.EXCL_STATE and self.has_state_changed(status)):
|
||||
self.result['changed'] = True
|
||||
if not self.module.check_mode:
|
||||
self.switch_state()
|
||||
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to reconfigure job, %s for %s' % (to_native(e), self.jenkins_url),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def create_job(self):
|
||||
if self.config is None:
|
||||
self.module.fail_json(msg='missing required param: config')
|
||||
|
||||
self.result['changed'] = True
|
||||
try:
|
||||
config_file = self.get_config()
|
||||
self.result['diff']['after'] = config_file
|
||||
if not self.module.check_mode:
|
||||
self.server.create_job(self.name, config_file)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to create job, %s for %s' % (to_native(e), self.jenkins_url),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def absent_job(self):
|
||||
if self.job_exists():
|
||||
self.result['changed'] = True
|
||||
self.result['diff']['before'] = self.get_current_config()
|
||||
if not self.module.check_mode:
|
||||
try:
|
||||
self.server.delete_job(self.name)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Unable to delete job, %s for %s' % (to_native(e), self.jenkins_url),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def get_result(self):
|
||||
result = self.result
|
||||
if self.job_exists():
|
||||
result['enabled'] = self.get_job_status() != "disabled"
|
||||
else:
|
||||
result['enabled'] = None
|
||||
return result
|
||||
|
||||
|
||||
def test_dependencies(module):
|
||||
if not python_jenkins_installed:
|
||||
module.fail_json(
|
||||
msg=missing_required_lib("python-jenkins",
|
||||
url="https://python-jenkins.readthedocs.io/en/latest/install.html"),
|
||||
exception=JENKINS_IMP_ERR)
|
||||
|
||||
|
||||
def job_config_to_string(xml_str):
|
||||
return ET.tostring(ET.fromstring(xml_str)).decode('ascii')
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
config=dict(required=False),
|
||||
name=dict(required=True),
|
||||
password=dict(required=False, no_log=True),
|
||||
state=dict(required=False, choices=['present', 'absent'], default="present"),
|
||||
enabled=dict(required=False, type='bool'),
|
||||
token=dict(required=False, no_log=True),
|
||||
url=dict(required=False, default="http://localhost:8080"),
|
||||
user=dict(required=False)
|
||||
),
|
||||
mutually_exclusive=[
|
||||
['password', 'token'],
|
||||
['config', 'enabled'],
|
||||
],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
test_dependencies(module)
|
||||
jenkins_job = JenkinsJob(module)
|
||||
|
||||
if module.params.get('state') == "present":
|
||||
jenkins_job.present_job()
|
||||
else:
|
||||
jenkins_job.absent_job()
|
||||
|
||||
result = jenkins_job.get_result()
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1
plugins/modules/web_infrastructure/jenkins_job_facts.py
Symbolic link
1
plugins/modules/web_infrastructure/jenkins_job_facts.py
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
jenkins_job_info.py
|
||||
256
plugins/modules/web_infrastructure/jenkins_job_info.py
Normal file
256
plugins/modules/web_infrastructure/jenkins_job_info.py
Normal file
|
|
@ -0,0 +1,256 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright: (c) Ansible Project
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: jenkins_job_info
|
||||
short_description: Get information about Jenkins jobs
|
||||
description:
|
||||
- This module can be used to query information about which Jenkins jobs which already exists.
|
||||
- This module was called C(jenkins_job_info) before Ansible 2.9. The usage did not change.
|
||||
requirements:
|
||||
- "python-jenkins >= 0.4.12"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Exact name of the Jenkins job to fetch information about.
|
||||
glob:
|
||||
description:
|
||||
- A shell glob of Jenkins job names to fetch information about.
|
||||
color:
|
||||
description:
|
||||
- Only fetch jobs with the given status color.
|
||||
password:
|
||||
description:
|
||||
- Password to authenticate with the Jenkins server.
|
||||
- This is a required parameter, if C(token) is not provided.
|
||||
token:
|
||||
description:
|
||||
- API token used to authenticate with the Jenkins server.
|
||||
- This is a required parameter, if C(password) is not provided.
|
||||
url:
|
||||
description:
|
||||
- URL where the Jenkins server is accessible.
|
||||
default: http://localhost:8080
|
||||
user:
|
||||
description:
|
||||
- User to authenticate with the Jenkins server.
|
||||
validate_certs:
|
||||
description:
|
||||
- If set to C(False), the SSL certificates will not be validated.
|
||||
- This should only set to C(False) used on personally controlled sites using self-signed certificates.
|
||||
default: true
|
||||
type: bool
|
||||
author:
|
||||
- "Chris St. Pierre (@stpierre)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Get all Jenkins jobs using basic auth
|
||||
- jenkins_job_info:
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get all Jenkins jobs using the token
|
||||
- jenkins_job_info:
|
||||
user: admin
|
||||
token: abcdefghijklmnop
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get info about a single job using basic auth
|
||||
- jenkins_job_info:
|
||||
name: some-job-name
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get info about a single job in a folder using basic auth
|
||||
- jenkins_job_info:
|
||||
name: some-folder-name/some-job-name
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get info about jobs matching a shell glob using basic auth
|
||||
- jenkins_job_info:
|
||||
glob: some-job-*
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get info about all failing jobs using basic auth
|
||||
- jenkins_job_info:
|
||||
color: red
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
# Get info about passing jobs matching a shell glob using basic auth
|
||||
- jenkins_job_info:
|
||||
name: some-job-*
|
||||
color: blue
|
||||
user: admin
|
||||
password: hunter2
|
||||
register: my_jenkins_job_info
|
||||
|
||||
- name: Get the info from custom URL with token and validate_certs=False
|
||||
jenkins_job_info:
|
||||
user: admin
|
||||
token: 126df5c60d66c66e3b75b11104a16a8a
|
||||
url: https://jenkins.example.com
|
||||
validate_certs: False
|
||||
register: my_jenkins_job_info
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
jobs:
|
||||
description: All jobs found matching the specified criteria
|
||||
returned: success
|
||||
type: list
|
||||
sample:
|
||||
[
|
||||
{
|
||||
"name": "test-job",
|
||||
"fullname": "test-folder/test-job",
|
||||
"url": "http://localhost:8080/job/test-job/",
|
||||
"color": "blue"
|
||||
},
|
||||
]
|
||||
'''
|
||||
|
||||
import ssl
|
||||
import fnmatch
|
||||
import traceback
|
||||
|
||||
JENKINS_IMP_ERR = None
|
||||
try:
|
||||
import jenkins
|
||||
HAS_JENKINS = True
|
||||
except ImportError:
|
||||
JENKINS_IMP_ERR = traceback.format_exc()
|
||||
HAS_JENKINS = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def get_jenkins_connection(module):
|
||||
url = module.params["url"]
|
||||
username = module.params.get("user")
|
||||
password = module.params.get("password")
|
||||
token = module.params.get("token")
|
||||
|
||||
validate_certs = module.params.get('validate_certs')
|
||||
if not validate_certs and hasattr(ssl, 'SSLContext'):
|
||||
ssl._create_default_https_context = ssl._create_unverified_context
|
||||
if validate_certs and not hasattr(ssl, 'SSLContext'):
|
||||
module.fail_json(msg="Module does not support changing verification mode with python < 2.7.9."
|
||||
" Either update Python or use validate_certs=false.")
|
||||
|
||||
if username and (password or token):
|
||||
return jenkins.Jenkins(url, username, password or token)
|
||||
elif username:
|
||||
return jenkins.Jenkins(url, username)
|
||||
else:
|
||||
return jenkins.Jenkins(url)
|
||||
|
||||
|
||||
def test_dependencies(module):
|
||||
if not HAS_JENKINS:
|
||||
module.fail_json(
|
||||
msg=missing_required_lib("python-jenkins",
|
||||
url="https://python-jenkins.readthedocs.io/en/latest/install.html"),
|
||||
exception=JENKINS_IMP_ERR)
|
||||
|
||||
|
||||
def get_jobs(module):
|
||||
jenkins_conn = get_jenkins_connection(module)
|
||||
jobs = []
|
||||
if module.params.get("name"):
|
||||
try:
|
||||
job_info = jenkins_conn.get_job_info(module.params.get("name"))
|
||||
except jenkins.NotFoundException:
|
||||
pass
|
||||
else:
|
||||
jobs.append({
|
||||
"name": job_info["name"],
|
||||
"fullname": job_info["fullName"],
|
||||
"url": job_info["url"],
|
||||
"color": job_info["color"]
|
||||
})
|
||||
|
||||
else:
|
||||
all_jobs = jenkins_conn.get_all_jobs()
|
||||
if module.params.get("glob"):
|
||||
jobs.extend(
|
||||
j for j in all_jobs
|
||||
if fnmatch.fnmatch(j["fullname"], module.params.get("glob")))
|
||||
else:
|
||||
jobs = all_jobs
|
||||
# python-jenkins includes the internal Jenkins class used for each job
|
||||
# in its return value; we strip that out because the leading underscore
|
||||
# (and the fact that it's not documented in the python-jenkins docs)
|
||||
# indicates that it's not part of the dependable public interface.
|
||||
for job in jobs:
|
||||
if "_class" in job:
|
||||
del job["_class"]
|
||||
|
||||
if module.params.get("color"):
|
||||
jobs = [j for j in jobs if j["color"] == module.params.get("color")]
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(),
|
||||
glob=dict(),
|
||||
color=dict(),
|
||||
password=dict(no_log=True),
|
||||
token=dict(no_log=True),
|
||||
url=dict(default="http://localhost:8080"),
|
||||
user=dict(),
|
||||
validate_certs=dict(type='bool', default=True),
|
||||
),
|
||||
mutually_exclusive=[
|
||||
['password', 'token'],
|
||||
['name', 'glob'],
|
||||
],
|
||||
required_one_of=[
|
||||
['password', 'token'],
|
||||
],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
if module._name == 'jenkins_job_facts':
|
||||
module.deprecate("The 'jenkins_job_facts' module has been renamed to 'jenkins_job_info'", version='2.13')
|
||||
|
||||
test_dependencies(module)
|
||||
jobs = list()
|
||||
|
||||
try:
|
||||
jobs = get_jobs(module)
|
||||
except jenkins.JenkinsException as err:
|
||||
module.fail_json(
|
||||
msg='Unable to connect to Jenkins server, %s' % to_native(err),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
module.exit_json(changed=False, jobs=jobs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
790
plugins/modules/web_infrastructure/jenkins_plugin.py
Normal file
790
plugins/modules/web_infrastructure/jenkins_plugin.py
Normal file
|
|
@ -0,0 +1,790 @@
|
|||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
|
||||
# (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: jenkins_plugin
|
||||
author: Jiri Tyr (@jtyr)
|
||||
short_description: Add or remove Jenkins plugin
|
||||
description:
|
||||
- Ansible module which helps to manage Jenkins plugins.
|
||||
|
||||
options:
|
||||
group:
|
||||
description:
|
||||
- Name of the Jenkins group on the OS.
|
||||
default: jenkins
|
||||
jenkins_home:
|
||||
description:
|
||||
- Home directory of the Jenkins user.
|
||||
default: /var/lib/jenkins
|
||||
mode:
|
||||
description:
|
||||
- File mode applied on versioned plugins.
|
||||
default: '0644'
|
||||
name:
|
||||
description:
|
||||
- Plugin name.
|
||||
required: yes
|
||||
owner:
|
||||
description:
|
||||
- Name of the Jenkins user on the OS.
|
||||
default: jenkins
|
||||
state:
|
||||
description:
|
||||
- Desired plugin state.
|
||||
- If the C(latest) is set, the check for new version will be performed
|
||||
every time. This is suitable to keep the plugin up-to-date.
|
||||
choices: [absent, present, pinned, unpinned, enabled, disabled, latest]
|
||||
default: present
|
||||
timeout:
|
||||
description:
|
||||
- Server connection timeout in secs.
|
||||
default: 30
|
||||
updates_expiration:
|
||||
description:
|
||||
- Number of seconds after which a new copy of the I(update-center.json)
|
||||
file is downloaded. This is used to avoid the need to download the
|
||||
plugin to calculate its checksum when C(latest) is specified.
|
||||
- Set it to C(0) if no cache file should be used. In that case, the
|
||||
plugin file will always be downloaded to calculate its checksum when
|
||||
C(latest) is specified.
|
||||
default: 86400
|
||||
updates_url:
|
||||
description:
|
||||
- URL of the Update Centre.
|
||||
- Used as the base URL to download the plugins and the
|
||||
I(update-center.json) JSON file.
|
||||
default: https://updates.jenkins.io
|
||||
url:
|
||||
description:
|
||||
- URL of the Jenkins server.
|
||||
default: http://localhost:8080
|
||||
version:
|
||||
description:
|
||||
- Plugin version number.
|
||||
- If this option is specified, all plugin dependencies must be installed
|
||||
manually.
|
||||
- It might take longer to verify that the correct version is installed.
|
||||
This is especially true if a specific version number is specified.
|
||||
- Quote the version to prevent the value to be interpreted as float. For
|
||||
example if C(1.20) would be unquoted, it would become C(1.2).
|
||||
with_dependencies:
|
||||
description:
|
||||
- Defines whether to install plugin dependencies.
|
||||
- This option takes effect only if the I(version) is not defined.
|
||||
type: bool
|
||||
default: yes
|
||||
|
||||
notes:
|
||||
- Plugin installation should be run under root or the same user which owns
|
||||
the plugin files on the disk. Only if the plugin is not installed yet and
|
||||
no version is specified, the API installation is performed which requires
|
||||
only the Web UI credentials.
|
||||
- It's necessary to notify the handler or call the I(service) module to
|
||||
restart the Jenkins service after a new plugin was installed.
|
||||
- Pinning works only if the plugin is installed and Jenkins service was
|
||||
successfully restarted after the plugin installation.
|
||||
- It is not possible to run the module remotely by changing the I(url)
|
||||
parameter to point to the Jenkins server. The module must be used on the
|
||||
host where Jenkins runs as it needs direct access to the plugin files.
|
||||
- "The C(params) option was removed in Ansible 2.5 due to circumventing Ansible's
|
||||
option handling"
|
||||
extends_documentation_fragment:
|
||||
- url
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
|
||||
- name: Install plugin without its dependencies
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
with_dependencies: no
|
||||
|
||||
- name: Make sure the plugin is always up-to-date
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: latest
|
||||
|
||||
- name: Install specific version of the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
version: "1.15"
|
||||
|
||||
- name: Pin the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: pinned
|
||||
|
||||
- name: Unpin the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: unpinned
|
||||
|
||||
- name: Enable the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: enabled
|
||||
|
||||
- name: Disable the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: disabled
|
||||
|
||||
- name: Uninstall plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
state: absent
|
||||
|
||||
#
|
||||
# Example of how to authenticate
|
||||
#
|
||||
- name: Install plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
url_username: admin
|
||||
url_password: p4ssw0rd
|
||||
url: http://localhost:8888
|
||||
|
||||
#
|
||||
# Example of a Play which handles Jenkins restarts during the state changes
|
||||
#
|
||||
- name: Jenkins Master play
|
||||
hosts: jenkins-master
|
||||
vars:
|
||||
my_jenkins_plugins:
|
||||
token-macro:
|
||||
enabled: yes
|
||||
build-pipeline-plugin:
|
||||
version: "1.4.9"
|
||||
pinned: no
|
||||
enabled: yes
|
||||
tasks:
|
||||
- name: Install plugins without a specific version
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
register: my_jenkins_plugin_unversioned
|
||||
when: >
|
||||
'version' not in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Install plugins with a specific version
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
version: "{{ item.value['version'] }}"
|
||||
register: my_jenkins_plugin_versioned
|
||||
when: >
|
||||
'version' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Initiate the fact
|
||||
set_fact:
|
||||
jenkins_restart_required: no
|
||||
|
||||
- name: Check if restart is required by any of the versioned plugins
|
||||
set_fact:
|
||||
jenkins_restart_required: yes
|
||||
when: item.changed
|
||||
with_items: "{{ my_jenkins_plugin_versioned.results }}"
|
||||
|
||||
- name: Check if restart is required by any of the unversioned plugins
|
||||
set_fact:
|
||||
jenkins_restart_required: yes
|
||||
when: item.changed
|
||||
with_items: "{{ my_jenkins_plugin_unversioned.results }}"
|
||||
|
||||
- name: Restart Jenkins if required
|
||||
service:
|
||||
name: jenkins
|
||||
state: restarted
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Wait for Jenkins to start up
|
||||
uri:
|
||||
url: http://localhost:8080
|
||||
status_code: 200
|
||||
timeout: 5
|
||||
register: jenkins_service_status
|
||||
# Keep trying for 5 mins in 5 sec intervals
|
||||
retries: 60
|
||||
delay: 5
|
||||
until: >
|
||||
'status' in jenkins_service_status and
|
||||
jenkins_service_status['status'] == 200
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Reset the fact
|
||||
set_fact:
|
||||
jenkins_restart_required: no
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Plugin pinning
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
state: "{{ 'pinned' if item.value['pinned'] else 'unpinned'}}"
|
||||
when: >
|
||||
'pinned' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Plugin enabling
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
state: "{{ 'enabled' if item.value['enabled'] else 'disabled'}}"
|
||||
when: >
|
||||
'enabled' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
plugin:
|
||||
description: plugin name
|
||||
returned: success
|
||||
type: str
|
||||
sample: build-pipeline-plugin
|
||||
state:
|
||||
description: state of the target, after execution
|
||||
returned: success
|
||||
type: str
|
||||
sample: "present"
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, to_bytes
|
||||
from ansible.module_utils.six.moves import http_cookiejar as cookiejar
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
from ansible.module_utils._text import to_native, text_type, binary_type
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
|
||||
class JenkinsPlugin(object):
|
||||
def __init__(self, module):
|
||||
# To be able to call fail_json
|
||||
self.module = module
|
||||
|
||||
# Shortcuts for the params
|
||||
self.params = self.module.params
|
||||
self.url = self.params['url']
|
||||
self.timeout = self.params['timeout']
|
||||
|
||||
# Crumb
|
||||
self.crumb = {}
|
||||
# Cookie jar for crumb session
|
||||
self.cookies = None
|
||||
|
||||
if self._csrf_enabled():
|
||||
self.cookies = cookiejar.LWPCookieJar()
|
||||
self.crumb = self._get_crumb()
|
||||
|
||||
# Get list of installed plugins
|
||||
self._get_installed_plugins()
|
||||
|
||||
def _csrf_enabled(self):
|
||||
csrf_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "api/json"), 'CSRF')
|
||||
|
||||
if 'useCrumbs' not in csrf_data:
|
||||
self.module.fail_json(
|
||||
msg="Required fields not found in the Crumbs response.",
|
||||
details=csrf_data)
|
||||
|
||||
return csrf_data['useCrumbs']
|
||||
|
||||
def _get_json_data(self, url, what, **kwargs):
|
||||
# Get the JSON data
|
||||
r = self._get_url_data(url, what, **kwargs)
|
||||
|
||||
# Parse the JSON data
|
||||
try:
|
||||
json_data = json.loads(to_native(r.read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot parse %s JSON data." % what,
|
||||
details=to_native(e))
|
||||
|
||||
return json_data
|
||||
|
||||
def _get_url_data(
|
||||
self, url, what=None, msg_status=None, msg_exception=None,
|
||||
**kwargs):
|
||||
# Compose default messages
|
||||
if msg_status is None:
|
||||
msg_status = "Cannot get %s" % what
|
||||
|
||||
if msg_exception is None:
|
||||
msg_exception = "Retrieval of %s failed." % what
|
||||
|
||||
# Get the URL data
|
||||
try:
|
||||
response, info = fetch_url(
|
||||
self.module, url, timeout=self.timeout, cookies=self.cookies,
|
||||
headers=self.crumb, **kwargs)
|
||||
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(msg=msg_status, details=info['msg'])
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg=msg_exception, details=to_native(e))
|
||||
|
||||
return response
|
||||
|
||||
def _get_crumb(self):
|
||||
crumb_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "crumbIssuer/api/json"), 'Crumb')
|
||||
|
||||
if 'crumbRequestField' in crumb_data and 'crumb' in crumb_data:
|
||||
ret = {
|
||||
crumb_data['crumbRequestField']: crumb_data['crumb']
|
||||
}
|
||||
else:
|
||||
self.module.fail_json(
|
||||
msg="Required fields not found in the Crum response.",
|
||||
details=crumb_data)
|
||||
|
||||
return ret
|
||||
|
||||
def _get_installed_plugins(self):
|
||||
plugins_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "pluginManager/api/json?depth=1"),
|
||||
'list of plugins')
|
||||
|
||||
# Check if we got valid data
|
||||
if 'plugins' not in plugins_data:
|
||||
self.module.fail_json(msg="No valid plugin data found.")
|
||||
|
||||
# Create final list of installed/pined plugins
|
||||
self.is_installed = False
|
||||
self.is_pinned = False
|
||||
self.is_enabled = False
|
||||
|
||||
for p in plugins_data['plugins']:
|
||||
if p['shortName'] == self.params['name']:
|
||||
self.is_installed = True
|
||||
|
||||
if p['pinned']:
|
||||
self.is_pinned = True
|
||||
|
||||
if p['enabled']:
|
||||
self.is_enabled = True
|
||||
|
||||
break
|
||||
|
||||
def install(self):
|
||||
changed = False
|
||||
plugin_file = (
|
||||
'%s/plugins/%s.jpi' % (
|
||||
self.params['jenkins_home'],
|
||||
self.params['name']))
|
||||
|
||||
if not self.is_installed and self.params['version'] in [None, 'latest']:
|
||||
if not self.module.check_mode:
|
||||
# Install the plugin (with dependencies)
|
||||
install_script = (
|
||||
'd = Jenkins.instance.updateCenter.getPlugin("%s")'
|
||||
'.deploy(); d.get();' % self.params['name'])
|
||||
|
||||
if self.params['with_dependencies']:
|
||||
install_script = (
|
||||
'Jenkins.instance.updateCenter.getPlugin("%s")'
|
||||
'.getNeededDependencies().each{it.deploy()}; %s' % (
|
||||
self.params['name'], install_script))
|
||||
|
||||
script_data = {
|
||||
'script': install_script
|
||||
}
|
||||
data = urlencode(script_data)
|
||||
|
||||
# Send the installation request
|
||||
r = self._get_url_data(
|
||||
"%s/scriptText" % self.url,
|
||||
msg_status="Cannot install plugin.",
|
||||
msg_exception="Plugin installation has failed.",
|
||||
data=data)
|
||||
|
||||
hpi_file = '%s/plugins/%s.hpi' % (
|
||||
self.params['jenkins_home'],
|
||||
self.params['name'])
|
||||
|
||||
if os.path.isfile(hpi_file):
|
||||
os.remove(hpi_file)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
# Check if the plugin directory exists
|
||||
if not os.path.isdir(self.params['jenkins_home']):
|
||||
self.module.fail_json(
|
||||
msg="Jenkins home directory doesn't exist.")
|
||||
|
||||
md5sum_old = None
|
||||
if os.path.isfile(plugin_file):
|
||||
# Make the checksum of the currently installed plugin
|
||||
with open(plugin_file, 'rb') as md5_plugin_fh:
|
||||
md5_plugin_content = md5_plugin_fh.read()
|
||||
md5sum_old = hashlib.md5(md5_plugin_content).hexdigest()
|
||||
|
||||
if self.params['version'] in [None, 'latest']:
|
||||
# Take latest version
|
||||
plugin_url = (
|
||||
"%s/latest/%s.hpi" % (
|
||||
self.params['updates_url'],
|
||||
self.params['name']))
|
||||
else:
|
||||
# Take specific version
|
||||
plugin_url = (
|
||||
"{0}/download/plugins/"
|
||||
"{1}/{2}/{1}.hpi".format(
|
||||
self.params['updates_url'],
|
||||
self.params['name'],
|
||||
self.params['version']))
|
||||
|
||||
if (
|
||||
self.params['updates_expiration'] == 0 or
|
||||
self.params['version'] not in [None, 'latest'] or
|
||||
md5sum_old is None):
|
||||
|
||||
# Download the plugin file directly
|
||||
r = self._download_plugin(plugin_url)
|
||||
|
||||
# Write downloaded plugin into file if checksums don't match
|
||||
if md5sum_old is None:
|
||||
# No previously installed plugin
|
||||
if not self.module.check_mode:
|
||||
self._write_file(plugin_file, r)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
# Get data for the MD5
|
||||
data = r.read()
|
||||
|
||||
# Make new checksum
|
||||
md5sum_new = hashlib.md5(data).hexdigest()
|
||||
|
||||
# If the checksum is different from the currently installed
|
||||
# plugin, store the new plugin
|
||||
if md5sum_old != md5sum_new:
|
||||
if not self.module.check_mode:
|
||||
self._write_file(plugin_file, data)
|
||||
|
||||
changed = True
|
||||
elif self.params['version'] == 'latest':
|
||||
# Check for update from the updates JSON file
|
||||
plugin_data = self._download_updates()
|
||||
|
||||
try:
|
||||
with open(plugin_file, 'rb') as sha1_plugin_fh:
|
||||
sha1_plugin_content = sha1_plugin_fh.read()
|
||||
sha1_old = hashlib.sha1(sha1_plugin_content)
|
||||
except Exception as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot calculate SHA1 of the old plugin.",
|
||||
details=to_native(e))
|
||||
|
||||
sha1sum_old = base64.b64encode(sha1_old.digest())
|
||||
|
||||
# If the latest version changed, download it
|
||||
if sha1sum_old != to_bytes(plugin_data['sha1']):
|
||||
if not self.module.check_mode:
|
||||
r = self._download_plugin(plugin_url)
|
||||
self._write_file(plugin_file, r)
|
||||
|
||||
changed = True
|
||||
|
||||
# Change file attributes if needed
|
||||
if os.path.isfile(plugin_file):
|
||||
params = {
|
||||
'dest': plugin_file
|
||||
}
|
||||
params.update(self.params)
|
||||
file_args = self.module.load_file_common_arguments(params)
|
||||
|
||||
if not self.module.check_mode:
|
||||
# Not sure how to run this in the check mode
|
||||
changed = self.module.set_fs_attributes_if_different(
|
||||
file_args, changed)
|
||||
else:
|
||||
# See the comment above
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def _download_updates(self):
|
||||
updates_filename = 'jenkins-plugin-cache.json'
|
||||
updates_dir = os.path.expanduser('~/.ansible/tmp')
|
||||
updates_file = "%s/%s" % (updates_dir, updates_filename)
|
||||
download_updates = True
|
||||
|
||||
# Check if we need to download new updates file
|
||||
if os.path.isfile(updates_file):
|
||||
# Get timestamp when the file was changed last time
|
||||
ts_file = os.stat(updates_file).st_mtime
|
||||
ts_now = time.time()
|
||||
|
||||
if ts_now - ts_file < self.params['updates_expiration']:
|
||||
download_updates = False
|
||||
|
||||
updates_file_orig = updates_file
|
||||
|
||||
# Download the updates file if needed
|
||||
if download_updates:
|
||||
url = "%s/update-center.json" % self.params['updates_url']
|
||||
|
||||
# Get the data
|
||||
r = self._get_url_data(
|
||||
url,
|
||||
msg_status="Remote updates not found.",
|
||||
msg_exception="Updates download failed.")
|
||||
|
||||
# Write the updates file
|
||||
update_fd, updates_file = tempfile.mkstemp()
|
||||
os.write(update_fd, r.read())
|
||||
|
||||
try:
|
||||
os.close(update_fd)
|
||||
except IOError as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot close the tmp updates file %s." % updates_file,
|
||||
details=to_native(e))
|
||||
|
||||
# Open the updates file
|
||||
try:
|
||||
f = open(updates_file, encoding='utf-8')
|
||||
except IOError as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot open temporal updates file.",
|
||||
details=to_native(e))
|
||||
|
||||
i = 0
|
||||
for line in f:
|
||||
# Read only the second line
|
||||
if i == 1:
|
||||
try:
|
||||
data = json.loads(line)
|
||||
except Exception as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot load JSON data from the tmp updates file.",
|
||||
details=to_native(e))
|
||||
|
||||
break
|
||||
|
||||
i += 1
|
||||
|
||||
# Move the updates file to the right place if we could read it
|
||||
if download_updates:
|
||||
# Make sure the destination directory exists
|
||||
if not os.path.isdir(updates_dir):
|
||||
try:
|
||||
os.makedirs(updates_dir, int('0700', 8))
|
||||
except OSError as e:
|
||||
self.module.fail_json(
|
||||
msg="Cannot create temporal directory.",
|
||||
details=to_native(e))
|
||||
|
||||
self.module.atomic_move(updates_file, updates_file_orig)
|
||||
|
||||
# Check if we have the plugin data available
|
||||
if 'plugins' not in data or self.params['name'] not in data['plugins']:
|
||||
self.module.fail_json(
|
||||
msg="Cannot find plugin data in the updates file.")
|
||||
|
||||
return data['plugins'][self.params['name']]
|
||||
|
||||
def _download_plugin(self, plugin_url):
|
||||
# Download the plugin
|
||||
r = self._get_url_data(
|
||||
plugin_url,
|
||||
msg_status="Plugin not found.",
|
||||
msg_exception="Plugin download failed.")
|
||||
|
||||
return r
|
||||
|
||||
def _write_file(self, f, data):
|
||||
# Store the plugin into a temp file and then move it
|
||||
tmp_f_fd, tmp_f = tempfile.mkstemp()
|
||||
|
||||
if isinstance(data, (text_type, binary_type)):
|
||||
os.write(tmp_f_fd, data)
|
||||
else:
|
||||
os.write(tmp_f_fd, data.read())
|
||||
|
||||
try:
|
||||
os.close(tmp_f_fd)
|
||||
except IOError as e:
|
||||
self.module.fail_json(
|
||||
msg='Cannot close the temporal plugin file %s.' % tmp_f,
|
||||
details=to_native(e))
|
||||
|
||||
# Move the file onto the right place
|
||||
self.module.atomic_move(tmp_f, f)
|
||||
|
||||
def uninstall(self):
|
||||
changed = False
|
||||
|
||||
# Perform the action
|
||||
if self.is_installed:
|
||||
if not self.module.check_mode:
|
||||
self._pm_query('doUninstall', 'Uninstallation')
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def pin(self):
|
||||
return self._pinning('pin')
|
||||
|
||||
def unpin(self):
|
||||
return self._pinning('unpin')
|
||||
|
||||
def _pinning(self, action):
|
||||
changed = False
|
||||
|
||||
# Check if the plugin is pinned/unpinned
|
||||
if (
|
||||
action == 'pin' and not self.is_pinned or
|
||||
action == 'unpin' and self.is_pinned):
|
||||
|
||||
# Perform the action
|
||||
if not self.module.check_mode:
|
||||
self._pm_query(action, "%sning" % action.capitalize())
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def enable(self):
|
||||
return self._enabling('enable')
|
||||
|
||||
def disable(self):
|
||||
return self._enabling('disable')
|
||||
|
||||
def _enabling(self, action):
|
||||
changed = False
|
||||
|
||||
# Check if the plugin is pinned/unpinned
|
||||
if (
|
||||
action == 'enable' and not self.is_enabled or
|
||||
action == 'disable' and self.is_enabled):
|
||||
|
||||
# Perform the action
|
||||
if not self.module.check_mode:
|
||||
self._pm_query(
|
||||
"make%sd" % action.capitalize(),
|
||||
"%sing" % action[:-1].capitalize())
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def _pm_query(self, action, msg):
|
||||
url = "%s/pluginManager/plugin/%s/%s" % (
|
||||
self.params['url'], self.params['name'], action)
|
||||
|
||||
# Send the request
|
||||
self._get_url_data(
|
||||
url,
|
||||
msg_status="Plugin not found. %s" % url,
|
||||
msg_exception="%s has failed." % msg)
|
||||
|
||||
|
||||
def main():
|
||||
# Module arguments
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(
|
||||
group=dict(default='jenkins'),
|
||||
jenkins_home=dict(default='/var/lib/jenkins'),
|
||||
mode=dict(default='0644', type='raw'),
|
||||
name=dict(required=True),
|
||||
owner=dict(default='jenkins'),
|
||||
params=dict(type='dict'),
|
||||
state=dict(
|
||||
choices=[
|
||||
'present',
|
||||
'absent',
|
||||
'pinned',
|
||||
'unpinned',
|
||||
'enabled',
|
||||
'disabled',
|
||||
'latest'],
|
||||
default='present'),
|
||||
timeout=dict(default=30, type="int"),
|
||||
updates_expiration=dict(default=86400, type="int"),
|
||||
updates_url=dict(default='https://updates.jenkins.io'),
|
||||
url=dict(default='http://localhost:8080'),
|
||||
url_password=dict(no_log=True),
|
||||
version=dict(),
|
||||
with_dependencies=dict(default=True, type='bool'),
|
||||
)
|
||||
# Module settings
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
# Params was removed
|
||||
# https://meetbot.fedoraproject.org/ansible-meeting/2017-09-28/ansible_dev_meeting.2017-09-28-15.00.log.html
|
||||
if module.params['params']:
|
||||
module.fail_json(msg="The params option to jenkins_plugin was removed in Ansible 2.5 since it circumvents Ansible's option handling")
|
||||
|
||||
# Force basic authentication
|
||||
module.params['force_basic_auth'] = True
|
||||
|
||||
# Convert timeout to float
|
||||
try:
|
||||
module.params['timeout'] = float(module.params['timeout'])
|
||||
except ValueError as e:
|
||||
module.fail_json(
|
||||
msg='Cannot convert %s to float.' % module.params['timeout'],
|
||||
details=to_native(e))
|
||||
|
||||
# Set version to latest if state is latest
|
||||
if module.params['state'] == 'latest':
|
||||
module.params['state'] = 'present'
|
||||
module.params['version'] = 'latest'
|
||||
|
||||
# Create some shortcuts
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
|
||||
# Initial change state of the task
|
||||
changed = False
|
||||
|
||||
# Instantiate the JenkinsPlugin object
|
||||
jp = JenkinsPlugin(module)
|
||||
|
||||
# Perform action depending on the requested state
|
||||
if state == 'present':
|
||||
changed = jp.install()
|
||||
elif state == 'absent':
|
||||
changed = jp.uninstall()
|
||||
elif state == 'pinned':
|
||||
changed = jp.pin()
|
||||
elif state == 'unpinned':
|
||||
changed = jp.unpin()
|
||||
elif state == 'enabled':
|
||||
changed = jp.enable()
|
||||
elif state == 'disabled':
|
||||
changed = jp.disable()
|
||||
|
||||
# Print status of the change
|
||||
module.exit_json(changed=changed, plugin=name, state=state)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
196
plugins/modules/web_infrastructure/jenkins_script.py
Normal file
196
plugins/modules/web_infrastructure/jenkins_script.py
Normal file
|
|
@ -0,0 +1,196 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# encoding: utf-8
|
||||
|
||||
# (c) 2016, James Hogarth <james.hogarth@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
author: James Hogarth (@hogarthj)
|
||||
module: jenkins_script
|
||||
short_description: Executes a groovy script in the jenkins instance
|
||||
description:
|
||||
- The C(jenkins_script) module takes a script plus a dict of values
|
||||
to use within the script and returns the result of the script being run.
|
||||
|
||||
options:
|
||||
script:
|
||||
description:
|
||||
- The groovy script to be executed.
|
||||
This gets passed as a string Template if args is defined.
|
||||
required: true
|
||||
url:
|
||||
description:
|
||||
- The jenkins server to execute the script against. The default is a local
|
||||
jenkins instance that is not being proxied through a webserver.
|
||||
default: http://localhost:8080
|
||||
validate_certs:
|
||||
description:
|
||||
- If set to C(no), the SSL certificates will not be validated.
|
||||
This should only set to C(no) used on personally controlled sites
|
||||
using self-signed certificates as it avoids verifying the source site.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
user:
|
||||
description:
|
||||
- The username to connect to the jenkins server with.
|
||||
password:
|
||||
description:
|
||||
- The password to connect to the jenkins server with.
|
||||
timeout:
|
||||
description:
|
||||
- The request timeout in seconds
|
||||
default: 10
|
||||
args:
|
||||
description:
|
||||
- A dict of key-value pairs used in formatting the script using string.Template (see https://docs.python.org/2/library/string.html#template-strings).
|
||||
|
||||
notes:
|
||||
- Since the script can do anything this does not report on changes.
|
||||
Knowing the script is being run it's important to set changed_when
|
||||
for the ansible output to be clear on any alterations made.
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Obtaining a list of plugins
|
||||
jenkins_script:
|
||||
script: 'println(Jenkins.instance.pluginManager.plugins)'
|
||||
user: admin
|
||||
password: admin
|
||||
|
||||
- name: Setting master using a variable to hold a more complicate script
|
||||
set_fact:
|
||||
setmaster_mode: |
|
||||
import jenkins.model.*
|
||||
instance = Jenkins.getInstance()
|
||||
instance.setMode(${jenkins_mode})
|
||||
instance.save()
|
||||
|
||||
- name: use the variable as the script
|
||||
jenkins_script:
|
||||
script: "{{ setmaster_mode }}"
|
||||
args:
|
||||
jenkins_mode: Node.Mode.EXCLUSIVE
|
||||
|
||||
- name: interacting with an untrusted HTTPS connection
|
||||
jenkins_script:
|
||||
script: "println(Jenkins.instance.pluginManager.plugins)"
|
||||
user: admin
|
||||
password: admin
|
||||
url: https://localhost
|
||||
validate_certs: no
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
output:
|
||||
description: Result of script
|
||||
returned: success
|
||||
type: str
|
||||
sample: 'Result: true'
|
||||
'''
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import http_cookiejar as cookiejar
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def is_csrf_protection_enabled(module):
|
||||
resp, info = fetch_url(module,
|
||||
module.params['url'] + '/api/json',
|
||||
timeout=module.params['timeout'],
|
||||
method='GET')
|
||||
if info["status"] != 200:
|
||||
module.fail_json(msg="HTTP error " + str(info["status"]) + " " + info["msg"], output='')
|
||||
|
||||
content = to_native(resp.read())
|
||||
return json.loads(content).get('useCrumbs', False)
|
||||
|
||||
|
||||
def get_crumb(module, cookies):
|
||||
resp, info = fetch_url(module,
|
||||
module.params['url'] + '/crumbIssuer/api/json',
|
||||
method='GET',
|
||||
timeout=module.params['timeout'],
|
||||
cookies=cookies)
|
||||
if info["status"] != 200:
|
||||
module.fail_json(msg="HTTP error " + str(info["status"]) + " " + info["msg"], output='')
|
||||
|
||||
content = to_native(resp.read())
|
||||
return json.loads(content)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
script=dict(required=True, type="str"),
|
||||
url=dict(required=False, type="str", default="http://localhost:8080"),
|
||||
validate_certs=dict(required=False, type="bool", default=True),
|
||||
user=dict(required=False, type="str", default=None),
|
||||
password=dict(required=False, no_log=True, type="str", default=None),
|
||||
timeout=dict(required=False, type="int", default=10),
|
||||
args=dict(required=False, type="dict", default=None)
|
||||
)
|
||||
)
|
||||
|
||||
if module.params['user'] is not None:
|
||||
if module.params['password'] is None:
|
||||
module.fail_json(msg="password required when user provided", output='')
|
||||
module.params['url_username'] = module.params['user']
|
||||
module.params['url_password'] = module.params['password']
|
||||
module.params['force_basic_auth'] = True
|
||||
|
||||
if module.params['args'] is not None:
|
||||
from string import Template
|
||||
try:
|
||||
script_contents = Template(module.params['script']).substitute(module.params['args'])
|
||||
except KeyError as err:
|
||||
module.fail_json(msg="Error with templating variable: %s" % err, output='')
|
||||
else:
|
||||
script_contents = module.params['script']
|
||||
|
||||
headers = {}
|
||||
cookies = None
|
||||
if is_csrf_protection_enabled(module):
|
||||
cookies = cookiejar.LWPCookieJar()
|
||||
crumb = get_crumb(module, cookies)
|
||||
headers = {crumb['crumbRequestField']: crumb['crumb']}
|
||||
|
||||
resp, info = fetch_url(module,
|
||||
module.params['url'] + "/scriptText",
|
||||
data=urlencode({'script': script_contents}),
|
||||
headers=headers,
|
||||
method="POST",
|
||||
timeout=module.params['timeout'],
|
||||
cookies=cookies)
|
||||
|
||||
if info["status"] != 200:
|
||||
module.fail_json(msg="HTTP error " + str(info["status"]) + " " + info["msg"], output='')
|
||||
|
||||
result = to_native(resp.read())
|
||||
|
||||
if 'Exception:' in result and 'at java.lang.Thread' in result:
|
||||
module.fail_json(msg="script failed with stacktrace:\n " + result, output='')
|
||||
|
||||
module.exit_json(
|
||||
output=result,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
443
plugins/modules/web_infrastructure/jira.py
Normal file
443
plugins/modules/web_infrastructure/jira.py
Normal file
|
|
@ -0,0 +1,443 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Steve Smith <ssmith@atlassian.com>
|
||||
# Atlassian open-source approval reference OSR-76.
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: jira
|
||||
short_description: create and modify issues in a JIRA instance
|
||||
description:
|
||||
- Create and modify issues in a JIRA instance.
|
||||
|
||||
options:
|
||||
uri:
|
||||
required: true
|
||||
description:
|
||||
- Base URI for the JIRA instance.
|
||||
|
||||
operation:
|
||||
required: true
|
||||
aliases: [ command ]
|
||||
choices: [ create, comment, edit, fetch, transition , link ]
|
||||
description:
|
||||
- The operation to perform.
|
||||
|
||||
username:
|
||||
required: true
|
||||
description:
|
||||
- The username to log-in with.
|
||||
|
||||
password:
|
||||
required: true
|
||||
description:
|
||||
- The password to log-in with.
|
||||
|
||||
project:
|
||||
required: false
|
||||
description:
|
||||
- The project for this operation. Required for issue creation.
|
||||
|
||||
summary:
|
||||
required: false
|
||||
description:
|
||||
- The issue summary, where appropriate.
|
||||
|
||||
description:
|
||||
required: false
|
||||
description:
|
||||
- The issue description, where appropriate.
|
||||
|
||||
issuetype:
|
||||
required: false
|
||||
description:
|
||||
- The issue type, for issue creation.
|
||||
|
||||
issue:
|
||||
required: false
|
||||
description:
|
||||
- An existing issue key to operate on.
|
||||
|
||||
comment:
|
||||
required: false
|
||||
description:
|
||||
- The comment text to add.
|
||||
|
||||
status:
|
||||
required: false
|
||||
description:
|
||||
- The desired status; only relevant for the transition operation.
|
||||
|
||||
assignee:
|
||||
required: false
|
||||
description:
|
||||
- Sets the assignee on create or transition operations. Note not all transitions will allow this.
|
||||
|
||||
linktype:
|
||||
required: false
|
||||
description:
|
||||
- Set type of link, when action 'link' selected.
|
||||
|
||||
inwardissue:
|
||||
required: false
|
||||
description:
|
||||
- Set issue from which link will be created.
|
||||
|
||||
outwardissue:
|
||||
required: false
|
||||
description:
|
||||
- Set issue to which link will be created.
|
||||
|
||||
fields:
|
||||
required: false
|
||||
description:
|
||||
- This is a free-form data structure that can contain arbitrary data. This is passed directly to the JIRA REST API
|
||||
(possibly after merging with other required data, as when passed to create). See examples for more information,
|
||||
and the JIRA REST API for the structure required for various fields.
|
||||
|
||||
timeout:
|
||||
required: false
|
||||
description:
|
||||
- Set timeout, in seconds, on requests to JIRA API.
|
||||
default: 10
|
||||
|
||||
validate_certs:
|
||||
required: false
|
||||
description:
|
||||
- Require valid SSL certificates (set to `false` if you'd like to use self-signed certificates)
|
||||
default: true
|
||||
type: bool
|
||||
|
||||
notes:
|
||||
- "Currently this only works with basic-auth."
|
||||
|
||||
author: "Steve Smith (@tarka)"
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
# Create a new issue and add a comment to it:
|
||||
- name: Create an issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: create
|
||||
summary: Example Issue
|
||||
description: Created using Ansible
|
||||
issuetype: Task
|
||||
register: issue
|
||||
|
||||
- name: Comment on issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: comment
|
||||
comment: A comment added by Ansible
|
||||
|
||||
# Assign an existing issue using edit
|
||||
- name: Assign an issue using free-form fields
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key}}'
|
||||
operation: edit
|
||||
assignee: ssmith
|
||||
|
||||
# Create an issue with an existing assignee
|
||||
- name: Create an assigned issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: create
|
||||
summary: Assigned issue
|
||||
description: Created and assigned using Ansible
|
||||
issuetype: Task
|
||||
assignee: ssmith
|
||||
|
||||
# Edit an issue
|
||||
- name: Set the labels on an issue using free-form fields
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: edit
|
||||
args:
|
||||
fields:
|
||||
labels:
|
||||
- autocreated
|
||||
- ansible
|
||||
|
||||
# Retrieve metadata for an issue and use it to create an account
|
||||
- name: Get an issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: fetch
|
||||
issue: ANS-63
|
||||
register: issue
|
||||
|
||||
- name: Create a unix account for the reporter
|
||||
become: true
|
||||
user:
|
||||
name: '{{ issue.meta.fields.creator.name }}'
|
||||
comment: '{{ issue.meta.fields.creator.displayName }}'
|
||||
|
||||
# You can get list of valid linktypes at /rest/api/2/issueLinkType
|
||||
# url of your jira installation.
|
||||
- name: Create link from HSP-1 to MKY-1
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
operation: link
|
||||
linktype: Relates
|
||||
inwardissue: HSP-1
|
||||
outwardissue: MKY-1
|
||||
|
||||
# Transition an issue by target status
|
||||
- name: Close the issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: transition
|
||||
status: Done
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from ansible.module_utils._text import to_text, to_bytes
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def request(url, user, passwd, timeout, data=None, method=None):
|
||||
if data:
|
||||
data = json.dumps(data)
|
||||
|
||||
# NOTE: fetch_url uses a password manager, which follows the
|
||||
# standard request-then-challenge basic-auth semantics. However as
|
||||
# JIRA allows some unauthorised operations it doesn't necessarily
|
||||
# send the challenge, so the request occurs as the anonymous user,
|
||||
# resulting in unexpected results. To work around this we manually
|
||||
# inject the basic-auth header up-front to ensure that JIRA treats
|
||||
# the requests as authorized for this user.
|
||||
auth = to_text(base64.b64encode(to_bytes('{0}:{1}'.format(user, passwd), errors='surrogate_or_strict')))
|
||||
|
||||
response, info = fetch_url(module, url, data=data, method=method, timeout=timeout,
|
||||
headers={'Content-Type': 'application/json',
|
||||
'Authorization': "Basic %s" % auth})
|
||||
|
||||
if info['status'] not in (200, 201, 204):
|
||||
module.fail_json(msg=info['msg'])
|
||||
|
||||
body = response.read()
|
||||
|
||||
if body:
|
||||
return json.loads(to_text(body, errors='surrogate_or_strict'))
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
def post(url, user, passwd, timeout, data):
|
||||
return request(url, user, passwd, timeout, data=data, method='POST')
|
||||
|
||||
|
||||
def put(url, user, passwd, timeout, data):
|
||||
return request(url, user, passwd, timeout, data=data, method='PUT')
|
||||
|
||||
|
||||
def get(url, user, passwd, timeout):
|
||||
return request(url, user, passwd, timeout)
|
||||
|
||||
|
||||
def create(restbase, user, passwd, params):
|
||||
createfields = {
|
||||
'project': {'key': params['project']},
|
||||
'summary': params['summary'],
|
||||
'issuetype': {'name': params['issuetype']}}
|
||||
|
||||
if params['description']:
|
||||
createfields['description'] = params['description']
|
||||
|
||||
# Merge in any additional or overridden fields
|
||||
if params['fields']:
|
||||
createfields.update(params['fields'])
|
||||
|
||||
data = {'fields': createfields}
|
||||
|
||||
url = restbase + '/issue/'
|
||||
|
||||
ret = post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def comment(restbase, user, passwd, params):
|
||||
data = {
|
||||
'body': params['comment']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue'] + '/comment'
|
||||
|
||||
ret = post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def edit(restbase, user, passwd, params):
|
||||
data = {
|
||||
'fields': params['fields']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
|
||||
ret = put(url, user, passwd, params['timeout'], data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def fetch(restbase, user, passwd, params):
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
ret = get(url, user, passwd, params['timeout'])
|
||||
return ret
|
||||
|
||||
|
||||
def transition(restbase, user, passwd, params):
|
||||
# Find the transition id
|
||||
turl = restbase + '/issue/' + params['issue'] + "/transitions"
|
||||
tmeta = get(turl, user, passwd, params['timeout'])
|
||||
|
||||
target = params['status']
|
||||
tid = None
|
||||
for t in tmeta['transitions']:
|
||||
if t['name'] == target:
|
||||
tid = t['id']
|
||||
break
|
||||
|
||||
if not tid:
|
||||
raise ValueError("Failed find valid transition for '%s'" % target)
|
||||
|
||||
# Perform it
|
||||
url = restbase + '/issue/' + params['issue'] + "/transitions"
|
||||
data = {'transition': {"id": tid},
|
||||
'fields': params['fields']}
|
||||
|
||||
ret = post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def link(restbase, user, passwd, params):
|
||||
data = {
|
||||
'type': {'name': params['linktype']},
|
||||
'inwardIssue': {'key': params['inwardissue']},
|
||||
'outwardIssue': {'key': params['outwardissue']},
|
||||
}
|
||||
|
||||
url = restbase + '/issueLink/'
|
||||
|
||||
ret = post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# Some parameters are required depending on the operation:
|
||||
OP_REQUIRED = dict(create=['project', 'issuetype', 'summary'],
|
||||
comment=['issue', 'comment'],
|
||||
edit=[],
|
||||
fetch=['issue'],
|
||||
transition=['status'],
|
||||
link=['linktype', 'inwardissue', 'outwardissue'])
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
uri=dict(required=True),
|
||||
operation=dict(choices=['create', 'comment', 'edit', 'fetch', 'transition', 'link'],
|
||||
aliases=['command'], required=True),
|
||||
username=dict(required=True),
|
||||
password=dict(required=True, no_log=True),
|
||||
project=dict(),
|
||||
summary=dict(),
|
||||
description=dict(),
|
||||
issuetype=dict(),
|
||||
issue=dict(aliases=['ticket']),
|
||||
comment=dict(),
|
||||
status=dict(),
|
||||
assignee=dict(),
|
||||
fields=dict(default={}, type='dict'),
|
||||
linktype=dict(),
|
||||
inwardissue=dict(),
|
||||
outwardissue=dict(),
|
||||
timeout=dict(type='float', default=10),
|
||||
validate_certs=dict(default=True, type='bool'),
|
||||
),
|
||||
supports_check_mode=False
|
||||
)
|
||||
|
||||
op = module.params['operation']
|
||||
|
||||
# Check we have the necessary per-operation parameters
|
||||
missing = []
|
||||
for parm in OP_REQUIRED[op]:
|
||||
if not module.params[parm]:
|
||||
missing.append(parm)
|
||||
if missing:
|
||||
module.fail_json(msg="Operation %s require the following missing parameters: %s" % (op, ",".join(missing)))
|
||||
|
||||
# Handle rest of parameters
|
||||
uri = module.params['uri']
|
||||
user = module.params['username']
|
||||
passwd = module.params['password']
|
||||
if module.params['assignee']:
|
||||
module.params['fields']['assignee'] = {'name': module.params['assignee']}
|
||||
|
||||
if not uri.endswith('/'):
|
||||
uri = uri + '/'
|
||||
restbase = uri + 'rest/api/2'
|
||||
|
||||
# Dispatch
|
||||
try:
|
||||
|
||||
# Lookup the corresponding method for this operation. This is
|
||||
# safe as the AnsibleModule should remove any unknown operations.
|
||||
thismod = sys.modules[__name__]
|
||||
method = getattr(thismod, op)
|
||||
|
||||
ret = method(restbase, user, passwd, module.params)
|
||||
|
||||
except Exception as e:
|
||||
return module.fail_json(msg=e.message)
|
||||
|
||||
module.exit_json(changed=True, meta=ret)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
163
plugins/modules/web_infrastructure/nginx_status_facts.py
Normal file
163
plugins/modules/web_infrastructure/nginx_status_facts.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2016, René Moser <mail@renemoser.net>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['deprecated'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: nginx_status_facts
|
||||
deprecated:
|
||||
removed_in: '2.13'
|
||||
why: Deprecated in favour of C(_info) module.
|
||||
alternative: Use M(nginx_status_info) instead.
|
||||
short_description: Retrieve nginx status facts.
|
||||
description:
|
||||
- Gathers facts from nginx from an URL having C(stub_status) enabled.
|
||||
author: "René Moser (@resmo)"
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- URL of the nginx status.
|
||||
required: true
|
||||
timeout:
|
||||
description:
|
||||
- HTTP connection timeout in seconds.
|
||||
required: false
|
||||
default: 10
|
||||
|
||||
notes:
|
||||
- See http://nginx.org/en/docs/http/ngx_http_stub_status_module.html for more information.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Gather status facts from nginx on localhost
|
||||
- name: get current http stats
|
||||
nginx_status_facts:
|
||||
url: http://localhost/nginx_status
|
||||
|
||||
# Gather status facts from nginx on localhost with a custom timeout of 20 seconds
|
||||
- name: get current http stats
|
||||
nginx_status_facts:
|
||||
url: http://localhost/nginx_status
|
||||
timeout: 20
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
nginx_status_facts.active_connections:
|
||||
description: Active connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2340
|
||||
nginx_status_facts.accepts:
|
||||
description: The total number of accepted client connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
nginx_status_facts.handled:
|
||||
description: The total number of handled connections. Generally, the parameter value is the same as accepts unless some resource limits have been reached.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
nginx_status_facts.requests:
|
||||
description: The total number of client requests.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 144332345
|
||||
nginx_status_facts.reading:
|
||||
description: The current number of connections where nginx is reading the request header.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 0
|
||||
nginx_status_facts.writing:
|
||||
description: The current number of connections where nginx is writing the response back to the client.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 241
|
||||
nginx_status_facts.waiting:
|
||||
description: The current number of idle client connections waiting for a request.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2092
|
||||
nginx_status_facts.data:
|
||||
description: HTTP response as is.
|
||||
returned: success
|
||||
type: str
|
||||
sample: "Active connections: 2340 \nserver accepts handled requests\n 81769947 81769947 144332345 \nReading: 0 Writing: 241 Waiting: 2092 \n"
|
||||
'''
|
||||
|
||||
import re
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_text
|
||||
|
||||
|
||||
class NginxStatusFacts(object):
|
||||
|
||||
def __init__(self):
|
||||
self.url = module.params.get('url')
|
||||
self.timeout = module.params.get('timeout')
|
||||
|
||||
def run(self):
|
||||
result = {
|
||||
'nginx_status_facts': {
|
||||
'active_connections': None,
|
||||
'accepts': None,
|
||||
'handled': None,
|
||||
'requests': None,
|
||||
'reading': None,
|
||||
'writing': None,
|
||||
'waiting': None,
|
||||
'data': None,
|
||||
}
|
||||
}
|
||||
(response, info) = fetch_url(module=module, url=self.url, force=True, timeout=self.timeout)
|
||||
if not response:
|
||||
module.fail_json(msg="No valid or no response from url %s within %s seconds (timeout)" % (self.url, self.timeout))
|
||||
|
||||
data = to_text(response.read(), errors='surrogate_or_strict')
|
||||
if not data:
|
||||
return result
|
||||
|
||||
result['nginx_status_facts']['data'] = data
|
||||
expr = r'Active connections: ([0-9]+) \nserver accepts handled requests\n ([0-9]+) ([0-9]+) ([0-9]+) \n' \
|
||||
r'Reading: ([0-9]+) Writing: ([0-9]+) Waiting: ([0-9]+)'
|
||||
match = re.match(expr, data, re.S)
|
||||
if match:
|
||||
result['nginx_status_facts']['active_connections'] = int(match.group(1))
|
||||
result['nginx_status_facts']['accepts'] = int(match.group(2))
|
||||
result['nginx_status_facts']['handled'] = int(match.group(3))
|
||||
result['nginx_status_facts']['requests'] = int(match.group(4))
|
||||
result['nginx_status_facts']['reading'] = int(match.group(5))
|
||||
result['nginx_status_facts']['writing'] = int(match.group(6))
|
||||
result['nginx_status_facts']['waiting'] = int(match.group(7))
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
url=dict(required=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
nginx_status_facts = NginxStatusFacts().run()
|
||||
result = dict(changed=False, ansible_facts=nginx_status_facts)
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
160
plugins/modules/web_infrastructure/nginx_status_info.py
Normal file
160
plugins/modules/web_infrastructure/nginx_status_info.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2016, René Moser <mail@renemoser.net>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: nginx_status_info
|
||||
short_description: Retrieve information on nginx status.
|
||||
description:
|
||||
- Gathers information from nginx from an URL having C(stub_status) enabled.
|
||||
author: "René Moser (@resmo)"
|
||||
options:
|
||||
url:
|
||||
type: str
|
||||
description:
|
||||
- URL of the nginx status.
|
||||
required: true
|
||||
timeout:
|
||||
type: int
|
||||
description:
|
||||
- HTTP connection timeout in seconds.
|
||||
required: false
|
||||
default: 10
|
||||
|
||||
notes:
|
||||
- See U(http://nginx.org/en/docs/http/ngx_http_stub_status_module.html) for more information.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Gather status info from nginx on localhost
|
||||
- name: get current http stats
|
||||
nginx_status_info:
|
||||
url: http://localhost/nginx_status
|
||||
register: result
|
||||
|
||||
# Gather status info from nginx on localhost with a custom timeout of 20 seconds
|
||||
- name: get current http stats
|
||||
nginx_status_info:
|
||||
url: http://localhost/nginx_status
|
||||
timeout: 20
|
||||
register: result
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
---
|
||||
active_connections:
|
||||
description: Active connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2340
|
||||
accepts:
|
||||
description: The total number of accepted client connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
handled:
|
||||
description: The total number of handled connections. Generally, the parameter value is the same as accepts unless some resource limits have been reached.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
requests:
|
||||
description: The total number of client requests.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 144332345
|
||||
reading:
|
||||
description: The current number of connections where nginx is reading the request header.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 0
|
||||
writing:
|
||||
description: The current number of connections where nginx is writing the response back to the client.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 241
|
||||
waiting:
|
||||
description: The current number of idle client connections waiting for a request.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2092
|
||||
data:
|
||||
description: HTTP response as is.
|
||||
returned: success
|
||||
type: str
|
||||
sample: "Active connections: 2340 \nserver accepts handled requests\n 81769947 81769947 144332345 \nReading: 0 Writing: 241 Waiting: 2092 \n"
|
||||
'''
|
||||
|
||||
import re
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_text
|
||||
|
||||
|
||||
class NginxStatusInfo(object):
|
||||
|
||||
def __init__(self):
|
||||
self.url = module.params.get('url')
|
||||
self.timeout = module.params.get('timeout')
|
||||
|
||||
def run(self):
|
||||
result = {
|
||||
'active_connections': None,
|
||||
'accepts': None,
|
||||
'handled': None,
|
||||
'requests': None,
|
||||
'reading': None,
|
||||
'writing': None,
|
||||
'waiting': None,
|
||||
'data': None,
|
||||
}
|
||||
(response, info) = fetch_url(module=module, url=self.url, force=True, timeout=self.timeout)
|
||||
if not response:
|
||||
module.fail_json(msg="No valid or no response from url %s within %s seconds (timeout)" % (self.url, self.timeout))
|
||||
|
||||
data = to_text(response.read(), errors='surrogate_or_strict')
|
||||
if not data:
|
||||
return result
|
||||
|
||||
result['data'] = data
|
||||
expr = r'Active connections: ([0-9]+) \nserver accepts handled requests\n ([0-9]+) ([0-9]+) ([0-9]+) \n' \
|
||||
r'Reading: ([0-9]+) Writing: ([0-9]+) Waiting: ([0-9]+)'
|
||||
match = re.match(expr, data, re.S)
|
||||
if match:
|
||||
result['active_connections'] = int(match.group(1))
|
||||
result['accepts'] = int(match.group(2))
|
||||
result['handled'] = int(match.group(3))
|
||||
result['requests'] = int(match.group(4))
|
||||
result['reading'] = int(match.group(5))
|
||||
result['writing'] = int(match.group(6))
|
||||
result['waiting'] = int(match.group(7))
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
url=dict(type='str', required=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
nginx_status_info = NginxStatusInfo().run()
|
||||
module.exit_json(changed=False, **nginx_status_info)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
230
plugins/modules/web_infrastructure/rundeck_acl_policy.py
Normal file
230
plugins/modules/web_infrastructure/rundeck_acl_policy.py
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2017, Loic Blot <loic.blot@unix-experience.fr>
|
||||
# Sponsored by Infopro Digital. http://www.infopro-digital.com/
|
||||
# Sponsored by E.T.A.I. http://www.etai.fr/
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rundeck_acl_policy
|
||||
|
||||
short_description: Manage Rundeck ACL policies.
|
||||
description:
|
||||
- Create, update and remove Rundeck ACL policies through HTTP API.
|
||||
author: "Loic Blot (@nerzhul)"
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Create or remove Rundeck project.
|
||||
choices: ['present', 'absent']
|
||||
default: 'present'
|
||||
name:
|
||||
description:
|
||||
- Sets the project name.
|
||||
required: True
|
||||
url:
|
||||
description:
|
||||
- Sets the rundeck instance URL.
|
||||
required: True
|
||||
api_version:
|
||||
description:
|
||||
- Sets the API version used by module.
|
||||
- API version must be at least 14.
|
||||
default: 14
|
||||
token:
|
||||
description:
|
||||
- Sets the token to authenticate against Rundeck API.
|
||||
required: True
|
||||
project:
|
||||
description:
|
||||
- Sets the project which receive the ACL policy.
|
||||
- If unset, it's a system ACL policy.
|
||||
policy:
|
||||
description:
|
||||
- Sets the ACL policy content.
|
||||
- ACL policy content is a YAML object as described in http://rundeck.org/docs/man5/aclpolicy.html.
|
||||
- It can be a YAML string or a pure Ansible inventory YAML object.
|
||||
extends_documentation_fragment: url
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create or update a rundeck ACL policy in project Ansible
|
||||
rundeck_acl_policy:
|
||||
name: "Project_01"
|
||||
api_version: 18
|
||||
url: "https://rundeck.example.org"
|
||||
token: "mytoken"
|
||||
state: present
|
||||
project: "Ansible"
|
||||
policy:
|
||||
description: "my policy"
|
||||
context:
|
||||
application: rundeck
|
||||
for:
|
||||
project:
|
||||
- allow: read
|
||||
by:
|
||||
group: "build"
|
||||
|
||||
- name: Remove a rundeck system policy
|
||||
rundeck_acl_policy:
|
||||
name: "Project_02"
|
||||
url: "https://rundeck.example.org"
|
||||
token: "mytoken"
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
rundeck_response:
|
||||
description: Rundeck response when a failure occurs.
|
||||
returned: failed
|
||||
type: str
|
||||
before:
|
||||
description: Dictionary containing ACL policy informations before modification.
|
||||
returned: success
|
||||
type: dict
|
||||
after:
|
||||
description: Dictionary containing ACL policy informations after modification.
|
||||
returned: success
|
||||
type: dict
|
||||
'''
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
from ansible.module_utils._text import to_text
|
||||
import json
|
||||
|
||||
|
||||
class RundeckACLManager:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def handle_http_code_if_needed(self, infos):
|
||||
if infos["status"] == 403:
|
||||
self.module.fail_json(msg="Token not allowed. Please ensure token is allowed or has the correct "
|
||||
"permissions.", rundeck_response=infos["body"])
|
||||
elif infos["status"] >= 500:
|
||||
self.module.fail_json(msg="Fatal Rundeck API error.", rundeck_response=infos["body"])
|
||||
|
||||
def request_rundeck_api(self, query, data=None, method="GET"):
|
||||
resp, info = fetch_url(self.module,
|
||||
"%s/api/%d/%s" % (self.module.params["url"], self.module.params["api_version"], query),
|
||||
data=json.dumps(data),
|
||||
method=method,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"X-Rundeck-Auth-Token": self.module.params["token"]
|
||||
})
|
||||
|
||||
self.handle_http_code_if_needed(info)
|
||||
if resp is not None:
|
||||
resp = resp.read()
|
||||
if resp != b"":
|
||||
try:
|
||||
json_resp = json.loads(to_text(resp, errors='surrogate_or_strict'))
|
||||
return json_resp, info
|
||||
except ValueError as e:
|
||||
self.module.fail_json(msg="Rundeck response was not a valid JSON. Exception was: %s. "
|
||||
"Object was: %s" % (str(e), resp))
|
||||
return resp, info
|
||||
|
||||
def get_acl(self):
|
||||
resp, info = self.request_rundeck_api("system/acl/%s.aclpolicy" % self.module.params["name"])
|
||||
return resp
|
||||
|
||||
def create_or_update_acl(self):
|
||||
facts = self.get_acl()
|
||||
if facts is None:
|
||||
# If in check mode don't create project, simulate a fake project creation
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=True, before={}, after=self.module.params["policy"])
|
||||
|
||||
_, info = self.request_rundeck_api("system/acl/%s.aclpolicy" % self.module.params["name"],
|
||||
method="POST",
|
||||
data={"contents": self.module.params["policy"]})
|
||||
|
||||
if info["status"] == 201:
|
||||
self.module.exit_json(changed=True, before={}, after=self.get_acl())
|
||||
elif info["status"] == 400:
|
||||
self.module.fail_json(msg="Unable to validate acl %s. Please ensure it's a valid ACL" %
|
||||
self.module.params["name"])
|
||||
elif info["status"] == 409:
|
||||
self.module.fail_json(msg="ACL %s already exists" % self.module.params["name"])
|
||||
else:
|
||||
self.module.fail_json(msg="Unhandled HTTP status %d, please report the bug" % info["status"],
|
||||
before={}, after=self.get_acl())
|
||||
else:
|
||||
if facts["contents"] == self.module.params["policy"]:
|
||||
self.module.exit_json(changed=False, before=facts, after=facts)
|
||||
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=True, before=facts, after=facts)
|
||||
|
||||
_, info = self.request_rundeck_api("system/acl/%s.aclpolicy" % self.module.params["name"],
|
||||
method="PUT",
|
||||
data={"contents": self.module.params["policy"]})
|
||||
|
||||
if info["status"] == 200:
|
||||
self.module.exit_json(changed=True, before=facts, after=self.get_acl())
|
||||
elif info["status"] == 400:
|
||||
self.module.fail_json(msg="Unable to validate acl %s. Please ensure it's a valid ACL" %
|
||||
self.module.params["name"])
|
||||
elif info["status"] == 404:
|
||||
self.module.fail_json(msg="ACL %s doesn't exists. Cannot update." % self.module.params["name"])
|
||||
|
||||
def remove_acl(self):
|
||||
facts = self.get_acl()
|
||||
if facts is None:
|
||||
self.module.exit_json(changed=False, before={}, after={})
|
||||
else:
|
||||
# If not in check mode, remove the project
|
||||
if not self.module.check_mode:
|
||||
self.request_rundeck_api("system/acl/%s.aclpolicy" % self.module.params["name"], method="DELETE")
|
||||
self.module.exit_json(changed=True, before=facts, after={})
|
||||
|
||||
|
||||
def main():
|
||||
# Also allow the user to set values for fetch_url
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
state=dict(type='str', choices=['present', 'absent'], default='present'),
|
||||
name=dict(required=True, type='str'),
|
||||
url=dict(required=True, type='str'),
|
||||
api_version=dict(type='int', default=14),
|
||||
token=dict(required=True, type='str', no_log=True),
|
||||
policy=dict(type='str'),
|
||||
project=dict(type='str'),
|
||||
))
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
required_if=[
|
||||
['state', 'present', ['policy']],
|
||||
],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if module.params["api_version"] < 14:
|
||||
module.fail_json(msg="API version should be at least 14")
|
||||
|
||||
rundeck = RundeckACLManager(module)
|
||||
if module.params['state'] == 'present':
|
||||
rundeck.create_or_update_acl()
|
||||
elif module.params['state'] == 'absent':
|
||||
rundeck.remove_acl()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
188
plugins/modules/web_infrastructure/rundeck_project.py
Normal file
188
plugins/modules/web_infrastructure/rundeck_project.py
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Ansible module to manage rundeck projects
|
||||
# (c) 2017, Loic Blot <loic.blot@unix-experience.fr>
|
||||
# Sponsored by Infopro Digital. http://www.infopro-digital.com/
|
||||
# Sponsored by E.T.A.I. http://www.etai.fr/
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rundeck_project
|
||||
|
||||
short_description: Manage Rundeck projects.
|
||||
description:
|
||||
- Create and remove Rundeck projects through HTTP API.
|
||||
author: "Loic Blot (@nerzhul)"
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Create or remove Rundeck project.
|
||||
choices: ['present', 'absent']
|
||||
default: 'present'
|
||||
name:
|
||||
description:
|
||||
- Sets the project name.
|
||||
required: True
|
||||
url:
|
||||
description:
|
||||
- Sets the rundeck instance URL.
|
||||
required: True
|
||||
api_version:
|
||||
description:
|
||||
- Sets the API version used by module.
|
||||
- API version must be at least 14.
|
||||
default: 14
|
||||
token:
|
||||
description:
|
||||
- Sets the token to authenticate against Rundeck API.
|
||||
required: True
|
||||
extends_documentation_fragment: url
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create a rundeck project
|
||||
rundeck_project:
|
||||
name: "Project_01"
|
||||
api_version: 18
|
||||
url: "https://rundeck.example.org"
|
||||
token: "mytoken"
|
||||
state: present
|
||||
|
||||
- name: Remove a rundeck project
|
||||
rundeck_project:
|
||||
name: "Project_02"
|
||||
url: "https://rundeck.example.org"
|
||||
token: "mytoken"
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
rundeck_response:
|
||||
description: Rundeck response when a failure occurs
|
||||
returned: failed
|
||||
type: str
|
||||
before:
|
||||
description: dictionary containing project information before modification
|
||||
returned: success
|
||||
type: dict
|
||||
after:
|
||||
description: dictionary containing project information after modification
|
||||
returned: success
|
||||
type: dict
|
||||
'''
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
import json
|
||||
|
||||
|
||||
class RundeckProjectManager(object):
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def handle_http_code_if_needed(self, infos):
|
||||
if infos["status"] == 403:
|
||||
self.module.fail_json(msg="Token not allowed. Please ensure token is allowed or has the correct "
|
||||
"permissions.", rundeck_response=infos["body"])
|
||||
elif infos["status"] >= 500:
|
||||
self.module.fail_json(msg="Fatal Rundeck API error.", rundeck_response=infos["body"])
|
||||
|
||||
def request_rundeck_api(self, query, data=None, method="GET"):
|
||||
resp, info = fetch_url(self.module,
|
||||
"%s/api/%d/%s" % (self.module.params["url"], self.module.params["api_version"], query),
|
||||
data=json.dumps(data),
|
||||
method=method,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"X-Rundeck-Auth-Token": self.module.params["token"]
|
||||
})
|
||||
|
||||
self.handle_http_code_if_needed(info)
|
||||
if resp is not None:
|
||||
resp = resp.read()
|
||||
if resp != "":
|
||||
try:
|
||||
json_resp = json.loads(resp)
|
||||
return json_resp, info
|
||||
except ValueError as e:
|
||||
self.module.fail_json(msg="Rundeck response was not a valid JSON. Exception was: %s. "
|
||||
"Object was: %s" % (to_native(e), resp))
|
||||
return resp, info
|
||||
|
||||
def get_project_facts(self):
|
||||
resp, info = self.request_rundeck_api("project/%s" % self.module.params["name"])
|
||||
return resp
|
||||
|
||||
def create_or_update_project(self):
|
||||
facts = self.get_project_facts()
|
||||
if facts is None:
|
||||
# If in check mode don't create project, simulate a fake project creation
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=True, before={}, after={"name": self.module.params["name"]})
|
||||
|
||||
resp, info = self.request_rundeck_api("projects", method="POST", data={
|
||||
"name": self.module.params["name"],
|
||||
"config": {}
|
||||
})
|
||||
|
||||
if info["status"] == 201:
|
||||
self.module.exit_json(changed=True, before={}, after=self.get_project_facts())
|
||||
else:
|
||||
self.module.fail_json(msg="Unhandled HTTP status %d, please report the bug" % info["status"],
|
||||
before={}, after=self.get_project_facts())
|
||||
else:
|
||||
self.module.exit_json(changed=False, before=facts, after=facts)
|
||||
|
||||
def remove_project(self):
|
||||
facts = self.get_project_facts()
|
||||
if facts is None:
|
||||
self.module.exit_json(changed=False, before={}, after={})
|
||||
else:
|
||||
# If not in check mode, remove the project
|
||||
if not self.module.check_mode:
|
||||
self.request_rundeck_api("project/%s" % self.module.params["name"], method="DELETE")
|
||||
self.module.exit_json(changed=True, before=facts, after={})
|
||||
|
||||
|
||||
def main():
|
||||
# Also allow the user to set values for fetch_url
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
state=dict(type='str', choices=['present', 'absent'], default='present'),
|
||||
name=dict(required=True, type='str'),
|
||||
url=dict(required=True, type='str'),
|
||||
api_version=dict(type='int', default=14),
|
||||
token=dict(required=True, type='str', no_log=True),
|
||||
))
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if module.params["api_version"] < 14:
|
||||
module.fail_json(msg="API version should be at least 14")
|
||||
|
||||
rundeck = RundeckProjectManager(module)
|
||||
if module.params['state'] == 'present':
|
||||
rundeck.create_or_update_project()
|
||||
elif module.params['state'] == 'absent':
|
||||
rundeck.remove_project()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
226
plugins/modules/web_infrastructure/sophos_utm/utm_aaa_group.py
Normal file
226
plugins/modules/web_infrastructure/sophos_utm/utm_aaa_group.py
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_aaa_group
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: Create, update or destroy an aaa group object in Sophos UTM.
|
||||
|
||||
description:
|
||||
- Create, update or destroy an aaa group object in Sophos UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry.
|
||||
type: str
|
||||
required: true
|
||||
adirectory_groups:
|
||||
description:
|
||||
- List of adirectory group strings.
|
||||
type: list
|
||||
adirectory_groups_sids:
|
||||
description:
|
||||
- Dictionary of group sids.
|
||||
type: dict
|
||||
backend_match:
|
||||
description:
|
||||
- The backend for the group.
|
||||
type: str
|
||||
choices:
|
||||
- none
|
||||
- adirectory
|
||||
- edirectory
|
||||
- radius
|
||||
- tacacs
|
||||
- ldap
|
||||
default: none
|
||||
comment:
|
||||
description:
|
||||
- Comment that describes the AAA group.
|
||||
type: str
|
||||
default: ''
|
||||
dynamic:
|
||||
description:
|
||||
- Group type. Is static if none is selected.
|
||||
type: str
|
||||
default: none
|
||||
choices:
|
||||
- none
|
||||
- ipsec_dn
|
||||
- directory_groups
|
||||
edirectory_groups:
|
||||
description:
|
||||
- List of edirectory group strings.
|
||||
type: list
|
||||
ipsec_dn:
|
||||
description:
|
||||
- The ipsec dn string.
|
||||
type: str
|
||||
ldap_attribute:
|
||||
description:
|
||||
- The ldap attribute to check against.
|
||||
type: str
|
||||
ldap_attribute_value:
|
||||
description:
|
||||
- The ldap attribute value to check against.
|
||||
type: str
|
||||
members:
|
||||
description:
|
||||
- A list of user ref names (aaa/user).
|
||||
type: list
|
||||
default: []
|
||||
network:
|
||||
description:
|
||||
- The network reference name. The objects contains the known ip addresses for the authentication object (network/aaa).
|
||||
type: str
|
||||
default: ""
|
||||
radius_groups:
|
||||
description:
|
||||
- A list of radius group strings.
|
||||
type: list
|
||||
default: []
|
||||
tacacs_groups:
|
||||
description:
|
||||
- A list of tacacs group strings.
|
||||
type: list
|
||||
default: []
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create UTM aaa_group
|
||||
utm_aaa_group:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAAAGroupEntry
|
||||
backend_match: ldap
|
||||
dynamic: directory_groups
|
||||
ldap_attributes: memberof
|
||||
ldap_attributes_value: "cn=groupname,ou=Groups,dc=mydomain,dc=com"
|
||||
network: REF_OBJECT_STRING
|
||||
state: present
|
||||
|
||||
- name: Remove UTM aaa_group
|
||||
utm_aaa_group:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAAAGroupEntry
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created.
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object.
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked.
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object.
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object.
|
||||
type: str
|
||||
adirectory_groups:
|
||||
description: List of Active Directory Groups.
|
||||
type: str
|
||||
adirectory_groups_sids:
|
||||
description: List of Active Directory Groups SIDS.
|
||||
type: list
|
||||
backend_match:
|
||||
description: The backend to use.
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string.
|
||||
type: str
|
||||
dynamic:
|
||||
description: Whether the group match is ipsec_dn or directory_group.
|
||||
type: str
|
||||
edirectory_groups:
|
||||
description: List of eDirectory Groups.
|
||||
type: str
|
||||
ipsec_dn:
|
||||
description: ipsec_dn identifier to match.
|
||||
type: str
|
||||
ldap_attribute:
|
||||
description: The LDAP Attribute to match against.
|
||||
type: str
|
||||
ldap_attribute_value:
|
||||
description: The LDAP Attribute Value to match against.
|
||||
type: str
|
||||
members:
|
||||
description: List of member identifiers of the group.
|
||||
type: list
|
||||
network:
|
||||
description: The identifier of the network (network/aaa).
|
||||
type: str
|
||||
radius_group:
|
||||
description: The radius group identifier.
|
||||
type: str
|
||||
tacacs_group:
|
||||
description: The tacacs group identifier.
|
||||
type: str
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "aaa/group"
|
||||
key_to_check_for_changes = ["comment", "adirectory_groups", "adirectory_groups_sids", "backend_match", "dynamic",
|
||||
"edirectory_groups", "ipsec_dn", "ldap_attribute", "ldap_attribute_value", "members",
|
||||
"network", "radius_groups", "tacacs_groups"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
adirectory_groups=dict(type='list', elements='str', required=False, default=[]),
|
||||
adirectory_groups_sids=dict(type='dict', required=False, default={}),
|
||||
backend_match=dict(type='str', required=False, default="none",
|
||||
choices=["none", "adirectory", "edirectory", "radius", "tacacs", "ldap"]),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
dynamic=dict(type='str', required=False, default="none", choices=["none", "ipsec_dn", "directory_groups"]),
|
||||
edirectory_groups=dict(type='list', elements='str', required=False, default=[]),
|
||||
ipsec_dn=dict(type='str', required=False, default=""),
|
||||
ldap_attribute=dict(type='str', required=False, default=""),
|
||||
ldap_attribute_value=dict(type='str', required=False, default=""),
|
||||
members=dict(type='list', elements='str', required=False, default=[]),
|
||||
network=dict(type='str', required=False, default=""),
|
||||
radius_groups=dict(type='list', elements='str', required=False, default=[]),
|
||||
tacacs_groups=dict(type='list', elements='str', required=False, default=[]),
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_aaa_group_info
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: get info for reverse_proxy frontend entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- get info for a reverse_proxy frontend entry in SOPHOS UTM.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Remove UTM aaa_group
|
||||
utm_aaa_group_info:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAAAGroupEntry
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
adirectory_groups:
|
||||
description: List of Active Directory Groups
|
||||
type: str
|
||||
adirectory_groups_sids:
|
||||
description: List of Active Directory Groups SIDS
|
||||
type: list
|
||||
backend_match:
|
||||
description: The backend to use
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
dynamic:
|
||||
description: Whether the group match is ipsec_dn or directory_group
|
||||
type: str
|
||||
edirectory_groups:
|
||||
description: List of eDirectory Groups
|
||||
type: str
|
||||
ipsec_dn:
|
||||
description: ipsec_dn identifier to match
|
||||
type: str
|
||||
ldap_attribute:
|
||||
description: The LDAP Attribute to match against
|
||||
type: str
|
||||
ldap_attribute_value:
|
||||
description: The LDAP Attribute Value to match against
|
||||
type: str
|
||||
members:
|
||||
description: List of member identifiers of the group
|
||||
type: list
|
||||
network:
|
||||
description: The identifier of the network (network/aaa)
|
||||
type: str
|
||||
radius_group:
|
||||
description: The radius group identifier
|
||||
type: str
|
||||
tacacs_group:
|
||||
description: The tacacs group identifier
|
||||
type: str
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "aaa/group"
|
||||
key_to_check_for_changes = []
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True)
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,163 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Stephan Schwarz <stearz@gmx.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_ca_host_key_cert
|
||||
|
||||
author:
|
||||
- Stephan Schwarz (@stearz)
|
||||
|
||||
short_description: create, update or destroy ca host_key_cert entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a ca host_key_cert entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry.
|
||||
required: true
|
||||
ca:
|
||||
description:
|
||||
- A reference to an existing utm_ca_signing_ca or utm_ca_verification_ca object.
|
||||
required: true
|
||||
meta:
|
||||
description:
|
||||
- A reference to an existing utm_ca_meta_x509 object.
|
||||
required: true
|
||||
certificate:
|
||||
description:
|
||||
- The certificate in PEM format.
|
||||
required: true
|
||||
comment:
|
||||
description:
|
||||
- Optional comment string.
|
||||
encrypted:
|
||||
description:
|
||||
- Optionally enable encryption.
|
||||
default: False
|
||||
type: bool
|
||||
key:
|
||||
description:
|
||||
- Optional private key in PEM format.
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
# Create a ca_host_key_cert entry
|
||||
- name: utm ca_host_key_cert
|
||||
utm_ca_host_key_cert:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestHostKeyCertEntry
|
||||
ca: REF_ca/signing_ca_OBJECT_STRING
|
||||
meta: REF_ca/meta_x509_OBJECT_STRING
|
||||
certificate: |
|
||||
--- BEGIN CERTIFICATE ---
|
||||
. . .
|
||||
. . .
|
||||
. . .
|
||||
--- END CERTIFICATE ---
|
||||
state: present
|
||||
|
||||
# Remove a ca_host_key_cert entry
|
||||
- name: utm ca_host_key_cert
|
||||
utm_ca_host_key_cert:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestHostKeyCertEntry
|
||||
state: absent
|
||||
|
||||
# Read a ca_host_key_cert entry
|
||||
- name: utm ca_host_key_cert
|
||||
utm_ca_host_key_cert:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestHostKeyCertEntry
|
||||
state: info
|
||||
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
ca:
|
||||
description: A reference to an existing utm_ca_signing_ca or utm_ca_verification_ca object.
|
||||
type: str
|
||||
meta:
|
||||
description: A reference to an existing utm_ca_meta_x509 object.
|
||||
type: str
|
||||
certificate:
|
||||
description: The certificate in PEM format
|
||||
type: str
|
||||
comment:
|
||||
description: Comment string (may be empty string)
|
||||
type: str
|
||||
encrypted:
|
||||
description: If encryption is enabled
|
||||
type: bool
|
||||
key:
|
||||
description: Private key in PEM format (may be empty string)
|
||||
type: str
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "ca/host_key_cert"
|
||||
key_to_check_for_changes = ["ca", "certificate", "comment", "encrypted", "key", "meta"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
ca=dict(type='str', required=True),
|
||||
meta=dict(type='str', required=True),
|
||||
certificate=dict(type='str', required=True),
|
||||
comment=dict(type='str', required=False),
|
||||
encrypted=dict(type='bool', required=False, default=False),
|
||||
key=dict(type='str', required=False, no_log=True),
|
||||
)
|
||||
)
|
||||
try:
|
||||
# This is needed because the bool value only accepts int values in the backend
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Stephan Schwarz <stearz@gmx.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_ca_host_key_cert_info
|
||||
|
||||
author:
|
||||
- Stephan Schwarz (@stearz)
|
||||
|
||||
short_description: Get info for a ca host_key_cert entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Get info for a ca host_key_cert entry in SOPHOS UTM.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: utm ca_host_key_cert_info
|
||||
utm_ca_host_key_cert_info:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestHostKeyCertEntry
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
ca:
|
||||
description: A reference to an existing utm_ca_signing_ca or utm_ca_verification_ca object.
|
||||
type: str
|
||||
meta:
|
||||
description: A reference to an existing utm_ca_meta_x509 object.
|
||||
type: str
|
||||
certificate:
|
||||
description: The certificate in PEM format
|
||||
type: str
|
||||
comment:
|
||||
description: Comment string (may be empty string)
|
||||
type: str
|
||||
encrypted:
|
||||
description: If encryption is enabled
|
||||
type: bool
|
||||
key:
|
||||
description: Private key in PEM format (may be empty string)
|
||||
type: str
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "ca/host_key_cert"
|
||||
key_to_check_for_changes = []
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True)
|
||||
)
|
||||
)
|
||||
try:
|
||||
# This is needed because the bool value only accepts int values in the backend
|
||||
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
156
plugins/modules/web_infrastructure/sophos_utm/utm_dns_host.py
Normal file
156
plugins/modules/web_infrastructure/sophos_utm/utm_dns_host.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_dns_host
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: create, update or destroy dns entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a dns entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
address:
|
||||
description:
|
||||
- The IPV4 Address of the entry. Can be left empty for automatic resolving.
|
||||
default: 0.0.0.0
|
||||
address6:
|
||||
description:
|
||||
- The IPV6 Address of the entry. Can be left empty for automatic resolving.
|
||||
default: "::"
|
||||
comment:
|
||||
description:
|
||||
- An optional comment to add to the dns host object
|
||||
hostname:
|
||||
description:
|
||||
- The hostname for the dns host object
|
||||
interface:
|
||||
description:
|
||||
- The reference name of the interface to use. If not provided the default interface will be used
|
||||
resolved:
|
||||
description:
|
||||
- whether the hostname's ipv4 address is already resolved or not
|
||||
default: False
|
||||
type: bool
|
||||
resolved6:
|
||||
description:
|
||||
- whether the hostname's ipv6 address is already resolved or not
|
||||
default: False
|
||||
type: bool
|
||||
timeout:
|
||||
description:
|
||||
- the timeout for the utm to resolve the ip address for the hostname again
|
||||
default: 0
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create UTM dns host entry
|
||||
utm_dns_host:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestDNSEntry
|
||||
hostname: testentry.some.tld
|
||||
state: present
|
||||
|
||||
- name: Remove UTM dns host entry
|
||||
utm_dns_host:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestDNSEntry
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
address:
|
||||
description: The ipv4 address of the object
|
||||
type: str
|
||||
address6:
|
||||
description: The ipv6 address of the object
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
hostname:
|
||||
description: The hostname of the object
|
||||
type: str
|
||||
interface:
|
||||
description: The reference name of the interface the object is associated with
|
||||
type: str
|
||||
resolved:
|
||||
description: Whether the ipv4 address is resolved or not
|
||||
type: bool
|
||||
resolved6:
|
||||
description: Whether the ipv6 address is resolved or not
|
||||
type: bool
|
||||
timeout:
|
||||
description: The timeout until a new resolving will be attempted
|
||||
type: int
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "network/dns_host"
|
||||
key_to_check_for_changes = ["comment", "hostname", "interface"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
address=dict(type='str', required=False, default='0.0.0.0'),
|
||||
address6=dict(type='str', required=False, default='::'),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
hostname=dict(type='str', required=False),
|
||||
interface=dict(type='str', required=False, default=""),
|
||||
resolved=dict(type='bool', required=False, default=False),
|
||||
resolved6=dict(type='bool', required=False, default=False),
|
||||
timeout=dict(type='int', required=False, default=0),
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Juergen Wiebe <wiebe@e-spirit.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_network_interface_address
|
||||
|
||||
author:
|
||||
- Juergen Wiebe (@steamx)
|
||||
|
||||
short_description: Create, update or destroy network/interface_address object
|
||||
|
||||
description:
|
||||
- Create, update or destroy a network/interface_address object in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
address:
|
||||
description:
|
||||
- The ip4 address of the network/interface_address object.
|
||||
required: true
|
||||
address6:
|
||||
description:
|
||||
- The ip6 address of the network/interface_address object.
|
||||
required: false
|
||||
comment:
|
||||
description:
|
||||
- An optional comment to add to the object
|
||||
resolved:
|
||||
description:
|
||||
- Whether or not the object is resolved
|
||||
resolved6:
|
||||
description:
|
||||
- Whether or not the object is resolved
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
# Create a network interface address
|
||||
- name: utm network interface address
|
||||
utm_proxy_backend:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestNetworkInterfaceAddress
|
||||
address: 0.0.0.0
|
||||
state: present
|
||||
|
||||
# Remove a network interface address
|
||||
- name: utm network interface address
|
||||
network_interface_address:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestNetworkInterfaceAddress
|
||||
address: 0.0.0.0
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
address:
|
||||
description: The ip4 address of the network/interface_address object
|
||||
type: str
|
||||
address6:
|
||||
description: The ip6 address of the network/interface_address object
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
resolved:
|
||||
description: Whether or not the object is resolved
|
||||
type: bool
|
||||
resolved6:
|
||||
description: Whether or not the object is resolved
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "network/interface_address"
|
||||
key_to_check_for_changes = ["comment", "address"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
address=dict(type='str', required=True),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
address6=dict(type='str', required=False),
|
||||
resolved=dict(type='boolean', required=False),
|
||||
resolved6=dict(type='boolean', required=False)
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Juergen Wiebe <wiebe@e-spirit.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_network_interface_address_info
|
||||
|
||||
author:
|
||||
- Juergen Wiebe (@steamx)
|
||||
|
||||
short_description: Get info for a network/interface_address object
|
||||
|
||||
description:
|
||||
- Get info for a network/interface_address object in SOPHOS UTM.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: utm network interface address
|
||||
utm_proxy_interface_address_info:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestNetworkInterfaceAddress
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
address:
|
||||
description: The ip4 address of the network/interface_address object
|
||||
type: str
|
||||
address6:
|
||||
description: The ip6 address of the network/interface_address object
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
resolved:
|
||||
description: Whether or not the object is resolved
|
||||
type: bool
|
||||
resolved6:
|
||||
description: Whether or not the object is resolved
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "network/interface_address"
|
||||
key_to_check_for_changes = []
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True)
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,348 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Stephan Schwarz <stearz@gmx.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_auth_profile
|
||||
|
||||
author:
|
||||
- Stephan Schwarz (@stearz)
|
||||
|
||||
short_description: create, update or destroy reverse_proxy auth_profile entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy auth_profile entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
aaa:
|
||||
description:
|
||||
- List of references to utm_aaa objects (allowed users or groups)
|
||||
required: true
|
||||
basic_prompt:
|
||||
description:
|
||||
- The message in the basic authentication prompt
|
||||
required: true
|
||||
backend_mode:
|
||||
description:
|
||||
- Specifies if the backend server needs authentication ([Basic|None])
|
||||
default: None
|
||||
choices:
|
||||
- Basic
|
||||
- None
|
||||
backend_strip_basic_auth:
|
||||
description:
|
||||
- Should the login data be stripped when proxying the request to the backend host
|
||||
type: bool
|
||||
default: True
|
||||
choices:
|
||||
- True
|
||||
- False
|
||||
backend_user_prefix:
|
||||
description:
|
||||
- Prefix string to prepend to the username for backend authentication
|
||||
default: ""
|
||||
backend_user_suffix:
|
||||
description:
|
||||
- Suffix string to append to the username for backend authentication
|
||||
default: ""
|
||||
comment:
|
||||
description:
|
||||
- Optional comment string
|
||||
default: ""
|
||||
frontend_cookie:
|
||||
description:
|
||||
- Frontend cookie name
|
||||
frontend_cookie_secret:
|
||||
description:
|
||||
- Frontend cookie secret
|
||||
frontend_form:
|
||||
description:
|
||||
- Frontend authentication form name
|
||||
frontend_form_template:
|
||||
description:
|
||||
- Frontend authentication form template
|
||||
default: ""
|
||||
frontend_login:
|
||||
description:
|
||||
- Frontend login name
|
||||
frontend_logout:
|
||||
description:
|
||||
- Frontend logout name
|
||||
frontend_mode:
|
||||
description:
|
||||
- Frontend authentication mode (Form|Basic)
|
||||
default: Basic
|
||||
choices:
|
||||
- Basic
|
||||
- Form
|
||||
frontend_realm:
|
||||
description:
|
||||
- Frontend authentication realm
|
||||
frontend_session_allow_persistency:
|
||||
description:
|
||||
- Allow session persistency
|
||||
type: bool
|
||||
default: False
|
||||
choices:
|
||||
- True
|
||||
- False
|
||||
frontend_session_lifetime:
|
||||
description:
|
||||
- session lifetime
|
||||
required: true
|
||||
frontend_session_lifetime_limited:
|
||||
description:
|
||||
- Specifies if limitation of session lifetime is active
|
||||
type: bool
|
||||
default: True
|
||||
choices:
|
||||
- True
|
||||
- False
|
||||
frontend_session_lifetime_scope:
|
||||
description:
|
||||
- scope for frontend_session_lifetime (days|hours|minutes)
|
||||
default: hours
|
||||
choices:
|
||||
- days
|
||||
- hours
|
||||
- minutes
|
||||
frontend_session_timeout:
|
||||
description:
|
||||
- session timeout
|
||||
required: true
|
||||
frontend_session_timeout_enabled:
|
||||
description:
|
||||
- Specifies if session timeout is active
|
||||
type: bool
|
||||
default: True
|
||||
choices:
|
||||
- True
|
||||
- False
|
||||
frontend_session_timeout_scope:
|
||||
description:
|
||||
- scope for frontend_session_timeout (days|hours|minutes)
|
||||
default: minutes
|
||||
choices:
|
||||
- days
|
||||
- hours
|
||||
- minutes
|
||||
logout_delegation_urls:
|
||||
description:
|
||||
- List of logout URLs that logouts are delegated to
|
||||
default: []
|
||||
logout_mode:
|
||||
description:
|
||||
- Mode of logout (None|Delegation)
|
||||
default: None
|
||||
choices:
|
||||
- None
|
||||
- Delegation
|
||||
redirect_to_requested_url:
|
||||
description:
|
||||
- Should a redirect to the requested URL be made
|
||||
type: bool
|
||||
default: False
|
||||
choices:
|
||||
- True
|
||||
- False
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create UTM proxy_auth_profile
|
||||
utm_proxy_auth_profile:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAuthProfileEntry
|
||||
aaa: [REF_OBJECT_STRING,REF_ANOTHEROBJECT_STRING]
|
||||
basic_prompt: "Authentication required: Please login"
|
||||
frontend_session_lifetime: 1
|
||||
frontend_session_timeout: 1
|
||||
state: present
|
||||
|
||||
- name: Remove UTM proxy_auth_profile
|
||||
utm_proxy_auth_profile:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAuthProfileEntry
|
||||
state: absent
|
||||
|
||||
- name: Read UTM proxy_auth_profile
|
||||
utm_proxy_auth_profile:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestAuthProfileEntry
|
||||
state: info
|
||||
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
aaa:
|
||||
description: List of references to utm_aaa objects (allowed users or groups)
|
||||
type: list
|
||||
basic_prompt:
|
||||
description: The message in the basic authentication prompt
|
||||
type: str
|
||||
backend_mode:
|
||||
description: Specifies if the backend server needs authentication ([Basic|None])
|
||||
type: str
|
||||
backend_strip_basic_auth:
|
||||
description: Should the login data be stripped when proxying the request to the backend host
|
||||
type: bool
|
||||
backend_user_prefix:
|
||||
description: Prefix string to prepend to the username for backend authentication
|
||||
type: str
|
||||
backend_user_suffix:
|
||||
description: Suffix string to append to the username for backend authentication
|
||||
type: str
|
||||
comment:
|
||||
description: Optional comment string
|
||||
type: str
|
||||
frontend_cookie:
|
||||
description: Frontend cookie name
|
||||
type: str
|
||||
frontend_cookie_secret:
|
||||
description: Frontend cookie secret
|
||||
type: str
|
||||
frontend_form:
|
||||
description: Frontend authentication form name
|
||||
type: str
|
||||
frontend_form_template:
|
||||
description: Frontend authentication form template
|
||||
type: str
|
||||
frontend_login:
|
||||
description: Frontend login name
|
||||
type: str
|
||||
frontend_logout:
|
||||
description: Frontend logout name
|
||||
type: str
|
||||
frontend_mode:
|
||||
description: Frontend authentication mode (Form|Basic)
|
||||
type: str
|
||||
frontend_realm:
|
||||
description: Frontend authentication realm
|
||||
type: str
|
||||
frontend_session_allow_persistency:
|
||||
description: Allow session persistency
|
||||
type: bool
|
||||
frontend_session_lifetime:
|
||||
description: session lifetime
|
||||
type: int
|
||||
frontend_session_lifetime_limited:
|
||||
description: Specifies if limitation of session lifetime is active
|
||||
type: bool
|
||||
frontend_session_lifetime_scope:
|
||||
description: scope for frontend_session_lifetime (days|hours|minutes)
|
||||
type: str
|
||||
frontend_session_timeout:
|
||||
description: session timeout
|
||||
type: int
|
||||
frontend_session_timeout_enabled:
|
||||
description: Specifies if session timeout is active
|
||||
type: bool
|
||||
frontend_session_timeout_scope:
|
||||
description: scope for frontend_session_timeout (days|hours|minutes)
|
||||
type: str
|
||||
logout_delegation_urls:
|
||||
description: List of logout URLs that logouts are delegated to
|
||||
type: list
|
||||
logout_mode:
|
||||
description: Mode of logout (None|Delegation)
|
||||
type: str
|
||||
redirect_to_requested_url:
|
||||
description: Should a redirect to the requested URL be made
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/auth_profile"
|
||||
key_to_check_for_changes = ["aaa", "basic_prompt", "backend_mode", "backend_strip_basic_auth",
|
||||
"backend_user_prefix", "backend_user_suffix", "comment", "frontend_cookie",
|
||||
"frontend_cookie_secret", "frontend_form", "frontend_form_template",
|
||||
"frontend_login", "frontend_logout", "frontend_mode", "frontend_realm",
|
||||
"frontend_session_allow_persistency", "frontend_session_lifetime",
|
||||
"frontend_session_lifetime_limited", "frontend_session_lifetime_scope",
|
||||
"frontend_session_timeout", "frontend_session_timeout_enabled",
|
||||
"frontend_session_timeout_scope", "logout_delegation_urls", "logout_mode",
|
||||
"redirect_to_requested_url"]
|
||||
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
aaa=dict(type='list', elements='str', required=True),
|
||||
basic_prompt=dict(type='str', required=True),
|
||||
backend_mode=dict(type='str', required=False, default="None", choices=['Basic', 'None']),
|
||||
backend_strip_basic_auth=dict(type='bool', required=False, default=True, choices=[True, False]),
|
||||
backend_user_prefix=dict(type='str', required=False, default=""),
|
||||
backend_user_suffix=dict(type='str', required=False, default=""),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
frontend_cookie=dict(type='str', required=False),
|
||||
frontend_cookie_secret=dict(type='str', required=False),
|
||||
frontend_form=dict(type='str', required=False),
|
||||
frontend_form_template=dict(type='str', required=False, default=""),
|
||||
frontend_login=dict(type='str', required=False),
|
||||
frontend_logout=dict(type='str', required=False),
|
||||
frontend_mode=dict(type='str', required=False, default="Basic", choices=['Basic', 'Form']),
|
||||
frontend_realm=dict(type='str', required=False),
|
||||
frontend_session_allow_persistency=dict(type='bool', required=False, default=False, choices=[True, False]),
|
||||
frontend_session_lifetime=dict(type='int', required=True),
|
||||
frontend_session_lifetime_limited=dict(type='bool', required=False, default=True, choices=[True, False]),
|
||||
frontend_session_lifetime_scope=dict(type='str', required=False, default="hours", choices=['days', 'hours', 'minutes']),
|
||||
frontend_session_timeout=dict(type='int', required=True),
|
||||
frontend_session_timeout_enabled=dict(type='bool', required=False, default=True, choices=[True, False]),
|
||||
frontend_session_timeout_scope=dict(type='str', required=False, default="minutes", choices=['days', 'hours', 'minutes']),
|
||||
logout_delegation_urls=dict(type='list', elements='str', required=False, default=[]),
|
||||
logout_mode=dict(type='str', required=False, default="None", choices=['None', 'Delegation']),
|
||||
redirect_to_requested_url=dict(type='bool', required=False, default=False, choices=[True, False])
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,242 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Sebastian Schenzel <sebastian.schenzel@mailbox.org>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_exception
|
||||
|
||||
author:
|
||||
- Sebastian Schenzel (@RickS-C137)
|
||||
|
||||
short_description: Create, update or destroy reverse_proxy exception entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy exception entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: True
|
||||
type: str
|
||||
op:
|
||||
description:
|
||||
- The operand to be used with the entries of the path parameter
|
||||
default: 'AND'
|
||||
choices:
|
||||
- 'AND'
|
||||
- 'OR'
|
||||
required: False
|
||||
type: str
|
||||
path:
|
||||
description:
|
||||
- The paths the exception in the reverse proxy is defined for
|
||||
type: list
|
||||
default: []
|
||||
required: False
|
||||
skip_custom_threats_filters:
|
||||
description:
|
||||
- A list of threats to be skipped
|
||||
type: list
|
||||
default: []
|
||||
required: False
|
||||
skip_threats_filter_categories:
|
||||
description:
|
||||
- Define which categories of threats are skipped
|
||||
type: list
|
||||
default: []
|
||||
required: False
|
||||
skipav:
|
||||
description:
|
||||
- Skip the Antivirus Scanning
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skipbadclients:
|
||||
description:
|
||||
- Block clients with bad reputation
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skipcookie:
|
||||
description:
|
||||
- Skip the Cookie Signing check
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skipform:
|
||||
description:
|
||||
- Enable form hardening
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skipform_missingtoken:
|
||||
description:
|
||||
- Enable form hardening with missing tokens
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skiphtmlrewrite:
|
||||
description:
|
||||
- Protection against SQL
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skiptft:
|
||||
description:
|
||||
- Enable true file type control
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
skipurl:
|
||||
description:
|
||||
- Enable static URL hardening
|
||||
default: False
|
||||
type: bool
|
||||
required: False
|
||||
source:
|
||||
description:
|
||||
- Define which categories of threats are skipped
|
||||
type: list
|
||||
default: []
|
||||
required: False
|
||||
status:
|
||||
description:
|
||||
- Status of the exception rule set
|
||||
default: True
|
||||
type: bool
|
||||
required: False
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create UTM proxy_exception
|
||||
utm_proxy_exception:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestExceptionEntry
|
||||
backend: REF_OBJECT_STRING
|
||||
state: present
|
||||
|
||||
- name: Remove UTM proxy_exception
|
||||
utm_proxy_exception:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestExceptionEntry
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
comment:
|
||||
description: The optional comment string
|
||||
op:
|
||||
description: The operand to be used with the entries of the path parameter
|
||||
type: str
|
||||
path:
|
||||
description: The paths the exception in the reverse proxy is defined for
|
||||
type: list
|
||||
skip_custom_threats_filters:
|
||||
description: A list of threats to be skipped
|
||||
type: list
|
||||
skip_threats_filter_categories:
|
||||
description: Define which categories of threats are skipped
|
||||
type: list
|
||||
skipav:
|
||||
description: Skip the Antivirus Scanning
|
||||
type: bool
|
||||
skipbadclients:
|
||||
description: Block clients with bad reputation
|
||||
type: bool
|
||||
skipcookie:
|
||||
description: Skip the Cookie Signing check
|
||||
type: bool
|
||||
skipform:
|
||||
description: Enable form hardening
|
||||
type: bool
|
||||
skipform_missingtoken:
|
||||
description: Enable form hardening with missing tokens
|
||||
type: bool
|
||||
skiphtmlrewrite:
|
||||
description: Protection against SQL
|
||||
type: bool
|
||||
skiptft:
|
||||
description: Enable true file type control
|
||||
type: bool
|
||||
skipurl:
|
||||
description: Enable static URL hardening
|
||||
type: bool
|
||||
source:
|
||||
description: Define which categories of threats are skipped
|
||||
type: list
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/exception"
|
||||
key_to_check_for_changes = ["op", "path", "skip_custom_threats_filters", "skip_threats_filter_categories", "skipav",
|
||||
"comment", "skipbadclients", "skipcookie", "skipform", "status", "skipform_missingtoken",
|
||||
"skiphtmlrewrite", "skiptft", "skipurl", "source"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
op=dict(type='str', required=False, default='AND', choices=['AND', 'OR']),
|
||||
path=dict(type='list', elements='string', required=False, default=[]),
|
||||
skip_custom_threats_filters=dict(type='list', elements='string', required=False, default=[]),
|
||||
skip_threats_filter_categories=dict(type='list', elements='string', required=False, default=[]),
|
||||
skipav=dict(type='bool', required=False, default=False),
|
||||
skipbadclients=dict(type='bool', required=False, default=False),
|
||||
skipcookie=dict(type='bool', required=False, default=False),
|
||||
skipform=dict(type='bool', required=False, default=False),
|
||||
skipform_missingtoken=dict(type='bool', required=False, default=False),
|
||||
skiphtmlrewrite=dict(type='bool', required=False, default=False),
|
||||
skiptft=dict(type='bool', required=False, default=False),
|
||||
skipurl=dict(type='bool', required=False, default=False),
|
||||
source=dict(type='list', elements='string', required=False, default=[]),
|
||||
status=dict(type='bool', required=False, default=True),
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,268 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_frontend
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: create, update or destroy reverse_proxy frontend entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy frontend entry in Sophos UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
add_content_type_header :
|
||||
description:
|
||||
- Whether to add the content type header or not
|
||||
type: bool
|
||||
default: False
|
||||
address:
|
||||
description:
|
||||
- The reference name of the network/interface_address object.
|
||||
default: REF_DefaultInternalAddress
|
||||
allowed_networks:
|
||||
description:
|
||||
- A list of reference names for the allowed networks.
|
||||
default: ['REF_NetworkAny']
|
||||
certificate:
|
||||
description:
|
||||
- The reference name of the ca/host_key_cert object.
|
||||
default: ""
|
||||
comment:
|
||||
description:
|
||||
- An optional comment to add to the object
|
||||
default: ""
|
||||
disable_compression:
|
||||
description:
|
||||
- Whether to enable the compression
|
||||
type: bool
|
||||
default: False
|
||||
domain:
|
||||
description:
|
||||
- A list of domain names for the frontend object
|
||||
exceptions:
|
||||
description:
|
||||
- A list of exception ref names (reverse_proxy/exception)
|
||||
default: []
|
||||
htmlrewrite:
|
||||
description:
|
||||
- Whether to enable html rewrite or not
|
||||
type: bool
|
||||
default: False
|
||||
htmlrewrite_cookies:
|
||||
description:
|
||||
- Whether to enable html rewrite cookie or not
|
||||
type: bool
|
||||
default: False
|
||||
implicitredirect:
|
||||
description:
|
||||
- Whether to enable implicit redirection or not
|
||||
type: bool
|
||||
default: False
|
||||
lbmethod:
|
||||
description:
|
||||
- Which loadbalancer method should be used
|
||||
choices:
|
||||
- ""
|
||||
- bybusyness
|
||||
- bytraffic
|
||||
- byrequests
|
||||
default: bybusyness
|
||||
locations:
|
||||
description:
|
||||
- A list of location ref names (reverse_proxy/location)
|
||||
default: []
|
||||
port:
|
||||
description:
|
||||
- The frontend http port
|
||||
default: 80
|
||||
preservehost:
|
||||
description:
|
||||
- Whether to preserve host header
|
||||
type: bool
|
||||
default: False
|
||||
profile:
|
||||
description:
|
||||
- The reference string of the reverse_proxy/profile
|
||||
default: ""
|
||||
status:
|
||||
description:
|
||||
- Whether to activate the frontend entry or not
|
||||
type: bool
|
||||
default: True
|
||||
type:
|
||||
description:
|
||||
- Which protocol should be used
|
||||
choices:
|
||||
- http
|
||||
- https
|
||||
default: http
|
||||
xheaders:
|
||||
description:
|
||||
- Whether to pass the host header or not
|
||||
type: bool
|
||||
default: False
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create utm proxy_frontend
|
||||
utm_proxy_frontend:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestFrontendEntry
|
||||
host: REF_OBJECT_STRING
|
||||
state: present
|
||||
|
||||
- name: Remove utm proxy_frontend
|
||||
utm_proxy_frontend:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestFrontendEntry
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
add_content_type_header:
|
||||
description: Whether to add the content type header
|
||||
type: bool
|
||||
address:
|
||||
description: The reference name of the address
|
||||
type: str
|
||||
allowed_networks:
|
||||
description: List of reference names of networks associated
|
||||
type: list
|
||||
certificate:
|
||||
description: Reference name of certificate (ca/host_key_cert)
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
disable_compression:
|
||||
description: State of compression support
|
||||
type: bool
|
||||
domain:
|
||||
description: List of hostnames
|
||||
type: list
|
||||
exceptions:
|
||||
description: List of associated proxy exceptions
|
||||
type: list
|
||||
htmlrewrite:
|
||||
description: State of html rewrite
|
||||
type: bool
|
||||
htmlrewrite_cookies:
|
||||
description: Whether the html rewrite cookie will be set
|
||||
type: bool
|
||||
implicitredirect:
|
||||
description: Whether to use implicit redirection
|
||||
type: bool
|
||||
lbmethod:
|
||||
description: The method of loadbalancer to use
|
||||
type: str
|
||||
locations:
|
||||
description: The reference names of reverse_proxy/locations associated with the object
|
||||
type: list
|
||||
port:
|
||||
description: The port of the frontend connection
|
||||
type: int
|
||||
preservehost:
|
||||
description: Preserve host header
|
||||
type: bool
|
||||
profile:
|
||||
description: The associated reverse_proxy/profile
|
||||
type: str
|
||||
status:
|
||||
description: Whether the frontend object is active or not
|
||||
type: bool
|
||||
type:
|
||||
description: The connection type
|
||||
type: str
|
||||
xheaders:
|
||||
description: The xheaders state
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/frontend"
|
||||
key_to_check_for_changes = ["add_content_type_header", "address", "allowed_networks", "certificate",
|
||||
"comment", "disable_compression", "domain", "exceptions", "htmlrewrite",
|
||||
"htmlrewrite_cookies", "implicitredirect", "lbmethod", "locations",
|
||||
"port", "preservehost", "profile", "status", "type", "xheaders"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
add_content_type_header=dict(type='bool', required=False, default=False),
|
||||
address=dict(type='str', required=False, default="REF_DefaultInternalAddress"),
|
||||
allowed_networks=dict(type='list', elements='str', required=False, default=["REF_NetworkAny"]),
|
||||
certificate=dict(type='str', required=False, default=""),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
disable_compression=dict(type='bool', required=False, default=False),
|
||||
domain=dict(type='list', elements='str', required=False),
|
||||
exceptions=dict(type='list', elements='str', required=False, default=[]),
|
||||
htmlrewrite=dict(type='bool', required=False, default=False),
|
||||
htmlrewrite_cookies=dict(type='bool', required=False, default=False),
|
||||
implicitredirect=dict(type='bool', required=False, default=False),
|
||||
lbmethod=dict(type='str', required=False, default="bybusyness",
|
||||
choices=['bybusyness', 'bytraffic', 'byrequests', '']),
|
||||
locations=dict(type='list', elements='str', required=False, default=[]),
|
||||
port=dict(type='int', required=False, default=80),
|
||||
preservehost=dict(type='bool', required=False, default=False),
|
||||
profile=dict(type='str', required=False, default=""),
|
||||
status=dict(type='bool', required=False, default=True),
|
||||
type=dict(type='str', required=False, default="http", choices=['http', 'https']),
|
||||
xheaders=dict(type='bool', required=False, default=False),
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_frontend_info
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: create, update or destroy reverse_proxy frontend entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy frontend entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Get utm proxy_frontend
|
||||
utm_proxy_frontend_info:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestBackendEntry
|
||||
host: REF_OBJECT_STRING
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
add_content_type_header:
|
||||
description: Whether to add the content type header
|
||||
type: bool
|
||||
address:
|
||||
description: The reference name of the address
|
||||
type: str
|
||||
allowed_networks:
|
||||
description: List of reference names of networks associated
|
||||
type: list
|
||||
certificate:
|
||||
description: Reference name of certificate (ca/host_key_cert)
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
disable_compression:
|
||||
description: State of compression support
|
||||
type: bool
|
||||
domain:
|
||||
description: List of hostnames
|
||||
type: list
|
||||
exceptions:
|
||||
description: List of associated proxy exceptions
|
||||
type: list
|
||||
htmlrewrite:
|
||||
description: State of html rewrite
|
||||
type: bool
|
||||
htmlrewrite_cookies:
|
||||
description: whether the html rewrite cookie will be set
|
||||
type: bool
|
||||
implicitredirect:
|
||||
description: whether to use implicit redirection
|
||||
type: bool
|
||||
lbmethod:
|
||||
description: The method of loadbalancer to use
|
||||
type: str
|
||||
locations:
|
||||
description: The reference names of reverse_proxy/locations associated with the object
|
||||
type: list
|
||||
port:
|
||||
description: The port of the frontend connection
|
||||
type: int
|
||||
preservehost:
|
||||
description: Preserve host header
|
||||
type: bool
|
||||
profile:
|
||||
description: The associated reverse_proxy/profile
|
||||
type: str
|
||||
status:
|
||||
description: Whether the frontend object is active or not
|
||||
type: bool
|
||||
type:
|
||||
description: The connection type
|
||||
type: str
|
||||
xheaders:
|
||||
description: The xheaders state
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/frontend"
|
||||
key_to_check_for_changes = []
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True)
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,209 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_location
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: create, update or destroy reverse_proxy location entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy location entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
access_control:
|
||||
description:
|
||||
- whether to activate the access control for the location
|
||||
type: str
|
||||
default: '0'
|
||||
choices:
|
||||
- '0'
|
||||
- '1'
|
||||
allowed_networks:
|
||||
description:
|
||||
- A list of allowed networks
|
||||
type: list
|
||||
default: REF_NetworkAny
|
||||
auth_profile:
|
||||
description:
|
||||
- The reference name of the auth profile
|
||||
backend:
|
||||
description:
|
||||
- A list of backends that are connected with this location declaration
|
||||
default: []
|
||||
be_path:
|
||||
description:
|
||||
- The path of the backend
|
||||
comment:
|
||||
description:
|
||||
- The optional comment string
|
||||
denied_networks:
|
||||
description:
|
||||
- A list of denied network references
|
||||
default: []
|
||||
hot_standby:
|
||||
description:
|
||||
- Activate hot standby mode
|
||||
type: bool
|
||||
default: False
|
||||
path:
|
||||
description:
|
||||
- The path of the location
|
||||
default: "/"
|
||||
status:
|
||||
description:
|
||||
- Whether the location is active or not
|
||||
type: bool
|
||||
default: True
|
||||
stickysession_id:
|
||||
description:
|
||||
- The stickysession id
|
||||
default: ROUTEID
|
||||
stickysession_status:
|
||||
description:
|
||||
- Enable the stickysession
|
||||
type: bool
|
||||
default: False
|
||||
websocket_passthrough:
|
||||
description:
|
||||
- Enable the websocket passthrough
|
||||
type: bool
|
||||
default: False
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create UTM proxy_location
|
||||
utm_proxy_backend:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestLocationEntry
|
||||
backend: REF_OBJECT_STRING
|
||||
state: present
|
||||
|
||||
- name: Remove UTM proxy_location
|
||||
utm_proxy_backend:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestLocationEntry
|
||||
state: absent
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
access_control:
|
||||
description: Whether to use access control state
|
||||
type: str
|
||||
allowed_networks:
|
||||
description: List of allowed network reference names
|
||||
type: list
|
||||
auth_profile:
|
||||
description: The auth profile reference name
|
||||
type: str
|
||||
backend:
|
||||
description: The backend reference name
|
||||
type: str
|
||||
be_path:
|
||||
description: The backend path
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
denied_networks:
|
||||
description: The list of the denied network names
|
||||
type: list
|
||||
hot_standby:
|
||||
description: Use hot standy
|
||||
type: bool
|
||||
path:
|
||||
description: Path name
|
||||
type: str
|
||||
status:
|
||||
description: Whether the object is active or not
|
||||
type: bool
|
||||
stickysession_id:
|
||||
description: The identifier of the stickysession
|
||||
type: str
|
||||
stickysession_status:
|
||||
description: Whether to use stickysession or not
|
||||
type: bool
|
||||
websocket_passthrough:
|
||||
description: Whether websocket passthrough will be used or not
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/location"
|
||||
key_to_check_for_changes = ["access_control", "allowed_networks", "auth_profile", "backend", "be_path", "comment",
|
||||
"denied_networks", "hot_standby", "path", "status", "stickysession_id",
|
||||
"stickysession_status", "websocket_passthrough"]
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
access_control=dict(type='str', required=False, default="0", choices=['0', '1']),
|
||||
allowed_networks=dict(type='list', elements='str', required=False, default=['REF_NetworkAny']),
|
||||
auth_profile=dict(type='str', required=False, default=""),
|
||||
backend=dict(type='list', elements='str', required=False, default=[]),
|
||||
be_path=dict(type='str', required=False, default=""),
|
||||
comment=dict(type='str', required=False, default=""),
|
||||
denied_networks=dict(type='list', elements='str', required=False, default=[]),
|
||||
hot_standby=dict(type='bool', required=False, default=False),
|
||||
path=dict(type='str', required=False, default="/"),
|
||||
status=dict(type='bool', required=False, default=True),
|
||||
stickysession_id=dict(type='str', required=False, default='ROUTEID'),
|
||||
stickysession_status=dict(type='bool', required=False, default=False),
|
||||
websocket_passthrough=dict(type='bool', required=False, default=False),
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: utm_proxy_location_info
|
||||
|
||||
author:
|
||||
- Johannes Brunswicker (@MatrixCrawler)
|
||||
|
||||
short_description: create, update or destroy reverse_proxy location entry in Sophos UTM
|
||||
|
||||
description:
|
||||
- Create, update or destroy a reverse_proxy location entry in SOPHOS UTM.
|
||||
- This module needs to have the REST Ability of the UTM to be activated.
|
||||
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the object. Will be used to identify the entry
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.utm
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Remove UTM proxy_location
|
||||
utm_proxy_location_info:
|
||||
utm_host: sophos.host.name
|
||||
utm_token: abcdefghijklmno1234
|
||||
name: TestLocationEntry
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
result:
|
||||
description: The utm object that was created
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_ref:
|
||||
description: The reference name of the object
|
||||
type: str
|
||||
_locked:
|
||||
description: Whether or not the object is currently locked
|
||||
type: bool
|
||||
_type:
|
||||
description: The type of the object
|
||||
type: str
|
||||
name:
|
||||
description: The name of the object
|
||||
type: str
|
||||
access_control:
|
||||
description: Whether to use access control state
|
||||
type: str
|
||||
allowed_networks:
|
||||
description: List of allowed network reference names
|
||||
type: list
|
||||
auth_profile:
|
||||
description: The auth profile reference name
|
||||
type: str
|
||||
backend:
|
||||
description: The backend reference name
|
||||
type: str
|
||||
be_path:
|
||||
description: The backend path
|
||||
type: str
|
||||
comment:
|
||||
description: The comment string
|
||||
type: str
|
||||
denied_networks:
|
||||
description: The list of the denied network names
|
||||
type: list
|
||||
hot_standby:
|
||||
description: Use hot standy
|
||||
type: bool
|
||||
path:
|
||||
description: Path name
|
||||
type: str
|
||||
status:
|
||||
description: Whether the object is active or not
|
||||
type: bool
|
||||
stickysession_id:
|
||||
description: The identifier of the stickysession
|
||||
type: str
|
||||
stickysession_status:
|
||||
description: Whether to use stickysession or not
|
||||
type: bool
|
||||
websocket_passthrough:
|
||||
description: Whether websocket passthrough will be used or not
|
||||
type: bool
|
||||
"""
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM, UTMModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
endpoint = "reverse_proxy/location"
|
||||
key_to_check_for_changes = []
|
||||
module = UTMModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True)
|
||||
)
|
||||
)
|
||||
try:
|
||||
UTM(module, endpoint, key_to_check_for_changes, info_only=True).execute()
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
254
plugins/modules/web_infrastructure/supervisorctl.py
Normal file
254
plugins/modules/web_infrastructure/supervisorctl.py
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012, Matt Wright <matt@nobien.net>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: supervisorctl
|
||||
short_description: Manage the state of a program or group of programs running via supervisord
|
||||
description:
|
||||
- Manage the state of a program or group of programs running via supervisord
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the supervisord program or group to manage.
|
||||
- The name will be taken as group name when it ends with a colon I(:)
|
||||
- Group support is only available in Ansible version 1.6 or later.
|
||||
required: true
|
||||
config:
|
||||
description:
|
||||
- The supervisor configuration file path
|
||||
server_url:
|
||||
description:
|
||||
- URL on which supervisord server is listening
|
||||
username:
|
||||
description:
|
||||
- username to use for authentication
|
||||
password:
|
||||
description:
|
||||
- password to use for authentication
|
||||
state:
|
||||
description:
|
||||
- The desired state of program/group.
|
||||
required: true
|
||||
choices: [ "present", "started", "stopped", "restarted", "absent", "signalled" ]
|
||||
signal:
|
||||
description:
|
||||
- The signal to send to the program/group, when combined with the 'signalled' state. Required when l(state=signalled).
|
||||
supervisorctl_path:
|
||||
description:
|
||||
- path to supervisorctl executable
|
||||
notes:
|
||||
- When C(state) = I(present), the module will call C(supervisorctl reread) then C(supervisorctl add) if the program/group does not exist.
|
||||
- When C(state) = I(restarted), the module will call C(supervisorctl update) then call C(supervisorctl restart).
|
||||
- When C(state) = I(absent), the module will call C(supervisorctl reread) then C(supervisorctl remove) to remove the target program/group.
|
||||
requirements: [ "supervisorctl" ]
|
||||
author:
|
||||
- "Matt Wright (@mattupstate)"
|
||||
- "Aaron Wang (@inetfuture) <inetfuture@gmail.com>"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Manage the state of program to be in 'started' state.
|
||||
- supervisorctl:
|
||||
name: my_app
|
||||
state: started
|
||||
|
||||
# Manage the state of program group to be in 'started' state.
|
||||
- supervisorctl:
|
||||
name: 'my_apps:'
|
||||
state: started
|
||||
|
||||
# Restart my_app, reading supervisorctl configuration from a specified file.
|
||||
- supervisorctl:
|
||||
name: my_app
|
||||
state: restarted
|
||||
config: /var/opt/my_project/supervisord.conf
|
||||
|
||||
# Restart my_app, connecting to supervisord with credentials and server URL.
|
||||
- supervisorctl:
|
||||
name: my_app
|
||||
state: restarted
|
||||
username: test
|
||||
password: testpass
|
||||
server_url: http://localhost:9001
|
||||
|
||||
# Send a signal to my_app via supervisorctl
|
||||
- supervisorctl:
|
||||
name: my_app
|
||||
state: signalled
|
||||
signal: USR1
|
||||
'''
|
||||
|
||||
import os
|
||||
from ansible.module_utils.basic import AnsibleModule, is_executable
|
||||
|
||||
|
||||
def main():
|
||||
arg_spec = dict(
|
||||
name=dict(required=True),
|
||||
config=dict(required=False, type='path'),
|
||||
server_url=dict(required=False),
|
||||
username=dict(required=False),
|
||||
password=dict(required=False, no_log=True),
|
||||
supervisorctl_path=dict(required=False, type='path'),
|
||||
state=dict(required=True, choices=['present', 'started', 'restarted', 'stopped', 'absent', 'signalled']),
|
||||
signal=dict(required=False)
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
|
||||
|
||||
name = module.params['name']
|
||||
is_group = False
|
||||
if name.endswith(':'):
|
||||
is_group = True
|
||||
name = name.rstrip(':')
|
||||
state = module.params['state']
|
||||
config = module.params.get('config')
|
||||
server_url = module.params.get('server_url')
|
||||
username = module.params.get('username')
|
||||
password = module.params.get('password')
|
||||
supervisorctl_path = module.params.get('supervisorctl_path')
|
||||
signal = module.params.get('signal')
|
||||
|
||||
# we check error message for a pattern, so we need to make sure that's in C locale
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
|
||||
if supervisorctl_path:
|
||||
if os.path.exists(supervisorctl_path) and is_executable(supervisorctl_path):
|
||||
supervisorctl_args = [supervisorctl_path]
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Provided path to supervisorctl does not exist or isn't executable: %s" % supervisorctl_path)
|
||||
else:
|
||||
supervisorctl_args = [module.get_bin_path('supervisorctl', True)]
|
||||
|
||||
if config:
|
||||
supervisorctl_args.extend(['-c', config])
|
||||
if server_url:
|
||||
supervisorctl_args.extend(['-s', server_url])
|
||||
if username:
|
||||
supervisorctl_args.extend(['-u', username])
|
||||
if password:
|
||||
supervisorctl_args.extend(['-p', password])
|
||||
|
||||
if state == 'signalled' and not signal:
|
||||
module.fail_json(msg="State 'signalled' requires a 'signal' value")
|
||||
|
||||
def run_supervisorctl(cmd, name=None, **kwargs):
|
||||
args = list(supervisorctl_args) # copy the master args
|
||||
args.append(cmd)
|
||||
if name:
|
||||
args.append(name)
|
||||
return module.run_command(args, **kwargs)
|
||||
|
||||
def get_matched_processes():
|
||||
matched = []
|
||||
rc, out, err = run_supervisorctl('status')
|
||||
for line in out.splitlines():
|
||||
# One status line may look like one of these two:
|
||||
# process not in group:
|
||||
# echo_date_lonely RUNNING pid 7680, uptime 13:22:18
|
||||
# process in group:
|
||||
# echo_date_group:echo_date_00 RUNNING pid 7681, uptime 13:22:18
|
||||
fields = [field for field in line.split(' ') if field != '']
|
||||
process_name = fields[0]
|
||||
status = fields[1]
|
||||
|
||||
if is_group:
|
||||
# If there is ':', this process must be in a group.
|
||||
if ':' in process_name:
|
||||
group = process_name.split(':')[0]
|
||||
if group != name:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if process_name != name:
|
||||
continue
|
||||
|
||||
matched.append((process_name, status))
|
||||
return matched
|
||||
|
||||
def take_action_on_processes(processes, status_filter, action, expected_result):
|
||||
to_take_action_on = []
|
||||
for process_name, status in processes:
|
||||
if status_filter(status):
|
||||
to_take_action_on.append(process_name)
|
||||
|
||||
if len(to_take_action_on) == 0:
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
for process_name in to_take_action_on:
|
||||
rc, out, err = run_supervisorctl(action, process_name, check_rc=True)
|
||||
if '%s: %s' % (process_name, expected_result) not in out:
|
||||
module.fail_json(msg=out)
|
||||
|
||||
module.exit_json(changed=True, name=name, state=state, affected=to_take_action_on)
|
||||
|
||||
if state == 'restarted':
|
||||
rc, out, err = run_supervisorctl('update', check_rc=True)
|
||||
processes = get_matched_processes()
|
||||
if len(processes) == 0:
|
||||
module.fail_json(name=name, msg="ERROR (no such process)")
|
||||
|
||||
take_action_on_processes(processes, lambda s: True, 'restart', 'started')
|
||||
|
||||
processes = get_matched_processes()
|
||||
|
||||
if state == 'absent':
|
||||
if len(processes) == 0:
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
run_supervisorctl('reread', check_rc=True)
|
||||
rc, out, err = run_supervisorctl('remove', name)
|
||||
if '%s: removed process group' % name in out:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
else:
|
||||
module.fail_json(msg=out, name=name, state=state)
|
||||
|
||||
if state == 'present':
|
||||
if len(processes) > 0:
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
run_supervisorctl('reread', check_rc=True)
|
||||
rc, out, err = run_supervisorctl('add', name)
|
||||
if '%s: added process group' % name in out:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
else:
|
||||
module.fail_json(msg=out, name=name, state=state)
|
||||
|
||||
if state == 'started':
|
||||
if len(processes) == 0:
|
||||
module.fail_json(name=name, msg="ERROR (no such process)")
|
||||
take_action_on_processes(processes, lambda s: s not in ('RUNNING', 'STARTING'), 'start', 'started')
|
||||
|
||||
if state == 'stopped':
|
||||
if len(processes) == 0:
|
||||
module.fail_json(name=name, msg="ERROR (no such process)")
|
||||
take_action_on_processes(processes, lambda s: s in ('RUNNING', 'STARTING'), 'stop', 'stopped')
|
||||
|
||||
if state == 'signalled':
|
||||
if len(processes) == 0:
|
||||
module.fail_json(name=name, msg="ERROR (no such process)")
|
||||
take_action_on_processes(processes, lambda s: s in ('RUNNING'), "signal %s" % signal, 'signalled')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
305
plugins/modules/web_infrastructure/taiga_issue.py
Normal file
305
plugins/modules/web_infrastructure/taiga_issue.py
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015, Alejandro Guirao <lekumberri@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: taiga_issue
|
||||
short_description: Creates/deletes an issue in a Taiga Project Management Platform
|
||||
description:
|
||||
- Creates/deletes an issue in a Taiga Project Management Platform (U(https://taiga.io)).
|
||||
- An issue is identified by the combination of project, issue subject and issue type.
|
||||
- This module implements the creation or deletion of issues (not the update).
|
||||
options:
|
||||
taiga_host:
|
||||
description:
|
||||
- The hostname of the Taiga instance.
|
||||
default: https://api.taiga.io
|
||||
project:
|
||||
description:
|
||||
- Name of the project containing the issue. Must exist previously.
|
||||
required: True
|
||||
subject:
|
||||
description:
|
||||
- The issue subject.
|
||||
required: True
|
||||
issue_type:
|
||||
description:
|
||||
- The issue type. Must exist previously.
|
||||
required: True
|
||||
priority:
|
||||
description:
|
||||
- The issue priority. Must exist previously.
|
||||
default: Normal
|
||||
status:
|
||||
description:
|
||||
- The issue status. Must exist previously.
|
||||
default: New
|
||||
severity:
|
||||
description:
|
||||
- The issue severity. Must exist previously.
|
||||
default: Normal
|
||||
description:
|
||||
description:
|
||||
- The issue description.
|
||||
default: ""
|
||||
attachment:
|
||||
description:
|
||||
- Path to a file to be attached to the issue.
|
||||
attachment_description:
|
||||
description:
|
||||
- A string describing the file to be attached to the issue.
|
||||
default: ""
|
||||
tags:
|
||||
description:
|
||||
- A lists of tags to be assigned to the issue.
|
||||
default: []
|
||||
state:
|
||||
description:
|
||||
- Whether the issue should be present or not.
|
||||
choices: ["present", "absent"]
|
||||
default: present
|
||||
author: Alejandro Guirao (@lekum)
|
||||
requirements: [python-taiga]
|
||||
notes:
|
||||
- The authentication is achieved either by the environment variable TAIGA_TOKEN or by the pair of environment variables TAIGA_USERNAME and TAIGA_PASSWORD
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create an issue in the my hosted Taiga environment and attach an error log
|
||||
- taiga_issue:
|
||||
taiga_host: https://mytaigahost.example.com
|
||||
project: myproject
|
||||
subject: An error has been found
|
||||
issue_type: Bug
|
||||
priority: High
|
||||
status: New
|
||||
severity: Important
|
||||
description: An error has been found. Please check the attached error log for details.
|
||||
attachment: /path/to/error.log
|
||||
attachment_description: Error log file
|
||||
tags:
|
||||
- Error
|
||||
- Needs manual check
|
||||
state: present
|
||||
|
||||
# Deletes the previously created issue
|
||||
- taiga_issue:
|
||||
taiga_host: https://mytaigahost.example.com
|
||||
project: myproject
|
||||
subject: An error has been found
|
||||
issue_type: Bug
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''# '''
|
||||
import traceback
|
||||
|
||||
from os import getenv
|
||||
from os.path import isfile
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
TAIGA_IMP_ERR = None
|
||||
try:
|
||||
from taiga import TaigaAPI
|
||||
from taiga.exceptions import TaigaException
|
||||
TAIGA_MODULE_IMPORTED = True
|
||||
except ImportError:
|
||||
TAIGA_IMP_ERR = traceback.format_exc()
|
||||
TAIGA_MODULE_IMPORTED = False
|
||||
|
||||
|
||||
def manage_issue(module, taiga_host, project_name, issue_subject, issue_priority,
|
||||
issue_status, issue_type, issue_severity, issue_description,
|
||||
issue_attachment, issue_attachment_description,
|
||||
issue_tags, state, check_mode=False):
|
||||
"""
|
||||
Method that creates/deletes issues depending whether they exist and the state desired
|
||||
|
||||
The credentials should be passed via environment variables:
|
||||
- TAIGA_TOKEN
|
||||
- TAIGA_USERNAME and TAIGA_PASSWORD
|
||||
|
||||
Returns a tuple with these elements:
|
||||
- A boolean representing the success of the operation
|
||||
- A descriptive message
|
||||
- A dict with the issue attributes, in case of issue creation, otherwise empty dict
|
||||
"""
|
||||
|
||||
changed = False
|
||||
|
||||
try:
|
||||
token = getenv('TAIGA_TOKEN')
|
||||
if token:
|
||||
api = TaigaAPI(host=taiga_host, token=token)
|
||||
else:
|
||||
api = TaigaAPI(host=taiga_host)
|
||||
username = getenv('TAIGA_USERNAME')
|
||||
password = getenv('TAIGA_PASSWORD')
|
||||
if not any([username, password]):
|
||||
return (False, changed, "Missing credentials", {})
|
||||
api.auth(username=username, password=password)
|
||||
|
||||
user_id = api.me().id
|
||||
project_list = filter(lambda x: x.name == project_name, api.projects.list(member=user_id))
|
||||
if len(project_list) != 1:
|
||||
return (False, changed, "Unable to find project %s" % project_name, {})
|
||||
project = project_list[0]
|
||||
project_id = project.id
|
||||
|
||||
priority_list = filter(lambda x: x.name == issue_priority, api.priorities.list(project=project_id))
|
||||
if len(priority_list) != 1:
|
||||
return (False, changed, "Unable to find issue priority %s for project %s" % (issue_priority, project_name), {})
|
||||
priority_id = priority_list[0].id
|
||||
|
||||
status_list = filter(lambda x: x.name == issue_status, api.issue_statuses.list(project=project_id))
|
||||
if len(status_list) != 1:
|
||||
return (False, changed, "Unable to find issue status %s for project %s" % (issue_status, project_name), {})
|
||||
status_id = status_list[0].id
|
||||
|
||||
type_list = filter(lambda x: x.name == issue_type, project.list_issue_types())
|
||||
if len(type_list) != 1:
|
||||
return (False, changed, "Unable to find issue type %s for project %s" % (issue_type, project_name), {})
|
||||
type_id = type_list[0].id
|
||||
|
||||
severity_list = filter(lambda x: x.name == issue_severity, project.list_severities())
|
||||
if len(severity_list) != 1:
|
||||
return (False, changed, "Unable to find severity %s for project %s" % (issue_severity, project_name), {})
|
||||
severity_id = severity_list[0].id
|
||||
|
||||
issue = {
|
||||
"project": project_name,
|
||||
"subject": issue_subject,
|
||||
"priority": issue_priority,
|
||||
"status": issue_status,
|
||||
"type": issue_type,
|
||||
"severity": issue_severity,
|
||||
"description": issue_description,
|
||||
"tags": issue_tags,
|
||||
}
|
||||
|
||||
# An issue is identified by the project_name, the issue_subject and the issue_type
|
||||
matching_issue_list = filter(lambda x: x.subject == issue_subject and x.type == type_id, project.list_issues())
|
||||
matching_issue_list_len = len(matching_issue_list)
|
||||
|
||||
if matching_issue_list_len == 0:
|
||||
# The issue does not exist in the project
|
||||
if state == "present":
|
||||
# This implies a change
|
||||
changed = True
|
||||
if not check_mode:
|
||||
# Create the issue
|
||||
new_issue = project.add_issue(issue_subject, priority_id, status_id, type_id, severity_id, tags=issue_tags, description=issue_description)
|
||||
if issue_attachment:
|
||||
new_issue.attach(issue_attachment, description=issue_attachment_description)
|
||||
issue["attachment"] = issue_attachment
|
||||
issue["attachment_description"] = issue_attachment_description
|
||||
return (True, changed, "Issue created", issue)
|
||||
|
||||
else:
|
||||
# If does not exist, do nothing
|
||||
return (True, changed, "Issue does not exist", {})
|
||||
|
||||
elif matching_issue_list_len == 1:
|
||||
# The issue exists in the project
|
||||
if state == "absent":
|
||||
# This implies a change
|
||||
changed = True
|
||||
if not check_mode:
|
||||
# Delete the issue
|
||||
matching_issue_list[0].delete()
|
||||
return (True, changed, "Issue deleted", {})
|
||||
|
||||
else:
|
||||
# Do nothing
|
||||
return (True, changed, "Issue already exists", {})
|
||||
|
||||
else:
|
||||
# More than 1 matching issue
|
||||
return (False, changed, "More than one issue with subject %s in project %s" % (issue_subject, project_name), {})
|
||||
|
||||
except TaigaException as exc:
|
||||
msg = "An exception happened: %s" % to_native(exc)
|
||||
return (False, changed, msg, {})
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
taiga_host=dict(required=False, default="https://api.taiga.io"),
|
||||
project=dict(required=True),
|
||||
subject=dict(required=True),
|
||||
issue_type=dict(required=True),
|
||||
priority=dict(required=False, default="Normal"),
|
||||
status=dict(required=False, default="New"),
|
||||
severity=dict(required=False, default="Normal"),
|
||||
description=dict(required=False, default=""),
|
||||
attachment=dict(required=False, default=None),
|
||||
attachment_description=dict(required=False, default=""),
|
||||
tags=dict(required=False, default=[], type='list'),
|
||||
state=dict(required=False, choices=['present', 'absent'],
|
||||
default='present'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not TAIGA_MODULE_IMPORTED:
|
||||
module.fail_json(msg=missing_required_lib("python-taiga"),
|
||||
exception=TAIGA_IMP_ERR)
|
||||
|
||||
taiga_host = module.params['taiga_host']
|
||||
project_name = module.params['project']
|
||||
issue_subject = module.params['subject']
|
||||
issue_priority = module.params['priority']
|
||||
issue_status = module.params['status']
|
||||
issue_type = module.params['issue_type']
|
||||
issue_severity = module.params['severity']
|
||||
issue_description = module.params['description']
|
||||
issue_attachment = module.params['attachment']
|
||||
issue_attachment_description = module.params['attachment_description']
|
||||
if issue_attachment:
|
||||
if not isfile(issue_attachment):
|
||||
msg = "%s is not a file" % issue_attachment
|
||||
module.fail_json(msg=msg)
|
||||
issue_tags = module.params['tags']
|
||||
state = module.params['state']
|
||||
|
||||
return_status, changed, msg, issue_attr_dict = manage_issue(
|
||||
module,
|
||||
taiga_host,
|
||||
project_name,
|
||||
issue_subject,
|
||||
issue_priority,
|
||||
issue_status,
|
||||
issue_type,
|
||||
issue_severity,
|
||||
issue_description,
|
||||
issue_attachment,
|
||||
issue_attachment_description,
|
||||
issue_tags,
|
||||
state,
|
||||
check_mode=module.check_mode
|
||||
)
|
||||
if return_status:
|
||||
if len(issue_attr_dict) > 0:
|
||||
module.exit_json(changed=changed, msg=msg, issue=issue_attr_dict)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
else:
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue