commit ba358db1041cd555c2232b0a4ee83f25672fcb57
Author: Nicolas Chauvet <kwizart(a)gmail.com>
Date: Wed Aug 18 22:58:21 2021 +0200
Scripts library plugins
callback_plugins/logdetail.py | 203 ++++++++++++-------------
filter_plugins/fedmsg.py | 4 +-
inventory/group_vars/all | 1 +
inventory/host_vars/hv01.online.rpmfusion.net | 1 +
library/virt_boot | 10 +-
scripts/auth-keys-from-fas | 16 +-
scripts/freezelist | 13 +-
scripts/generate-oidc-token | 97 ++++++++++++
scripts/hosts_with_var_set | 61 ++++----
9 files changed, 247 insertions(+), 159 deletions(-)
---
diff --git a/callback_plugins/logdetail.py b/callback_plugins/logdetail.py
index 289e7ed..902ecad 100644
--- a/callback_plugins/logdetail.py
+++ b/callback_plugins/logdetail.py
@@ -15,13 +15,36 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <
http://www.gnu.org/licenses/>.
-from __future__ import absolute_import
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = r'''
+callback: logdetail
+callback_type: notification
+short_description: Logs playbook results, per date, playbook and host.
+description: Logs playbook results, per date, playbook and host, in I(log_path).
+options:
+ log_path:
+ description: The path where log files will be created.
+ default: /var/log/ansible
+ ini:
+ - section: callback_logdetail
+ key: log_path
+ env:
+ - name: ANSIBLE_LOGDETAIL_PATH
+'''
import os
import time
import json
import pwd
-from ansible import utils
+import gzip
+
+try:
+ from ansible.utils.hashing import secure_hash
+except ImportError:
+ from ansible.utils import md5 as secure_hash
try:
from ansible.plugins.callback import CallbackBase
@@ -29,31 +52,31 @@ except ImportError:
# Ansible v1 compat
CallbackBase = object
-TIME_FORMAT="%b %d %Y %H:%M:%S"
+TIME_FORMAT = "%b %d %Y %H:%M:%S"
-MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
+MSG_FORMAT = "%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
-LOG_PATH = '/var/log/ansible'
def getlogin():
try:
user = os.getlogin()
- except OSError, e:
+ except OSError as e:
user = pwd.getpwuid(os.geteuid())[0]
return user
+
class LogMech(object):
- def __init__(self):
+ def __init__(self, logpath):
self.started = time.time()
self.pid = str(os.getpid())
self._pb_fn = None
self._last_task_start = None
self.play_info = {}
- self.logpath = LOG_PATH
+ self.logpath = logpath
if not os.path.exists(self.logpath):
try:
os.makedirs(self.logpath, mode=0750)
- except OSError, e:
+ except OSError as e:
if e.errno != 17:
raise
@@ -74,13 +97,13 @@ class LogMech(object):
def logpath_play(self):
# this is all to get our path to look nice ish
tstamp = time.strftime('%Y/%m/%d/%H.%M.%S',
time.localtime(self.started))
- path = os.path.normpath(self.logpath + '/' + self.playbook_id +
'/' + tstamp + '/')
+ path = os.path.normpath(self.logpath + '/' + self.playbook_id +
'/' + tstamp + '/')
if not os.path.exists(path):
try:
os.makedirs(path)
- except OSError, e:
- if e.errno != 17: # if it is not dir exists then raise it up
+ except OSError as e:
+ if e.errno != 17: # if it is not dir exists then raise it up
raise
return path
@@ -96,8 +119,8 @@ class LogMech(object):
def task_to_json(self, task):
res = {}
res['task_name'] = task.name
- res['task_module'] = task.module_name
- res['task_args'] = task.module_args
+ res['task_module'] = task.action
+ res['task_args'] = task.args
if self.playbook_id == 'ansible-cmd':
res['task_userid'] = getlogin()
for k in ("delegate_to", "environment",
"with_first_found",
@@ -115,22 +138,21 @@ class LogMech(object):
host = 'HOSTMISSING'
if type(data) == dict:
- name = data.get('module_name',None)
+ name = data.get('module_name', None)
else:
name = "unknown"
-
# we're in setup - move the invocation info up one level
if 'invocation' in data:
invoc = data['invocation']
if not name and 'module_name' in invoc:
name = invoc['module_name']
- #don't add this since it can often contain complete passwords :(
+ # don't add this since it can often contain complete passwords :(
del(data['invocation'])
if task:
- name = task.name
+ name = task._name
data['task_start'] = self._last_task_start
data['task_end'] = time.time()
data.update(self.task_to_json(task))
@@ -143,7 +165,7 @@ class LogMech(object):
if self.play_info.get('check', False) and
self.play_info.get('diff', False):
category = 'CHECK_DIFF:' + category
- elif self.play_info.get('check', False):
+ elif self.play_info.get('check', False):
category = 'CHECK:' + category
# Sometimes this is None.. othertimes it's fine. Othertimes it has
@@ -152,14 +174,12 @@ class LogMech(object):
name = name.strip()
sanitize_host = host.replace(' ', '_').replace('>',
'-')
- fd = open(self.logpath_play + '/' + sanitize_host + '.log',
'a')
+ fd = gzip.open(self.logpath_play + '/' + sanitize_host +
'.log.gz', 'at')
now = time.strftime(TIME_FORMAT, time.localtime())
fd.write(MSG_FORMAT % dict(now=now, name=name, count=count, category=category,
data=json.dumps(data)))
fd.close()
-logmech = LogMech()
-
class CallbackModule(CallbackBase):
"""
logs playbook results, per host, in /var/log/ansible/hosts
@@ -172,101 +192,72 @@ class CallbackModule(CallbackBase):
def __init__(self):
self._task_count = 0
self._play_count = 0
+ self.task = None
+ self.playbook = None
- def on_any(self, *args, **kwargs):
- pass
+ super(CallbackModule, self).__init__()
+ self.set_options()
+ self.logmech = LogMech(self.get_option('log_path'))
+ def set_play_context(self, play_context):
+ self.play_context = play_context
- def runner_on_failed(self, host, res, ignore_errors=False):
+ def v2_runner_on_failed(self, result, ignore_errors=False):
category = 'FAILED'
- task = getattr(self,'task', None)
- logmech.log(host, category, res, task, self._task_count)
-
+ self.logmech.log(result._host.get_name(), category, result._result, self.task,
self._task_count)
- def runner_on_ok(self, host, res):
+ def v2_runner_on_ok(self, result):
category = 'OK'
- task = getattr(self,'task', None)
- logmech.log(host, category, res, task, self._task_count)
-
+ self.logmech.log(result._host.get_name(), category, result._result, self.task,
self._task_count)
- def runner_on_error(self, host, res):
- category = 'ERROR'
- task = getattr(self,'task', None)
- logmech.log(host, category, res, task, self._task_count)
-
- def runner_on_skipped(self, host, item=None):
+ def v2_runner_on_skipped(self, result):
category = 'SKIPPED'
- task = getattr(self,'task', None)
res = {}
- res['item'] = item
- logmech.log(host, category, res, task, self._task_count)
+ res['item'] = self._get_item_label(getattr(result._result,
'results', {}))
+ self.logmech.log(result._host.get_name(), category, res, self.task,
self._task_count)
- def runner_on_unreachable(self, host, output):
+ def v2_runner_on_unreachable(self, result):
category = 'UNREACHABLE'
- task = getattr(self,'task', None)
res = {}
- res['output'] = output
- logmech.log(host, category, res, task, self._task_count)
-
- def runner_on_no_hosts(self):
- pass
+ res['output'] = result._result
+ self.logmech.log(result._host.get_name(), category, res, self.task,
self._task_count)
- def runner_on_async_poll(self, host, res, jid, clock):
- pass
-
- def runner_on_async_ok(self, host, res, jid):
- pass
-
- def runner_on_async_failed(self, host, res, jid):
+ def v2_runner_on_async_failed(self, result):
category = 'ASYNC_FAILED'
- task = getattr(self,'task', None)
- logmech.log(host, category, res, task, self._task_count)
-
- def playbook_on_start(self):
- pass
-
- def playbook_on_notify(self, host, handler):
- pass
-
- def playbook_on_no_hosts_matched(self):
- pass
+ self.logmech.log(result._host.get_name(), category, result._result, self.task,
self._task_count)
- def playbook_on_no_hosts_remaining(self):
- pass
+ def v2_playbook_on_start(self, playbook):
+ self.playbook = playbook
- def playbook_on_task_start(self, name, is_conditional):
- logmech._last_task_start = time.time()
+ def v2_playbook_on_task_start(self, task, is_conditional):
+ self.task = task
+ if self.task:
+ self.task._name = task.get_name().strip()
+ self.logmech._last_task_start = time.time()
self._task_count += 1
- def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None,
confirm=False, salt_size=None, salt=None, default=None):
- pass
-
- def playbook_on_setup(self):
+ def v2_playbook_on_setup(self):
self._task_count += 1
- pass
- def playbook_on_import_for_host(self, host, imported_file):
- task = getattr(self,'task', None)
+ def v2_playbook_on_import_for_host(self, result, imported_file):
res = {}
res['imported_file'] = imported_file
- logmech.log(host, 'IMPORTED', res, task)
+ self.logmech.log(result._host.get_name(), 'IMPORTED', res, self.task)
- def playbook_on_not_import_for_host(self, host, missing_file):
- task = getattr(self,'task', None)
+ def v2_playbook_on_not_import_for_host(self, result, missing_file):
res = {}
res['missing_file'] = missing_file
- logmech.log(host, 'NOTIMPORTED', res, task)
+ self.logmech.log(result._host.get_name(), 'NOTIMPORTED', res, self.task)
- def playbook_on_play_start(self, pattern):
+ def v2_playbook_on_play_start(self, play):
self._task_count = 0
- play = getattr(self, 'play', None)
if play:
# figure out where the playbook FILE is
- path = os.path.abspath(play.playbook.filename)
+ path = os.path.abspath(self.playbook._file_name)
# tel the logger what the playbook is
- logmech.playbook_id = path
+ self.logmech.playbook_id = path
# if play count == 0
# write out playbook info now
@@ -275,33 +266,35 @@ class CallbackModule(CallbackBase):
pb_info['playbook_start'] = time.time()
pb_info['playbook'] = path
pb_info['userid'] = getlogin()
- pb_info['extra_vars'] = play.playbook.extra_vars
- pb_info['inventory'] = play.playbook.inventory.host_list
- pb_info['playbook_checksum'] = utils.md5(path)
- pb_info['check'] = play.playbook.check
- pb_info['diff'] = play.playbook.diff
- logmech.play_log(json.dumps(pb_info, indent=4))
+ pb_info['extra_vars'] = play._variable_manager.extra_vars
+ pb_info['inventory'] =
play._variable_manager._inventory._sources
+ pb_info['playbook_checksum'] = secure_hash(path)
+ if hasattr(self, "play_context"):
+ pb_info['check'] = self.play_context.check_mode
+ pb_info['diff'] = self.play_context.diff
+ self.logmech.play_log(json.dumps(pb_info, indent=4))
self._play_count += 1
# then write per-play info that doesn't duplcate the playbook info
info = {}
info['play'] = play.name
info['hosts'] = play.hosts
- info['transport'] = play.transport
info['number'] = self._play_count
- info['check'] = play.playbook.check
- info['diff'] = play.playbook.diff
- logmech.play_info = info
- logmech.play_log(json.dumps(info, indent=4))
-
+ if hasattr(self, "play_context"):
+ info['transport'] = str(self.play_context.connection)
+ info['check'] = self.play_context.check_mode
+ info['diff'] = self.play_context.diff
+ self.logmech.play_info = info
+ try:
+ self.logmech.play_log(json.dumps(info, indent=4))
+ except TypeError:
+ print(("Failed to conver to JSON:", info))
- def playbook_on_stats(self, stats):
+ def v2_playbook_on_stats(self, stats):
results = {}
- for host in stats.processed.keys():
+ for host in list(stats.processed.keys()):
results[host] = stats.summarize(host)
- logmech.log(host, 'STATS', results[host])
- logmech.play_log(json.dumps({'stats': results}, indent=4))
- logmech.play_log(json.dumps({'playbook_end': time.time()}, indent=4))
- print 'logs written to: %s' % logmech.logpath_play
-
-
+ self.logmech.log(host, 'STATS', results[host])
+ self.logmech.play_log(json.dumps({'stats': results}, indent=4))
+ self.logmech.play_log(json.dumps({'playbook_end': time.time()},
indent=4))
+ print(('logs written to: %s' % self.logmech.logpath_play))
diff --git a/filter_plugins/fedmsg.py b/filter_plugins/fedmsg.py
index 60beaf8..16773ce 100644
--- a/filter_plugins/fedmsg.py
+++ b/filter_plugins/fedmsg.py
@@ -9,7 +9,7 @@ def invert_fedmsg_policy(groups, vars, env):
"""
if env == 'staging':
- hosts = groups['staging'] + groups['fedmsg-qa-network-stg']
+ hosts = groups['staging'] + groups['staging_friendly']
else:
hosts = [h for h in groups['all'] if h not in groups['staging']]
@@ -25,7 +25,7 @@ def invert_fedmsg_policy(groups, vars, env):
inverted[key] = inverted.get(key, [])
inverted[key].append(cert['service'] + '-' + fqdn)
- result = inverted.items()
+ result = list(inverted.items())
# Sort things so they come out in a reliable order (idempotence)
[inverted[key].sort() for key in inverted]
result.sort(key=operator.itemgetter(0))
diff --git a/inventory/group_vars/all b/inventory/group_vars/all
index 6f086cb..0f43cb1 100644
--- a/inventory/group_vars/all
+++ b/inventory/group_vars/all
@@ -432,6 +432,7 @@ sshd_sftp: false
# Autodetect python version
#
ansible_python_interpreter: auto
+
#
# datacenter with active certbot in it
#
diff --git
a/inventory/host_vars/hv01.online.rpmfusion.net
b/inventory/host_vars/hv01.online.rpmfusion.net
index f28d410..dcfcea4 100644
---
a/inventory/host_vars/hv01.online.rpmfusion.net
+++
b/inventory/host_vars/hv01.online.rpmfusion.net
@@ -15,4 +15,5 @@ udp_ports: ['53', '1194']
custom_rules: [ '-A FORWARD -d 192.168.181.0/24 -o br1 -m conntrack --ctstate
RELATED,ESTABLISHED -j ACCEPT' , '-A FORWARD -s 192.168.181.0/24 -i br1 -j
ACCEPT', '-A INPUT -i br1 -p tcp -m tcp --dport 111 -j ACCEPT', '-A INPUT
-i tun0 -p tcp -m tcp --dport 111 -j ACCEPT' , '-A INPUT -i br1 -p udp -m udp
--dport 514 -j ACCEPT', '-A INPUT -i tun0 -p udp -m udp --dport 514 -j ACCEPT'
, '-A INPUT -i br1 -p tcp -m tcp --dport 514 -j ACCEPT', '-A INPUT -i tun0 -p
tcp -m tcp --dport 514 -j ACCEPT' ,'-A INPUT -i br1 -p tcp -m tcp --dport 662 -j
ACCEPT' , '-A INPUT -i tun0 -p tcp -m tcp --dport 662 -j ACCEPT', '-A
INPUT -i br1 -p tcp -m tcp --dport 892 -j ACCEPT' , '-A INPUT -i tun0 -p tcp -m
tcp --dport 892 -j ACCEPT', '-A INPUT -i br1 -p tcp -m tcp --dport 2049 -j
ACCEPT', '-A INPUT -i tun0 -p tcp -m tcp --dport 2049 -j ACCEPT', '-A
INPUT -i br1 -p udp -m udp --dport 2049 -j ACCEPT', '-A INPUT -i tun0 -p udp -m
udp --dport 2049 -j ACCEPT', '-A INPUT -i br1 -p tcp -m tcp --dport 50
00 -j ACCEPT', '-A INPUT -i tun0 -p tcp -m tcp --dport 5000 -j ACCEPT'
,'-A INPUT -i br1 -p tcp -m tcp --dport 32803 -j ACCEPT', '-A INPUT -i tun0 -p
tcp -m tcp --dport 32803 -j ACCEPT' , '-A INPUT -i br1 -p udp -m udp --dport 32769
-j ACCEPT', '-A INPUT -i tun0 -p udp -m udp --dport 32769 -j ACCEPT' , '-A
INPUT -i br1 -p tcp -m tcp --dport 3128 -j ACCEPT', '-A INPUT -i tun0 -p tcp -m
tcp --dport 3128 -j ACCEPT']
custom_nat_rules: ['-A POSTROUTING -o br0 -j MASQUERADE' ]
+ansible_python
diff --git a/library/virt_boot b/library/virt_boot
index 6591e56..6280297 100755
--- a/library/virt_boot
+++ b/library/virt_boot
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python3
# -*- coding: utf-8 -*-
# (c) 2012, Jeroen Hoekx <jeroen(a)hoekx.be>
@@ -100,12 +100,12 @@ except ImportError:
import elementtree.ElementTree as ET
from elementtree.ElementTree import SubElement
except ImportError:
- print "failed=True msg='ElementTree python module
unavailable'"
+ print("failed=True msg='ElementTree python module
unavailable'")
try:
import libvirt
except ImportError:
- print "failed=True msg='libvirt python module unavailable'"
+ print("failed=True msg='libvirt python module unavailable'")
sys.exit(1)
from ansible.module_utils.basic import AnsibleModule
@@ -138,7 +138,7 @@ def detach_disk(domain, doc, device):
source = disk.find('source')
if source is not None and 'file' in source.attrib:
del source.attrib['file']
- domain.updateDeviceFlags(ET.tostring(disk),
libvirt.VIR_DOMAIN_AFFECT_CONFIG)
+ domain.updateDeviceFlags(ET.tostring(disk).decode('utf-8'),
libvirt.VIR_DOMAIN_AFFECT_CONFIG)
return True
return False
@@ -287,7 +287,7 @@ def main():
changed = True
### save back
- conn.defineXML( ET.tostring(doc) )
+ conn.defineXML( ET.tostring(doc).decode('utf-8') )
if start and not domain.isActive():
changed = True
diff --git a/scripts/auth-keys-from-fas b/scripts/auth-keys-from-fas
index 9ec9577..3c7505c 100755
--- a/scripts/auth-keys-from-fas
+++ b/scripts/auth-keys-from-fas
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/python3
#
# Copyright 2012 Red Hat, Inc.
# License: GPLv2+
@@ -111,11 +111,11 @@ def read_config_files(cfg_files):
if results != True:
for (section_list, key, unused_) in flatten_errors(options, results):
if key is not None:
- print 'The "%s" key in the section "%s" failed
validation' % (
- key, ', '.join(section_list))
+ print('The "%s" key in the section "%s" failed
validation' % (
+ key, ', '.join(section_list)))
else:
- print 'The following section was missing:%s ' % ',
'.join(
- section_list)
+ print('The following section was missing:%s ' % ',
'.join(
+ section_list))
sys.exit(1)
return options
@@ -159,8 +159,8 @@ def retry_fas(function, *args, **kwargs):
return function(*args, **kwargs)
except AuthError:
retries += 1
- password = getpass('FAS Password for %s:' %
function.im_self.username)
- function.im_self.password = password
+ password = getpass('FAS Password for %s:' %
function.__self__.username)
+ function.__self__.password = password
if retries >= MAX_RETRIES:
raise
@@ -203,4 +203,4 @@ if __name__ == '__main__':
for user in sorted(ssh_keys.keys()):
for key in ssh_keys[user]:
- print '%s%s' % (from_string, key)
+ print('%s%s' % (from_string, key))
diff --git a/scripts/freezelist b/scripts/freezelist
index 2690a54..89856ea 100755
--- a/scripts/freezelist
+++ b/scripts/freezelist
@@ -25,23 +25,22 @@ variable_manager = VariableManager(loader=loader, inventory=inv)
frozen = []
unfrozen = []
-for host in sorted(inv.get_hosts()):
+for host in sorted(inv.get_hosts(), key=lambda host: host.name):
vars = variable_manager.get_vars(host=host)
freezes = vars.get('freezes', None)
if freezes:
frozen.append(host.get_name())
elif freezes is None:
- print 'Error: missing freezes: %s' % host.get_name()
+ print('Error: missing freezes: %s' % host.get_name())
else:
unfrozen.append(host.get_name())
-print 'freeze:'
+print('freeze:')
for host in sorted(frozen):
- print 'F: ' + host
+ print('F: ' + host)
-print 'do not freeze:'
+print('do not freeze:')
for host in sorted(unfrozen):
- print 'NF: ' + host
-
+ print('NF: ' + host)
diff --git a/scripts/generate-oidc-token b/scripts/generate-oidc-token
new file mode 100755
index 0000000..28e2272
--- /dev/null
+++ b/scripts/generate-oidc-token
@@ -0,0 +1,97 @@
+#!/usr/bin/python3
+# Copyright (c) 2018 Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""
+This script will accept some parameters and will print out some SQL you can run against
the Ipsilon
+database, and a token you can give to an application to authenticate against a service.
+"""
+import base64
+import json
+import os
+import time
+import uuid
+
+import click
+
+
+secret = base64.urlsafe_b64encode(os.urandom(64))[:64].decode()
+
+
+template = """
+Run this SQL against Ipsilon's database:
+
+--------START CUTTING HERE--------
+BEGIN;
+insert into token values
('{uuid}','username','{service_name}@service');
+insert into token values ('{uuid}','security_check','{secret}');
+insert into token values
('{uuid}','client_id','{service_name}');
+insert into token values ('{uuid}','expires_at','{expiration}');
+insert into token values ('{uuid}','type','Bearer');
+insert into token values ('{uuid}','issued_at','{now}');
+insert into token values ('{uuid}','scope','{scope}');
+COMMIT;
+-------- END CUTTING HERE --------
+
+"""
+
+
+def validate_scopes(ctx, param, scopes):
+ """
+ Ensure that the user provided at least one scope.
+
+ Args:
+ ctx(click.core.Context): Unused.
+ param (click.core.Option): Unused.
+ scopes (tuple): The scopes provided by the user that we are validating.
+ Raises:
+ click.BadParameter: If the length of the scopes tuple is less than 1.
+ """
+ if len(scopes) < 1:
+ raise click.BadParameter('At least one scope must be provided.')
+
+ return scopes
+
+
+(a)click.command()
+(a)click.argument('service_name')
+(a)click.option('--expiration', '-e', prompt='Number of days until
expiration', type=int,
+ help='The number of days from now until this token expires.')
+(a)click.option('--scope', '-s', multiple=True, callback=validate_scopes,
+ help='A scope to include for this token. May be supplied multiple
times.')
+(a)click.option('--no-openid', is_flag=True, help='Do not use
"openid" as the first item in scope.')
+def generate_token(service_name, expiration, scope, no_openid):
+ """
+ Print out SQL to insert a token in the Ipsilon database, and the token itself.
+
+ SERVICE_NAME is the name of the service that the token will be used by, (e.g.,
bodhi).
+ """
+ identifier = uuid.uuid4()
+
+ now = int(time.time())
+ expiration = now + (expiration * 24 * 3600)
+
+ scope = list(scope)
+ if not no_openid:
+ scope.insert(0, 'openid')
+ scope = json.dumps(scope)
+
+ print(template.format(uuid=identifier, service_name=service_name, secret=secret,
+ expiration=expiration, scope=scope, now=now))
+
+ print("Token: {}_{}\n".format(identifier, secret))
+
+
+if __name__ == '__main__':
+ generate_token()
diff --git a/scripts/hosts_with_var_set b/scripts/hosts_with_var_set
index ec35858..1bba9b2 100755
--- a/scripts/hosts_with_var_set
+++ b/scripts/hosts_with_var_set
@@ -3,7 +3,7 @@
# doteast porting to ansible 2.0
# list hosts with ansible var[=value], Or
# list all hosts with their corresponding vars
-# Note that the script will attempt to "match" the supplied value of the var
against the values if it the var is multivalued
+# Note that the script will attempt to "match" the supplied value of the var
against the values if it the var is multivalued
from ansible import constants as C
from ansible.parsing.dataloader import DataLoader
@@ -23,46 +23,43 @@ parser.add_option('-a', action="store_true",
dest='all_vars', default=None,
opts, args = parser.parse_args(sys.argv[1:])
if ((opts.variable == None and opts.all_vars == None) or (opts.variable != None and
opts.all_vars != None)):
- print "Usage: hosts_with_var_set -o varname[=value] | -a"
- sys.exit(-1)
+ print("Usage: hosts_with_var_set -o varname[=value] | -a")
+ sys.exit(-1)
loader = DataLoader()
inv = InventoryManager(loader=loader, sources=opts.inventory)
variable_manager = VariableManager(loader=loader, inventory=inv)
-matching=True
+matching = True
if opts.variable != None:
- if opts.variable.find("=") == -1:
- matching=False
- var_name=opts.variable
- else:
- var_name,value = opts.variable.split('=')
- if value == "":
- value="None"
+ if opts.variable.find("=") == -1:
+ matching = False
+ var_name = opts.variable
+ else:
+ var_name, value = opts.variable.split('=')
+ if value == "":
+ value = "None"
var_set = []
-
-for host in sorted(inv.get_hosts()):
+for host in inv.get_hosts():
vars = variable_manager.get_vars(host=host)
if opts.variable == None:
- # remove expanded 'all' groups
- vars.pop('groups')
- vars['groups']=host.get_groups()
- print "%s\n%s\n" % (host.get_name(),vars)
+ # remove expanded 'all' groups
+ vars.pop('groups')
+ vars['groups'] = host.get_groups()
+ print("%s\n%s\n" % (host.get_name(), vars))
else:
- if vars.has_key(var_name):
- if not matching:
- var_set.append(host.get_name())
- else:
- if str(vars.get(var_name)).find(value) != -1:
- var_set.append(host.get_name())
-
-if opts.variable != None:
- if not matching:
- print 'hosts with variable %s:' % var_name
- else:
- print 'hosts with variable %s matching %s value' % (var_name,value)
- for host in sorted(var_set):
- print host
-
+ if var_name in vars:
+ if not matching:
+ var_set.append(host.get_name())
+ else:
+ if str(vars.get(var_name)).find(value) != -1:
+ var_set.append(host.get_name())
+if opts.variable != None:
+ if not matching:
+ print('hosts with variable %s:' % var_name)
+ else:
+ print('hosts with variable %s matching %s value' % (var_name, value))
+ for host in sorted(var_set):
+ print(host)