Skip to content

Instantly share code, notes, and snippets.

@jctanner
Last active September 17, 2018 16:27
Show Gist options
  • Select an option

  • Save jctanner/210c5519014d2e400a09573439b50df1 to your computer and use it in GitHub Desktop.

Select an option

Save jctanner/210c5519014d2e400a09573439b50df1 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import argparse
import glob
import os
import shutil
import subprocess
import sys
import tempfile
from logzero import logger
from pprint import pprint
from jinja2 import Environment
MOLECULE_ACTIONS = ['init', 'lint', 'syntax', 'test', 'verify']
ANSIBLE_TEST_ACTIONS = ['sanity', 'units', 'integration']
ACTIONS = MOLECULE_ACTIONS + ANSIBLE_TEST_ACTIONS
MODULE_TEMPLATE = """#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
module: {{ name }}
short_description: {{ short_description|default('no description') }}
description: {{ long_description|default('no description') }}
version_added: None
author: {{ author|default('no author') }}
requirements: []
options: {}
notes:
'''
EXAMPLES = '''
'''
from ansible.module_utils.basic import AnsibleModule
class ModuleCaller(object):
def __init__(self, module):
self.module = module
self.check_command = "{{ check_command }}"
self.change_command = "{{ change_command }}"
def check(self):
(rc, so, se) = self.module.run_command(self.check_command)
if rc == 0:
return True
else:
return False
def change(self):
(rc, so, se) = self.module.run_command(self.change_command)
if rc == 0:
return True
else:
return False
def main():
module = AnsibleModule(
argument_spec=dict(
),
supports_check_mode=True,
)
caller = ModuleCaller(module)
if not caller.check():
if module.check_mode:
module.exit_json(changed=True)
else:
if not caller.change():
module.exit_json(changed=True)
else:
module.fail_json(msg="command '%s' failed" % caller.change_command)
else:
module.exit_json(changed=False)
if __name__ == "__main__":
main()
"""
MODULE_UNIT_TEST = """import pytest
from ansible.modules.{{ name }} import ModuleCaller
"""
MODULE_INTEGRATION_TEST = """# check idempotency
- {{ name }}:
register: run1
- {{ name }}:
register: run2
- assert:
that:
- 'run1 is changed'
- 'run2 is not changed'
"""
class BaseCommand(object):
action = 'base'
def add_arguments(self, subparser):
pass
def find_plugins(self):
plugins = {}
for topdir in ['modules', 'module_utils', 'plugins']:
plugintype = topdir
for dirName, subdirList, fileList in os.walk(topdir):
if topdir != 'plugins':
plugins[topdir] = [
os.path.join(os.getcwd(), topdir, x)
for x in fileList
]
else:
path_parts = dirName.split(os.path.sep)
if len(path_parts) == 2:
plugins[path_parts[-1]] = [
os.path.join(os.getcwd(), dirName, x)
for x in fileList
]
return plugins
def find_roles(self):
'''Find all roles in the collection and note if they have molecule configs'''
if not os.path.exists('roles'):
return {}
roles = {}
for dirName, subdirList, fileList in os.walk('roles'):
if dirName == 'roles':
for dn in subdirList:
roles[dn] = {
'name': dn,
'molecule': False,
'path': os.path.join(os.getcwd(), 'roles', dn)
}
else:
parts = dirName.split(os.path.sep)
if len(parts) == 2:
if 'molecule' in subdirList:
roles[parts[1]]['molecule'] = True
return roles
def run_command(self, cmd, live=False):
if live:
cmd += ' 2>&1'
p = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
if not live:
(so, se) = p.communicate()
rc = p.returncode
else:
so = ''
se = ''
while True:
_so = p.stdout.readline()
#_se = p.stderr.readline()
_se = None
#if so == '' and p.poll() is not None:
if (_so == '' or _se == '') and p.poll() is not None:
break
if _so:
so += _so
sys.stdout.write(_so)
if _se:
se += _se
sys.stderr.write(_se)
rc = p.poll()
return (rc, so, se)
def run(self, args):
result = {
}
if self.action in ANSIBLE_TEST_ACTIONS:
res = self.run_ansible_test(args, {})
roles = {self.action: res}
else:
roles = self.find_roles()
role_names = sorted(roles.keys())
for rn in role_names:
#print('## ' + rn + ' [' + self.action + ']')
logger.info('## ' + rn + ' [' + self.action + ']')
pprint(roles[rn])
if self.action in MOLECULE_ACTIONS and roles[rn]['molecule']:
cmd = 'cd %s' % roles[rn]['path']
cmd += '; '
cmd += 'molecule %s' % self.action
(rc, so, se) = self.run_command(cmd, live=True)
roles[rn]['tests'] = {}
roles[rn]['tests'][self.action] = {}
for bit in ['rc', 'so', 'se']:
roles[rn]['tests'][self.action][bit] = locals().get(bit)
else:
# FIXME - what to do with non-moleculed roles?
pass
result['roles'] = roles
return result
def copy_plugins_into_checkout(self, plugins, checkout):
'''Transfer the plugin files into the checkout so ansible-test can see them'''
toplevel = ['modules', 'module_utils']
for k,v in plugins.items():
if k in toplevel:
dst = os.path.join(checkout, 'lib', 'ansible', k)
else:
dst = os.path.join(checkout, 'lib', 'ansible', 'plugins', k)
for fn in v:
_dst = dst
logger.debug('copy %s -> %s' % (fn, _dst))
shutil.copy(fn, _dst)
def copy_tests_into_checkout(self, srcdir, checkout):
'''Transfer the test files into the checkout so ansible-test can see them'''
DIRS = ['test/units', 'test/integration']
for DIR in DIRS:
dst = os.path.join(checkout, DIR)
if os.path.exists(dst):
if os.path.basename(dst) == 'units':
WHITELIST = ['ansible.cfg']
fns = glob.glob(dst + '/*')
for fn in fns:
if os.path.basename(fn) not in WHITELIST:
if os.path.isdir(fn):
logger.debug('rm -rf %s' % fn)
shutil.rmtree(fn)
else:
os.remove(fn)
src_fns = glob.glob(os.path.join(srcdir, DIR) + '/*')
for srcfn in src_fns:
dst2 = os.path.join(dst, os.path.basename(srcfn))
if os.path.isdir(srcfn):
shutil.copytree(srcfn, dst2)
else:
shutil.copy(srcfn, dst2)
#import epdb; epdb.st()
else:
logger.debug('rm -rf %s' % dst)
shutil.rmtree(dst)
shutil.copytree(os.path.join(srcdir, DIR), dst)
if not os.path.isdir(os.path.join(checkout, 'test', 'integration', 'targets')):
os.makedirs(os.path.join(checkout, 'test', 'integration', 'targets'))
(rc, so, se) = self.run_command('find test/units', live=False)
ufiles = sorted([x.strip() for x in so.split('\n') if x.strip()])
for ufile in ufiles:
if os.path.basename(ufile) == 'units':
continue
#src = os.path.join(ufile)
src = ufile
dst = os.path.join(checkout, ufile)
if os.path.isdir(src):
if not os.path.exists(dst):
logger.debug('mkdir %s' % dst)
os.makedirs(dst)
continue
logger.debug('%s -> %s' % (src, dst))
shutil.copy(src, dst)
#import epdb; epdb.st()
def trim_checkout(self, checkout):
'''Clean out any files that distract from what needs to be tested'''
MU_WHITELIST = [
'_text.py',
'basic.py',
'common',
'compat',
'facts',
'parsing',
'pycompat24.py',
'six'
]
MU_BLACKLIST = [
#'_collections_compat.py'
]
modules = glob.glob(os.path.join(checkout, 'lib', 'ansible', 'modules') + '/*')
modules = [x for x in modules if not os.path.basename(x) == '__init__.py']
for module in modules:
shutil.rmtree(module)
module_utils = glob.glob(os.path.join(checkout, 'lib', 'ansible', 'module_utils') + '/*')
module_utils = [x for x in module_utils if not os.path.basename(x) == '__init__.py']
module_utils = [x for x in module_utils if os.path.basename(x) not in MU_WHITELIST]
(rc, so, se) = self.run_command('find %s/lib/ansible/module_utils -type f' % checkout)
found_utils = [x.strip() for x in so.split('\n') if x.strip()]
found_utils = [x for x in found_utils if os.path.basename(x) in MU_BLACKLIST]
module_utils = sorted(set(module_utils + found_utils))
for mu in module_utils:
#print(mu)
if os.path.isdir(mu):
shutil.rmtree(mu)
else:
os.remove(mu)
# remove integration targets
targets = glob.glob(os.path.join(checkout, 'test', 'integration') + '/*')
for target in targets:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
# remove trimmed friles from test/sanity/pslint/ignore.txt
self.trim_pylint_ignore(checkout)
def trim_pylint_ignore(self, checkout):
'''Clean out any files that don't exist so pslint+pylint doesn't complain'''
for testdir in ['pslint', 'pylint', 'validate-modules']:
for fn in ['skip.txt', 'ignore.txt']:
ignore_file = os.path.join(checkout, 'test', 'sanity', testdir, fn)
logger.debug('clean %s' % ignore_file)
with open(ignore_file, 'r') as f:
ignore_data = f.read()
ignored_filenames = ignore_data.split('\n')
ignored_filenames = [x.split()[0].strip() for x in ignored_filenames if x.split()]
to_delete = []
for igfn in ignored_filenames:
cf = os.path.join(checkout, igfn)
#if 'infini' in cf:
# import epdb; epdb.st()
if not os.path.exists(cf):
to_delete.append(igfn)
ignore_lines = ignore_data.split('\n')
ignore_lines = [x for x in ignore_lines if x.split() and x.split()[0] not in to_delete]
with open(ignore_file, 'w') as f:
f.write('\n'.join(ignore_lines))
#import epdb; epdb.st()
#import epdb; epdb.st()
def run_ansible_test(self, args, role):
#assert 'path' in role
TRIMMABLE = ['sanity', 'units', 'integration']
workdir = tempfile.mkdtemp()
if not os.path.isdir(workdir):
os.makedirs(workdir)
# what plugins are in this repo?
logger.info('find testable plugins')
plugins = self.find_plugins()
# make a checkout
logger.info('create checkout for ansible-test')
repo_dir = os.path.join(workdir, 'ansible.checkout')
if not os.path.exists('/tmp/ansible.checkout'):
#print('/tmp/ansible.checkout -> %s' % repo_dir)
#shutil.copytree('/tmp/ansible.checkout', repo_dir)
cmd = 'git clone https://github.com/ansible/ansible /tmp/ansible.checkout'
logger.info(cmd)
(rc, so, se) = self.run_command(cmd)
logger.info('cp /tmp/ansible.checkout -> %s' % repo_dir)
self.run_command('cp -Rp /tmp/ansible.checkout %s' % repo_dir)
#else:
# cmd = 'git clone https://github.com/ansible/ansible %s' % repo_dir
# (rc, so, se) = self.run_command(cmd)
# reduce the amount of files analyzed
if self.action in TRIMMABLE:
logger.debug('trim checkout')
self.trim_checkout(repo_dir)
logger.debug('copy plugins and tests to checkout')
self.copy_plugins_into_checkout(plugins, repo_dir)
self.copy_tests_into_checkout(os.getcwd(), repo_dir)
# run the ansible-test command
cmd = 'cd %s' % repo_dir
cmd += '; '
cmd += 'source hacking/env-setup'
cmd += '; '
cmd += 'ansible-test %s' % self.action
if args.python:
cmd += ' '
cmd += '--python=%s' % args.python
if args.docker:
cmd += ' '
cmd += '--docker=%s' % args.docker
if self.action == 'sanity':
toskip = ['ansible-doc', 'botmeta', 'integration-aliases']
for ts in toskip:
cmd += ' --skip-test=%s' % ts
#import epdb; epdb.st()
#if self.action == 'integration':
# import epdb; epdbd.st()
#print('# %s' % cmd)
logger.info(cmd)
(rc, so, se) = self.run_command(cmd, live=True)
#print(so)
#print(se)
if 'tests' not in role:
role['tests'] = {}
role['tests'][self.action] = {}
role['tests'][self.action]['rc'] = rc
role['tests'][self.action]['so'] = so
role['tests'][self.action]['se'] = se
#print(workdir)
#import epdb; epdb.st()
shutil.rmtree(workdir)
return role
class MoleculeInit(BaseCommand):
action = 'init'
def add_arguments(self, subparser):
_subparsers = subparser.add_subparsers(dest='subcommand')
#init_actions = ['collection', 'role', 'scenario', 'template']
init_actions = ['collection', 'role', 'module']
for action in init_actions:
sp = _subparsers.add_parser(action)
sp.add_argument('name')
if action == 'module':
sp.add_argument('--wizard', action='store_true')
sp.add_argument('--check_command')
sp.add_argument('--change_command')
def init_collection(self, args):
namespace = args.name.split('.')
assert len(namespace) == 2, "Collection names must be <user>.<name>"
paths = [
os.path.join(args.name, 'modules'),
os.path.join(args.name, 'module_utils'),
os.path.join(args.name, 'plugins'),
os.path.join(args.name, 'plugins', 'action'),
os.path.join(args.name, 'plugins', 'filter'),
os.path.join(args.name, 'plugins', 'lookup'),
os.path.join(args.name, 'test'),
os.path.join(args.name, 'test', 'units'),
os.path.join(args.name, 'test', 'integration'),
os.path.join(args.name, 'test', 'integration', 'targets'),
os.path.join(args.name, 'roles'),
]
for path in paths:
if not os.path.exists(path):
os.makedirs(path)
metafile = os.path.join(args.name, '.collection')
with open(metafile, 'w') as f:
f.write('')
with open(os.path.join(args.name, 'test', 'integration', 'integration.cfg'), 'w') as f:
f.write('')
def init_role(self, args):
''' Make a new stub role '''
# make sure this is done inside a collection
col_files = [
os.path.join(os.getcwd(), '.collection'),
os.path.join(os.path.dirname(os.getcwd()), '.collection')
]
assert [x for x in col_files if os.path.exists(x)], "init role must be executed inside a collection"
if os.path.dirname(os.getcwd()) != 'roles':
prefix = 'roles'
else:
prefix = ''
paths = [
os.path.join(prefix, args.name, 'defaults'),
os.path.join(prefix, args.name, 'tasks'),
os.path.join(prefix, args.name, 'handlers'),
]
for path in paths:
logger.debug(path)
if not os.path.exists(path):
os.makedirs(path)
tasks_file = os.path.join(prefix, args.name, 'tasks', 'main.yml')
with open(tasks_file, 'w') as f:
f.write('# %s tasks\n' % args.name)
def init_module(self, args):
# make sure this is done inside a collection
col_files = [
os.path.join(os.getcwd(), '.collection'),
]
assert [x for x in col_files if os.path.exists(x)], "init module must be executed inside a collection"
paths = [
os.path.join('modules'),
os.path.join('test', 'units', 'modules'),
os.path.join('test', 'integration', 'targets', args.name, 'tasks'),
]
for path in paths:
logger.debug(path)
if not os.path.exists(path):
os.makedirs(path)
mfile = os.path.join('modules', '%s.py' % args.name)
env = Environment()
templar = env.from_string(MODULE_TEMPLATE)
rendered = templar.render(
name=args.name,
check_command=args.check_command,
change_command=args.change_command
)
with open(mfile, 'w') as f:
f.write(rendered + '\n')
mufile = os.path.join('test', 'units', 'modules', 'test_%s.py' % args.name)
templar = env.from_string(MODULE_UNIT_TEST)
rendered = templar.render(
name=args.name
)
with open(mufile, 'w') as f:
f.write(rendered)
mifile = os.path.join('test', 'integration', 'targets', args.name, 'tasks', 'main.yml')
templar = env.from_string(MODULE_INTEGRATION_TEST)
rendered = templar.render(
name=args.name
)
with open(mifile, 'w') as f:
f.write(rendered)
def run(self, args):
logger.info('subcommand: %s' % args.subcommand)
if args.subcommand == 'collection':
self.init_collection(args)
elif args.subcommand == 'role':
self.init_role(args)
elif args.subcommand == 'module':
self.init_module(args)
else:
pass
class MoleculeLint(BaseCommand):
action = 'lint'
class MoleculeTest(BaseCommand):
action = 'test'
class MoleculeSyntax(BaseCommand):
action = 'syntax'
class MoleculeVerify(BaseCommand):
action = 'verify'
class AnsibleTestBaseCommand(BaseCommand):
action = None
def add_arguments(self, subparser):
subparser.add_argument('--docker')
subparser.add_argument('--python')
class MoleculeSanity(AnsibleTestBaseCommand):
action = 'sanity'
skiptests = []
class MoleculeUnits(AnsibleTestBaseCommand):
action = 'units'
skiptests = []
class MoleculeIntegration(AnsibleTestBaseCommand):
action = 'integration'
skiptests = []
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='action')
callers = {}
for action in ACTIONS:
logger.debug('build %s parser' % action)
sp = subparsers.add_parser(action)
classname = 'Molecule' + action.title()
try:
classref = getattr(sys.modules[__name__], classname)
classobj = classref()
classobj.add_arguments(sp)
callers[action] = classobj
except AttributeError as e:
logger.error(e)
callers[action] = None
#if action == 'init':
# import epdb; epdb.st()
args = parser.parse_args()
results = callers[args.action].run(args)
pprint(results)
if args.action != 'init' and 'roles' in results:
logger.info('')
logger.info('#######################################')
rc = 0
for k,v in results['roles'].items():
for k2, v2 in v.get('tests', {}).items():
rc += v2['rc']
if v2['rc'] != 0:
logger.error('%s %s failed' % (k, k2))
#import epdb; epdb.st()
if rc == 0:
logger.info('RC: %s' % rc)
else:
logger.error('RC: %s' % rc)
sys.exit(rc)
if __name__ == "__main__":
main()
#!/usr/bin/env python
import argparse
import logging
import os
import shutil
import subprocess
import sys
import tempfile
def run_command(cmd):
p = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
(so, se) = p.communicate()
return (p.returncode, so, se)
class GenericInstaller(object):
def download_to_cache(self):
pass
def fetch(self):
pass
class GalaxyRole(GenericInstaller):
def __init__(self, content_id, local=False):
self.local = local
self.content_id = content_id
self.namespace = self.content_id.split('.', 1)[0]
self.repo = self.content_id.split('.', 1)[1]
def fetch(self):
if self.local:
return self.content_id
url = 'https://github.com/%s/%s' % (self.namespace, self.repo)
tdir = tempfile.mkdtemp()
cmd = 'git clone %s %s' % (url, tdir)
(rc, so, se) = run_command(cmd)
if rc != 0:
raise Exception(se)
return tdir
class Mazer(object):
''' MAGIC!!! '''
COLLECTION_PATH = os.path.expanduser('~/.ansible/content')
def add_arguments(self, subparser):
pass
def run(self, args):
pass
def content_id_to_spec(self, cid):
'''Convert a content name/id into a installable object'''
if os.path.exists(cid):
# a local role
print('# DETECTED LOCAL CONTENT')
return GalaxyRole(cid, local=True)
elif not cid.startswith('http') and not cid.startswith('git'):
# a remote role
return GalaxyRole(cid)
else:
raise Exception('Not implemented!')
def get_spec_version(self, content_path):
'''Is this oldstyle or newstyle?'''
roles_path = os.path.join(content_path, 'roles')
tasks_path = os.path.join(content_path, 'tasks')
defaults_path = os.path.join(content_path, 'defaults')
if os.path.exists(roles_path):
return 2
if os.path.exists(tasks_path):
return 1
if os.path.exists(defaults_path):
return 1
def get_v2_roles(self, content_path):
'''list nested roles in roles/'''
roles = []
roledir = os.path.join(content_path, 'roles')
for root, dirs, files in os.walk(roledir):
roles = dirs[:]
break
roles = [x for x in roles if x != '.git']
return roles
def get_v2_non_roles(self, content_path):
'''list directories not in roles/'''
others = []
for root, dirs, files in os.walk(content_path):
others = dirs[:]
break
others = [x for x in others if x != 'roles']
others = [x for x in others if x != '.git']
return others
class MazerInstall(Mazer):
action = 'install'
def add_arguments(self, subparser):
subparser.add_argument('content_id')
subparser.add_argument('-e', '--editable', action='store_true')
def run(self, args):
cid = args.content_id
spec = self.content_id_to_spec(cid)
tmpdir = spec.fetch()
# create the top level content path
cp = os.path.join(self.COLLECTION_PATH, spec.namespace, spec.repo)
# no reinstalls
if os.path.exists(cp):
return
if not os.path.exists(cp):
os.makedirs(cp)
rd = os.path.join(cp, 'roles')
if not os.path.exists(rd):
os.makedirs(rd)
specver = self.get_spec_version(tmpdir)
if specver == 1:
# install all the nested roles
raise Exception('Not implemented!')
else:
# install all the nested roles
roles = self.get_v2_roles(tmpdir)
for role in roles:
src = os.path.join(tmpdir, 'roles', role)
dst = os.path.join(rd, role)
if not args.editable:
shutil.copytree(src, dst)
else:
os.symlink(os.path.abspath(src), os.path.abspath(dst))
# install the subdirs
others = self.get_v2_non_roles(tmpdir)
for other in others:
src = os.path.join(tmpdir, other)
if other == 'library':
dst = os.path.join(cp, 'modules')
else:
dst = os.path.join(cp, other)
if not args.editable:
shutil.copytree(src, dst)
else:
os.symlink(os.path.abspath(src), os.path.abspath(dst))
# cleanup
if not args.editable and not spec.local:
shutil.rmtree(tmpdir)
class MazerRemove(MazerInstall):
action = 'remove'
def run(self, args):
'''Clean out the content'''
cid = args.content_id
spec = self.content_id_to_spec(cid)
cp = os.path.join(self.COLLECTION_PATH, spec.namespace, spec.repo)
shutil.rmtree(cp)
def main():
callers = {}
actions = ['build', 'info', 'init', 'install', 'list', 'remove', 'version']
#actions = ['install']
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='action')
for action in actions:
sp = subparsers.add_parser(action)
classname = 'Mazer' + action.title()
try:
classref = getattr(sys.modules[__name__], classname)
classobj = classref()
classobj.add_arguments(sp)
callers[action] = classobj
except AttributeError:
callers[action] = None
'''
classref = getattr(sys.modules[__name__], classname)
classobj = classref()
classobj.add_arguments(sp)
callers[action] = classobj
'''
parser.add_argument('-c', '--ignore-certs')
parser.add_argument('-s', '--server')
parser.add_argument('-v', action='append')
args = parser.parse_args()
callers[args.action].run(args)
#import epdb; epdb.st()
if __name__ == "__main__":
main()
diff --git a/lib/ansible/config/base.yml b/lib/ansible/config/base.yml
index 15659739f1..433d1278f6 100644
--- a/lib/ansible/config/base.yml
+++ b/lib/ansible/config/base.yml
@@ -343,7 +343,7 @@ LOCALHOST_WARNING:
version_added: "2.6"
DEFAULT_ACTION_PLUGIN_PATH:
name: Action plugins path
- default: ~/.ansible/plugins/action:/usr/share/ansible/plugins/action
+ default: ~/.ansible/content/*/*/action_plugins:~/.ansible/plugins/action:/usr/share/ansible/plugins/action
description: Colon separated paths in which Ansible will search for Action Plugins.
env: [{name: ANSIBLE_ACTION_PLUGINS}]
ini:
@@ -475,7 +475,7 @@ DEFAULT_CALLABLE_WHITELIST:
type: list
DEFAULT_CALLBACK_PLUGIN_PATH:
name: Callback Plugins Path
- default: ~/.ansible/plugins/callback:/usr/share/ansible/plugins/callback
+ default: ~/.ansible/content/*/*/callback_plugins:~/.ansible/plugins/callback:/usr/share/ansible/plugins/callback
description: Colon separated paths in which Ansible will search for Callback Plugins.
env: [{name: ANSIBLE_CALLBACK_PLUGINS}]
ini:
@@ -495,7 +495,7 @@ DEFAULT_CALLBACK_WHITELIST:
yaml: {key: plugins.callback.whitelist}
DEFAULT_CLICONF_PLUGIN_PATH:
name: Cliconf Plugins Path
- default: ~/.ansible/plugins/cliconf:/usr/share/ansible/plugins/cliconf
+ default: ~/.ansible/content/*/*/cliconf_plugins:~/.ansible/plugins/cliconf:/usr/share/ansible/plugins/cliconf
description: Colon separated paths in which Ansible will search for Cliconf Plugins.
env: [{name: ANSIBLE_CLICONF_PLUGINS}]
ini:
@@ -503,7 +503,7 @@ DEFAULT_CLICONF_PLUGIN_PATH:
type: pathspec
DEFAULT_CONNECTION_PLUGIN_PATH:
name: Connection Plugins Path
- default: ~/.ansible/plugins/connection:/usr/share/ansible/plugins/connection
+ default: ~/.ansible/content/*/*/connection_plugins:~/.ansible/plugins/connection:/usr/share/ansible/plugins/connection
description: Colon separated paths in which Ansible will search for Connection Plugins.
env: [{name: ANSIBLE_CONNECTION_PLUGINS}]
ini:
@@ -553,7 +553,7 @@ DEFAULT_FACT_PATH:
yaml: {key: facts.gathering.fact_path}
DEFAULT_FILTER_PLUGIN_PATH:
name: Jinja2 Filter Plugins Path
- default: ~/.ansible/plugins/filter:/usr/share/ansible/plugins/filter
+ default: ~/.ansible/content/*/*/filter_plugins:~/.ansible/content/*/*/plugins/filter:~/.ansible/plugins/filter:/usr/share/ansible/plugins/filter
description: Colon separated paths in which Ansible will search for Jinja2 Filter Plugins.
env: [{name: ANSIBLE_FILTER_PLUGINS}]
ini:
@@ -673,7 +673,7 @@ DEFAULT_HOST_LIST:
yaml: {key: defaults.inventory}
DEFAULT_HTTPAPI_PLUGIN_PATH:
name: HttpApi Plugins Path
- default: ~/.ansible/plugins/httpapi:/usr/share/ansible/plugins/httpapi
+ default: ~/.ansible/content/*/*/httpapi_plugins:~/.ansible/plugins/httpapi:/usr/share/ansible/plugins/httpapi
description: Colon separated paths in which Ansible will search for HttpApi Plugins.
env: [{name: ANSIBLE_HTTPAPI_PLUGINS}]
ini:
@@ -695,7 +695,7 @@ DEFAULT_INTERNAL_POLL_INTERVAL:
- "The default corresponds to the value hardcoded in Ansible <= 2.1"
DEFAULT_INVENTORY_PLUGIN_PATH:
name: Inventory Plugins Path
- default: ~/.ansible/plugins/inventory:/usr/share/ansible/plugins/inventory
+ default: ~/.ansible/content/*/*/inventory_plugins:~/.ansible/plugins/inventory:/usr/share/ansible/plugins/inventory
description: Colon separated paths in which Ansible will search for Inventory Plugins.
env: [{name: ANSIBLE_INVENTORY_PLUGINS}]
ini:
@@ -779,7 +779,7 @@ DEFAULT_LOG_FILTER:
DEFAULT_LOOKUP_PLUGIN_PATH:
name: Lookup Plugins Path
description: Colon separated paths in which Ansible will search for Lookup Plugins.
- default: ~/.ansible/plugins/lookup:/usr/share/ansible/plugins/lookup
+ default: ~/.ansible/content/*/*/lookup_plugins:~/.ansible/plugins/lookup:/usr/share/ansible/plugins/lookup
env: [{name: ANSIBLE_LOOKUP_PLUGINS}]
ini:
- {key: lookup_plugins, section: defaults}
@@ -833,7 +833,7 @@ DEFAULT_MODULE_NAME:
DEFAULT_MODULE_PATH:
name: Modules Path
description: Colon separated paths in which Ansible will search for Modules.
- default: ~/.ansible/plugins/modules:/usr/share/ansible/plugins/modules
+ default: ~/.ansible/content/*/*/modules:~/.ansible/plugins/modules:/usr/share/ansible/plugins/modules
env: [{name: ANSIBLE_LIBRARY}]
ini:
- {key: library, section: defaults}
@@ -853,14 +853,14 @@ DEFAULT_MODULE_SET_LOCALE:
DEFAULT_MODULE_UTILS_PATH:
name: Module Utils Path
description: Colon separated paths in which Ansible will search for Module utils files, which are shared by modules.
- default: ~/.ansible/plugins/module_utils:/usr/share/ansible/plugins/module_utils
+ default: ~/.ansible/content/*/*/module_utils:~/.ansible/plugins/module_utils:/usr/share/ansible/plugins/module_utils
env: [{name: ANSIBLE_MODULE_UTILS}]
ini:
- {key: module_utils, section: defaults}
type: pathspec
DEFAULT_NETCONF_PLUGIN_PATH:
name: Netconf Plugins Path
- default: ~/.ansible/plugins/netconf:/usr/share/ansible/plugins/netconf
+ default: ~/.ansible/content/*/*/netconf_plugins:~/.ansible/plugins/netconf:/usr/share/ansible/plugins/netconf
description: Colon separated paths in which Ansible will search for Netconf Plugins.
env: [{name: ANSIBLE_NETCONF_PLUGINS}]
ini:
@@ -1054,7 +1054,7 @@ DEFAULT_STRATEGY:
DEFAULT_STRATEGY_PLUGIN_PATH:
name: Strategy Plugins Path
description: Colon separated paths in which Ansible will search for Strategy Plugins.
- default: ~/.ansible/plugins/strategy:/usr/share/ansible/plugins/strategy
+ default: ~/.ansible/content/*/*/strategy_plugins:~/.ansible/plugins/strategy:/usr/share/ansible/plugins/strategy
env: [{name: ANSIBLE_STRATEGY_PLUGINS}]
ini:
- {key: strategy_plugins, section: defaults}
@@ -1167,7 +1167,7 @@ DEFAULT_TASK_INCLUDES_STATIC:
alternatives: None, as its already built into the decision between include_tasks and import_tasks
DEFAULT_TERMINAL_PLUGIN_PATH:
name: Terminal Plugins Path
- default: ~/.ansible/plugins/terminal:/usr/share/ansible/plugins/terminal
+ default: ~/.ansible/content/*/*/terminal_plugins:~/.ansible/plugins/terminal:/usr/share/ansible/plugins/terminal
description: Colon separated paths in which Ansible will search for Terminal Plugins.
env: [{name: ANSIBLE_TERMINAL_PLUGINS}]
ini:
@@ -1176,7 +1176,7 @@ DEFAULT_TERMINAL_PLUGIN_PATH:
DEFAULT_TEST_PLUGIN_PATH:
name: Jinja2 Test Plugins Path
description: Colon separated paths in which Ansible will search for Jinja2 Test Plugins.
- default: ~/.ansible/plugins/test:/usr/share/ansible/plugins/test
+ default: ~/.ansible/content/*/*/test_plugins:~/.ansible/plugins/test:/usr/share/ansible/plugins/test
env: [{name: ANSIBLE_TEST_PLUGINS}]
ini:
- {key: test_plugins, section: defaults}
@@ -1209,7 +1209,7 @@ DEFAULT_UNDEFINED_VAR_BEHAVIOR:
type: boolean
DEFAULT_VARS_PLUGIN_PATH:
name: Vars Plugins Path
- default: ~/.ansible/plugins/vars:/usr/share/ansible/plugins/vars
+ default: ~/.ansible/content/*/*/vars_plugins:~/.ansible/plugins/vars:/usr/share/ansible/plugins/vars
description: Colon separated paths in which Ansible will search for Vars Plugins.
env: [{name: ANSIBLE_VARS_PLUGINS}]
ini:
diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py
index 1c22b634d2..8dd3ed3b96 100644
--- a/lib/ansible/plugins/loader.py
+++ b/lib/ansible/plugins/loader.py
@@ -58,7 +58,7 @@ class PluginLoader:
elif not config:
config = []
- self.config = config
+ self.config = self.expand_path_globs(config)
if class_name not in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
@@ -111,6 +111,19 @@ class PluginLoader:
PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
)
+ def expand_path_globs(self, paths):
+ generated = []
+ for path in paths:
+ if 'content/*' not in path:
+ generated.append(path)
+ continue
+
+ dirs = glob.glob(path)
+ if dirs:
+ generated += dirs
+
+ return generated
+
def format_paths(self, paths):
''' Returns a string suitable for printing of the search path '''
@@ -301,6 +314,21 @@ class PluginLoader:
if full_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][full_name] = full_path
+ # MAZER
+ if '/content/' in path:
+ path_elements = path.split(os.path.sep)
+ content_index = path_elements.index('content')
+ user_namespace = path_elements[content_index + 1]
+ repository_name = path_elements[content_index + 2]
+
+ pyfqn = (user_namespace + '.' + repository_name + '.' + base_name).replace('-', '_')
+ fqn = user_namespace + '.' + repository_name + '.' + base_name
+ rqn = repository_name + '.' + base_name
+
+ for ext in ['', extension]:
+ for qn in [pyfqn, fqn, rqn]:
+ self._plugin_path_cache[ext][qn] = full_path
+
self._searched_paths.add(path)
try:
return pull_cache[name]
@@ -355,6 +383,8 @@ class PluginLoader:
warnings.simplefilter("ignore", RuntimeWarning)
with open(path, 'rb') as module_file:
module = imp.load_source(full_name, path, module_file)
+ #if 'newfilter' in path:
+ # import epdb; epdb.st()
return module
def _update_object(self, obj, name, path):
@@ -469,6 +499,9 @@ class PluginLoader:
for i in self._get_paths():
all_matches.extend(glob.glob(os.path.join(i, "*.py")))
+ #if 'filter' in i:
+ # import epdb; epdb.st()
+
loaded_modules = set()
for path in sorted(all_matches, key=os.path.basename):
name = os.path.splitext(path)[0]
@@ -486,6 +519,7 @@ class PluginLoader:
continue
if path not in self._module_cache:
+ import q; q('_load_module_source', name, path)
try:
module = self._load_module_source(name, path)
except Exception as e:
@@ -493,6 +527,9 @@ class PluginLoader:
self._module_cache[path] = module
found_in_cache = False
+ #if 'newfilter' in path:
+ # import epdb; epdb.st()
+
try:
obj = getattr(self._module_cache[path], self.class_name)
except AttributeError as e:
@@ -523,6 +560,29 @@ class PluginLoader:
self._load_config_defs(basename, path)
self._update_object(obj, basename, path)
+
+ # MAZER
+ if hasattr(obj, 'filters') and '/content/' in path:
+ path_elements = path.split(os.path.sep)
+ content_index = path_elements.index('content')
+ namespace = path_elements[content_index + 1]
+ repository = path_elements[content_index + 2]
+
+ filter_dict = obj.filters()
+ for key in filter_dict.keys():
+ newkeys = [
+ namespace + '.' + repository + '.' + key,
+ repository + '.' + key
+ ]
+ for nk in newkeys:
+ nk = nk.replace('-', '_')
+ filter_dict[nk] = filter_dict[key]
+
+ def patch_filters():
+ return filter_dict
+
+ obj.filters = patch_filters
+
yield obj
@@ -620,6 +680,7 @@ def _load_plugin_filter():
raise AnsibleError('The stat module was specified in the module blacklist file, {0}, but'
' Ansible will not function without the stat module. Please remove stat'
' from the blacklist.'.format(filter_cfg))
+
return filters
- hosts: localhost
connection: local
gather_facts: False
tasks:
#- shell: whoami
################# MODULES ##################
- name: validate calling module with the pythonic FQN
jctanner.ansible_testing_content.newmodule:
- name: validate calling module with the FQN
jctanner.ansible-testing-content.newmodule:
- name: validate calling module with the RQN
ansible-testing-content.newmodule:
- name: validate calling short name
newmodule:
################# FILTERS ##################
- name: call filter plugin by pythonic FQN
debug:
msg: "{{ 'cat' | jctanner.ansible_testing_content.newfilter }}"
- name: call filter plugin by pythonic RQN
debug:
msg: "{{ 'cat' | ansible_testing_content.newfilter }}"
- name: call filter plugin by short name
debug:
msg: "{{ 'cat' | newfilter }}"
################# LOOKUPS ##################
- name: call lookup plugin by pythonic FQN
debug:
msg: "{{ lookup('jctanner.ansible_testing_content.newlookup') }}"
- name: call lookup plugin by pythonic RQN
debug:
msg: "{{ lookup('ansible_testing_content.newlookup') }}"
- name: call lookup plugin by short name
debug:
msg: "{{ lookup('newlookup') }}"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment