1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2024-09-14 20:13:21 +02:00

Dev guide reorg continues (#17732)

This commit is contained in:
scottb 2016-09-23 13:48:37 -07:00 committed by Brian Coca
parent 0d94d39689
commit f46de631af
16 changed files with 3068 additions and 1 deletions

View file

@ -0,0 +1,225 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/dev_guide.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/dev_guide.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/dev_guide"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/dev_guide"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."

View file

@ -0,0 +1,417 @@
# -*- coding: utf-8 -*-
#
# dev_guide documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 15 13:24:02 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index2.rst'
# General information about the project.
project = u'dev_guide'
copyright = u'2016, Ansible'
author = u'Ansible'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'dev_guide v1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'dev_guidedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dev_guide.tex', u'dev\\_guide Documentation',
u'Ansible', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dev_guide', u'dev_guide Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dev_guide', u'dev_guide Documentation',
author, 'dev_guide', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#
# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#
# epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_pre_files = []
# HTML files that should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#
# epub_tocdepth = 3
# Allow duplicate toc entries.
#
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#
# epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#
# epub_fix_images = False
# Scale large images.
#
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# epub_show_urls = 'inline'
# If false, no index is generated.
#
# epub_use_index = True

View file

@ -0,0 +1,18 @@
Developer Information
`````````````````````
Learn how to build modules of your own in any language, and also how to extend Ansible through several kinds of plugins. Explore Ansible's Python API and write Python plugins to integrate with other solutions in your environment.
.. toctree::
:maxdepth: 1
developing_api
developing_inventory
developing_modules
developing_plugins
developing_core
developing_test_pr
developing_releases
Developers will also likely be interested in the fully-discoverable in :doc:`tower`. It's great for embedding Ansible in all manner of applications.

View file

@ -0,0 +1,194 @@
Python API
==========
.. contents:: Topics
Please note that while we make this API available it is not intended for direct consumption, it is here
for the support of the Ansible command line tools. We try not to make breaking changes but we reserve the
right to do so at any time if it makes sense for the Ansible toolset.
The following documentation is provided for those that still want to use the API directly, but be mindful this is not something the Ansible team supports.
There are several interesting ways to use Ansible from an API perspective. You can use
the Ansible python API to control nodes, you can extend Ansible to respond to various python events, you can
write various plugins, and you can plug in inventory data from external data sources. This document
covers the execution and Playbook API at a basic level.
If you are looking to use Ansible programmatically from something other than Python, trigger events asynchronously,
or have access control and logging demands, take a look at :doc:`tower`
as it has a very nice REST API that provides all of these things at a higher level.
Ansible is written in its own API so you have a considerable amount of power across the board.
This chapter discusses the Python API.
.. _python_api:
The Python API is very powerful, and is how the all the ansible CLI tools are implemented.
In version 2.0 the core ansible got rewritten and the API was mostly rewritten.
.. note:: Ansible relies on forking processes, as such the API is not thread safe.
.. _python_api_20:
Python API 2.0
--------------
In 2.0 things get a bit more complicated to start, but you end up with much more discrete and readable classes::
#!/usr/bin/env python
import json
from collections import namedtuple
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
from ansible.inventory import Inventory
from ansible.playbook.play import Play
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.plugins.callback import CallbackBase
class ResultCallback(CallbackBase):
"""A sample callback plugin used for performing an action as results come in
If you want to collect all results into a single object for processing at
the end of the execution, look into utilizing the ``json`` callback plugin
or writing your own custom callback plugin
"""
def v2_runner_on_ok(self, result, **kwargs):
"""Print a json representation of the result
This method could store the result in an instance attribute for retrieval later
"""
host = result._host
print json.dumps({host.name: result._result}, indent=4)
Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check'])
# initialize needed objects
variable_manager = VariableManager()
loader = DataLoader()
options = Options(connection='local', module_path='/path/to/mymodules', forks=100, become=None, become_method=None, become_user=None, check=False)
passwords = dict(vault_pass='secret')
# Instantiate our ResultCallback for handling results as they come in
results_callback = ResultCallback()
# create inventory and pass to var manager
inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list='localhost')
variable_manager.set_inventory(inventory)
# create play with tasks
play_source = dict(
name = "Ansible Play",
hosts = 'localhost',
gather_facts = 'no',
tasks = [
dict(action=dict(module='shell', args='ls'), register='shell_out'),
dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}')))
]
)
play = Play().load(play_source, variable_manager=variable_manager, loader=loader)
# actually run it
tqm = None
try:
tqm = TaskQueueManager(
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=options,
passwords=passwords,
stdout_callback=results_callback, # Use our custom callback instead of the ``default`` callback plugin
)
result = tqm.run(play)
finally:
if tqm is not None:
tqm.cleanup()
.. _python_api_old:
Python API pre 2.0
------------------
It's pretty simple::
import ansible.runner
runner = ansible.runner.Runner(
module_name='ping',
module_args='',
pattern='web*',
forks=10
)
datastructure = runner.run()
The run method returns results per host, grouped by whether they
could be contacted or not. Return types are module specific, as
expressed in the :doc:`modules` documentation.::
{
"dark" : {
"web1.example.com" : "failure message"
},
"contacted" : {
"web2.example.com" : 1
}
}
A module can return any type of JSON data it wants, so Ansible can
be used as a framework to rapidly build powerful applications and scripts.
.. _detailed_api_old_example:
Detailed API Example
````````````````````
The following script prints out the uptime information for all hosts::
#!/usr/bin/python
import ansible.runner
import sys
# construct the ansible runner and execute on all hosts
results = ansible.runner.Runner(
pattern='*', forks=10,
module_name='command', module_args='/usr/bin/uptime',
).run()
if results is None:
print "No hosts found"
sys.exit(1)
print "UP ***********"
for (hostname, result) in results['contacted'].items():
if not 'failed' in result:
print "%s >>> %s" % (hostname, result['stdout'])
print "FAILED *******"
for (hostname, result) in results['contacted'].items():
if 'failed' in result:
print "%s >>> %s" % (hostname, result['msg'])
print "DOWN *********"
for (hostname, result) in results['dark'].items():
print "%s >>> %s" % (hostname, result)
Advanced programmers may also wish to read the source to ansible itself,
for it uses the API (with all available options) to implement the ``ansible``
command line tools (``lib/ansible/cli/``).
.. seealso::
:doc:`developing_inventory`
Developing dynamic inventory integrations
:doc:`developing_modules`
How to develop modules
:doc:`developing_plugins`
How to develop plugins
`Development Mailing List <http://groups.google.com/group/ansible-devel>`_
Mailing list for development topics
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel

View file

@ -0,0 +1,24 @@
Developing the Ansible Core Engine
==================================
Although many of the pieces of the Ansible Core Engine are plugins that can be
swapped out via playbook directives or configuration, there are still pieces
of the Engine that are not modular. The documents here give insight into how
those pieces work together.
.. toctree::
:maxdepth: 1
developing_program_flow_modules
.. seealso::
:doc:`developing_api`
Learn about the Python API for task execution
:doc:`developing_plugins`
Learn about developing plugins
`Mailing List <http://groups.google.com/group/ansible-devel>`_
The development mailing list
`irc.freenode.net <http://irc.freenode.net>`_
#ansible-devel IRC chat channel

View file

@ -0,0 +1,99 @@
Developing Dynamic Inventory Sources
====================================
.. contents:: Topics
:local:
As described in :doc:`intro_dynamic_inventory`, ansible can pull inventory information from dynamic sources, including cloud sources.
How do we write a new one?
Simple! We just create a script or program that can print JSON in the right format when fed the proper arguments.
You can do this in any language.
.. _inventory_script_conventions:
Script Conventions
``````````````````
When the external node script is called with the single argument ``--list``, the script must output a JSON encoded hash/dictionary of all the groups to be managed to stdout. Each group's value should be either a hash/dictionary containing a list of each host/IP, potential child groups, and potential group variables, or simply a list of host/IP addresses, like so::
{
"databases" : {
"hosts" : [ "host1.example.com", "host2.example.com" ],
"vars" : {
"a" : true
}
},
"webservers" : [ "host2.example.com", "host3.example.com" ],
"atlanta" : {
"hosts" : [ "host1.example.com", "host4.example.com", "host5.example.com" ],
"vars" : {
"b" : false
},
"children": [ "marietta", "5points" ]
},
"marietta" : [ "host6.example.com" ],
"5points" : [ "host7.example.com" ]
}
.. versionadded:: 1.0
Before version 1.0, each group could only have a list of hostnames/IP addresses, like the webservers, marietta, and 5points groups above.
When called with the arguments ``--host <hostname>`` (where <hostname> is a host from above), the script must print either an empty JSON
hash/dictionary, or a hash/dictionary of variables to make available to templates and playbooks. Printing variables is optional,
if the script does not wish to do this, printing an empty hash/dictionary is the way to go::
{
"favcolor" : "red",
"ntpserver" : "wolf.example.com",
"monitoring" : "pack.example.com"
}
.. _inventory_script_tuning:
Tuning the External Inventory Script
````````````````````````````````````
.. versionadded:: 1.3
The stock inventory script system detailed above works for all versions of Ansible, but calling
``--host`` for every host can be rather expensive, especially if it involves expensive API calls to
a remote subsystem. In Ansible
1.3 or later, if the inventory script returns a top level element called "_meta", it is possible
to return all of the host variables in one inventory script call. When this meta element contains
a value for "hostvars", the inventory script will not be invoked with ``--host`` for each host. This
results in a significant performance increase for large numbers of hosts, and also makes client
side caching easier to implement for the inventory script.
The data to be added to the top level JSON dictionary looks like this::
{
# results of inventory script as above go here
# ...
"_meta" : {
"hostvars" : {
"moocow.example.com" : { "asdf" : 1234 },
"llama.example.com" : { "asdf" : 5678 },
}
}
}
.. seealso::
:doc:`developing_api`
Python API to Playbooks and Ad Hoc Task Execution
:doc:`developing_modules`
How to develop modules
:doc:`developing_plugins`
How to develop plugins
`Ansible Tower <http://ansible.com/ansible-tower>`_
REST API endpoint and GUI for Ansible, syncs with dynamic inventory
`Development Mailing List <http://groups.google.com/group/ansible-devel>`_
Mailing list for development topics
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel

View file

@ -0,0 +1,48 @@
Appendix: Module Utilities
``````````````````````````
Ansible provides a number of module utilities that provide helper functions that you can use when developing your own modules. The `basic.py` module utility provides the main entry point for accessing the Ansible library, and all Ansible modules must, at minimum, import from basic.py::
from ansible.module_utils.basic import *
The following is a list of module_utils files and a general description. The module utility source code lives in the `./lib/module_utils` directory under your main Ansible path - for more details on any specific module utility, please see the source code.
- a10.py - Utilities used by the a10_server module to manage A10 Networks devices.
- api.py - Adds shared support for generic API modules.
- asa.py - Module support utilities for managing Cisco ASA network devices.
- azure_rm_common.py - Definitions and utilities for Microsoft Azure Resource Manager template deployments.
- basic.py - General definitions and helper utilites for Ansible modules.
- cloudstack.py - Utilities for CloudStack modules.
- database.py - Miscellaneous helper functions for PostGRES and MySQL
- docker_common.py - Definitions and helper utilites for modules working with Docker.
- ec2.py - Definitions and utilites for modules working with Amazon EC2
- eos.py - Helper functions for modules working with EOS networking devices.
- f5.py - Helper functions for modules working with F5 networking devices.
- facts.py - Helper functions for modules that return facts.
- gce.py - Definitions and helper functions for modules that work with Google Compute Engine resources.
- ios.py - Definitions and helper functions for modules that manage Cisco IOS networking devices
- iosxr.py - Definitions and helper functions for modules that manage Cisco IOS-XR networking devices
- ismount.py - Contains single helper function that fixes os.path.ismount
- junos.py - Definitions and helper functions for modules that manage Junos networking devices
- known_hosts.py - Utilites for working with known_hosts file
- mysql.py - Allows modules to connect to a MySQL instance
- netcfg.py - Configuration utility functions for use by networking modules
- netcmd.py - Defines commands and comparison operators for use in networking modules
- network.py - Functions for running commands on networking devices
- nxos.py - Contains definitions and helper functions specific to Cisco NXOS networking devices
- openstack.py - Utilities for modules that work with Openstack instances.
- openswitch.py - Definitions and helper functions for modules that manage OpenSwitch devices
- powershell.ps1 - Utilities for working with Microsoft Windows clients
- pycompat24.py - Exception workaround for python 2.4
- rax.py - Definitions and helper functions for modules that work with Rackspace resources.
- redhat.py - Functions for modules that manage Red Hat Network registration and subscriptions
- service.py - Contains utilities to enable modules to work with Linux services (placeholder, not in use).
- shell.py - Functions to allow modules to create shells and work with shell commands
- six.py - Module utils for working with the Six python 2 and 3 compatibility library
- splitter.py - String splitting and manipulation utilites for working with Jinja2 templates
- urls.py - Utilities for working with http and https requests
- vca.py - Contains utilities for modules that work with VMware vCloud Air
- vmware.py - Contains utilities for modules that work with VMware vSphere VMs
- vyos.py - Definitions and functions for working with VyOS networking

View file

@ -0,0 +1,815 @@
Developing Modules
==================
.. contents:: Topics
Ansible modules are reusable, standalone scripts that can be used by the Ansible API,
or by the :command:`ansible` or :command:`ansible-playbook` programs. They
return information to ansible by printing a JSON string to stdout before
exiting. They take arguments in in one of several ways which we'll go into
as we work through this tutorial.
See :doc:`modules` for a list of various ones developed in core.
Modules can be written in any language and are found in the path specified
by :envvar:`ANSIBLE_LIBRARY` or the ``--module-path`` command line option.
By default, everything that ships with Ansible is pulled from its source tree, but
additional paths can be added.
The directory i:file:`./library`, alongside your top level :term:`playbooks`, is also automatically
added as a search directory.
Should you develop an interesting Ansible module, consider sending a pull request to the
`modules-extras project <https://github.com/ansible/ansible-modules-extras>`_. There's also a core
repo for more established and widely used modules. "Extras" modules may be promoted to core periodically,
but there's no fundamental difference in the end - both ship with Ansible, all in one package, regardless
of how you acquire Ansible.
.. _module_dev_tutorial:
Tutorial
````````
Let's build a very-basic module to get and set the system time. For starters, let's build
a module that just outputs the current time.
We are going to use Python here but any language is possible. Only File I/O and outputting to standard
out are required. So, bash, C++, clojure, Python, Ruby, whatever you want
is fine.
Now Python Ansible modules contain some extremely powerful shortcuts (that all the core modules use)
but first we are going to build a module the very hard way. The reason we do this is because modules
written in any language OTHER than Python are going to have to do exactly this. We'll show the easy
way later.
So, here's an example. You would never really need to build a module to set the system time,
the 'command' module could already be used to do this.
Reading the modules that come with Ansible (linked above) is a great way to learn how to write
modules. Keep in mind, though, that some modules in Ansible's source tree are internalisms,
so look at :ref:`service` or :ref:`yum`, and don't stare too close into things like ``async_wrapper`` or
you'll turn to stone. Nobody ever executes ``async_wrapper`` directly.
Ok, let's get going with an example. We'll use Python. For starters, save this as a file named :file:`timetest.py`::
#!/usr/bin/python
import datetime
import json
date = str(datetime.datetime.now())
print json.dumps({
"time" : date
})
.. _module_testing:
Testing Modules
````````````````
There's a useful test script in the source checkout for Ansible::
git clone git://github.com/ansible/ansible.git --recursive
source ansible/hacking/env-setup
For instructions on setting up Ansible from source, please see
:doc:`intro_installation`.
Let's run the script you just wrote with that::
ansible/hacking/test-module -m ./timetest.py
You should see output that looks something like this::
{'time': '2012-03-14 22:13:48.539183'}
If you did not, you might have a typo in your module, so recheck it and try again.
.. _reading_input:
Reading Input
`````````````
Let's modify the module to allow setting the current time. We'll do this by seeing
if a key value pair in the form `time=<string>` is passed in to the module.
Ansible internally saves arguments to an arguments file. So we must read the file
and parse it. The arguments file is just a string, so any form of arguments are legal.
Here we'll do some basic parsing to treat the input as key=value.
The example usage we are trying to achieve to set the time is::
time time="March 14 22:10"
If no time parameter is set, we'll just leave the time as is and return the current time.
.. note::
This is obviously an unrealistic idea for a module. You'd most likely just
use the command module. However, it makes for a decent tutorial.
Let's look at the code. Read the comments as we'll explain as we go. Note that this
is highly verbose because it's intended as an educational example. You can write modules
a lot shorter than this::
#!/usr/bin/python
# import some python modules that we'll use. These are all
# available in Python's core
import datetime
import sys
import json
import os
import shlex
# read the argument string from the arguments file
args_file = sys.argv[1]
args_data = file(args_file).read()
# For this module, we're going to do key=value style arguments.
# Modules can choose to receive json instead by adding the string:
# WANT_JSON
# Somewhere in the file.
# Modules can also take free-form arguments instead of key-value or json
# but this is not recommended.
arguments = shlex.split(args_data)
for arg in arguments:
# ignore any arguments without an equals in it
if "=" in arg:
(key, value) = arg.split("=")
# if setting the time, the key 'time'
# will contain the value we want to set the time to
if key == "time":
# now we'll affect the change. Many modules
# will strive to be 'idempotent', meaning they
# will only make changes when the desired state
# expressed to the module does not match
# the current state. Look at 'service'
# or 'yum' in the main git tree for an example
# of how that might look.
rc = os.system("date -s \"%s\"" % value)
# always handle all possible errors
#
# when returning a failure, include 'failed'
# in the return data, and explain the failure
# in 'msg'. Both of these conventions are
# required however additional keys and values
# can be added.
if rc != 0:
print json.dumps({
"failed" : True,
"msg" : "failed setting the time"
})
sys.exit(1)
# when things do not fail, we do not
# have any restrictions on what kinds of
# data are returned, but it's always a
# good idea to include whether or not
# a change was made, as that will allow
# notifiers to be used in playbooks.
date = str(datetime.datetime.now())
print json.dumps({
"time" : date,
"changed" : True
})
sys.exit(0)
# if no parameters are sent, the module may or
# may not error out, this one will just
# return the time
date = str(datetime.datetime.now())
print json.dumps({
"time" : date
})
Let's test that module::
ansible/hacking/test-module -m ./timetest.py -a "time=\"March 14 12:23\""
This should return something like::
{"changed": true, "time": "2012-03-14 12:23:00.000307"}
.. _binary_module_reading_input:
Binary Modules Input
++++++++++++++++++++
Support for binary modules was added in Ansible 2.2. When Ansible detects a binary module, it will proceed to
supply the argument input as a file on ``argv[1]`` that is formatted as JSON. The JSON contents of that file
would resemble something similar to the following payload for a module accepting the same arguments as the
``ping`` module::
{
"data": "pong",
"_ansible_verbosity": 4,
"_ansible_diff": false,
"_ansible_debug": false,
"_ansible_check_mode": false,
"_ansible_no_log": false
}
.. _module_provided_facts:
Module Provided 'Facts'
````````````````````````
The :ref:`setup` module that ships with Ansible provides many variables about a system that can be used in playbooks
and templates. However, it's possible to also add your own facts without modifying the system module. To do
this, just have the module return a `ansible_facts` key, like so, along with other return data::
{
"changed" : True,
"rc" : 5,
"ansible_facts" : {
"leptons" : 5000,
"colors" : {
"red" : "FF0000",
"white" : "FFFFFF"
}
}
}
These 'facts' will be available to all statements called after that module (but not before) in the playbook.
A good idea might be to make a module called 'site_facts' and always call it at the top of each playbook, though
we're always open to improving the selection of core facts in Ansible as well.
.. _common_module_boilerplate:
Common Module Boilerplate
`````````````````````````
As mentioned, if you are writing a module in Python, there are some very powerful shortcuts you can use.
Modules are still transferred as one file, but an arguments file is no longer needed, so these are not
only shorter in terms of code, they are actually FASTER in terms of execution time.
Rather than mention these here, the best way to learn is to read some of the `source of the modules <https://github.com/ansible/ansible-modules-core>`_ that come with Ansible.
The 'group' and 'user' modules are reasonably non-trivial and showcase what this looks like.
Key parts include always importing the boilerplate code from
:mod:`ansible.module_utils.basic` like this::
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
.. note::
Prior to Ansible-2.1.0, importing only what you used from
:mod:`ansible.module_utils.basic` did not work. You needed to use
a wildcard import like this::
from ansible.module_utils.basic import *
And instantiating the module class like::
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent']),
name = dict(required=True),
enabled = dict(required=True, type='bool'),
something = dict(aliases=['whatever'])
)
)
The :class:`AnsibleModule` provides lots of common code for handling returns, parses your arguments
for you, and allows you to check inputs.
Successful returns are made like this::
module.exit_json(changed=True, something_else=12345)
And failures are just as simple (where `msg` is a required parameter to explain the error)::
module.fail_json(msg="Something fatal happened")
There are also other useful functions in the module class, such as :func:`module.sha1(path)`. See
:file:`lib/ansible/module_utils/basic.py` in the source checkout for implementation details.
Again, modules developed this way are best tested with the :file:`hacking/test-module` script in the git
source checkout. Because of the magic involved, this is really the only way the scripts
can function outside of Ansible.
If submitting a module to Ansible's core code, which we encourage, use of
:class:`AnsibleModule` is required.
.. _developing_for_check_mode:
Check Mode
``````````
.. versionadded:: 1.1
Modules may optionally support check mode. If the user runs Ansible in check
mode, the module should try to predict whether changes will occur.
For your module to support check mode, you must pass ``supports_check_mode=True``
when instantiating the AnsibleModule object. The AnsibleModule.check_mode attribute
will evaluate to True when check mode is enabled. For example::
module = AnsibleModule(
argument_spec = dict(...),
supports_check_mode=True
)
if module.check_mode:
# Check if any changes would be made but don't actually make those changes
module.exit_json(changed=check_if_system_state_would_be_changed())
Remember that, as module developer, you are responsible for ensuring that no
system state is altered when the user enables check mode.
If your module does not support check mode, when the user runs Ansible in check
mode, your module will simply be skipped.
.. _module_dev_pitfalls:
Common Pitfalls
```````````````
You should also never do this in a module::
print "some status message"
Because the output is supposed to be valid JSON.
Modules must not output anything on standard error, because the system will merge
standard out with standard error and prevent the JSON from parsing. Capturing standard
error and returning it as a variable in the JSON on standard out is fine, and is, in fact,
how the command module is implemented.
If a module returns stderr or otherwise fails to produce valid JSON, the actual output
will still be shown in Ansible, but the command will not succeed.
Always use the hacking/test-module script when developing modules and it will warn
you about these kind of things.
.. _module_dev_conventions:
Conventions/Recommendations
```````````````````````````
As a reminder from the example code above, here are some basic conventions
and guidelines:
* If the module is addressing an object, the parameter for that object should be called 'name' whenever possible, or accept 'name' as an alias.
* If you have a company module that returns facts specific to your installations, a good name for this module is `site_facts`.
* Modules accepting boolean status should generally accept 'yes', 'no', 'true', 'false', or anything else a user may likely throw at them. The AnsibleModule common code supports this with "type='bool'".
* Include a minimum of dependencies if possible. If there are dependencies, document them at the top of the module file, and have the module raise JSON error messages when the import fails.
* Modules must be self-contained in one file to be auto-transferred by ansible.
* If packaging modules in an RPM, they only need to be installed on the control machine and should be dropped into /usr/share/ansible. This is entirely optional and up to you.
* Modules must output valid JSON only. The toplevel return type must be a hash (dictionary) although they can be nested. Lists or simple scalar values are not supported, though they can be trivially contained inside a dictionary.
* In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'.
* Return codes from modules are actually not significant, but continue on with 0=success and non-zero=failure for reasons of future proofing.
* As results from many hosts will be aggregated at once, modules should return only relevant output. Returning the entire contents of a log file is generally bad form.
.. _module_documenting:
Documenting Your Module
```````````````````````
All modules included in the CORE distribution must have a
``DOCUMENTATION`` string. This string MUST be a valid YAML document
which conforms to the schema defined below. You may find it easier to
start writing your ``DOCUMENTATION`` string in an editor with YAML
syntax highlighting before you include it in your Python file.
.. _module_doc_example:
Example
+++++++
See an example documentation string in the checkout under `examples/DOCUMENTATION.yml <https://github.com/ansible/ansible/blob/devel/examples/DOCUMENTATION.yml>`_.
Include it in your module file like this::
#!/usr/bin/python
# Copyright header....
DOCUMENTATION = '''
---
module: modulename
short_description: This is a sentence describing the module
# ... snip ...
'''
If an argument takes both C(True)/C(False) and C(Yes)/C(No), the documentation should use C(True) and C(False).
The ``description``, and ``notes`` fields
support formatting with some special macros.
These formatting functions are ``U()``, ``M()``, ``I()``, and ``C()``
for URL, module, italic, and constant-width respectively. It is suggested
to use ``C()`` for file and option names, and ``I()`` when referencing
parameters; module names should be specified as ``M(module)``.
Examples (which typically contain colons, quotes, etc.) are difficult
to format with YAML, so these must be
written in plain text in an ``EXAMPLES`` string within the module
like this::
EXAMPLES = '''
- action: modulename opt1=arg1 opt2=arg2
'''
The EXAMPLES section, just like the documentation section, is required in
all module pull requests for new modules.
The RETURN section documents what the module returns. For each value returned,
provide a ``description``, in what circumstances the value is ``returned``,
the ``type`` of the value and a ``sample``. For example, from
the ``copy`` module::
RETURN = '''
dest:
description: destination file/path
returned: success
type: string
sample: "/path/to/file.txt"
src:
description: source file used for the copy on the target machine
returned: changed
type: string
sample: "/home/httpd/.ansible/tmp/ansible-tmp-1423796390.97-147729857856000/source"
md5sum:
description: md5 checksum of the file after running copy
returned: when supported
type: string
sample: "2a5aeecc61dc98c4d780b14b330e3282"
...
'''
.. _module_dev_testing:
Building & Testing
++++++++++++++++++
Put your completed module file into the 'library' directory and then
run the command: ``make webdocs``. The new 'modules.html' file will be
built and appear in the 'docsite/' directory.
.. tip::
If you're having a problem with the syntax of your YAML you can
validate it on the `YAML Lint <http://www.yamllint.com/>`_ website.
.. tip::
You can set the environment variable ANSIBLE_KEEP_REMOTE_FILES=1 on the controlling host to prevent ansible from
deleting the remote files so you can debug your module.
.. _debugging_ansiblemodule_based_modules:
Debugging AnsibleModule-based modules
`````````````````````````````````````
.. tip::
If you're using the :file:`hacking/test-module` script then most of this
is taken care of for you. If you need to do some debugging of the module
on the remote machine that the module will actually run on or when the
module is used in a playbook then you may need to use this information
instead of relying on test-module.
Starting with Ansible-2.1.0, AnsibleModule-based modules are put together as
a zip file consisting of the module file and the various python module
boilerplate inside of a wrapper script instead of as a single file with all of
the code concatenated together. Without some help, this can be harder to
debug as the file needs to be extracted from the wrapper in order to see
what's actually going on in the module. Luckily the wrapper script provides
some helper methods to do just that.
If you are using Ansible with the :envvar:`ANSIBLE_KEEP_REMOTE_FILES`
environment variables to keep the remote module file, here's a sample of how
your debugging session will start::
$ ANSIBLE_KEEP_REMOTE_FILES=1 ansible localhost -m ping -a 'data=debugging_session' -vvv
<127.0.0.1> ESTABLISH LOCAL CONNECTION FOR USER: badger
<127.0.0.1> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" && echo "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" )'
<127.0.0.1> PUT /var/tmp/tmpjdbJ1w TO /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/ping
<127.0.0.1> EXEC /bin/sh -c 'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/ping'
localhost | SUCCESS => {
"changed": false,
"invocation": {
"module_args": {
"data": "debugging_session"
},
"module_name": "ping"
},
"ping": "debugging_session"
}
Setting :envvar:`ANSIBLE_KEEP_REMOTE_FILE` to ``1`` tells Ansible to keep the
remote module files instead of deleting them after the module finishes
executing. Giving Ansible the ``-vvv`` optin makes Ansible more verbose.
That way it prints the file name of the temporary module file for you to see.
If you want to examine the wrapper file you can. It will show a small python
script with a large, base64 encoded string. The string contains the module
that is going to be executed. Run the wrapper's explode command to turn the
string into some python files that you can work with::
$ python /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/ping explode
Module expanded into:
/home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/debug_dir
When you look into the debug_dir you'll see a directory structure like this::
├── ansible_module_ping.py
├── args
└── ansible
├── __init__.py
└── module_utils
├── basic.py
└── __init__.py
* :file:`ansible_module_ping.py` is the code for the module itself. The name
is based on the name of the module with a prefix so that we don't clash with
any other python module names. You can modify this code to see what effect
it would have on your module.
* The :file:`args` file contains a JSON string. The string is a dictionary
containing the module arguments and other variables that Ansible passes into
the module to change it's behaviour. If you want to modify the parameters
that are passed to the module, this is the file to do it in.
* The :file:`ansible` directory contains code from
:mod:`ansible.module_utils` that is used by the module. Ansible includes
files for any :`module:`ansible.module_utils` imports in the module but not
no files from any other module. So if your module uses
:mod:`ansible.module_utils.url` Ansible will include it for you, but if
your module includes :mod:`requests` then you'll have to make sure that
the python requests library is installed on the system before running the
module. You can modify files in this directory if you suspect that the
module is having a problem in some of this boilerplate code rather than in
the module code you have written.
Once you edit the code or arguments in the exploded tree you need some way to
run it. There's a separate wrapper subcommand for this::
$ python /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/ping execute
{"invocation": {"module_args": {"data": "debugging_session"}}, "changed": false, "ping": "debugging_session"}
This subcommand takes care of setting the PYTHONPATH to use the exploded
:file:`debug_dir/ansible/module_utils` directory and invoking the script using
the arguments in the :file:`args` file. You can continue to run it like this
until you understand the problem. Then you can copy it back into your real
module file and test that the real module works via :command:`ansible` or
:command:`ansible-playbook`.
.. note::
The wrapper provides one more subcommand, ``excommunicate``. This
subcommand is very similar to ``execute`` in that it invokes the exploded
module on the arguments in the :file:`args`. The way it does this is
different, however. ``excommunicate`` imports the :func:`main`
function from the module and then calls that. This makes excommunicate
execute the module in the wrapper's process. This may be useful for
running the module under some graphical debuggers but it is very different
from the way the module is executed by Ansible itself. Some modules may
not work with ``excommunicate`` or may behave differently than when used
with Ansible normally. Those are not bugs in the module; they're
limitations of ``excommunicate``. Use at your own risk.
.. _module_paths:
Module Paths
````````````
If you are having trouble getting your module "found" by ansible, be
sure it is in the :envvar:`ANSIBLE_LIBRARY` environment variable.
If you have a fork of one of the ansible module projects, do something like this::
ANSIBLE_LIBRARY=~/ansible-modules-core:~/ansible-modules-extras
And this will make the items in your fork be loaded ahead of what ships with Ansible. Just be sure
to make sure you're not reporting bugs on versions from your fork!
To be safe, if you're working on a variant on something in Ansible's normal distribution, it's not
a bad idea to give it a new name while you are working on it, to be sure you know you're pulling
your version.
.. _module_contribution:
Getting Your Module Into Ansible
````````````````````````````````
High-quality modules with minimal dependencies
can be included in Ansible, but modules (just due to the programming
preferences of the developers) will need to be implemented in Python and use
the AnsibleModule common code, and should generally use consistent arguments with the rest of
the program. Stop by the mailing list to inquire about requirements if you like, and submit
a github pull request to the `extras <https://github.com/ansible/ansible-modules-extras>`_ project.
Included modules will ship with ansible, and also have a chance to be promoted to 'core' status, which
gives them slightly higher development priority (though they'll work in exactly the same way).
Module checklist
````````````````
The following checklist items are important guidelines for people who want to contribute to the development of modules to Ansible on GitHub. Please read the guidelines before you submit your PR/proposal.
* The shebang should always be ``#!/usr/bin/python``, this allows ansible_python_interpreter to work
* Modules must be written to support Python 2.4. If this is not possible, required minimum python version and rationale should be explained in the requirements section in DOCUMENTATION.
* Modules must be written to use proper Python-3 syntax. At some point in the future we'll come up with rules for running on Python-3 but we're not there yet. See :doc:`developing_modules_python3` for help on how to do this.
* Documentation: Make sure it exists
* Module documentation should briefly and accurately define what each module and option does, and how it works with others in the underlying system. Documentation should be written for broad audience--readable both by experts and non-experts. This documentation is not meant to teach a total novice, but it also should not be reserved for the Illuminati (hard balance).
* If an argument takes both C(True)/C(False) and C(Yes)/C(No), the documentation should use C(True) and C(False).
* Descriptions should always start with a capital letter and end with a full stop. Consistency always helps.
* The `required` setting is only required when true, otherwise it is assumed to be false.
* If `required` is false/missing, `default` may be specified (assumed 'null' if missing). Ensure that the default parameter in docs matches default parameter in code.
* Documenting `default` is not needed for `required: true`.
* Remove unnecessary doc like `aliases: []` or `choices: []`.
* Do not use Boolean values in a choice list . For example, in the list `choices: ['no', 'verify', 'always]`, 'no' will be interpreted as a Boolean value (you can check basic.py for BOOLEANS_* constants to see the full list of Boolean keywords). If your option actually is a boolean, just use `type=bool`; there is no need to populate 'choices'.
* For new modules or options in a module add version_added. The version should match the value of the current development version and is a string (not a float), so be sure to enclose it in quotes.
* Verify that arguments in doc and module spec dict are identical.
* For password / secret arguments no_log=True should be set.
* Requirements should be documented, using the `requirements=[]` field.
* Author should be set, with their name and their github id, at the least.
* Ensure that you make use of U() for urls, C() for files and options, I() for params, M() for modules.
* If an optional parameter is sometimes required this need to be reflected in the documentation, e.g. "Required when C(state=present)."
* Verify that a GPL 3 License header is included.
* Does module use check_mode? Could it be modified to use it? Document it. Documentation is everyone's friend.
* Examples--include them whenever possible and make sure they are reproducible.
* Document the return structure of the module. Refer to :ref:`common_return_values` and :ref:`module_documenting` for additional information.
* Predictable user interface: This is a particularly important section as it is also an area where we need significant improvements.
* Name consistency across modules (weve gotten better at this, but we still have many deviations).
* Declarative operation (not CRUD)--this makes it easy for a user not to care what the existing state is, just about the final state. ``started/stopped``, ``present/absent``--don't overload options too much. It is preferable to add a new, simple option than to add choices/states that don't fit with existing ones.
* Keep options small, having them take large data structures might save us a few tasks, but adds a complex requirement that we cannot easily validate before passing on to the module.
* Allow an "expert mode". This may sound like the absolute opposite of the previous one, but it is always best to let expert users deal with complex data. This requires different modules in some cases, so that you end up having one (1) expert module and several 'piecemeal' ones (ec2_vpc_net?). The reason for this is not, as many users express, because it allows a single task and keeps plays small (which just moves the data complexity into vars files, leaving you with a slightly different structure in another YAML file). It does, however, allow for a more 'atomic' operation against the underlying APIs and services.
* Informative responses: Please note, that for >= 2.0, it is required that return data to be documented.
* Always return useful data, even when there is no change.
* Be consistent about returns (some modules are too random), unless it is detrimental to the state/action.
* Make returns reusable--most of the time you don't want to read it, but you do want to process it and re-purpose it.
* Return diff if in diff mode. This is not required for all modules, as it won't make sense for certain ones, but please attempt to include this when applicable).
* Code: This applies to all code in general, but often seems to be missing from modules, so please keep the following in mind as you work.
* Validate upfront--fail fast and return useful and clear error messages.
* Defensive programming--modules should be designed simply enough that this should be easy. Modules should always handle errors gracefully and avoid direct stacktraces. Ansible deals with this better in 2.0 and returns them in the results.
* Fail predictably--if we must fail, do it in a way that is the most expected. Either mimic the underlying tool or the general way the system works.
* Modules should not do the job of other modules, that is what roles are for. Less magic is more.
* Don't reinvent the wheel. Part of the problem is that code sharing is not that easy nor documented, we also need to expand our base functions to provide common patterns (retry, throttling, etc).
* Support check mode. This is not required for all modules, as it won't make sense for certain ones, but please attempt to include this when applicable). For more information, refer to :ref:`check_mode_drift` and :ref:`check_mode_dry`.
* Exceptions: The module must handle them. (exceptions are bugs)
* Give out useful messages on what you were doing and you can add the exception message to that.
* Avoid catchall exceptions, they are not very useful unless the underlying API gives very good error messages pertaining the attempted action.
* Module-dependent guidelines: Additional module guidelines may exist for certain families of modules.
* Be sure to check out the modules themselves for additional information.
* Amazon: https://github.com/ansible/ansible-modules-extras/blob/devel/cloud/amazon/GUIDELINES.md
* Modules should make use of the "extends_documentation_fragment" to ensure documentation available. For example, the AWS module should include::
extends_documentation_fragment:
- aws
- ec2
* The module must not use sys.exit() --> use fail_json() from the module object.
* Import custom packages in try/except and handled with fail_json() in main() e.g.::
try:
import foo
HAS_LIB=True
except:
HAS_LIB=False
* The return structure should be consistent, even if NA/None are used for keys normally returned under other options.
* Are module actions idempotent? If not document in the descriptions or the notes.
* Import module snippets `from ansible.module_utils.basic import *` at the bottom, conserves line numbers for debugging.
* The module must have a `main` function that wraps the normal execution.
* Call your :func:`main` from a conditional so that it would be possible to
import them into unittests in the future example::
if __name__ == '__main__':
main()
* Try to normalize parameters with other modules, you can have aliases for when user is more familiar with underlying API name for the option
* Being pep8 compliant is nice, but not a requirement. Specifically, the 80 column limit now hinders readability more that it improves it
* Avoid '`action`/`command`', they are imperative and not declarative, there are other ways to express the same thing
* Do not add `list` or `info` state options to an existing module - create a new `_facts` module.
* If you are asking 'how can I have a module execute other modules' ... you want to write a role
* Return values must be able to be serialized as json via the python stdlib
json library. basic python types (strings, int, dicts, lists, etc) are
serializable. A common pitfall is to try returning an object via
exit_json(). Instead, convert the fields you need from the object into the
fields of a dictionary and return the dictionary.
* When fetching URLs, please use either fetch_url or open_url from ansible.module_utils.urls
rather than urllib2; urllib2 does not natively verify TLS certificates and so is insecure for https.
Windows modules checklist
`````````````````````````
* Favour native powershell and .net ways of doing things over calls to COM libraries or calls to native executables which may or may not be present in all versions of windows
* modules are in powershell (.ps1 files) but the docs reside in same name python file (.py)
* look at ansible/lib/ansible/module_utils/powershell.ps1 for common code, avoid duplication
* Ansible uses strictmode version 2.0 so be sure to test with that enabled
* start with::
#!powershell
then::
<GPL header>
then::
# WANT_JSON
# POWERSHELL_COMMON
then, to parse all arguments into a variable modules generally use::
$params = Parse-Args $args
* Arguments:
* Try and use state present and state absent like other modules
* You need to check that all your mandatory args are present. You can do this using the builtin Get-AnsibleParam function.
* Required arguments::
$package = Get-AnsibleParam -obj $params -name name -failifempty $true
* Required arguments with name validation::
$state = Get-AnsibleParam -obj $params -name "State" -ValidateSet "Present","Absent" -resultobj $resultobj -failifempty $true
* Optional arguments with name validation::
$state = Get-AnsibleParam -obj $params -name "State" -default "Present" -ValidateSet "Present","Absent"
* the If "FailIfEmpty" is true, the resultobj parameter is used to specify the object returned to fail-json. You can also override the default message
using $emptyattributefailmessage (for missing required attributes) and $ValidateSetErrorMessage (for attribute validation errors)
* Look at existing modules for more examples of argument checking.
* Results
* The result object should allways contain an attribute called changed set to either $true or $false
* Create your result object like this::
$result = New-Object psobject @{
changed = $false
other_result_attribute = $some_value
};
If all is well, exit with a
Exit-Json $result
* Ensure anything you return, including errors can be converted to json.
* Be aware that because exception messages could contain almost anything.
* ConvertTo-Json will fail if it encounters a trailing \ in a string.
* If all is not well use Fail-Json to exit.
* Have you tested for powershell 3.0 and 4.0 compliance?
Deprecating and making module aliases
``````````````````````````````````````
Starting in 1.8, you can deprecate modules by renaming them with a preceding _, i.e. old_cloud.py to
_old_cloud.py. This keeps the module available, but hides it from the primary docs and listing.
You can also rename modules and keep an alias to the old name by using a symlink that starts with _.
This example allows the stat module to be called with fileinfo, making the following examples equivalent::
EXAMPLES = '''
ln -s stat.py _fileinfo.py
ansible -m stat -a "path=/tmp" localhost
ansible -m fileinfo -a "path=/tmp" localhost
'''
.. seealso::
:doc:`modules`
Learn about available modules
:doc:`developing_plugins`
Learn about developing plugins
:doc:`developing_api`
Learn about the Python API for playbook and task execution
`GitHub Core modules directory <https://github.com/ansible/ansible-modules-core/tree/devel>`_
Browse source of core modules
`Github Extras modules directory <https://github.com/ansible/ansible-modules-extras/tree/devel>`_
Browse source of extras modules.
`Mailing List <http://groups.google.com/group/ansible-devel>`_
Development mailing list
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel
.. include:: ./developing_module_utilities.rst

View file

@ -0,0 +1,159 @@
===========================
Porting Modules to Python 3
===========================
Ansible modules are not the usual Python-3 porting exercise. There are two
factors that make it harder to port them than most code:
1. Many modules need to run on Python-2.4 in addition to Python-3.
2. A lot of mocking has to go into unittesting a Python-3 module. So it's
harder to test that your porting has fixed everything or to make sure that
later commits haven't regressed.
Which version of Python-3.x and which version of Python-2.x are our minimums?
=============================================================================
The short answer is Python-3.4 and Python-2.4 but please read on for more
information.
For Python-3 we are currently using Python-3.4 as a minimum. However, no long
term supported Linux distributions currently ship with Python-3. When that
occurs, we will probably take that as our minimum Python-3 version rather than
Python-3.4. Thus far, Python-3 has been adding small changes that make it
more compatible with Python-2 in its newer versions (For instance, Python-3.5
added the ability to use percent-formatted byte strings.) so it should be more
pleasant to use a newer version of Python-3 if it's available. At some point
this will change but we'll just have to cross that bridge when we get to it.
For Python-2 the default is for modules to run on Python-2.4. This allows
users with older distributions that are stuck on Python-2.4 to manage their
machines. Modules are allowed to drop support for Python-2.4 when one of
their dependent libraries require a higher version of python. This is not an
invitation to add unnecessary dependent libraries in order to force your
module to be usable only with a newer version of Python. Instead it is an
acknowledgment that some libraries (for instance, boto3 and docker-py) will
only function with newer Python.
.. note:: When will we drop support for Python-2.4?
The only long term supported distro that we know of with Python-2.4 is
RHEL5 (and its rebuilds like CentOS5) which is supported until April of
2017. We will likely end our support for Python-2.4 in modules in an
Ansible release around that time. We know of no long term supported
distributions with Python-2.5 so the new minimum Python-2 version will
likely be Python-2.6. This will let us take advantage of the
forwards-compat features of Python-2.6 so porting and maintainance of
Python-2/Python-3 code will be easier after that.
Supporting only Python-2 or only Python-3
=========================================
Sometimes a module's dependent libraries only run on Python-2 or only run on
Python-3. We do not yet have a strategy for these modules but we'll need to
come up with one. I see three possibilities:
1. We treat these libraries like any other libraries that may not be installed
on the system. When we import them we check if the import was successful.
If so, then we continue. If not we return an error about the library being
missing. Users will have to find out that the library is unavailable on
their version of Python either by searching for the library on their own or
reading the requirements section in :command:`ansible-doc`.
2. The shebang line is the only metadata that Ansible extracts from a module
so we may end up using that to specify what we mean. Something like
``#!/usr/bin/python`` means the module will run on both Python-2 and
Python-3, ``#!/usr/bin/python2`` means the module will only run on
Python-2, and ``#!/usr/bin/python3`` means the module will only run on
Python-3. Ansible's code will need to be modified to accommodate this.
For :command:`python2`, if ``ansible_python2_interpreter`` is not set, it
will have to fallback to `` ansible_python_interpreter`` and if that's not
set, fallback to ``/usr/bin/python``. For :command:`python3`, Ansible
will have to first try ``ansible_python3_interpreter`` and then fallback to
``/usr/bin/python3`` as normal.
3. We add a way for Ansible to retrieve metadata about modules. The metadata
will include the version of Python that is required.
Methods 2 and 3 will both require that we modify modules or otherwise add this
additional information somewhere. 2 needs only a little code changes in
executor/module_common.py to parse. 3 will require a lot of work. This is
probably not worthwhile if this is the only change but could be worthwhile if
there's other things as well. 1 requires that we port all modules to work
with python3 syntax but only the code path to get to the library import being
attempted and then a fail_json() being called because the libraries are
unavailable needs to actually work.
Tips, tricks, and idioms to adopt
=================================
Exceptions
----------
In code which already needs Python-2.6+ (For instance, because a library it
depends on only runs on Python >= 2.6) it is okay to port directly to the new
exception catching syntax::
try:
a = 2/0
except ValueError as e:
module.fail_json(msg="Tried to divide by zero!")
For modules which also run on Python-2.4, we have to use an uglier
construction to make this work under both Python-2.4 and Python-3::
from ansible.module_utils.pycompat24 import get_exception
[...]
try:
a = 2/0
except ValueError:
e = get_exception()
module.fail_json(msg="Tried to divide by zero!")
Octal numbers
-------------
In Python-2.4, octal literals are specified as ``0755``. In Python-3, that is
invalid and octals must be specified as ``0o755``. To bridge this gap,
modules should create their octals like this::
# Can't use 0755 on Python-3 and can't use 0o755 on Python-2.4
EXECUTABLE_PERMS = int('0755', 8)
Bundled six
-----------
The third-party python-six library exists to help projects create code that
runs on both Python-2 and Python-3. Ansible includes version 1.4.1 in
module_utils so that other modules can use it without requiring that it is
installed on the remote system. To make use of it, import it like this::
from ansible.module_utils import six
.. note:: Why version 1.4.1?
six-1.4.1 is the last version of python-six to support Python-2.4. As
long as Ansible modules need to run on Python-2.4 we won't be able to
update the bundled copy of six.
Compile Test
------------
We have travis compiling all modules with various versions of Python to check
that the modules conform to the syntax at those versions. When you've
ported a module so that its syntax works with Python-3, we need to modify
.travis.yml so that the module is included in the syntax check. Here's the
relevant section of .travis.yml::
env:
global:
- PY3_EXCLUDE_LIST="cloud/amazon/cloudformation.py
cloud/amazon/ec2_ami.py
[...]
utilities/logic/wait_for.py"
The :envvar:`PY3_EXCLUDE_LIST` environment variable is a blacklist of modules
which should not be tested (because we know that they are older modules which
have not yet been ported to pass the Python-3 syntax checks. To get another
old module to compile with Python-3, remove the entry for it from the list.
The goal is to have the LIST be empty.

View file

@ -0,0 +1,204 @@
Developing Plugins
==================
.. contents:: Topics
Plugins are pieces of code that augment Ansible's core functionality. Ansible ships with a number of handy plugins, and you can easily write your own.
The following types of plugins are available:
- *Callback* plugins enable you to hook into Ansible events for display or logging purposes.
- *Connection* plugins define how to communicate with inventory hosts.
- *Lookup* plugins are used to pull data from an external source.
- *Vars* plugins inject additional variable data into Ansible runs that did not come from an inventory, playbook, or the command line.
This section describes the various types of plugins and how to implement them.
.. _developing_callbacks:
Callback Plugins
----------------
Callback plugins enable adding new behaviors to Ansible when responding to events.
.. _callback_examples:
Example Callback Plugins
++++++++++++++++++++++++
Ansible comes with a number of callback plugins that you can look at for examples. These can be found in `lib/ansible/plugins/callback <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/callback>`_.
The `log_plays
<https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/callback/log_plays.py>`_
callback is an example of how to intercept playbook events to a log
file, and the `mail
<https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/callback/mail.py>`_
callback sends email when playbooks complete.
The `osx_say
<https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/callback/osx_say.py>`_
callback provided is particularly entertaining -- it will respond with
computer synthesized speech on OS X in relation to playbook events,
and is guaranteed to entertain and/or annoy coworkers.
.. _configuring_callbacks:
Configuring Callback Plugins
++++++++++++++++++++++++++++
To activate a callback, drop it in a callback directory as configured in `ansible.cfg`.
Plugins are loaded in alphanumeric order; for example, a plugin implemented in a file named `1_first.py` would run before a plugin file named `2_second.py`.
Callbacks need to be whitelisted in your `ansible.cfg` file in order to function. For example::
#callback_whitelist = timer, mail, myplugin
.. _callback_development:
Developing Callback Plugins
+++++++++++++++++++++++++++
Callback plugins are created by creating a new class with the Base(Callbacks) class as the parent::
from ansible.plugins.callback import CallbackBase
from ansible import constants as C
class CallbackModule(CallbackBase):
From there, override the specific methods from the CallbackBase that you want to provide a callback for. For plugins intended for use with Ansible version 2.0 and later, you should only override methods that start with `v2`. For a complete list of methods that you can override, please see ``__init__.py`` in the `lib/ansible/plugins/callback <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/callback>`_ directory.
The following example shows how Ansible's timer plugin is implemented::
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from datetime import datetime
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This callback module tells you how long your plays ran for.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'timer'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.start_time = datetime.now()
def days_hours_minutes_seconds(self, runtime):
minutes = (runtime.seconds // 60) % 60
r_seconds = runtime.seconds - (minutes * 60)
return runtime.days, runtime.seconds // 3600, minutes, r_seconds
def playbook_on_stats(self, stats):
self.v2_playbook_on_stats(stats)
def v2_playbook_on_stats(self, stats):
end_time = datetime.now()
runtime = end_time - self.start_time
self._display.display("Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(runtime)))
Note that the CALLBACK_VERSION and CALLBACK_NAME definitons are required. If your callback plugin needs to write to stdout, you should define CALLBACK_TYPE = stdout in the subclass, and then the stdout plugin needs to be configured in `ansible.cfg` to override the default. For example::
#stdout_callback = mycallbackplugin
.. _developing_connection_type_plugins:
Connection Type Plugins
-----------------------
By default, ansible ships with a 'paramiko' SSH, native ssh (just called 'ssh'), 'local' connection type, and there are also some minor players like 'chroot' and 'jail'. All of these can be used
in playbooks and with /usr/bin/ansible to decide how you want to talk to remote machines. The basics of these connection types
are covered in the :doc:`intro_getting_started` section. Should you want to extend Ansible to support other transports (SNMP? Message bus?
Carrier Pigeon?) it's as simple as copying the format of one of the existing modules and dropping it into the connection plugins
directory. The value of 'smart' for a connection allows selection of paramiko or openssh based on system capabilities, and chooses
'ssh' if OpenSSH supports ControlPersist, in Ansible 1.2.1 and later. Previous versions did not support 'smart'.
More documentation on writing connection plugins is pending, though you can jump into `lib/ansible/plugins/connection <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/connection>`_ and figure things out pretty easily.
.. _developing_lookup_plugins:
Lookup Plugins
--------------
Language constructs like "with_fileglob" and "with_items" are implemented via lookup plugins. Just like other plugin types, you can write your own.
More documentation on writing lookup plugins is pending, though you can jump into `lib/ansible/plugins/lookup <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/lookup>`_ and figure
things out pretty easily.
.. _developing_vars_plugins:
Vars Plugins
------------
Playbook constructs like 'host_vars' and 'group_vars' work via 'vars' plugins. They inject additional variable
data into ansible runs that did not come from an inventory, playbook, or command line. Note that variables
can also be returned from inventory, so in most cases, you won't need to write or understand vars_plugins.
More documentation on writing vars plugins is pending, though you can jump into `lib/ansible/inventory/vars_plugins <https://github.com/ansible/ansible/tree/devel/lib/ansible/inventory/vars_plugins>`_ and figure
things out pretty easily.
If you find yourself wanting to write a vars_plugin, it's more likely you should write an inventory script instead.
.. _developing_filter_plugins:
Filter Plugins
--------------
If you want more Jinja2 filters available in a Jinja2 template (filters like to_yaml and to_json are provided by default), they can be extended by writing a filter plugin. Most of the time, when someone comes up with an idea for a new filter they would like to make available in a playbook, we'll just include them in 'core.py' instead.
Jump into `lib/ansible/plugins/filter <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/filter>`_ for details.
.. _distributing_plugins:
Distributing Plugins
--------------------
Plugins are loaded from both Python's site_packages (those that ship with ansible) and a configured plugins directory, which defaults
to /usr/share/ansible/plugins, in a subfolder for each plugin type::
* action
* lookup
* callback
* connection
* filter
* strategy
* cache
* test
* shell
To change this path, edit the ansible configuration file.
In addition, plugins can be shipped in a subdirectory relative to a top-level playbook, in folders named the same as indicated above.
They can also be shipped as part of a role, in a subdirectory named as indicated above. The plugin will be availiable as soon as the role
is called.
.. seealso::
:doc:`modules`
List of built-in modules
:doc:`developing_api`
Learn about the Python API for task execution
:doc:`developing_inventory`
Learn about how to develop dynamic inventory sources
:doc:`developing_modules`
Learn about how to write Ansible modules
`Mailing List <http://groups.google.com/group/ansible-devel>`_
The development mailing list
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel

View file

@ -0,0 +1,523 @@
.. _flow_modules:
=======
Modules
=======
This in-depth dive helps you understand Ansible's program flow to execute
modules. It is written for people working on the portions of the Core Ansible
Engine that execute a module. Those writing Ansible Modules may also find this
in-depth dive to be of interest, but individuals simply using Ansible Modules
will not likely find this to be helpful.
.. _flow_types_of_modules:
Types of Modules
================
Ansible supports several different types of modules in its code base. Some of
these are for backwards compatibility and others are to enable flexibility.
.. _flow_action_plugins:
Action Plugins
--------------
Action Plugins look like modules to end users who are writing :term:`playbooks` but
they're distinct entities for the purposes of this document. Action Plugins
always execute on the controller and are sometimes able to do all work there
(for instance, the ``debug`` Action Plugin which prints some text for the user to
see or the ``assert`` Action Plugin which can test whether several values in
a playbook satisfy certain criteria.)
More often, Action Plugins set up some values on the controller, then invoke an
actual module on the managed node that does something with these values. An
easy to understand version of this is the :ref:`template Action Plugin
<template>`. The :ref:`template Action Plugin <template>` takes values from
the user to construct a file in a temporary location on the controller using
variables from the playbook environment. It then transfers the temporary file
to a temporary file on the remote system. After that, it invokes the
:ref:`copy module <copy>` which operates on the remote system to move the file
into its final location, sets file permissions, and so on.
.. _flow_new_style_modules:
New-style Modules
-----------------
All of the modules that ship with Ansible fall into this category.
New-style modules have the arguments to the module embedded inside of them in
some manner. Non-new-style modules must copy a separate file over to the
managed node, which is less efficient as it requires two over-the-wire
connections instead of only one.
.. _flow_python_modules:
Python
^^^^^^
New-style Python modules use the :ref:`Ansiballz` framework for constructing
modules. All official modules (shipped with Ansible) use either this or the
:ref:`powershell module framework <flow_powershell_modules>`.
These modules use imports from :code:`ansible.module_utils` in order to pull in
boilerplate module code, such as argument parsing, formatting of return
values as :term:`JSON`, and various file operations.
.. note:: In Ansible, up to version 2.0.x, the official Python modules used the
:ref:`module_replacer` framework. For module authors, :ref:`Ansiballz` is
largely a superset of :ref:`module_replacer` functionality, so you usually
do not need to know about one versus the other.
.. _flow_powershell_modules:
Powershell
^^^^^^^^^^
New-style powershell modules use the :ref:`module_replacer` framework for
constructing modules. These modules get a library of powershell code embedded
in them before being sent to the managed node.
.. _flow_jsonargs_modules:
JSONARGS
^^^^^^^^
Scripts can arrange for an argument string to be placed within them by placing
the string ``<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>`` somewhere inside of the
file. The module typically sets a variable to that value like this::
json_arguments = """<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"""
Which is expanded as::
json_arguments = """{"param1": "test's quotes", "param2": "\"To be or not to be\" - Hamlet"}"""
.. note:: Ansible outputs a :term:`JSON` string with bare quotes. Double quotes are
used to quote string values, double quotes inside of string values are
backslash escaped, and single quotes may appear unescaped inside of
a string value. To use JSONARGS, your scripting language must have a way
to handle this type of string. The example uses Python's triple quoted
strings to do this. Other scripting languages may have a similar quote
character that won't be confused by any quotes in the JSON or it may
allow you to define your own start-of-quote and end-of-quote characters.
If the language doesn't give you any of these then you'll need to write
a :ref:`non-native JSON module <flow_want_json_modules>` or
:ref:`Old-style module <flow_old_style_modules>` instead.
The module typically parses the contents of ``json_arguments`` using a JSON
library and then use them as native variables throughout the rest of its code.
.. _flow_want_json_modules:
Non-native want JSON modules
----------------------------
If a module has the string ``WANT_JSON`` in it anywhere, Ansible treats
it as a non-native module that accepts a filename as its only command line
parameter. The filename is for a temporary file containing a :term:`JSON`
string containing the module's parameters. The module needs to open the file,
read and parse the parameters, operate on the data, and print its return data
as a JSON encoded dictionary to stdout before exiting.
These types of modules are self-contained entities. As of Ansible 2.1, Ansible
only modifies them to change a shebang line if present.
.. seealso:: Examples of Non-native modules written in ruby are in the `Ansible
for Rubyists <https://github.com/ansible/ansible-for-rubyists>`_ repository.
.. _flow_binary_modules:
Binary Modules
--------------
From Ansible 2.2 onwards, modules may also be small binary programs. Ansible
doesn't perform any magic to make these portable to different systems so they
may be specific to the system on which they were compiled or require other
binary runtime dependencies. Despite these drawbacks, a site may sometimes
have no choice but to compile a custom module against a specific binary
library if that's the only way they have to get access to certain resources.
Binary modules take their arguments and will return data to Ansible in the same
way as :ref:`want JSON modules <flow_want_json_modules>`.
.. seealso:: One example of a `binary module
<https://github.com/ansible/ansible/blob/devel/test/integration/library/helloworld.go>`_
written in go.
.. _flow_old_style_modules:
Old-style Modules
-----------------
Old-style modules are similar to
:ref:`want JSON modules <flow_want_json_modules>`, except that the file that
they take contains ``key=value`` pairs for their parameters instead of
:term:`JSON`.
Ansible decides that a module is old-style when it doesn't have any of the
markers that would show that it is one of the other types.
.. _flow_how_modules_are_executed:
How modules are executed
========================
When a user uses :program:`ansible` or :program:`ansible-playbook`, they
specify a task to execute. The task is usually the name of a module along
with several parameters to be passed to the module. Ansible takes these
values and processes them in various ways before they are finally executed on
the remote machine.
.. _flow_executor_task_executor:
executor/task_executor
----------------------
The TaskExecutor receives the module name and parameters that were parsed from
the :term:`playbook <playbooks>` (or from the command line in the case of
:command:`/usr/bin/ansible`). It uses the name to decide whether it's looking
at a module or an :ref:`Action Plugin <flow_action_plugins>`. If it's
a module, it loads the :ref:`Normal Action Plugin <flow_normal_action_plugin>`
and passes the name, variables, and other information about the task and play
to that Action Plugin for further processing.
.. _flow_normal_action_plugin:
Normal Action Plugin
--------------------
The ``normal`` Action Plugin executes the module on the remote host. It is
the primary coordinator of much of the work to actually execute the module on
the managed machine.
* It takes care of creating a connection to the managed machine by
instantiating a ``Connection`` class according to the inventory
configuration for that host.
* It adds any internal Ansible variables to the module's parameters (for
instance, the ones that pass along ``no_log`` to the module).
* It takes care of creating any temporary files on the remote machine and
cleans up afterwards.
* It does the actual work of pushing the module and module parameters to the
remote host, although the :ref:`module_common <flow_executor_module_common>`
code described in the next section does the work of deciding which format
those will take.
* It handles any special cases regarding modules (for instance, various
complications around Windows modules that must have the same names as Python
modules, so that internal calling of modules from other Action Plugins work.)
Much of this functionality comes from the :class:`BaseAction` class,
which lives in :file:`plugins/action/__init__.py`. It makes use of
``Connection`` and ``Shell`` objects to do its work.
.. note::
When :term:`tasks <tasks>` are run with the ``async:`` parameter, Ansible
uses the ``async`` Action Plugin instead of the ``normal`` Action Plugin
to invoke it. That program flow is currently not documented. Read the
source for information on how that works.
.. _flow_executor_module_common:
executor/module_common.py
-------------------------
Code in :file:`executor/module_common.py` takes care of assembling the module
to be shipped to the managed node. The module is first read in, then examined
to determine its type. :ref:`PowerShell <flow_powershell_modules>` and
:ref:`JSON-args modules <flow_jsonargs_modules>` are passed through
:ref:`Module Replacer <module_replacer>`. New-style
:ref:`Python modules <flow_python_modules>` are assembled by :ref:`Ansiballz`.
:ref:`Non-native-want-JSON <flow_want_json_modules>`,
:ref:`Binary modules <flow_binary_modules>`, and
:ref:`Old-Style modules <flow_old_style_modules>` aren't touched by either of
these and pass through unchanged. After the assembling step, one final
modification is made to all modules that have a shebang line. Ansible checks
whether the interpreter in the shebang line has a specific path configured via
an ``ansible_$X_interpreter`` inventory variable. If it does, Ansible
substitutes that path for the interpreter path given in the module. After
this, Ansible returns the complete module data and the module type to the
:ref:`Normal Action <flow_normal_action_plugin>` which continues execution of
the module.
Next we'll go into some details of the two assembler frameworks.
.. _module_replacer:
Module Replacer
^^^^^^^^^^^^^^^
The Module Replacer framework is the original framework implementing new-style
modules. It is essentially a preprocessor (like the C Preprocessor for those
familiar with that programming language). It does straight substitutions of
specific substring patterns in the module file. There are two types of
substitutions:
* Replacements that only happen in the module file. These are public
replacement strings that modules can utilize to get helpful boilerplate or
access to arguments.
- :code:`from ansible.module_utils.MOD_LIB_NAME import *` is replaced with the
contents of the :file:`ansible/module_utils/MOD_LIB_NAME.py` These should
only be used with :ref:`new-style Python modules <flow_python_modules>`.
- :code:`#<<INCLUDE_ANSIBLE_MODULE_COMMON>>` is equivalent to
:code:`from ansible.module_utils.basic import *` and should also only apply
to new-style Python modules.
- :code:`# POWERSHELL_COMMON` substitutes the contents of
:file:`ansible/module_utils/powershell.ps1`. It should only be used with
:ref:`new-style Powershell modules <flow_powershell_modules>`.
* Replacements that are used by ``ansible.module_utils`` code. These are internal
replacement patterns. They may be used internally, in the above public
replacements, but shouldn't be used directly by modules.
- :code:`"<<ANSIBLE_VERSION>>"` is substituted with the Ansible version. In
:ref:`new-style Python modules <flow_python_modules>` under the
:ref:`Ansiballz` frameworkthe proper way is to instead instantiate an
:class:`AnsibleModule` and then access the version from
:attr:``AnsibleModule.ansible_version``.
- :code:`"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"` is substituted with
a string which is the Python ``repr`` of the :term:`JSON` encoded module
parameters. Using ``repr`` on the JSON string makes it safe to embed in
a Python file. In new-style Python modules under the Ansiballz framework
this is better accessed by instantiating an :class:`AnsibleModule` and
then using :attr:`AnsibleModule.params`.
- :code:`<<SELINUX_SPECIAL_FILESYSTEMS>>` substitutes a string which is
a comma separated list of file systems which have a file system dependent
security context in SELinux. In new-style Python modules, if you really
need this you should instantiate an :class:`AnsibleModule` and then use
:attr:`AnsibleModule._selinux_special_fs`. The variable has also changed
from a comma separated string of file system names to an actual python
list of filesystem names.
- :code:`<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>` substitutes the module
parameters as a JSON string. Care must be taken to properly quote the
string as JSON data may contain quotes. This pattern is not substituted
in new-style Python modules as they can get the module parameters another
way.
- The string :code:`syslog.LOG_USER` is replaced wherever it occurs with the
``syslog_facility`` which was named in :file:`ansible.cfg` or any
``ansible_syslog_facility`` inventory variable that applies to this host. In
new-style Python modules this has changed slightly. If you really need to
access it, you should instantiate an :class:`AnsibleModule` and then use
:attr:`AnsibleModule._syslog_facility` to access it. It is no longer the
actual syslog facility and is now the name of the syslog facility. See
the :ref:`documentation on internal arguments <flow_internal_arguments>`
for details.
.. _Ansiballz:
Ansiballz
^^^^^^^^^
Ansible 2.1 switched from the :ref:`module_replacer` framework to the
Ansiballz framework for assembling modules. The Ansiballz framework differs
from module replacer in that it uses real Python imports of things in
:file:`ansible/module_utils` instead of merely preprocessing the module. It
does this by constructing a zipfile -- which includes the module file, files
in :file:`ansible/module_utils` that are imported by the module, and some
boilerplate to pass in the module's parameters. The zipfile is then Base64
encoded and wrapped in a small Python script which decodes the Base64 encoding
and places the zipfile into a temp directory on the managed node. It then
extracts just the ansible module script from the zip file and places that in
the temporary directory as well. Then it sets the PYTHONPATH to find python
modules inside of the zip file and invokes :command:`python` on the extracted
ansible module.
.. note::
Ansible wraps the zipfile in the Python script for two reasons:
* for compatibility with Python-2.4 and Python-2.6 which have less
featureful versions of Python's ``-m`` command line switch.
* so that pipelining will function properly. Pipelining needs to pipe the
Python module into the Python interpreter on the remote node. Python
understands scripts on stdin but does not understand zip files.
In Ansiballz, any imports of Python modules from the
:py:mod:`ansible.module_utils` package trigger inclusion of that Python file
into the zipfile. Instances of :code:`#<<INCLUDE_ANSIBLE_MODULE_COMMON>>` in
the module are turned into :code:`from ansible.module_utils.basic import *`
and :file:`ansible/module-utils/basic.py` is then included in the zipfile.
Files that are included from :file:`module_utils` are themselves scanned for
imports of other Python modules from :file:`module_utils` to be included in
the zipfile as well.
.. warning::
At present, the Ansiballz Framework cannot determine whether an import
should be included if it is a relative import. Always use an absolute
import that has :py:mod:`ansible.module_utils` in it to allow Ansiballz to
determine that the file should be included.
.. _flow_passing_module_args:
Passing args
~~~~~~~~~~~~
In :ref:`module_replacer`, module arguments are turned into a JSON-ified
string and substituted into the combined module file. In :ref:`Ansiballz`,
the JSON-ified string is passed into the module via stdin. When
a :class:`ansible.module_utils.basic.AnsibleModule` is instantiated,
it parses this string and places the args into
:attr:`AnsibleModule.params` where it can be accessed by the module's
other code.
.. note::
Internally, the :class:`AnsibleModule` uses the helper function,
:py:func:`ansible.module_utils.basic._load_params`, to load the parameters
from stdin and save them into an internal global variable. Very dynamic
custom modules which need to parse the parameters prior to instantiating
an ``AnsibleModule`` may use ``_load_params`` to retrieve the
parameters. Be aware that ``_load_params`` is an internal function and
may change in breaking ways if necessary to support changes in the code.
However, we'll do our best not to break it gratuitously, which is not
something that can be said for either the way parameters are passed or
the internal global variable.
.. _flow_internal_arguments:
Internal arguments
^^^^^^^^^^^^^^^^^^
Both :ref:`module_replacer` and :ref:`Ansiballz` send additional arguments to
the module beyond those which the user specified in the playbook. These
additional arguments are internal parameters that help implement global
Ansible features. Modules often do not need to know about these explicitly as
the features are implemented in :py:mod:`ansible.module_utils.basic` but certain
features need support from the module so it's good to know about them.
_ansible_no_log
~~~~~~~~~~~~~~~
This is a boolean. If it's True then the playbook specified ``no_log`` (in
a task's parameters or as a play parameter). This automatically affects calls
to :py:meth:`AnsibleModule.log`. If a module implements its own logging then
it needs to check this value. The best way to look at this is for the module
to instantiate an :class:`AnsibleModule` and then check the value of
:attr:`AnsibleModule.no_log`.
.. note::
``no_log`` specified in a module's argument_spec are handled by a different mechanism.
_ansible_debug
~~~~~~~~~~~~~~
This is a boolean that turns on more verbose logging. If a module uses
:py:meth:`AnsibleModule.debug` rather than :py:meth:`AnsibleModule.log` then
the messages are only logged if this is True. This also turns on logging of
external commands that the module executes. This can be changed via
the``debug`` setting in :file:`ansible.cfg` or the environment variable
:envvar:`ANSIBLE_DEBUG`. If, for some reason, a module must access this, it
should do so by instantiating an :class:`AnsibleModule` and accessing
:attr:`AnsibleModule._debug`.
_ansible_diff
~~~~~~~~~~~~~
This boolean is turned on via the ``--diff`` command line option. If a module
supports it, it will tell the module to show a unified diff of changes to be
made to templated files. The proper way for a module to access this is by
instantiating an :class:`AnsibleModule` and accessing
:attr:`AnsibleModule._diff`.
_ansible_verbosity
~~~~~~~~~~~~~~~~~~
This value could be used for finer grained control over logging. However, it
is currently unused.
_ansible_selinux_special_fs
~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a list of names of filesystems which should have a special selinux
context. They are used by the :class:`AnsibleModule` methods which operate on
files (changing attributes, moving, and copying). The list of names is set
via a comma separated string of filesystem names from :file:`ansible.cfg`::
# ansible.cfg
[selinux]
special_context_filesystems=nfs,vboxsf,fuse,ramfs
If a module cannot use the builtin ``AnsibleModule`` methods to manipulate
files and needs to know about these special context filesystems, it should
instantiate an ``AnsibleModule`` and then examine the list in
:attr:`AnsibleModule._selinux_special_fs`.
This replaces :attr:`ansible.module_utils.basic.SELINUX_SPECIAL_FS` from
:ref:`module_replacer`. In module replacer it was a comma separated string of
filesystem names. Under Ansiballz it's an actual list.
.. versionadded:: 2.1
_ansible_syslog_facility
~~~~~~~~~~~~~~~~~~~~~~~~
This parameter controls which syslog facility ansible module logs to. It may
be set by changing the ``syslog_facility`` value in :file:`ansible.cfg`. Most
modules should just use :meth:`AnsibleModule.log` which will then make use of
this. If a module has to use this on its own, it should instantiate an
:class:`AnsibleModule` and then retrieve the name of the syslog facility from
:attr:`AnsibleModule._syslog_facility`. The code will look slightly different
than it did under :ref:`module_replacer` due to how hacky the old way was::
# Old way
import syslog
syslog.openlog(NAME, 0, syslog.LOG_USER)
# New way
import syslog
facility_name = module._syslog_facility
facility = getattr(syslog, facility_name, syslog.LOG_USER)
syslog.openlog(NAME, 0, facility)
.. versionadded:: 2.1
_ansible_version
~~~~~~~~~~~~~~~~
This parameter passes the version of ansible that runs the module. To access
it, a module should instantiate an :class:`AnsibleModule` and then retrieve it
from :attr:`AnsibleModule.ansible_version`. This replaces
:attr:`ansible.module_utils.basic.ANSIBLE_VERSION` from
:ref:`module_replacer`.
.. versionadded:: 2.1
.. _flow_special_considerations:
Special Considerations
----------------------
.. _flow_pipelining:
Pipelining
^^^^^^^^^^
Ansible can transfer a module to a remote machine in one of two ways:
* it can write out the module to a temporary file on the remote host and then
use a second connection to the remote host to execute it with the
interpreter that the module needs
* or it can use what's known as pipelining to execute the module by piping it
into the remote interpreter's stdin.
Pipelining only works with modules written in Python at this time because
Ansible only knows that Python supports this mode of operation. Supporting
pipelining means that whatever format the module payload takes before being
sent over the wire must be executable by Python via stdin.
.. _flow_args_over_stdin:
Why pass args over stdin?
^^^^^^^^^^^^^^^^^^^^^^^^^
Passing arguments via stdin was chosen for the following reasons:
* When combined with :ref:`pipelining`, this keeps the module's arguments from
temporarily being saved onto disk on the remote machine. This makes it
harder (but not impossible) for a malicious user on the remote machine to
steal any sensitive information that may be present in the arguments.
* Command line arguments would be insecure as most systems allow unprivileged
users to read the full commandline of a process.
* Environment variables are usually more secure than the commandline but some
systems limit the total size of the environment. This could lead to
truncation of the parameters if we hit that limit.

View file

@ -0,0 +1,48 @@
Releases
========
.. contents:: Topics
:local:
.. _schedule:
Release Schedule
````````````````
Ansible is on a 'flexible' 4 month release schedule, sometimes this can be extended if there is a major change that requires a longer cycle (i.e. 2.0 core rewrite).
Currently modules get released at the same time as the main Ansible repo, even though they are separated into ansible-modules-core and ansible-modules-extras.
The major features and bugs fixed in a release should be reflected in the CHANGELOG.md, minor ones will be in the commit history (FIXME: add git exmaple to list).
When a fix/feature gets added to the `devel` branch it will be part of the next release, some bugfixes can be backported to previous releases and might be part of a minor point release if it is deemed necessary.
Sometimes an RC can be extended by a few days if a bugfix makes a change that can have far reaching consequences, so users have enough time to find any new issues that may stem from this.
.. _methods:
Release methods
````````````````
Ansible normally goes through a 'release candidate', issuing an RC1 for a release, if no major bugs are discovered in it after 5 business days we'll get a final release.
Otherwise fixes will be applied and an RC2 will be provided for testing and if no bugs after 2 days, the final release will be made, iterating this last step and incrementing the candidate number as we find major bugs.
.. _freezing:
Release feature freeze
``````````````````````
During the release candidate process, the focus will be on bugfixes that affect the RC, new features will be delayed while we try to produce a final version. Some bugfixes that are minor or don't affect the RC will also be postponed until after the release is finalized.
.. seealso::
:doc:`developing_api`
Python API to Playbooks and Ad Hoc Task Execution
:doc:`developing_modules`
How to develop modules
:doc:`developing_plugins`
How to develop plugins
`Ansible Tower <http://ansible.com/ansible-tower>`_
REST API endpoint and GUI for Ansible, syncs with dynamic inventory
`Development Mailing List <http://groups.google.com/group/ansible-devel>`_
Mailing list for development topics
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel

View file

@ -0,0 +1,193 @@
Helping Testing PRs
```````````````````
If you're a developer, one of the most valuable things you can do is look at the github issues list and help fix bugs. We almost always prioritize bug fixing over
feature development, so clearing bugs out of the way is one of the best things you can do.
Even if you're not a developer, helping test pull requests for bug fixes and features is still immensely valuable.
This goes for testing new features as well as testing bugfixes.
In many cases, code should add tests that prove it works but that's not ALWAYS possible and tests are not always comprehensive, especially when a user doesn't have access
to a wide variety of platforms, or that is using an API or web service.
In these cases, live testing against real equipment can be more valuable than automation that runs against simulated interfaces.
In any case, things should always be tested manually the first time too.
Thankfully helping test ansible is pretty straightforward, assuming you are already used to how ansible works.
Get Started with A Source Checkout
++++++++++++++++++++++++++++++++++
You can do this by checking out ansible, making a test branch off the main one, merging a GitHub issue, testing,
and then commenting on that particular issue on GitHub. Here's how:
.. note::
Testing source code from GitHub pull requests sent to us does have some inherent risk, as the source code
sent may have mistakes or malicious code that could have a negative impact on your system. We recommend
doing all testing on a virtual machine, whether a cloud instance, or locally. Some users like Vagrant
or Docker for this, but they are optional. It is also useful to have virtual machines of different Linux or
other flavors, since some features (apt vs. yum, for example) are specific to those OS versions.
First, you will need to configure your testing environment with the necessary tools required to run our test
suites. You will need at least::
git
python-nosetests (sometimes named python-nose)
python-passlib
python-mock
If you want to run the full integration test suite you'll also need the following packages installed::
svn
hg
python-pip
gem
Second, if you haven't already, clone the Ansible source code from GitHub::
git clone https://github.com/ansible/ansible.git --recursive
cd ansible/
.. note::
If you have previously forked the repository on GitHub, you could also clone it from there.
.. note::
If updating your repo for testing something module related, use "git rebase origin/devel" and then "git submodule update" to fetch
the latest development versions of modules. Skipping the "git submodule update" step will result in versions that will be stale.
Activating The Source Checkout
++++++++++++++++++++++++++++++
The Ansible source includes a script that allows you to use Ansible directly from source without requiring a
full installation, that is frequently used by developers on Ansible.
Simply source it (to use the Linux/Unix terminology) to begin using it immediately::
source ./hacking/env-setup
This script modifies the PYTHONPATH enviromnent variables (along with a few other things), which will be temporarily
set as long as your shell session is open.
If you'd like your testing environment to always use the latest source, you could call the command from startup scripts (for example,
`.bash_profile`).
Finding A Pull Request and Checking It Out On A Branch
++++++++++++++++++++++++++++++++++++++++++++++++++++++
Next, find the pull request you'd like to test and make note of the line at the top which describes the source
and destination repositories. It will look something like this::
Someuser wants to merge 1 commit into ansible:devel from someuser:feature_branch_name
.. note::
It is important that the PR request target be ansible:devel, as we do not accept pull requests into any other branch. Dot releases are cherry-picked manually by ansible staff.
The username and branch at the end are the important parts, which will be turned into git commands as follows::
git checkout -b testing_PRXXXX devel
git pull https://github.com/someuser/ansible.git feature_branch_name
The first command creates and switches to a new branch named testing_PRXXXX, where the XXXX is the actual issue number associated with the pull request (for example, 1234). This branch is based on the devel branch. The second command pulls the new code from the users feature branch into the newly created branch.
.. note::
If the GitHub user interface shows that the pull request will not merge cleanly, we do not recommend proceeding if you are not somewhat familiar with git and coding, as you will have to resolve a merge conflict. This is the responsibility of the original pull request contributor.
.. note::
Some users do not create feature branches, which can cause problems when they have multiple, un-related commits in their version of `devel`. If the source looks like `someuser:devel`, make sure there is only one commit listed on the pull request.
Finding a Pull Request for Ansible Modules
++++++++++++++++++++++++++++++++++++++++++
Ansible modules are in separate repositories, which are managed as Git submodules. Here's a step by step process for checking out a PR for an Ansible extras module, for instance:
1. git clone https://github.com/ansible/ansible.git
2. cd ansible
3. git submodule init
4. git submodule update --recursive [ fetches the submodules ]
5. cd lib/ansible/modules/extras
6. git fetch origin pull/1234/head:pr/1234 [ fetches the specific PR ]
7. git checkout pr/1234 [ do your testing here ]
8. cd /path/to/ansible/clone
9. git submodule update --recursive
For Those About To Test, We Salute You
++++++++++++++++++++++++++++++++++++++
At this point, you should be ready to begin testing!
If the PR is a bug-fix pull request, the first things to do are to run the suite of unit and integration tests, to ensure
the pull request does not break current functionality::
# Unit Tests
make tests
# Integration Tests
cd test/integration
make
.. note::
Ansible does provide integration tests for cloud-based modules as well, however we do not recommend using them for some users
due to the associated costs from the cloud providers. As such, typically it's better to run specific parts of the integration battery
and skip these tests.
Integration tests aren't the end all beat all - in many cases what is fixed might not *HAVE* a test, so determining if it works means
checking the functionality of the system and making sure it does what it said it would do.
Pull requests for bug-fixes should reference the bug issue number they are fixing.
We encourage users to provide playbook examples for bugs that show how to reproduce the error, and these playbooks should be used to verify the bugfix does resolve
the issue if available. You may wish to also do your own review to poke the corners of the change.
Since some reproducers can be quite involved, you might wish to create a testing directory with the issue # as a sub-
directory to keep things organized::
mkdir -p testing/XXXX # where XXXX is again the issue # for the original issue or PR
cd testing/XXXX
<create files or git clone example playbook repo>
While it should go without saying, be sure to read any playbooks before you run them. VMs help with running untrusted content greatly,
though a playbook could still do something to your computing resources that you'd rather not like.
Once the files are in place, you can run the provided playbook (if there is one) to test the functionality::
ansible-playbook -vvv playbook_name.yml
If there's no playbook, you may have to copy and paste playbook snippets or run an ad-hoc command that was pasted in.
Our issue template also included sections for "Expected Output" and "Actual Output", which should be used to gauge the output
from the provided examples.
If the pull request resolves the issue, please leave a comment on the pull request, showing the following information:
* "Works for me!"
* The output from `ansible --version`.
In some cases, you may wish to share playbook output from the test run as well.
Example!::
Works for me! Tested on `Ansible 1.7.1`. I verified this on CentOS 6.5 and also Ubuntu 14.04.
If the PR does not resolve the issue, or if you see any failures from the unit/integration tests, just include that output instead::
This doesn't work for me.
When I ran this my toaster started making loud noises!
Output from the toaster looked like this:
```
BLARG
StrackTrace
RRRARRGGG
```
When you are done testing a feature branch, you can remove it with the following command::
git branch -D someuser-feature_branch_name
We understand some users may be inexperienced with git, or other aspects of the above procedure, so feel free to stop by ansible-devel
list for questions and we'd be happy to help answer them.

View file

@ -0,0 +1,29 @@
Developer Information
=====================
Ansible Developer Guide
`````````````
Welcome to the Ansible Developer Guide!
The purpose of this guide is to document all of the paths available to you for interacting and shaping Ansible with code, ranging from developing modules and plugins to helping to develop the Ansible Core Engine via pull requests.
To get started, select one of the following topics.
.. toctree::
:maxdepth: 1
overview_architecture
overview_components
overview_contributing
developing_modules
developing_plugins
developing_inventory
developing_api
developing_core
developing_test_pr
developing_releases

View file

@ -0,0 +1,70 @@
Ansible Architecture
====================
Ansible is a radically simple IT automation engine that automates cloud provisioning, configuration management, application deployment, intra-service orchestration, and many other IT needs.
Being designed for multi-tier deployments since day one, Ansible models your IT infrastructure by describing how all of your systems inter-relate, rather than just managing one system at a time.
It uses no agents and no additional custom security infrastructure, so it's easy to deploy - and most importantly, it uses a very simple language (YAML, in the form of Ansible Playbooks) that allow you to describe your automation jobs in a way that approaches plain English.
In this section, we'll give you a really quick overview of how Ansible works so you can see how the pieces fit together.
Modules
`````````````````
Ansible works by connecting to your nodes and pushing out small programs, called "Ansible Modules" to them. These programs are written to be resource models of the desired state of the system. Ansible then executes these modules (over SSH by default), and removes them when finished.
Your library of modules can reside on any machine, and there are no servers, daemons, or databases required. Typically you'll work with your favorite terminal program, a text editor, and probably a version control system to keep track of changes to your content.
Inventory
````````````````````
By default, Ansible represents what machines it manages using a very simple INI file that puts all of your managed machines in groups of your own choosing.
To add new machines, there is no additional SSL signing server involved, so there's never any hassle deciding why a particular machine didnt get linked up due to obscure NTP or DNS issues.
If there's another source of truth in your infrastructure, Ansible can also plugin to that, such as drawing inventory, group, and variable information from sources like EC2, Rackspace, OpenStack, and more.
Here's what a plain text inventory file looks like::
---
webservers]
www1.example.com
www2.example.com
[dbservers]
db0.example.com
db1.example.com
Once inventory hosts are listed, variables can be assigned to them in simple text files (in a subdirectory called 'group_vars/' or 'host_vars/' or directly in the inventory file.
Or, as already mentioned, use a dynamic inventory to pull your inventory from data sources like EC2, Rackspace, or OpenStack.
Playbooks
````````````````````
Playbooks can finely orchestrate multiple slices of your infrastructure topology, with very detailed control over how many machines to tackle at a time. This is where Ansible starts to get most interesting.
Ansible's approach to orchestration is one of finely-tuned simplicity, as we believe your automation code should make perfect sense to you years down the road and there should be very little to remember about special syntax or features.
Here's what a simple playbook looks like::
---
- hosts: webservers
serial: 5 # update 5 machines at a time
roles:
- common
- webapp
- hosts: content_servers
roles:
- common
- content
Extending Ansible with Plug-ins and the API
````````````````````````````````````````````
Should you want to write your own, Ansible modules can be written in any language that can return JSON (Ruby, Python, bash, etc). Inventory can also plug in to any datasource by writing a program that speaks to that datasource and returns JSON. There's also various Python APIs for extending Ansibles connection types (SSH is not the only transport possible), callbacks (how Ansible logs, etc), and even for adding new server side behaviors.

View file

@ -4,11 +4,12 @@ Developer Information
Learn how to build modules of your own in any language, and also how to extend Ansible through several kinds of plugins. Explore Ansible's Python API and write Python plugins to integrate with other solutions in your environment.
.. toctree::
:maxdepth: 1
:maxdepth: 2
developing_api
developing_inventory
developing_modules
developing_module_utilities
developing_plugins
developing_core
developing_test_pr