2014-10-02 17:07:05 +00:00
|
|
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
2014-10-15 22:53:43 +00:00
|
|
|
|
2014-10-15 23:18:12 +00:00
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2015-04-27 19:43:25 +00:00
|
|
|
import json
|
|
|
|
import pipes
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
|
2015-09-03 06:23:27 +00:00
|
|
|
from six import iteritems
|
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
from ansible import constants as C
|
2015-08-16 14:16:02 +00:00
|
|
|
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
|
2015-01-15 07:13:45 +00:00
|
|
|
from ansible.playbook.conditional import Conditional
|
2015-01-02 13:51:15 +00:00
|
|
|
from ansible.playbook.task import Task
|
2015-05-02 04:48:11 +00:00
|
|
|
from ansible.template import Templar
|
2015-01-15 07:13:45 +00:00
|
|
|
from ansible.utils.listify import listify_lookup_plugin_terms
|
2015-03-25 20:56:46 +00:00
|
|
|
from ansible.utils.unicode import to_unicode
|
2014-11-14 22:14:08 +00:00
|
|
|
|
|
|
|
from ansible.utils.debug import debug
|
|
|
|
|
|
|
|
__all__ = ['TaskExecutor']
|
|
|
|
|
2014-10-15 23:37:29 +00:00
|
|
|
class TaskExecutor:
|
2014-10-15 22:53:43 +00:00
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
'''
|
|
|
|
This is the main worker class for the executor pipeline, which
|
|
|
|
handles loading an action plugin to actually dispatch the task to
|
|
|
|
a given host. This class roughly corresponds to the old Runner()
|
|
|
|
class.
|
|
|
|
'''
|
|
|
|
|
2015-06-11 15:54:25 +00:00
|
|
|
# Modules that we optimize by squashing loop items into a single call to
|
|
|
|
# the module
|
2015-07-07 15:59:20 +00:00
|
|
|
SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)
|
2015-06-11 15:54:25 +00:00
|
|
|
|
2015-07-21 16:12:22 +00:00
|
|
|
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
|
2015-05-02 04:48:11 +00:00
|
|
|
self._host = host
|
|
|
|
self._task = task
|
|
|
|
self._job_vars = job_vars
|
2015-07-21 16:12:22 +00:00
|
|
|
self._play_context = play_context
|
2015-05-02 04:48:11 +00:00
|
|
|
self._new_stdin = new_stdin
|
|
|
|
self._loader = loader
|
|
|
|
self._shared_loader_obj = shared_loader_obj
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-07-24 14:31:14 +00:00
|
|
|
try:
|
|
|
|
from __main__ import display
|
|
|
|
self._display = display
|
|
|
|
except ImportError:
|
|
|
|
from ansible.utils.display import Display
|
|
|
|
self._display = Display()
|
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
def run(self):
|
|
|
|
'''
|
|
|
|
The main executor entrypoint, where we determine if the specified
|
|
|
|
task requires looping and either runs the task with
|
|
|
|
'''
|
|
|
|
|
|
|
|
debug("in run()")
|
2015-01-08 16:51:54 +00:00
|
|
|
|
|
|
|
try:
|
2015-01-15 07:13:45 +00:00
|
|
|
# lookup plugins need to know if this task is executing from
|
|
|
|
# a role, so that it can properly find files/templates/etc.
|
|
|
|
roledir = None
|
|
|
|
if self._task._role:
|
|
|
|
roledir = self._task._role._role_path
|
|
|
|
self._job_vars['roledir'] = roledir
|
|
|
|
|
2015-01-08 16:51:54 +00:00
|
|
|
items = self._get_loop_items()
|
2015-01-09 15:37:31 +00:00
|
|
|
if items is not None:
|
2015-01-08 16:51:54 +00:00
|
|
|
if len(items) > 0:
|
|
|
|
item_results = self._run_loop(items)
|
2015-04-02 16:54:45 +00:00
|
|
|
|
|
|
|
# loop through the item results, and remember the changed/failed
|
|
|
|
# result flags based on any item there.
|
|
|
|
changed = False
|
|
|
|
failed = False
|
|
|
|
for item in item_results:
|
2015-06-02 14:41:46 +00:00
|
|
|
if 'changed' in item and item['changed']:
|
2015-04-02 16:54:45 +00:00
|
|
|
changed = True
|
2015-06-02 14:41:46 +00:00
|
|
|
if 'failed' in item and item['failed']:
|
2015-04-02 16:54:45 +00:00
|
|
|
failed = True
|
|
|
|
|
|
|
|
# create the overall result item, and set the changed/failed
|
|
|
|
# flags there to reflect the overall result of the loop
|
2015-01-08 16:51:54 +00:00
|
|
|
res = dict(results=item_results)
|
2015-04-02 16:54:45 +00:00
|
|
|
|
|
|
|
if changed:
|
|
|
|
res['changed'] = True
|
|
|
|
|
|
|
|
if failed:
|
|
|
|
res['failed'] = True
|
|
|
|
res['msg'] = 'One or more items failed'
|
|
|
|
else:
|
|
|
|
res['msg'] = 'All items completed'
|
2015-01-08 16:51:54 +00:00
|
|
|
else:
|
|
|
|
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
|
2014-11-14 22:14:08 +00:00
|
|
|
else:
|
2015-01-08 16:51:54 +00:00
|
|
|
debug("calling self._execute()")
|
|
|
|
res = self._execute()
|
|
|
|
debug("_execute() done")
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-01-15 22:56:54 +00:00
|
|
|
# make sure changed is set in the result, if it's not present
|
|
|
|
if 'changed' not in res:
|
|
|
|
res['changed'] = False
|
|
|
|
|
2015-01-08 16:51:54 +00:00
|
|
|
debug("dumping result to json")
|
|
|
|
result = json.dumps(res)
|
|
|
|
debug("done dumping result, returning")
|
|
|
|
return result
|
2015-08-27 06:16:11 +00:00
|
|
|
except AnsibleError as e:
|
2015-03-25 20:56:46 +00:00
|
|
|
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
|
2014-11-14 22:14:08 +00:00
|
|
|
|
|
|
|
def _get_loop_items(self):
|
|
|
|
'''
|
|
|
|
Loads a lookup plugin to handle the with_* portion of a task (if specified),
|
|
|
|
and returns the items result.
|
|
|
|
'''
|
|
|
|
|
2015-08-03 16:12:45 +00:00
|
|
|
# create a copy of the job vars here so that we can modify
|
|
|
|
# them temporarily without changing them too early for other
|
|
|
|
# parts of the code that might still need a pristine version
|
|
|
|
vars_copy = self._job_vars.copy()
|
|
|
|
|
|
|
|
# now we update them with the play context vars
|
|
|
|
self._play_context.update_vars(vars_copy)
|
|
|
|
|
2015-08-04 16:10:23 +00:00
|
|
|
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
|
2014-11-14 22:14:08 +00:00
|
|
|
items = None
|
2015-08-04 16:10:23 +00:00
|
|
|
if self._task.loop:
|
|
|
|
if self._task.loop in self._shared_loader_obj.lookup_loader:
|
2015-08-11 20:38:42 +00:00
|
|
|
#TODO: remove convert_bare true and deprecate this in with_
|
2015-08-16 14:16:02 +00:00
|
|
|
try:
|
|
|
|
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
|
|
|
|
except AnsibleUndefinedVariable as e:
|
|
|
|
if 'has no attribute' in str(e):
|
|
|
|
loop_terms = []
|
|
|
|
self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
|
|
|
|
else:
|
|
|
|
raise
|
2015-08-10 21:37:31 +00:00
|
|
|
items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
|
2015-08-04 16:10:23 +00:00
|
|
|
else:
|
|
|
|
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
|
2014-11-14 22:14:08 +00:00
|
|
|
|
|
|
|
return items
|
|
|
|
|
|
|
|
def _run_loop(self, items):
|
|
|
|
'''
|
|
|
|
Runs the task with the loop items specified and collates the result
|
|
|
|
into an array named 'results' which is inserted into the final result
|
|
|
|
along with the item for which the loop ran.
|
|
|
|
'''
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
2015-01-20 07:16:19 +00:00
|
|
|
# make copies of the job vars and task so we can add the item to
|
|
|
|
# the variables and re-validate the task with the item variable
|
|
|
|
task_vars = self._job_vars.copy()
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-01-20 07:16:19 +00:00
|
|
|
items = self._squash_items(items, task_vars)
|
2014-11-14 22:14:08 +00:00
|
|
|
for item in items:
|
2015-01-08 16:51:54 +00:00
|
|
|
task_vars['item'] = item
|
|
|
|
|
|
|
|
try:
|
|
|
|
tmp_task = self._task.copy()
|
2015-09-03 12:11:30 +00:00
|
|
|
tmp_play_context = self._play_context.copy()
|
2015-08-27 06:16:11 +00:00
|
|
|
except AnsibleParserError as e:
|
2015-01-08 16:51:54 +00:00
|
|
|
results.append(dict(failed=True, msg=str(e)))
|
|
|
|
continue
|
|
|
|
|
2015-09-03 12:11:30 +00:00
|
|
|
# now we swap the internal task and play context with their copies,
|
|
|
|
# execute, and swap them back so we can do the next iteration cleanly
|
2015-01-08 16:51:54 +00:00
|
|
|
(self._task, tmp_task) = (tmp_task, self._task)
|
2015-09-03 12:11:30 +00:00
|
|
|
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
|
2015-01-12 22:04:56 +00:00
|
|
|
res = self._execute(variables=task_vars)
|
2015-01-08 16:51:54 +00:00
|
|
|
(self._task, tmp_task) = (tmp_task, self._task)
|
2015-09-03 12:11:30 +00:00
|
|
|
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
|
2015-01-08 16:51:54 +00:00
|
|
|
|
|
|
|
# now update the result with the item info, and append the result
|
|
|
|
# to the list of results
|
2014-11-14 22:14:08 +00:00
|
|
|
res['item'] = item
|
|
|
|
results.append(res)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-01-20 07:16:19 +00:00
|
|
|
def _squash_items(self, items, variables):
|
|
|
|
'''
|
|
|
|
Squash items down to a comma-separated list for certain modules which support it
|
|
|
|
(typically package management modules).
|
|
|
|
'''
|
2015-06-11 15:54:25 +00:00
|
|
|
if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
|
2015-01-20 07:16:19 +00:00
|
|
|
final_items = []
|
2015-07-01 04:32:54 +00:00
|
|
|
name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
|
2015-01-20 07:16:19 +00:00
|
|
|
for item in items:
|
|
|
|
variables['item'] = item
|
2015-05-04 06:33:10 +00:00
|
|
|
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
|
|
|
|
if self._task.evaluate_conditional(templar, variables):
|
2015-06-30 15:13:40 +00:00
|
|
|
if templar._contains_vars(name):
|
|
|
|
new_item = templar.template(name)
|
2015-06-26 14:54:38 +00:00
|
|
|
final_items.append(new_item)
|
|
|
|
else:
|
|
|
|
final_items.append(item)
|
|
|
|
joined_items = ",".join(final_items)
|
|
|
|
self._task.args['name'] = joined_items
|
|
|
|
return [joined_items]
|
2015-01-20 07:16:19 +00:00
|
|
|
else:
|
|
|
|
return items
|
|
|
|
|
2015-01-12 22:04:56 +00:00
|
|
|
def _execute(self, variables=None):
|
2014-11-14 22:14:08 +00:00
|
|
|
'''
|
|
|
|
The primary workhorse of the executor system, this runs the task
|
|
|
|
on the specified host (which may be the delegated_to host) and handles
|
|
|
|
the retry/until and block rescue/always execution
|
|
|
|
'''
|
|
|
|
|
2015-01-12 22:04:56 +00:00
|
|
|
if variables is None:
|
|
|
|
variables = self._job_vars
|
|
|
|
|
2015-05-02 04:48:11 +00:00
|
|
|
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
|
|
|
|
|
2015-01-20 20:03:26 +00:00
|
|
|
# fields set from the play/task may be based on variables, so we have to
|
2015-04-15 02:10:17 +00:00
|
|
|
# do the same kind of post validation step on it here before we use it.
|
2015-07-21 16:12:22 +00:00
|
|
|
self._play_context.post_validate(templar=templar)
|
2015-01-20 20:03:26 +00:00
|
|
|
|
2015-07-21 16:12:22 +00:00
|
|
|
# now that the play context is finalized, we can add 'magic'
|
2015-04-15 02:10:17 +00:00
|
|
|
# variables to the variable dictionary
|
2015-07-21 16:12:22 +00:00
|
|
|
self._play_context.update_vars(variables)
|
2015-04-15 02:10:17 +00:00
|
|
|
|
2015-01-20 20:03:26 +00:00
|
|
|
# Evaluate the conditional (if any) for this task, which we do before running
|
|
|
|
# the final task post-validation. We do this before the post validation due to
|
|
|
|
# the fact that the conditional may specify that the task be skipped due to a
|
|
|
|
# variable not being present which would otherwise cause validation to fail
|
2015-05-04 06:33:10 +00:00
|
|
|
if not self._task.evaluate_conditional(templar, variables):
|
2014-11-14 22:14:08 +00:00
|
|
|
debug("when evaulation failed, skipping this task")
|
2015-01-15 22:56:54 +00:00
|
|
|
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-07-05 05:06:54 +00:00
|
|
|
# Now we do final validation on the task, which sets all fields to their final values.
|
|
|
|
# In the case of debug tasks, we save any 'var' params and restore them after validating
|
|
|
|
# so that variables are not replaced too early.
|
|
|
|
prev_var = None
|
|
|
|
if self._task.action == 'debug' and 'var' in self._task.args:
|
|
|
|
prev_var = self._task.args.pop('var')
|
|
|
|
|
2015-05-02 04:48:11 +00:00
|
|
|
self._task.post_validate(templar=templar)
|
2015-07-24 14:31:14 +00:00
|
|
|
if '_variable_params' in self._task.args:
|
|
|
|
variable_params = self._task.args.pop('_variable_params')
|
|
|
|
if isinstance(variable_params, dict):
|
|
|
|
self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
|
|
|
|
variable_params.update(self._task.args)
|
|
|
|
self._task.args = variable_params
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-07-05 05:06:54 +00:00
|
|
|
if prev_var is not None:
|
|
|
|
self._task.args['var'] = prev_var
|
|
|
|
|
2015-02-12 18:11:08 +00:00
|
|
|
# if this task is a TaskInclude, we just return now with a success code so the
|
|
|
|
# main thread can expand the task list for the given host
|
|
|
|
if self._task.action == 'include':
|
|
|
|
include_variables = self._task.args.copy()
|
|
|
|
include_file = include_variables.get('_raw_params')
|
|
|
|
del include_variables['_raw_params']
|
2015-07-15 19:11:46 +00:00
|
|
|
return dict(include=include_file, include_variables=include_variables)
|
2015-02-12 18:11:08 +00:00
|
|
|
|
2015-07-09 12:23:43 +00:00
|
|
|
# get the connection and the handler for this execution
|
|
|
|
self._connection = self._get_connection(variables)
|
|
|
|
self._connection.set_host_overrides(host=self._host)
|
|
|
|
|
|
|
|
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
|
|
|
|
|
2015-01-23 03:45:25 +00:00
|
|
|
# And filter out any fields which were set to default(omit), and got the omit token value
|
|
|
|
omit_token = variables.get('omit')
|
|
|
|
if omit_token is not None:
|
2015-09-03 06:47:11 +00:00
|
|
|
self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)
|
2015-01-23 03:45:25 +00:00
|
|
|
|
2015-01-20 20:03:26 +00:00
|
|
|
# Read some values from the task, so that we can modify them if need be
|
2014-11-14 22:14:08 +00:00
|
|
|
retries = self._task.retries
|
|
|
|
if retries <= 0:
|
|
|
|
retries = 1
|
|
|
|
|
|
|
|
delay = self._task.delay
|
|
|
|
if delay < 0:
|
2015-01-07 17:44:52 +00:00
|
|
|
delay = 1
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-01-15 22:56:54 +00:00
|
|
|
# make a copy of the job vars here, in case we need to update them
|
|
|
|
# with the registered variable value later on when testing conditions
|
|
|
|
vars_copy = variables.copy()
|
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
debug("starting attempt loop")
|
|
|
|
result = None
|
|
|
|
for attempt in range(retries):
|
|
|
|
if attempt > 0:
|
2015-01-20 20:03:26 +00:00
|
|
|
# FIXME: this should use the callback/message passing mechanism
|
2015-07-02 14:33:22 +00:00
|
|
|
print("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result))
|
2014-11-14 22:14:08 +00:00
|
|
|
result['attempts'] = attempt + 1
|
|
|
|
|
|
|
|
debug("running the handler")
|
2015-01-12 22:04:56 +00:00
|
|
|
result = self._handler.run(task_vars=variables)
|
2014-11-14 22:14:08 +00:00
|
|
|
debug("handler run complete")
|
2015-01-02 13:51:15 +00:00
|
|
|
|
2015-01-07 17:44:52 +00:00
|
|
|
if self._task.async > 0:
|
|
|
|
# the async_wrapper module returns dumped JSON via its stdout
|
|
|
|
# response, so we parse it here and replace the result
|
|
|
|
try:
|
|
|
|
result = json.loads(result.get('stdout'))
|
2015-07-24 22:42:53 +00:00
|
|
|
except (TypeError, ValueError) as e:
|
2015-01-07 17:44:52 +00:00
|
|
|
return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
|
|
|
|
|
|
|
|
if self._task.poll > 0:
|
2015-05-04 06:33:10 +00:00
|
|
|
result = self._poll_async_result(result=result, templar=templar)
|
2015-01-02 13:51:15 +00:00
|
|
|
|
2015-03-25 18:51:40 +00:00
|
|
|
# update the local copy of vars with the registered value, if specified,
|
|
|
|
# or any facts which may have been generated by the module execution
|
2015-01-15 22:56:54 +00:00
|
|
|
if self._task.register:
|
|
|
|
vars_copy[self._task.register] = result
|
|
|
|
|
2015-03-25 18:51:40 +00:00
|
|
|
if 'ansible_facts' in result:
|
|
|
|
vars_copy.update(result['ansible_facts'])
|
|
|
|
|
2015-01-15 22:56:54 +00:00
|
|
|
# create a conditional object to evaluate task conditions
|
|
|
|
cond = Conditional(loader=self._loader)
|
|
|
|
|
|
|
|
# FIXME: make sure until is mutually exclusive with changed_when/failed_when
|
2014-11-14 22:14:08 +00:00
|
|
|
if self._task.until:
|
2015-01-15 07:13:45 +00:00
|
|
|
cond.when = self._task.until
|
2015-05-04 06:33:10 +00:00
|
|
|
if cond.evaluate_conditional(templar, vars_copy):
|
2015-01-07 17:44:52 +00:00
|
|
|
break
|
2015-01-15 22:56:54 +00:00
|
|
|
elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result:
|
|
|
|
if self._task.changed_when:
|
|
|
|
cond.when = [ self._task.changed_when ]
|
2015-05-04 06:33:10 +00:00
|
|
|
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
|
2015-01-15 22:56:54 +00:00
|
|
|
if self._task.failed_when:
|
|
|
|
cond.when = [ self._task.failed_when ]
|
2015-05-04 06:33:10 +00:00
|
|
|
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
|
2015-01-15 22:56:54 +00:00
|
|
|
result['failed_when_result'] = result['failed'] = failed_when_result
|
|
|
|
if failed_when_result:
|
|
|
|
break
|
2015-07-28 14:14:48 +00:00
|
|
|
elif 'failed' not in result:
|
|
|
|
if result.get('rc', 0) != 0:
|
|
|
|
result['failed'] = True
|
|
|
|
else:
|
|
|
|
# if the result is not failed, stop trying
|
|
|
|
break
|
2014-11-14 22:14:08 +00:00
|
|
|
|
|
|
|
if attempt < retries - 1:
|
|
|
|
time.sleep(delay)
|
|
|
|
|
2015-03-25 18:51:40 +00:00
|
|
|
# do the final update of the local variables here, for both registered
|
|
|
|
# values and any facts which may have been created
|
|
|
|
if self._task.register:
|
|
|
|
variables[self._task.register] = result
|
|
|
|
|
|
|
|
if 'ansible_facts' in result:
|
|
|
|
variables.update(result['ansible_facts'])
|
|
|
|
|
2015-07-17 16:02:26 +00:00
|
|
|
# save the notification target in the result, if it was specified, as
|
|
|
|
# this task may be running in a loop in which case the notification
|
|
|
|
# may be item-specific, ie. "notify: service {{item}}"
|
2015-07-17 18:44:05 +00:00
|
|
|
if self._task.notify is not None:
|
2015-07-27 18:01:02 +00:00
|
|
|
result['_ansible_notify'] = self._task.notify
|
2015-07-17 16:02:26 +00:00
|
|
|
|
2015-03-25 18:51:40 +00:00
|
|
|
# and return
|
2014-11-14 22:14:08 +00:00
|
|
|
debug("attempt loop complete, returning result")
|
|
|
|
return result
|
|
|
|
|
2015-05-04 06:33:10 +00:00
|
|
|
def _poll_async_result(self, result, templar):
|
2015-01-02 13:51:15 +00:00
|
|
|
'''
|
|
|
|
Polls for the specified JID to be complete
|
|
|
|
'''
|
|
|
|
|
2015-01-07 17:44:52 +00:00
|
|
|
async_jid = result.get('ansible_job_id')
|
2015-01-02 13:51:15 +00:00
|
|
|
if async_jid is None:
|
|
|
|
return dict(failed=True, msg="No job id was returned by the async task")
|
|
|
|
|
|
|
|
# Create a new psuedo-task to run the async_status module, and run
|
|
|
|
# that (with a sleep for "poll" seconds between each retry) until the
|
|
|
|
# async time limit is exceeded.
|
|
|
|
|
|
|
|
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
|
|
|
|
|
|
|
|
# Because this is an async task, the action handler is async. However,
|
|
|
|
# we need the 'normal' action handler for the status check, so get it
|
|
|
|
# now via the action_loader
|
2015-08-28 20:32:09 +00:00
|
|
|
normal_handler = self._shared_loader_obj.action_loader.get(
|
2015-01-02 13:51:15 +00:00
|
|
|
'normal',
|
|
|
|
task=async_task,
|
|
|
|
connection=self._connection,
|
2015-07-21 16:12:22 +00:00
|
|
|
play_context=self._play_context,
|
2015-01-15 07:13:45 +00:00
|
|
|
loader=self._loader,
|
2015-05-04 06:33:10 +00:00
|
|
|
templar=templar,
|
2015-05-02 04:48:11 +00:00
|
|
|
shared_loader_obj=self._shared_loader_obj,
|
2015-01-02 13:51:15 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
time_left = self._task.async
|
|
|
|
while time_left > 0:
|
|
|
|
time.sleep(self._task.poll)
|
|
|
|
|
|
|
|
async_result = normal_handler.run()
|
|
|
|
if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
|
|
|
|
break
|
|
|
|
|
|
|
|
time_left -= self._task.poll
|
|
|
|
|
|
|
|
if int(async_result.get('finished', 0)) != 1:
|
|
|
|
return dict(failed=True, msg="async task did not complete within the requested time")
|
|
|
|
else:
|
|
|
|
return async_result
|
|
|
|
|
2015-02-09 22:54:44 +00:00
|
|
|
def _get_connection(self, variables):
|
2014-11-14 22:14:08 +00:00
|
|
|
'''
|
|
|
|
Reads the connection property for the host, and returns the
|
|
|
|
correct connection object from the list of connection plugins
|
|
|
|
'''
|
|
|
|
|
|
|
|
# FIXME: calculation of connection params/auth stuff should be done here
|
|
|
|
|
2015-07-21 16:12:22 +00:00
|
|
|
if not self._play_context.remote_addr:
|
|
|
|
self._play_context.remote_addr = self._host.ipv4_address
|
2015-06-02 19:56:32 +00:00
|
|
|
|
2015-02-09 22:54:44 +00:00
|
|
|
if self._task.delegate_to is not None:
|
|
|
|
self._compute_delegate(variables)
|
|
|
|
|
2015-07-21 16:12:22 +00:00
|
|
|
conn_type = self._play_context.connection
|
2014-11-14 22:14:08 +00:00
|
|
|
if conn_type == 'smart':
|
|
|
|
conn_type = 'ssh'
|
2015-07-21 16:12:22 +00:00
|
|
|
if sys.platform.startswith('darwin') and self._play_context.password:
|
2015-04-27 19:43:25 +00:00
|
|
|
# due to a current bug in sshpass on OSX, which can trigger
|
|
|
|
# a kernel panic even for non-privileged users, we revert to
|
|
|
|
# paramiko on that OS when a SSH password is specified
|
|
|
|
conn_type = "paramiko"
|
|
|
|
else:
|
|
|
|
# see if SSH can support ControlPersist if not use paramiko
|
2015-07-22 20:14:17 +00:00
|
|
|
try:
|
|
|
|
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
(out, err) = cmd.communicate()
|
2015-08-26 15:00:45 +00:00
|
|
|
if "Bad configuration option" in err or "Usage:" in err:
|
2015-07-22 20:14:17 +00:00
|
|
|
conn_type = "paramiko"
|
|
|
|
except OSError:
|
2015-04-27 19:43:25 +00:00
|
|
|
conn_type = "paramiko"
|
2014-11-14 22:14:08 +00:00
|
|
|
|
2015-08-28 20:32:09 +00:00
|
|
|
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
|
2014-11-14 22:14:08 +00:00
|
|
|
if not connection:
|
|
|
|
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
|
|
|
|
|
|
|
|
return connection
|
|
|
|
|
2015-05-04 06:33:10 +00:00
|
|
|
def _get_action_handler(self, connection, templar):
|
2014-11-14 22:14:08 +00:00
|
|
|
'''
|
|
|
|
Returns the correct action plugin to handle the requestion task action
|
|
|
|
'''
|
2014-10-15 22:53:43 +00:00
|
|
|
|
2015-08-28 20:32:09 +00:00
|
|
|
if self._task.action in self._shared_loader_obj.action_loader:
|
2014-11-14 22:14:08 +00:00
|
|
|
if self._task.async != 0:
|
|
|
|
raise AnsibleError("async mode is not supported with the %s module" % module_name)
|
|
|
|
handler_name = self._task.action
|
|
|
|
elif self._task.async == 0:
|
|
|
|
handler_name = 'normal'
|
|
|
|
else:
|
|
|
|
handler_name = 'async'
|
2014-10-15 22:53:43 +00:00
|
|
|
|
2015-08-28 20:32:09 +00:00
|
|
|
handler = self._shared_loader_obj.action_loader.get(
|
2014-11-14 22:14:08 +00:00
|
|
|
handler_name,
|
|
|
|
task=self._task,
|
|
|
|
connection=connection,
|
2015-07-21 16:12:22 +00:00
|
|
|
play_context=self._play_context,
|
2015-01-15 07:13:45 +00:00
|
|
|
loader=self._loader,
|
2015-05-04 06:33:10 +00:00
|
|
|
templar=templar,
|
2015-05-02 04:48:11 +00:00
|
|
|
shared_loader_obj=self._shared_loader_obj,
|
2014-11-14 22:14:08 +00:00
|
|
|
)
|
2015-03-02 18:38:45 +00:00
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
if not handler:
|
|
|
|
raise AnsibleError("the handler '%s' was not found" % handler_name)
|
2014-10-15 22:53:43 +00:00
|
|
|
|
2014-11-14 22:14:08 +00:00
|
|
|
return handler
|
2015-02-09 22:54:44 +00:00
|
|
|
|
|
|
|
def _compute_delegate(self, variables):
|
|
|
|
|
|
|
|
# get the vars for the delegate by its name
|
|
|
|
try:
|
2015-08-19 16:08:26 +00:00
|
|
|
self._display.debug("Delegating to %s" % self._task.delegate_to)
|
2015-02-09 22:54:44 +00:00
|
|
|
this_info = variables['hostvars'][self._task.delegate_to]
|
2015-07-23 00:12:00 +00:00
|
|
|
|
|
|
|
# get the real ssh_address for the delegate and allow ansible_ssh_host to be templated
|
|
|
|
self._play_context.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to)
|
2015-08-19 16:08:26 +00:00
|
|
|
self._play_context.remote_user = this_info.get('ansible_remote_user', self._task.remote_user)
|
2015-07-23 00:12:00 +00:00
|
|
|
self._play_context.port = this_info.get('ansible_ssh_port', self._play_context.port)
|
|
|
|
self._play_context.password = this_info.get('ansible_ssh_pass', self._play_context.password)
|
|
|
|
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', self._play_context.private_key_file)
|
|
|
|
self._play_context.become_pass = this_info.get('ansible_sudo_pass', self._play_context.become_pass)
|
2015-08-20 00:36:08 +00:00
|
|
|
|
|
|
|
conn = this_info.get('ansible_connection', self._task.connection)
|
|
|
|
if conn:
|
|
|
|
self._play_context.connection = conn
|
|
|
|
|
2015-08-19 16:08:26 +00:00
|
|
|
except Exception as e:
|
2015-02-09 22:54:44 +00:00
|
|
|
# make sure the inject is empty for non-inventory hosts
|
|
|
|
this_info = {}
|
2015-08-19 16:08:26 +00:00
|
|
|
self._display.debug("Delegate due to: %s" % str(e))
|
2015-02-09 22:54:44 +00:00
|
|
|
|
|
|
|
# Last chance to get private_key_file from global variables.
|
|
|
|
# this is useful if delegated host is not defined in the inventory
|
2015-08-19 16:08:26 +00:00
|
|
|
if self._play_context.private_key_file is None:
|
|
|
|
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', None)
|
2015-02-09 22:54:44 +00:00
|
|
|
|
2015-08-19 16:08:26 +00:00
|
|
|
if self._play_context.private_key_file is None:
|
|
|
|
key = this_info.get('private_key_file', None)
|
|
|
|
if key:
|
|
|
|
self._play_context.private_key_file = os.path.expanduser(key)
|
2015-02-09 22:54:44 +00:00
|
|
|
|
|
|
|
for i in this_info:
|
|
|
|
if i.startswith("ansible_") and i.endswith("_interpreter"):
|
|
|
|
variables[i] = this_info[i]
|
|
|
|
|