2012-02-29 00:08:09 +00:00
|
|
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
2012-02-24 04:28:58 +00:00
|
|
|
#
|
2012-02-29 00:08:09 +00:00
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
2012-03-03 03:03:03 +00:00
|
|
|
|
2012-02-24 04:28:58 +00:00
|
|
|
import multiprocessing
|
2012-02-27 05:43:02 +00:00
|
|
|
import signal
|
2012-02-24 04:28:58 +00:00
|
|
|
import os
|
2012-04-10 23:27:19 +00:00
|
|
|
import pwd
|
2012-02-28 05:45:37 +00:00
|
|
|
import Queue
|
2012-03-03 17:25:56 +00:00
|
|
|
import random
|
2012-03-14 00:59:05 +00:00
|
|
|
import traceback
|
2012-03-14 04:34:00 +00:00
|
|
|
import tempfile
|
2012-04-26 18:34:49 +00:00
|
|
|
import time
|
2012-07-15 14:57:22 +00:00
|
|
|
import collections
|
2012-08-09 01:19:20 +00:00
|
|
|
import socket
|
2012-03-13 03:11:54 +00:00
|
|
|
|
2012-08-07 00:07:02 +00:00
|
|
|
import ansible.constants as C
|
2012-04-13 12:39:54 +00:00
|
|
|
import ansible.inventory
|
2012-03-18 21:04:07 +00:00
|
|
|
from ansible import utils
|
2012-03-18 21:16:12 +00:00
|
|
|
from ansible import errors
|
2012-07-18 02:33:36 +00:00
|
|
|
from ansible import module_common
|
2012-06-01 01:44:30 +00:00
|
|
|
import poller
|
|
|
|
import connection
|
2012-08-10 04:25:13 +00:00
|
|
|
from ansible.callbacks import DefaultRunnerCallbacks, vv
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-03-29 00:32:04 +00:00
|
|
|
HAS_ATFORK=True
|
|
|
|
try:
|
|
|
|
from Crypto.Random import atfork
|
|
|
|
except ImportError:
|
|
|
|
HAS_ATFORK=False
|
2012-03-14 23:57:56 +00:00
|
|
|
|
2012-03-03 03:03:03 +00:00
|
|
|
################################################
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-02-27 05:43:02 +00:00
|
|
|
def _executor_hook(job_queue, result_queue):
|
2012-02-24 04:28:58 +00:00
|
|
|
''' callback used by multiprocessing pool '''
|
2012-04-04 14:27:24 +00:00
|
|
|
|
2012-03-29 00:32:04 +00:00
|
|
|
# attempt workaround of https://github.com/newsapps/beeswithmachineguns/issues/17
|
2012-07-15 14:57:22 +00:00
|
|
|
# this function also not present in CentOS 6
|
2012-03-29 00:32:04 +00:00
|
|
|
if HAS_ATFORK:
|
|
|
|
atfork()
|
|
|
|
|
2012-02-27 05:43:02 +00:00
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
while not job_queue.empty():
|
|
|
|
try:
|
|
|
|
job = job_queue.get(block=False)
|
|
|
|
runner, host = job
|
|
|
|
result_queue.put(runner._executor(host))
|
|
|
|
except Queue.Empty:
|
|
|
|
pass
|
2012-03-25 23:05:27 +00:00
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
################################################
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
class ReturnData(object):
|
2012-07-15 14:22:15 +00:00
|
|
|
''' internal return class for execute methods, not part of API signature '''
|
2012-05-25 22:44:29 +00:00
|
|
|
|
2012-07-14 23:18:33 +00:00
|
|
|
__slots__ = [ 'result', 'comm_ok', 'host' ]
|
2012-05-25 22:44:29 +00:00
|
|
|
|
2012-07-14 23:18:33 +00:00
|
|
|
def __init__(self, host=None, result=None, comm_ok=True):
|
2012-05-25 22:44:29 +00:00
|
|
|
self.host = host
|
|
|
|
self.result = result
|
|
|
|
self.comm_ok = comm_ok
|
|
|
|
|
|
|
|
if type(self.result) in [ str, unicode ]:
|
|
|
|
self.result = utils.parse_json(self.result)
|
|
|
|
|
|
|
|
if host is None:
|
|
|
|
raise Exception("host not set")
|
|
|
|
if type(self.result) != dict:
|
|
|
|
raise Exception("dictionary result expected")
|
|
|
|
|
|
|
|
def communicated_ok(self):
|
|
|
|
return self.comm_ok
|
|
|
|
|
|
|
|
def is_successful(self):
|
2012-07-15 14:57:22 +00:00
|
|
|
return self.comm_ok and ('failed' not in self.result) and (self.result.get('rc',0) == 0)
|
2012-05-25 22:44:29 +00:00
|
|
|
|
2012-07-21 20:51:31 +00:00
|
|
|
def daisychain(self, module_name):
|
|
|
|
''' request a module call follow this one '''
|
|
|
|
if self.is_successful():
|
|
|
|
self.result['daisychain'] = module_name
|
|
|
|
return self
|
|
|
|
|
2012-02-24 04:28:58 +00:00
|
|
|
class Runner(object):
|
2012-07-15 14:22:15 +00:00
|
|
|
''' core API interface to ansible '''
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-07-21 20:51:31 +00:00
|
|
|
# see bin/ansible for how this is used...
|
2012-03-22 03:39:09 +00:00
|
|
|
|
2012-08-07 00:07:02 +00:00
|
|
|
def __init__(self,
|
2012-07-21 20:51:31 +00:00
|
|
|
host_list=C.DEFAULT_HOST_LIST, # ex: /etc/ansible/hosts, legacy usage
|
|
|
|
module_path=C.DEFAULT_MODULE_PATH, # ex: /usr/share/ansible
|
|
|
|
module_name=C.DEFAULT_MODULE_NAME, # ex: copy
|
|
|
|
module_args=C.DEFAULT_MODULE_ARGS, # ex: "src=/tmp/a dest=/tmp/b"
|
|
|
|
forks=C.DEFAULT_FORKS, # parallelism level
|
2012-07-24 00:06:18 +00:00
|
|
|
timeout=C.DEFAULT_TIMEOUT, # SSH timeout
|
2012-07-21 20:51:31 +00:00
|
|
|
pattern=C.DEFAULT_PATTERN, # which hosts? ex: 'all', 'acme.example.org'
|
|
|
|
remote_user=C.DEFAULT_REMOTE_USER, # ex: 'username'
|
|
|
|
remote_pass=C.DEFAULT_REMOTE_PASS, # ex: 'password123' or None if using key
|
|
|
|
remote_port=C.DEFAULT_REMOTE_PORT, # if SSH on different ports
|
2012-08-07 00:07:02 +00:00
|
|
|
private_key_file=C.DEFAULT_PRIVATE_KEY_FILE, # if not using keys/passwords
|
2012-07-21 20:51:31 +00:00
|
|
|
sudo_pass=C.DEFAULT_SUDO_PASS, # ex: 'password123' or None
|
|
|
|
background=0, # async poll every X seconds, else 0 for non-async
|
|
|
|
basedir=None, # directory of playbook, if applicable
|
|
|
|
setup_cache=None, # used to share fact data w/ other tasks
|
|
|
|
transport=C.DEFAULT_TRANSPORT, # 'ssh', 'paramiko', 'local'
|
|
|
|
conditional='True', # run only if this fact expression evals to true
|
|
|
|
callbacks=None, # used for output
|
|
|
|
sudo=False, # whether to run sudo or not
|
|
|
|
sudo_user=C.DEFAULT_SUDO_USER, # ex: 'root'
|
2012-08-07 00:07:02 +00:00
|
|
|
module_vars=None, # a playbooks internals thing
|
2012-07-21 20:51:31 +00:00
|
|
|
is_playbook=False, # running from playbook or not?
|
|
|
|
inventory=None # reference to Inventory object
|
|
|
|
):
|
|
|
|
|
|
|
|
# storage & defaults
|
|
|
|
self.setup_cache = utils.default(setup_cache, lambda: collections.defaultdict(dict))
|
|
|
|
self.basedir = utils.default(basedir, lambda: os.getcwd())
|
2012-08-09 01:13:31 +00:00
|
|
|
self.callbacks = utils.default(callbacks, lambda: DefaultRunnerCallbacks())
|
2012-07-21 20:51:31 +00:00
|
|
|
self.generated_jid = str(random.randint(0, 999999999999))
|
|
|
|
self.transport = transport
|
|
|
|
self.inventory = utils.default(inventory, lambda: ansible.inventory.Inventory(host_list))
|
|
|
|
self.module_vars = utils.default(module_vars, lambda: {})
|
2012-07-15 15:54:39 +00:00
|
|
|
self.sudo_user = sudo_user
|
|
|
|
self.connector = connection.Connection(self)
|
|
|
|
self.conditional = conditional
|
|
|
|
self.module_path = module_path
|
|
|
|
self.module_name = module_name
|
|
|
|
self.forks = int(forks)
|
|
|
|
self.pattern = pattern
|
|
|
|
self.module_args = module_args
|
|
|
|
self.timeout = timeout
|
|
|
|
self.remote_user = remote_user
|
|
|
|
self.remote_pass = remote_pass
|
|
|
|
self.remote_port = remote_port
|
2012-05-14 20:14:38 +00:00
|
|
|
self.private_key_file = private_key_file
|
2012-07-15 15:54:39 +00:00
|
|
|
self.background = background
|
|
|
|
self.sudo = sudo
|
|
|
|
self.sudo_pass = sudo_pass
|
|
|
|
self.is_playbook = is_playbook
|
2012-03-31 02:47:58 +00:00
|
|
|
|
2012-07-21 20:51:31 +00:00
|
|
|
# misc housekeeping
|
|
|
|
if self.transport == 'ssh' and remote_pass:
|
|
|
|
raise errors.AnsibleError("SSH transport does not support passwords, only keys or agents")
|
|
|
|
if self.transport == 'local':
|
|
|
|
self.remote_user = pwd.getpwuid(os.geteuid())[0]
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-21 20:51:31 +00:00
|
|
|
# ensure we are using unique tmp paths
|
2012-03-03 17:25:56 +00:00
|
|
|
random.seed()
|
2012-03-22 03:39:09 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def _delete_remote_files(self, conn, files):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' deletes one or more remote files '''
|
2012-03-14 00:59:05 +00:00
|
|
|
|
2012-03-03 17:25:56 +00:00
|
|
|
if type(files) == str:
|
|
|
|
files = [ files ]
|
2012-02-25 22:16:23 +00:00
|
|
|
for filename in files:
|
2012-04-21 15:38:39 +00:00
|
|
|
if filename.find('/tmp/') == -1:
|
2012-03-03 17:25:56 +00:00
|
|
|
raise Exception("not going to happen")
|
2012-05-25 22:44:29 +00:00
|
|
|
self._low_level_exec_command(conn, "rm -rf %s" % filename, None)
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-04-19 02:19:25 +00:00
|
|
|
def _transfer_str(self, conn, tmp, name, data):
|
|
|
|
''' transfer string to remote file '''
|
2012-03-20 23:55:04 +00:00
|
|
|
|
2012-04-19 02:19:25 +00:00
|
|
|
if type(data) == dict:
|
2012-07-15 14:12:49 +00:00
|
|
|
data = utils.jsonify(data)
|
2012-03-31 02:28:10 +00:00
|
|
|
|
2012-04-19 02:19:25 +00:00
|
|
|
afd, afile = tempfile.mkstemp()
|
|
|
|
afo = os.fdopen(afd, 'w')
|
2012-08-03 01:20:43 +00:00
|
|
|
afo.write(data.encode('utf8'))
|
2012-04-19 02:19:25 +00:00
|
|
|
afo.flush()
|
|
|
|
afo.close()
|
2012-03-31 02:28:10 +00:00
|
|
|
|
2012-04-19 02:19:25 +00:00
|
|
|
remote = os.path.join(tmp, name)
|
2012-07-15 16:29:53 +00:00
|
|
|
try:
|
|
|
|
conn.put_file(afile, remote)
|
|
|
|
finally:
|
|
|
|
os.unlink(afile)
|
2012-04-19 02:19:25 +00:00
|
|
|
return remote
|
2012-03-06 03:23:56 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
# *****************************************************
|
2012-07-14 23:18:33 +00:00
|
|
|
|
2012-08-07 00:07:02 +00:00
|
|
|
def _execute_module(self, conn, tmp, module_name, args,
|
2012-07-15 18:19:11 +00:00
|
|
|
async_jid=None, async_module=None, async_limit=None, inject=None):
|
2012-07-14 23:18:33 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
''' runs a module that has already been transferred '''
|
|
|
|
|
2012-04-11 00:58:40 +00:00
|
|
|
if type(args) == dict:
|
2012-07-15 14:12:49 +00:00
|
|
|
args = utils.jsonify(args,format=True)
|
2012-07-14 23:18:33 +00:00
|
|
|
|
2012-07-23 23:14:37 +00:00
|
|
|
(remote_module_path, is_new_style) = self._copy_module(conn, tmp, module_name, inject)
|
2012-08-08 05:40:22 +00:00
|
|
|
cmd = "chmod u+x %s" % remote_module_path
|
2012-07-28 12:48:05 +00:00
|
|
|
if self.sudo and self.sudo_user != 'root':
|
|
|
|
# deal with possible umask issues once sudo'ed to other user
|
2012-08-08 05:40:22 +00:00
|
|
|
cmd = "chmod a+rx %s" % remote_module_path
|
2012-07-28 12:48:05 +00:00
|
|
|
self._low_level_exec_command(conn, cmd, tmp)
|
2012-04-11 00:58:40 +00:00
|
|
|
|
2012-07-23 23:14:37 +00:00
|
|
|
cmd = ""
|
|
|
|
if not is_new_style:
|
|
|
|
args = utils.template(args, inject)
|
|
|
|
argsfile = self._transfer_str(conn, tmp, 'arguments', args)
|
|
|
|
if async_jid is None:
|
|
|
|
cmd = "%s %s" % (remote_module_path, argsfile)
|
|
|
|
else:
|
|
|
|
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
|
2012-03-14 23:57:56 +00:00
|
|
|
else:
|
2012-07-23 23:14:37 +00:00
|
|
|
if async_jid is None:
|
|
|
|
cmd = "%s" % (remote_module_path)
|
|
|
|
else:
|
|
|
|
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])
|
2012-04-02 17:29:12 +00:00
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
res = self._low_level_exec_command(conn, cmd, tmp, sudoable=True)
|
2012-07-14 23:18:33 +00:00
|
|
|
return ReturnData(host=conn.host, result=res)
|
2012-03-22 03:39:09 +00:00
|
|
|
|
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_raw(self, conn, tmp, inject=None):
|
2012-05-10 05:14:30 +00:00
|
|
|
''' execute a non-module command for bootstrapping, or if there's no python on a device '''
|
2012-07-15 14:57:22 +00:00
|
|
|
return ReturnData(host=conn.host, result=dict(
|
|
|
|
stdout=self._low_level_exec_command(conn, self.module_args, tmp, sudoable = True)
|
|
|
|
))
|
2012-05-10 05:14:30 +00:00
|
|
|
|
|
|
|
# ***************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_normal_module(self, conn, tmp, module_name, inject=None):
|
2012-03-22 03:39:09 +00:00
|
|
|
''' transfer & execute a module that is not 'copy' or 'template' '''
|
2012-03-06 03:23:56 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
# shell and command are the same module
|
2012-03-20 02:42:31 +00:00
|
|
|
if module_name == 'shell':
|
|
|
|
module_name = 'command'
|
2012-03-31 02:47:58 +00:00
|
|
|
self.module_args += " #USE_SHELL"
|
2012-03-15 00:40:06 +00:00
|
|
|
|
2012-08-09 01:16:48 +00:00
|
|
|
vv("REMOTE_MODULE %s %s" % (module_name, self.module_args), host=conn.host)
|
2012-07-23 23:14:37 +00:00
|
|
|
exec_rc = self._execute_module(conn, tmp, module_name, self.module_args, inject=inject)
|
2012-05-25 22:44:29 +00:00
|
|
|
return exec_rc
|
2012-03-03 17:25:56 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_async_module(self, conn, tmp, module_name, inject=None):
|
2012-03-22 03:39:09 +00:00
|
|
|
''' transfer the given module name, plus the async module, then run it '''
|
2012-03-14 00:59:05 +00:00
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
# shell and command module are the same
|
2012-03-20 02:42:31 +00:00
|
|
|
module_args = self.module_args
|
|
|
|
if module_name == 'shell':
|
|
|
|
module_name = 'command'
|
2012-03-31 02:47:58 +00:00
|
|
|
module_args += " #USE_SHELL"
|
2012-03-18 23:25:56 +00:00
|
|
|
|
2012-07-24 00:06:18 +00:00
|
|
|
(module_path, is_new_style) = self._copy_module(conn, tmp, module_name, inject)
|
2012-08-08 05:40:22 +00:00
|
|
|
self._low_level_exec_command(conn, "chmod a+rx %s" % module_path, tmp)
|
2012-07-24 00:46:35 +00:00
|
|
|
|
2012-07-23 23:40:21 +00:00
|
|
|
return self._execute_module(conn, tmp, 'async_wrapper', module_args,
|
2012-07-24 00:46:35 +00:00
|
|
|
async_module=module_path,
|
2012-08-07 00:07:02 +00:00
|
|
|
async_jid=self.generated_jid,
|
2012-07-15 18:19:11 +00:00
|
|
|
async_limit=self.background,
|
|
|
|
inject=inject
|
2012-03-14 23:57:56 +00:00
|
|
|
)
|
2012-03-21 02:29:21 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_copy(self, conn, tmp, inject=None):
|
2012-02-25 22:16:23 +00:00
|
|
|
''' handler for file transfer operations '''
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# load up options
|
2012-03-22 03:39:09 +00:00
|
|
|
options = utils.parse_kv(self.module_args)
|
2012-07-15 14:57:22 +00:00
|
|
|
source = options.get('src', None)
|
|
|
|
dest = options.get('dest', None)
|
2012-07-20 16:56:34 +00:00
|
|
|
if (source is None and not 'first_available_file' in inject) or dest is None:
|
2012-05-25 22:44:29 +00:00
|
|
|
result=dict(failed=True, msg="src and dest are required")
|
|
|
|
return ReturnData(host=conn.host, result=result)
|
2012-04-18 09:40:15 +00:00
|
|
|
|
2012-04-27 20:43:55 +00:00
|
|
|
# if we have first_available_file in our vars
|
|
|
|
# look up the files and use the first one we find as src
|
2012-07-20 16:56:34 +00:00
|
|
|
if 'first_available_file' in inject:
|
2012-04-27 20:43:55 +00:00
|
|
|
found = False
|
2012-07-20 16:56:34 +00:00
|
|
|
for fn in inject.get('first_available_file'):
|
2012-07-17 00:26:39 +00:00
|
|
|
fn = utils.template(fn, inject)
|
2012-04-27 20:43:55 +00:00
|
|
|
if os.path.exists(fn):
|
|
|
|
source = fn
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
2012-05-25 22:44:29 +00:00
|
|
|
results=dict(failed=True, msg="could not find src in first_available_file list")
|
2012-07-07 13:10:18 +00:00
|
|
|
return ReturnData(host=conn.host, results=results)
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-20 16:56:34 +00:00
|
|
|
source = utils.template(source, inject)
|
2012-07-07 13:10:18 +00:00
|
|
|
source = utils.path_dwim(self.basedir, source)
|
2012-04-18 09:40:15 +00:00
|
|
|
|
2012-07-09 07:52:00 +00:00
|
|
|
local_md5 = utils.md5(source)
|
2012-07-07 13:10:18 +00:00
|
|
|
if local_md5 is None:
|
|
|
|
result=dict(failed=True, msg="could not find src=%s" % source)
|
|
|
|
return ReturnData(host=conn.host, result=result)
|
|
|
|
|
2012-08-07 00:07:02 +00:00
|
|
|
remote_md5 = self._remote_md5(conn, tmp, dest)
|
|
|
|
|
|
|
|
exec_rc = None
|
2012-07-07 13:10:18 +00:00
|
|
|
if local_md5 != remote_md5:
|
|
|
|
# transfer the file to a remote tmp location
|
2012-08-08 05:40:22 +00:00
|
|
|
tmp_src = tmp + os.path.basename(source)
|
2012-07-07 13:10:18 +00:00
|
|
|
conn.put_file(source, tmp_src)
|
2012-08-08 05:40:22 +00:00
|
|
|
# fix file permissions when the copy is done as a different user
|
2012-08-08 23:17:31 +00:00
|
|
|
if self.sudo and self.sudo_user != 'root':
|
2012-08-08 05:40:22 +00:00
|
|
|
self._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp)
|
2012-07-07 13:10:18 +00:00
|
|
|
|
|
|
|
# run the copy module
|
2012-07-25 00:35:10 +00:00
|
|
|
self.module_args = "%s src=%s" % (self.module_args, tmp_src)
|
2012-07-24 01:55:41 +00:00
|
|
|
return self._execute_module(conn, tmp, 'copy', self.module_args, inject=inject).daisychain('file')
|
2012-07-21 20:51:31 +00:00
|
|
|
|
2012-07-07 13:10:18 +00:00
|
|
|
else:
|
|
|
|
# no need to transfer the file, already correct md5
|
|
|
|
result = dict(changed=False, md5sum=remote_md5, transferred=False)
|
2012-07-21 20:51:31 +00:00
|
|
|
return ReturnData(host=conn.host, result=result).daisychain('file')
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_fetch(self, conn, tmp, inject=None):
|
2012-04-11 03:19:23 +00:00
|
|
|
''' handler for fetch operations '''
|
|
|
|
|
2012-04-12 00:12:01 +00:00
|
|
|
# load up options
|
2012-04-11 03:19:23 +00:00
|
|
|
options = utils.parse_kv(self.module_args)
|
2012-04-12 00:57:41 +00:00
|
|
|
source = options.get('src', None)
|
|
|
|
dest = options.get('dest', None)
|
|
|
|
if source is None or dest is None:
|
2012-05-25 22:44:29 +00:00
|
|
|
results = dict(failed=True, msg="src and dest are required")
|
2012-06-05 15:30:34 +00:00
|
|
|
return ReturnData(host=conn.host, result=results)
|
2012-04-11 16:14:10 +00:00
|
|
|
|
2012-05-08 14:11:32 +00:00
|
|
|
# apply templating to source argument
|
2012-07-15 18:19:11 +00:00
|
|
|
source = utils.template(source, inject)
|
2012-05-20 01:59:23 +00:00
|
|
|
# apply templating to dest argument
|
2012-07-15 18:19:11 +00:00
|
|
|
dest = utils.template(dest, inject)
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-04-12 00:12:01 +00:00
|
|
|
# files are saved in dest dir, with a subdir for each host, then the filename
|
2012-05-25 22:44:29 +00:00
|
|
|
dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), conn.host, source)
|
2012-04-19 01:12:48 +00:00
|
|
|
dest = dest.replace("//","/")
|
2012-04-11 03:19:23 +00:00
|
|
|
|
2012-07-17 08:14:16 +00:00
|
|
|
# calculate md5 sum for the remote file
|
2012-07-07 12:45:06 +00:00
|
|
|
remote_md5 = self._remote_md5(conn, tmp, source)
|
2012-04-11 03:19:23 +00:00
|
|
|
|
2012-07-21 20:51:31 +00:00
|
|
|
# these don't fail because you may want to transfer a log file that possibly MAY exist
|
|
|
|
# but keep going to fetch other log files
|
2012-07-07 12:45:06 +00:00
|
|
|
if remote_md5 == '0':
|
2012-07-17 08:14:16 +00:00
|
|
|
result = dict(msg="unable to calculate the md5 sum of the remote file", file=source, changed=False)
|
|
|
|
return ReturnData(host=conn.host, result=result)
|
|
|
|
if remote_md5 == '1':
|
|
|
|
result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
|
2012-07-07 12:45:06 +00:00
|
|
|
return ReturnData(host=conn.host, result=result)
|
2012-07-17 08:14:16 +00:00
|
|
|
if remote_md5 == '2':
|
|
|
|
result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
|
|
|
|
return ReturnData(host=conn.host, result=result)
|
|
|
|
|
|
|
|
# calculate md5 sum for the local file
|
|
|
|
local_md5 = utils.md5(dest)
|
|
|
|
|
|
|
|
if remote_md5 != local_md5:
|
2012-04-12 00:12:01 +00:00
|
|
|
# create the containing directories, if needed
|
2012-06-05 14:38:12 +00:00
|
|
|
if not os.path.isdir(os.path.dirname(dest)):
|
|
|
|
os.makedirs(os.path.dirname(dest))
|
2012-05-20 01:59:23 +00:00
|
|
|
|
2012-04-12 00:12:01 +00:00
|
|
|
# fetch the file and check for changes
|
2012-04-11 03:19:23 +00:00
|
|
|
conn.fetch_file(source, dest)
|
2012-07-09 07:52:00 +00:00
|
|
|
new_md5 = utils.md5(dest)
|
2012-04-11 03:19:23 +00:00
|
|
|
if new_md5 != remote_md5:
|
2012-07-16 10:43:37 +00:00
|
|
|
result = dict(failed=True, md5sum=new_md5, msg="md5 mismatch", file=source)
|
2012-05-25 22:44:29 +00:00
|
|
|
return ReturnData(host=conn.host, result=result)
|
|
|
|
result = dict(changed=True, md5sum=new_md5)
|
|
|
|
return ReturnData(host=conn.host, result=result)
|
2012-04-11 03:19:23 +00:00
|
|
|
else:
|
2012-07-16 10:43:37 +00:00
|
|
|
result = dict(changed=False, md5sum=local_md5, file=source)
|
2012-05-25 22:44:29 +00:00
|
|
|
return ReturnData(host=conn.host, result=result)
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_template(self, conn, tmp, inject=None):
|
2012-02-25 22:16:23 +00:00
|
|
|
''' handler for template operations '''
|
|
|
|
|
2012-07-14 15:59:12 +00:00
|
|
|
if not self.is_playbook:
|
|
|
|
raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks")
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# load up options
|
2012-03-22 03:39:09 +00:00
|
|
|
options = utils.parse_kv(self.module_args)
|
2012-04-12 00:57:41 +00:00
|
|
|
source = options.get('src', None)
|
|
|
|
dest = options.get('dest', None)
|
2012-07-20 16:56:34 +00:00
|
|
|
if (source is None and 'first_available_file' not in inject) or dest is None:
|
2012-05-25 22:44:29 +00:00
|
|
|
result = dict(failed=True, msg="src and dest are required")
|
|
|
|
return ReturnData(host=conn.host, comm_ok=False, result=result)
|
2012-03-06 03:23:56 +00:00
|
|
|
|
2012-04-27 20:43:55 +00:00
|
|
|
# if we have first_available_file in our vars
|
|
|
|
# look up the files and use the first one we find as src
|
2012-07-20 16:56:34 +00:00
|
|
|
if 'first_available_file' in inject:
|
2012-04-27 20:43:55 +00:00
|
|
|
found = False
|
|
|
|
for fn in self.module_vars.get('first_available_file'):
|
2012-07-17 00:26:39 +00:00
|
|
|
fn = utils.template(fn, inject)
|
2012-04-27 20:43:55 +00:00
|
|
|
if os.path.exists(fn):
|
|
|
|
source = fn
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
2012-05-25 22:44:29 +00:00
|
|
|
result = dict(failed=True, msg="could not find src in first_available_file list")
|
|
|
|
return ReturnData(host=conn.host, comm_ok=False, result=result)
|
|
|
|
|
2012-07-20 16:56:34 +00:00
|
|
|
source = utils.template(source, inject)
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-07-21 20:23:00 +00:00
|
|
|
# template the source data locally & transfer
|
2012-04-19 02:43:17 +00:00
|
|
|
try:
|
2012-07-20 16:56:34 +00:00
|
|
|
resultant = utils.template_from_file(self.basedir, source, inject)
|
2012-04-19 02:43:17 +00:00
|
|
|
except Exception, e:
|
2012-05-25 22:44:29 +00:00
|
|
|
result = dict(failed=True, msg=str(e))
|
|
|
|
return ReturnData(host=conn.host, comm_ok=False, result=result)
|
2012-04-19 02:43:17 +00:00
|
|
|
xfered = self._transfer_str(conn, tmp, 'source', resultant)
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-21 20:23:00 +00:00
|
|
|
# run the copy module, queue the file module
|
2012-07-26 04:39:27 +00:00
|
|
|
self.module_args = "%s src=%s dest=%s" % (self.module_args, xfered, dest)
|
2012-07-24 01:01:35 +00:00
|
|
|
return self._execute_module(conn, tmp, 'copy', self.module_args, inject=inject).daisychain('file')
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
def _execute_assemble(self, conn, tmp, inject=None):
|
2012-06-12 06:09:54 +00:00
|
|
|
''' handler for assemble operations '''
|
2012-07-21 20:23:00 +00:00
|
|
|
|
2012-07-23 23:14:37 +00:00
|
|
|
# FIXME: once assemble is ported over to the use the new common logic, this method
|
|
|
|
# will be unneccessary as it can decide to daisychain via it's own module returns.
|
2012-08-07 00:07:02 +00:00
|
|
|
# and this function can be deleted.
|
2012-07-23 23:14:37 +00:00
|
|
|
|
2012-07-23 23:40:21 +00:00
|
|
|
return self._execute_module(conn, tmp, 'assemble', self.module_args, inject=inject).daisychain('file')
|
2012-06-12 06:09:54 +00:00
|
|
|
|
|
|
|
# *****************************************************
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def _executor(self, host):
|
2012-07-15 14:57:22 +00:00
|
|
|
''' handler for multiprocessing library '''
|
|
|
|
|
2012-03-25 23:05:27 +00:00
|
|
|
try:
|
2012-05-25 22:44:29 +00:00
|
|
|
exec_rc = self._executor_internal(host)
|
|
|
|
if type(exec_rc) != ReturnData:
|
|
|
|
raise Exception("unexpected return type: %s" % type(exec_rc))
|
2012-07-20 12:56:33 +00:00
|
|
|
# redundant, right?
|
2012-07-20 15:15:57 +00:00
|
|
|
if not exec_rc.comm_ok:
|
|
|
|
self.callbacks.on_unreachable(host, exec_rc.result)
|
2012-05-25 22:44:29 +00:00
|
|
|
return exec_rc
|
2012-03-25 23:05:27 +00:00
|
|
|
except errors.AnsibleError, ae:
|
|
|
|
msg = str(ae)
|
|
|
|
self.callbacks.on_unreachable(host, msg)
|
2012-05-25 22:44:29 +00:00
|
|
|
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
|
2012-03-25 23:05:27 +00:00
|
|
|
except Exception:
|
|
|
|
msg = traceback.format_exc()
|
|
|
|
self.callbacks.on_unreachable(host, msg)
|
2012-05-25 22:44:29 +00:00
|
|
|
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
|
2012-03-25 23:05:27 +00:00
|
|
|
|
2012-07-15 14:22:15 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-03-25 23:05:27 +00:00
|
|
|
def _executor_internal(self, host):
|
2012-07-15 16:51:56 +00:00
|
|
|
''' executes any module one or more times '''
|
|
|
|
|
2012-07-18 00:08:45 +00:00
|
|
|
host_variables = self.inventory.get_variables(host)
|
|
|
|
port = host_variables.get('ansible_ssh_port', self.remote_port)
|
2012-07-21 20:23:00 +00:00
|
|
|
|
2012-07-18 00:08:45 +00:00
|
|
|
inject = self.setup_cache[host].copy()
|
|
|
|
inject.update(host_variables)
|
|
|
|
inject.update(self.module_vars)
|
2012-07-20 14:02:35 +00:00
|
|
|
inject['hostvars'] = self.setup_cache
|
2012-07-20 12:29:44 +00:00
|
|
|
|
2012-07-15 16:51:56 +00:00
|
|
|
items = self.module_vars.get('items', [])
|
2012-08-02 00:33:13 +00:00
|
|
|
|
2012-07-18 00:08:45 +00:00
|
|
|
if isinstance(items, basestring) and items.startswith("$"):
|
|
|
|
items = items.replace("$","")
|
|
|
|
if items in inject:
|
|
|
|
items = inject[items]
|
|
|
|
else:
|
|
|
|
raise errors.AnsibleError("unbound variable in with_items: %s" % items)
|
|
|
|
if type(items) != list:
|
|
|
|
raise errors.AnsibleError("with_items only takes a list: %s" % items)
|
2012-08-02 00:33:13 +00:00
|
|
|
|
2012-08-09 00:43:01 +00:00
|
|
|
if len(items) and self.module_name in [ 'apt', 'yum' ]:
|
2012-08-02 00:33:13 +00:00
|
|
|
# hack for apt and soon yum, with_items maps back into a single module call
|
|
|
|
inject['item'] = ",".join(items)
|
|
|
|
items = []
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-15 16:51:56 +00:00
|
|
|
if len(items) == 0:
|
2012-07-18 00:08:45 +00:00
|
|
|
return self._executor_internal_inner(host, inject, port)
|
2012-07-15 16:51:56 +00:00
|
|
|
else:
|
|
|
|
# executing using with_items, so make multiple calls
|
|
|
|
# TODO: refactor
|
|
|
|
aggregrate = {}
|
|
|
|
all_comm_ok = True
|
|
|
|
all_changed = False
|
|
|
|
all_failed = False
|
|
|
|
results = []
|
2012-07-25 17:26:07 +00:00
|
|
|
# Save module name and args since daisy-chaining can overwrite them
|
|
|
|
module_name = self.module_name
|
|
|
|
module_args = self.module_args
|
2012-07-15 16:51:56 +00:00
|
|
|
for x in items:
|
2012-07-25 17:26:07 +00:00
|
|
|
self.module_name = module_name
|
|
|
|
self.module_args = module_args
|
2012-07-18 00:08:45 +00:00
|
|
|
inject['item'] = x
|
|
|
|
result = self._executor_internal_inner(host, inject, port)
|
2012-07-15 16:51:56 +00:00
|
|
|
results.append(result.result)
|
|
|
|
if result.comm_ok == False:
|
|
|
|
all_comm_ok = False
|
|
|
|
break
|
|
|
|
for x in results:
|
|
|
|
if x.get('changed') == True:
|
|
|
|
all_changed = True
|
|
|
|
if (x.get('failed') == True) or (('rc' in x) and (x['rc'] != 0)):
|
|
|
|
all_failed = True
|
|
|
|
break
|
|
|
|
msg = 'All items succeeded'
|
|
|
|
if all_failed:
|
|
|
|
msg = "One or more items failed."
|
|
|
|
rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
|
|
|
|
if not all_failed:
|
|
|
|
del rd_result['failed']
|
|
|
|
return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
|
|
|
|
|
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-24 02:12:26 +00:00
|
|
|
def _executor_internal_inner(self, host, inject, port, is_chained=False):
|
2012-07-15 16:51:56 +00:00
|
|
|
''' decides how to invoke a module '''
|
2012-03-20 02:42:31 +00:00
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
# special non-user/non-fact variables:
|
|
|
|
# 'groups' variable is a list of host name in each group
|
|
|
|
# 'hostvars' variable contains variables for each host name
|
|
|
|
# ... and is set elsewhere
|
|
|
|
# 'inventory_hostname' is also set elsewhere
|
|
|
|
group_hosts = {}
|
|
|
|
for g in self.inventory.groups:
|
|
|
|
group_hosts[g.name] = [ h.name for h in g.hosts ]
|
|
|
|
inject['groups'] = group_hosts
|
|
|
|
|
2012-07-22 15:08:16 +00:00
|
|
|
# allow module args to work as a dictionary
|
|
|
|
# though it is usually a string
|
|
|
|
new_args = ""
|
|
|
|
if type(self.module_args) == dict:
|
|
|
|
for (k,v) in self.module_args.iteritems():
|
|
|
|
new_args = new_args + "%s='%s' " % (k,v)
|
|
|
|
self.module_args = new_args
|
2012-08-03 00:21:59 +00:00
|
|
|
self.module_args = utils.template(self.module_args, inject)
|
2012-07-22 15:08:16 +00:00
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
conditional = utils.template(self.conditional, inject)
|
2012-06-11 13:06:23 +00:00
|
|
|
if not eval(conditional):
|
2012-07-15 14:12:49 +00:00
|
|
|
result = utils.jsonify(dict(skipped=True))
|
2012-07-23 23:36:26 +00:00
|
|
|
self.callbacks.on_skipped(host, inject.get('item',None))
|
2012-06-11 13:06:23 +00:00
|
|
|
return ReturnData(host=host, result=result)
|
|
|
|
|
2012-04-17 01:52:15 +00:00
|
|
|
conn = None
|
|
|
|
try:
|
|
|
|
conn = self.connector.connect(host, port)
|
|
|
|
except errors.AnsibleConnectionFailed, e:
|
2012-05-25 22:44:29 +00:00
|
|
|
result = dict(failed=True, msg="FAILED: %s" % str(e))
|
|
|
|
return ReturnData(host=host, comm_ok=False, result=result)
|
2012-04-17 01:52:15 +00:00
|
|
|
|
2012-07-15 18:19:11 +00:00
|
|
|
module_name = utils.template(self.module_name, inject)
|
2012-03-09 05:19:55 +00:00
|
|
|
|
2012-08-10 04:45:00 +00:00
|
|
|
tmp = ''
|
|
|
|
if self.module_name != 'raw':
|
|
|
|
tmp = self._make_tmp_path(conn)
|
2012-03-03 03:38:55 +00:00
|
|
|
result = None
|
2012-03-29 02:51:16 +00:00
|
|
|
|
2012-07-15 14:57:22 +00:00
|
|
|
handler = getattr(self, "_execute_%s" % self.module_name, None)
|
|
|
|
if handler:
|
2012-07-15 18:19:11 +00:00
|
|
|
result = handler(conn, tmp, inject=inject)
|
2012-02-25 22:16:23 +00:00
|
|
|
else:
|
2012-03-22 03:39:09 +00:00
|
|
|
if self.background == 0:
|
2012-07-15 18:19:11 +00:00
|
|
|
result = self._execute_normal_module(conn, tmp, module_name, inject=inject)
|
2012-03-22 03:39:09 +00:00
|
|
|
else:
|
2012-07-15 18:19:11 +00:00
|
|
|
result = self._execute_async_module(conn, tmp, module_name, inject=inject)
|
2012-03-11 22:40:35 +00:00
|
|
|
|
2012-07-24 02:12:26 +00:00
|
|
|
result.result['module'] = self.module_name
|
2012-07-20 12:56:33 +00:00
|
|
|
if result.is_successful() and 'daisychain' in result.result:
|
|
|
|
self.module_name = result.result['daisychain']
|
2012-07-22 15:08:16 +00:00
|
|
|
if 'daisychain_args' in result.result:
|
|
|
|
self.module_args = result.result['daisychain_args']
|
2012-07-24 02:12:26 +00:00
|
|
|
result2 = self._executor_internal_inner(host, inject, port, is_chained=True)
|
|
|
|
result2.result['module'] = self.module_name
|
2012-07-24 01:55:41 +00:00
|
|
|
changed = False
|
|
|
|
if result.result.get('changed',False) or result2.result.get('changed',False):
|
|
|
|
changed = True
|
2012-08-03 11:02:31 +00:00
|
|
|
result.result.update(result2.result)
|
|
|
|
result.result['changed'] = changed
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-20 12:56:33 +00:00
|
|
|
del result.result['daisychain']
|
|
|
|
|
2012-08-10 04:45:00 +00:00
|
|
|
if self.module_name != 'raw':
|
|
|
|
self._delete_remote_files(conn, tmp)
|
2012-03-03 03:38:55 +00:00
|
|
|
conn.close()
|
2012-03-25 23:05:27 +00:00
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
if not result.comm_ok:
|
|
|
|
# connection or parsing errors...
|
2012-06-11 13:07:37 +00:00
|
|
|
self.callbacks.on_unreachable(host, result.result)
|
2012-03-25 23:05:27 +00:00
|
|
|
else:
|
2012-05-25 22:44:29 +00:00
|
|
|
data = result.result
|
2012-07-20 12:29:44 +00:00
|
|
|
if 'item' in inject:
|
|
|
|
result.result['item'] = inject['item']
|
2012-07-24 02:12:26 +00:00
|
|
|
if is_chained:
|
|
|
|
# no callbacks
|
|
|
|
return result
|
|
|
|
if 'skipped' in data:
|
|
|
|
self.callbacks.on_skipped(result.host)
|
|
|
|
elif not result.is_successful():
|
2012-08-01 19:17:16 +00:00
|
|
|
ignore_errors = self.module_vars.get('ignore_errors', False)
|
2012-08-01 16:13:07 +00:00
|
|
|
self.callbacks.on_failed(result.host, data, ignore_errors)
|
|
|
|
if ignore_errors:
|
|
|
|
if 'failed' in result.result:
|
|
|
|
result.result['failed'] = False
|
|
|
|
if 'rc' in result.result:
|
|
|
|
result.result['rc'] = 0
|
2012-07-24 02:12:26 +00:00
|
|
|
else:
|
|
|
|
self.callbacks.on_ok(result.host, data)
|
2012-03-03 03:38:55 +00:00
|
|
|
return result
|
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
def _low_level_exec_command(self, conn, cmd, tmp, sudoable=False):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' execute a command string over SSH, return the output '''
|
2012-07-15 14:57:22 +00:00
|
|
|
|
2012-05-04 00:11:21 +00:00
|
|
|
sudo_user = self.sudo_user
|
2012-05-06 22:24:04 +00:00
|
|
|
stdin, stdout, stderr = conn.exec_command(cmd, tmp, sudo_user, sudoable=sudoable)
|
2012-05-25 22:44:29 +00:00
|
|
|
|
2012-03-29 02:51:16 +00:00
|
|
|
if type(stdout) != str:
|
2012-07-31 20:59:45 +00:00
|
|
|
out = "\n".join(stdout.readlines())
|
2012-03-29 02:51:16 +00:00
|
|
|
else:
|
2012-07-31 20:59:45 +00:00
|
|
|
out = stdout
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-31 20:59:45 +00:00
|
|
|
if type(stderr) != str:
|
|
|
|
err = "\n".join(stderr.readlines())
|
|
|
|
else:
|
|
|
|
err = stderr
|
|
|
|
|
|
|
|
return out + err
|
2012-04-27 05:25:38 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-07 12:45:06 +00:00
|
|
|
def _remote_md5(self, conn, tmp, path):
|
2012-08-07 00:07:02 +00:00
|
|
|
''' takes a remote md5sum without requiring python, and returns 0 if no file '''
|
|
|
|
|
2012-08-10 13:11:54 +00:00
|
|
|
test = "rc=0; [ -r \"%s\" ] || rc=2; [ -f \"%s\" ] || rc=1" % (path,path)
|
2012-07-07 12:45:06 +00:00
|
|
|
md5s = [
|
2012-07-17 08:14:16 +00:00
|
|
|
"(/usr/bin/md5sum %s 2>/dev/null)" % path,
|
|
|
|
"(/sbin/md5sum -q %s 2>/dev/null)" % path,
|
|
|
|
"(/usr/bin/digest -a md5 -v %s 2>/dev/null)" % path
|
2012-07-07 12:45:06 +00:00
|
|
|
]
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-07 12:45:06 +00:00
|
|
|
cmd = " || ".join(md5s)
|
2012-07-17 08:14:16 +00:00
|
|
|
cmd = "%s; %s || (echo \"${rc} %s\")" % (test, cmd, path)
|
2012-08-11 13:55:14 +00:00
|
|
|
return utils.last_non_blank_line(self._low_level_exec_command(conn, cmd, tmp, sudoable=False)).split()[0]
|
2012-07-07 12:45:06 +00:00
|
|
|
|
|
|
|
# *****************************************************
|
|
|
|
|
2012-06-06 12:47:47 +00:00
|
|
|
def _make_tmp_path(self, conn):
|
|
|
|
''' make and return a temporary path on a remote box '''
|
2012-03-14 00:59:05 +00:00
|
|
|
|
2012-06-06 12:47:47 +00:00
|
|
|
basefile = 'ansible-%s-%s' % (time.time(), random.randint(0, 2**48))
|
|
|
|
basetmp = os.path.join(C.DEFAULT_REMOTE_TMP, basefile)
|
2012-05-10 05:50:22 +00:00
|
|
|
if self.remote_user == 'root':
|
2012-06-06 12:47:47 +00:00
|
|
|
basetmp = os.path.join('/var/tmp', basefile)
|
2012-07-28 00:53:50 +00:00
|
|
|
elif self.sudo and self.sudo_user != 'root':
|
|
|
|
basetmp = os.path.join('/tmp', basefile)
|
2012-06-06 12:47:47 +00:00
|
|
|
|
|
|
|
cmd = 'mkdir -p %s' % basetmp
|
2012-04-21 15:38:39 +00:00
|
|
|
if self.remote_user != 'root':
|
2012-08-08 05:40:22 +00:00
|
|
|
cmd += ' && chmod a+rx %s' % basetmp
|
2012-06-06 12:47:47 +00:00
|
|
|
cmd += ' && echo %s' % basetmp
|
2012-04-21 15:38:39 +00:00
|
|
|
|
2012-05-25 22:44:29 +00:00
|
|
|
result = self._low_level_exec_command(conn, cmd, None, sudoable=False)
|
2012-08-11 13:55:14 +00:00
|
|
|
return utils.last_non_blank_line(result.split("\n"))[0].strip() + '/'
|
2012-02-27 22:52:37 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-07-21 20:23:00 +00:00
|
|
|
def _copy_module(self, conn, tmp, module, inject):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' transfer a module over SFTP, does not run it '''
|
2012-03-14 00:59:05 +00:00
|
|
|
|
2012-03-13 00:53:10 +00:00
|
|
|
if module.startswith("/"):
|
2012-03-18 21:16:12 +00:00
|
|
|
raise errors.AnsibleFileNotFound("%s is not a module" % module)
|
2012-06-06 20:42:29 +00:00
|
|
|
|
|
|
|
# Search module path(s) for named module.
|
|
|
|
for module_path in self.module_path.split(os.pathsep):
|
|
|
|
in_path = os.path.expanduser(os.path.join(module_path, module))
|
|
|
|
if os.path.exists(in_path):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise errors.AnsibleFileNotFound("module %s not found in %s" % (module, self.module_path))
|
2012-03-13 00:53:10 +00:00
|
|
|
|
2012-06-06 12:47:47 +00:00
|
|
|
out_path = os.path.join(tmp, module)
|
2012-06-04 08:15:12 +00:00
|
|
|
|
2012-07-18 02:33:36 +00:00
|
|
|
module_data = ""
|
2012-07-23 23:14:37 +00:00
|
|
|
is_new_style=False
|
2012-07-18 02:33:36 +00:00
|
|
|
with open(in_path) as f:
|
|
|
|
module_data = f.read()
|
2012-07-23 23:14:37 +00:00
|
|
|
if module_common.REPLACER in module_data:
|
|
|
|
is_new_style=True
|
2012-07-18 02:33:36 +00:00
|
|
|
module_data = module_data.replace(module_common.REPLACER, module_common.MODULE_COMMON)
|
2012-08-03 01:20:43 +00:00
|
|
|
encoded_args = "\"\"\"%s\"\"\"" % utils.template(self.module_args, inject).replace("\"","\\\"")
|
2012-07-23 23:14:37 +00:00
|
|
|
module_data = module_data.replace(module_common.REPLACER_ARGS, encoded_args)
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-07-21 20:23:00 +00:00
|
|
|
# use the correct python interpreter for the host
|
|
|
|
if 'ansible_python_interpreter' in inject:
|
|
|
|
interpreter = inject['ansible_python_interpreter']
|
2012-07-18 02:33:36 +00:00
|
|
|
module_lines = module_data.split('\n')
|
2012-06-04 08:15:12 +00:00
|
|
|
if '#!' and 'python' in module_lines[0]:
|
|
|
|
module_lines[0] = "#!%s" % interpreter
|
2012-07-18 02:33:36 +00:00
|
|
|
module_data = "\n".join(module_lines)
|
2012-06-04 08:15:12 +00:00
|
|
|
|
2012-07-18 02:33:36 +00:00
|
|
|
self._transfer_str(conn, tmp, module, module_data)
|
2012-07-23 23:14:37 +00:00
|
|
|
return (out_path, is_new_style)
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-03-14 00:59:05 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
def _parallel_exec(self, hosts):
|
|
|
|
''' handles mulitprocessing when more than 1 fork is required '''
|
2012-04-04 14:27:24 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
job_queue = multiprocessing.Manager().Queue()
|
|
|
|
[job_queue.put(i) for i in hosts]
|
2012-03-25 23:05:27 +00:00
|
|
|
result_queue = multiprocessing.Manager().Queue()
|
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
workers = []
|
|
|
|
for i in range(self.forks):
|
|
|
|
prc = multiprocessing.Process(target=_executor_hook,
|
|
|
|
args=(job_queue, result_queue))
|
|
|
|
prc.start()
|
|
|
|
workers.append(prc)
|
2012-04-04 14:27:24 +00:00
|
|
|
|
2012-04-04 00:20:55 +00:00
|
|
|
try:
|
|
|
|
for worker in workers:
|
|
|
|
worker.join()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
for worker in workers:
|
|
|
|
worker.terminate()
|
|
|
|
worker.join()
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
results = []
|
2012-08-09 01:19:20 +00:00
|
|
|
try:
|
|
|
|
while not result_queue.empty():
|
|
|
|
results.append(result_queue.get(block=False))
|
|
|
|
except socket.error:
|
|
|
|
raise errors.AnsibleError("<interrupted>")
|
2012-03-22 03:39:09 +00:00
|
|
|
return results
|
|
|
|
|
|
|
|
# *****************************************************
|
|
|
|
|
|
|
|
def _partition_results(self, results):
|
|
|
|
''' seperate results by ones we contacted & ones we didn't '''
|
|
|
|
|
2012-03-25 23:05:27 +00:00
|
|
|
if results is None:
|
|
|
|
return None
|
2012-07-15 14:57:22 +00:00
|
|
|
results2 = dict(contacted={}, dark={})
|
2012-03-25 23:05:27 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
for result in results:
|
2012-05-25 22:44:29 +00:00
|
|
|
host = result.host
|
|
|
|
if host is None:
|
|
|
|
raise Exception("internal error, host not set")
|
|
|
|
if result.communicated_ok():
|
|
|
|
results2["contacted"][host] = result.result
|
2012-03-22 03:39:09 +00:00
|
|
|
else:
|
2012-05-25 22:44:29 +00:00
|
|
|
results2["dark"][host] = result.result
|
2012-03-22 03:39:09 +00:00
|
|
|
|
|
|
|
# hosts which were contacted but never got a chance to return
|
2012-04-13 12:39:54 +00:00
|
|
|
for host in self.inventory.list_hosts(self.pattern):
|
2012-03-22 03:39:09 +00:00
|
|
|
if not (host in results2['dark'] or host in results2['contacted']):
|
2012-02-27 05:43:02 +00:00
|
|
|
results2["dark"][host] = {}
|
2012-02-25 22:16:23 +00:00
|
|
|
return results2
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
''' xfer & run module on all matched hosts '''
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
# find hosts that match the pattern
|
2012-04-13 12:39:54 +00:00
|
|
|
hosts = self.inventory.list_hosts(self.pattern)
|
2012-03-22 03:39:09 +00:00
|
|
|
if len(hosts) == 0:
|
2012-04-12 01:05:46 +00:00
|
|
|
self.callbacks.on_no_hosts()
|
2012-03-22 03:39:09 +00:00
|
|
|
return dict(contacted={}, dark={})
|
2012-08-07 00:07:02 +00:00
|
|
|
|
2012-03-22 03:39:09 +00:00
|
|
|
hosts = [ (self,x) for x in hosts ]
|
2012-03-25 23:05:27 +00:00
|
|
|
results = None
|
2012-03-22 03:39:09 +00:00
|
|
|
if self.forks > 1:
|
2012-03-22 03:47:58 +00:00
|
|
|
results = self._parallel_exec(hosts)
|
2012-03-22 03:39:09 +00:00
|
|
|
else:
|
2012-03-25 23:05:27 +00:00
|
|
|
results = [ self._executor(h[1]) for h in hosts ]
|
2012-03-22 03:39:09 +00:00
|
|
|
return self._partition_results(results)
|
|
|
|
|
2012-07-15 14:22:15 +00:00
|
|
|
# *****************************************************
|
|
|
|
|
2012-05-25 23:18:02 +00:00
|
|
|
def run_async(self, time_limit):
|
2012-04-26 18:34:49 +00:00
|
|
|
''' Run this module asynchronously and return a poller. '''
|
2012-07-15 14:57:22 +00:00
|
|
|
|
2012-04-26 18:34:49 +00:00
|
|
|
self.background = time_limit
|
|
|
|
results = self.run()
|
2012-05-25 23:18:02 +00:00
|
|
|
return results, poller.AsyncPoller(results, self)
|