2012-02-29 00:08:09 +00:00
|
|
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
2012-02-24 04:28:58 +00:00
|
|
|
#
|
2012-02-29 00:08:09 +00:00
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
2012-02-24 04:28:58 +00:00
|
|
|
#
|
|
|
|
|
2012-02-29 17:30:02 +00:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
2012-02-24 04:28:58 +00:00
|
|
|
import fnmatch
|
|
|
|
import multiprocessing
|
2012-02-27 05:43:02 +00:00
|
|
|
import signal
|
2012-02-24 04:28:58 +00:00
|
|
|
import os
|
|
|
|
import traceback
|
2012-02-05 18:25:10 +00:00
|
|
|
import paramiko # non-core dependency
|
|
|
|
import ansible.constants as C
|
2012-02-28 05:45:37 +00:00
|
|
|
import Queue
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-02-27 05:43:02 +00:00
|
|
|
def _executor_hook(job_queue, result_queue):
|
2012-02-24 04:28:58 +00:00
|
|
|
''' callback used by multiprocessing pool '''
|
2012-02-27 05:43:02 +00:00
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
while not job_queue.empty():
|
|
|
|
try:
|
|
|
|
job = job_queue.get(block=False)
|
|
|
|
runner, host = job
|
|
|
|
result_queue.put(runner._executor(host))
|
|
|
|
except Queue.Empty:
|
|
|
|
pass
|
2012-02-24 04:28:58 +00:00
|
|
|
|
|
|
|
class Runner(object):
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def __init__(self,
|
|
|
|
host_list=C.DEFAULT_HOST_LIST,
|
|
|
|
module_path=C.DEFAULT_MODULE_PATH,
|
|
|
|
module_name=C.DEFAULT_MODULE_NAME,
|
|
|
|
module_args=C.DEFAULT_MODULE_ARGS,
|
|
|
|
forks=C.DEFAULT_FORKS,
|
|
|
|
timeout=C.DEFAULT_TIMEOUT,
|
|
|
|
pattern=C.DEFAULT_PATTERN,
|
|
|
|
remote_user=C.DEFAULT_REMOTE_USER,
|
|
|
|
remote_pass=C.DEFAULT_REMOTE_PASS,
|
|
|
|
verbose=False):
|
2012-02-05 18:25:10 +00:00
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
'''
|
|
|
|
Constructor
|
|
|
|
host_list -- file on disk listing hosts to manage, or an array of hostnames
|
|
|
|
pattern ------ a fnmatch pattern selecting some of the hosts in host_list
|
|
|
|
module_path -- location of ansible library on disk
|
|
|
|
module_name -- which module to run
|
|
|
|
module_args -- arguments to pass to module
|
|
|
|
forks -------- how parallel should we be? 1 is extra debuggable.
|
|
|
|
remote_user -- who to login as (default root)
|
|
|
|
remote_pass -- provide only if you don't want to use keys or ssh-agent
|
|
|
|
'''
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# save input values
|
2012-03-02 01:41:17 +00:00
|
|
|
|
|
|
|
self.host_list, self.groups = self.parse_hosts(host_list)
|
2012-02-25 22:16:23 +00:00
|
|
|
self.module_path = module_path
|
|
|
|
self.module_name = module_name
|
|
|
|
self.forks = forks
|
|
|
|
self.pattern = pattern
|
|
|
|
self.module_args = module_args
|
|
|
|
self.timeout = timeout
|
|
|
|
self.verbose = verbose
|
|
|
|
self.remote_user = remote_user
|
|
|
|
self.remote_pass = remote_pass
|
2012-03-02 01:41:17 +00:00
|
|
|
# hosts in each group name in the inventory file
|
|
|
|
self._tmp_paths = {}
|
2012-02-25 22:16:23 +00:00
|
|
|
|
2012-02-28 03:28:01 +00:00
|
|
|
@classmethod
|
|
|
|
def parse_hosts(cls, host_list):
|
2012-03-02 01:41:17 +00:00
|
|
|
'''
|
|
|
|
parse the host inventory file, returns (hosts, groups)
|
|
|
|
[groupname]
|
|
|
|
host1
|
|
|
|
host2
|
|
|
|
'''
|
|
|
|
|
|
|
|
if type(host_list) == list:
|
|
|
|
return (host_list, {})
|
2012-02-27 01:29:27 +00:00
|
|
|
|
2012-03-02 01:41:17 +00:00
|
|
|
host_list = os.path.expanduser(host_list)
|
|
|
|
lines = file(host_list).read().split("\n")
|
|
|
|
groups = {}
|
|
|
|
group_name = 'ungrouped'
|
|
|
|
results = []
|
|
|
|
for item in lines:
|
|
|
|
if item.startswith("["):
|
|
|
|
group_name = item.replace("[","").replace("]","").lstrip().rstrip()
|
|
|
|
groups[group_name] = []
|
|
|
|
else:
|
|
|
|
groups[group_name].append(item)
|
|
|
|
results.append(item)
|
2012-02-27 01:29:27 +00:00
|
|
|
|
2012-03-02 01:41:17 +00:00
|
|
|
return (results, groups)
|
2012-02-24 04:28:58 +00:00
|
|
|
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def _matches(self, host_name, pattern=None):
|
|
|
|
''' returns if a hostname is matched by the pattern '''
|
2012-02-27 01:29:27 +00:00
|
|
|
# a pattern is in fnmatch format but more than one pattern
|
|
|
|
# can be strung together with semicolons. ex:
|
|
|
|
# atlanta-web*.example.com;dc-web*.example.com
|
2012-03-02 01:41:17 +00:00
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
if host_name == '':
|
|
|
|
return False
|
2012-02-25 22:22:48 +00:00
|
|
|
subpatterns = pattern.split(";")
|
|
|
|
for subpattern in subpatterns:
|
2012-03-02 01:41:17 +00:00
|
|
|
# the pattern could be a real glob
|
2012-02-25 22:22:48 +00:00
|
|
|
if fnmatch.fnmatch(host_name, subpattern):
|
|
|
|
return True
|
2012-03-02 01:41:17 +00:00
|
|
|
# or it could be a literal group name instead
|
|
|
|
if self.groups.has_key(subpattern):
|
|
|
|
if host_name in self.groups[subpattern]:
|
|
|
|
return True
|
2012-02-25 22:16:23 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
def _connect(self, host):
|
|
|
|
'''
|
|
|
|
obtains a paramiko connection to the host.
|
|
|
|
on success, returns (True, connection)
|
|
|
|
on failure, returns (False, traceback str)
|
|
|
|
'''
|
|
|
|
ssh = paramiko.SSHClient()
|
|
|
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
|
|
try:
|
2012-02-27 01:29:27 +00:00
|
|
|
# try paramiko
|
2012-02-25 22:16:23 +00:00
|
|
|
ssh.connect(host, username=self.remote_user, allow_agent=True,
|
2012-03-02 01:41:17 +00:00
|
|
|
look_for_keys=True, password=self.remote_pass, timeout=self.timeout)
|
2012-02-25 22:16:23 +00:00
|
|
|
return [ True, ssh ]
|
2012-02-28 09:23:34 +00:00
|
|
|
except Exception, e:
|
2012-02-27 01:29:27 +00:00
|
|
|
# it failed somehow, return the failure string
|
2012-02-28 09:23:34 +00:00
|
|
|
return [ False, str(e) ]
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
def _return_from_module(self, conn, host, result):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' helper function to handle JSON parsing of results '''
|
|
|
|
# disconnect from paramiko/SSH
|
2012-02-25 22:16:23 +00:00
|
|
|
conn.close()
|
|
|
|
try:
|
2012-02-27 01:29:27 +00:00
|
|
|
# try to parse the JSON response
|
2012-02-25 22:16:23 +00:00
|
|
|
return [ host, True, json.loads(result) ]
|
|
|
|
except:
|
2012-02-27 01:29:27 +00:00
|
|
|
# it failed, say so, but return the string anyway
|
2012-02-25 22:16:23 +00:00
|
|
|
return [ host, False, result ]
|
|
|
|
|
|
|
|
def _delete_remote_files(self, conn, files):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' deletes one or more remote files '''
|
2012-02-25 22:16:23 +00:00
|
|
|
for filename in files:
|
|
|
|
self._exec_command(conn, "rm -f %s" % filename)
|
|
|
|
|
|
|
|
def _transfer_file(self, conn, source, dest):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' transfers a remote file '''
|
2012-02-25 22:16:23 +00:00
|
|
|
self.remote_log(conn, 'COPY remote:%s local:%s' % (source, dest))
|
2012-02-27 01:29:27 +00:00
|
|
|
sftp = conn.open_sftp()
|
|
|
|
sftp.put(source, dest)
|
|
|
|
sftp.close()
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
def _transfer_module(self, conn):
|
2012-02-27 01:29:27 +00:00
|
|
|
'''
|
|
|
|
transfers a module file to the remote side to execute it,
|
|
|
|
but does not execute it yet
|
|
|
|
'''
|
2012-02-25 22:16:23 +00:00
|
|
|
outpath = self._copy_module(conn)
|
|
|
|
self._exec_command(conn, "chmod +x %s" % outpath)
|
|
|
|
return outpath
|
|
|
|
|
|
|
|
def _execute_module(self, conn, outpath):
|
2012-02-27 01:29:27 +00:00
|
|
|
'''
|
|
|
|
runs a module that has already been transferred
|
|
|
|
'''
|
2012-02-25 22:16:23 +00:00
|
|
|
cmd = self._command(outpath)
|
|
|
|
result = self._exec_command(conn, cmd)
|
|
|
|
self._delete_remote_files(conn, [ outpath ])
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _execute_normal_module(self, conn, host):
|
2012-02-27 01:29:27 +00:00
|
|
|
'''
|
|
|
|
transfer & execute a module that is not 'copy' or 'template'
|
|
|
|
because those require extra work.
|
|
|
|
'''
|
2012-02-25 22:16:23 +00:00
|
|
|
module = self._transfer_module(conn)
|
|
|
|
result = self._execute_module(conn, module)
|
|
|
|
return self._return_from_module(conn, host, result)
|
|
|
|
|
2012-02-27 00:21:44 +00:00
|
|
|
def _parse_kv(self, args):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' helper function to convert a string of key/value items to a dict '''
|
2012-02-27 00:21:44 +00:00
|
|
|
options = {}
|
|
|
|
for x in args:
|
|
|
|
if x.find("=") != -1:
|
|
|
|
k, v = x.split("=")
|
|
|
|
options[k]=v
|
|
|
|
return options
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def _execute_copy(self, conn, host):
|
|
|
|
''' handler for file transfer operations '''
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# load up options
|
2012-02-27 00:21:44 +00:00
|
|
|
options = self._parse_kv(self.module_args)
|
|
|
|
source = options['src']
|
|
|
|
dest = options['dest']
|
2012-02-27 01:29:27 +00:00
|
|
|
|
|
|
|
# transfer the file to a remote tmp location
|
2012-02-27 22:52:37 +00:00
|
|
|
tmp_path = self._get_tmp_path(conn)
|
|
|
|
tmp_src = tmp_path + source.split('/')[-1]
|
2012-02-27 00:55:26 +00:00
|
|
|
self._transfer_file(conn, source, tmp_src)
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
# install the copy module
|
|
|
|
self.module_name = 'copy'
|
|
|
|
module = self._transfer_module(conn)
|
|
|
|
|
|
|
|
# run the copy module
|
2012-02-27 00:55:26 +00:00
|
|
|
self.module_args = [ "src=%s" % tmp_src, "dest=%s" % dest ]
|
2012-02-25 22:16:23 +00:00
|
|
|
result = self._execute_module(conn, module)
|
2012-02-27 00:55:26 +00:00
|
|
|
self._delete_remote_files(conn, tmp_src)
|
2012-02-25 22:16:23 +00:00
|
|
|
return self._return_from_module(conn, host, result)
|
|
|
|
|
|
|
|
def _execute_template(self, conn, host):
|
|
|
|
''' handler for template operations '''
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# load up options
|
2012-02-27 00:21:44 +00:00
|
|
|
options = self._parse_kv(self.module_args)
|
|
|
|
source = options['src']
|
|
|
|
dest = options['dest']
|
|
|
|
metadata = options.get('metadata', '/etc/ansible/setup')
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
# first copy the source template over
|
|
|
|
tempname = os.path.split(source)[-1]
|
2012-02-27 22:52:37 +00:00
|
|
|
temppath = self._get_tmp_path(conn) + tempname
|
2012-02-25 22:16:23 +00:00
|
|
|
self._transfer_file(conn, source, temppath)
|
|
|
|
|
|
|
|
# install the template module
|
|
|
|
self.module_name = 'template'
|
|
|
|
module = self._transfer_module(conn)
|
|
|
|
|
|
|
|
# run the template module
|
2012-02-27 00:55:26 +00:00
|
|
|
self.module_args = [ "src=%s" % temppath, "dest=%s" % dest, "metadata=%s" % metadata ]
|
2012-02-25 22:16:23 +00:00
|
|
|
result = self._execute_module(conn, module)
|
|
|
|
self._delete_remote_files(conn, [ temppath ])
|
|
|
|
return self._return_from_module(conn, host, result)
|
|
|
|
|
|
|
|
|
|
|
|
def _executor(self, host):
|
|
|
|
'''
|
|
|
|
callback executed in parallel for each host.
|
|
|
|
returns (hostname, connected_ok, extra)
|
|
|
|
where extra is the result of a successful connect
|
|
|
|
or a traceback string
|
|
|
|
'''
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
# depending on whether it's a normal module,
|
|
|
|
# or a request to use the copy or template
|
|
|
|
# module, call the appropriate executor function
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
ok, conn = self._connect(host)
|
|
|
|
if not ok:
|
|
|
|
return [ host, False, conn ]
|
|
|
|
if self.module_name not in [ 'copy', 'template' ]:
|
|
|
|
return self._execute_normal_module(conn, host)
|
|
|
|
elif self.module_name == 'copy':
|
|
|
|
return self._execute_copy(conn, host)
|
|
|
|
elif self.module_name == 'template':
|
|
|
|
return self._execute_template(conn, host)
|
|
|
|
else:
|
2012-02-27 01:29:27 +00:00
|
|
|
# this would be a coding error in THIS module
|
|
|
|
# shouldn't occur
|
2012-02-25 22:16:23 +00:00
|
|
|
raise Exception("???")
|
|
|
|
|
|
|
|
def _command(self, outpath):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' form up a command string for running over SSH '''
|
2012-02-25 22:16:23 +00:00
|
|
|
cmd = "%s %s" % (outpath, " ".join(self.module_args))
|
|
|
|
return cmd
|
2012-02-24 04:28:58 +00:00
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
def remote_log(self, conn, msg):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' this is the function we use to log things '''
|
2012-02-25 22:16:23 +00:00
|
|
|
stdin, stdout, stderr = conn.exec_command('/usr/bin/logger -t ansible -p auth.info %r' % msg)
|
2012-02-27 01:29:27 +00:00
|
|
|
# TODO: maybe make that optional
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
def _exec_command(self, conn, cmd):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' execute a command string over SSH, return the output '''
|
2012-02-25 22:16:23 +00:00
|
|
|
msg = '%s: %s' % (self.module_name, cmd)
|
|
|
|
self.remote_log(conn, msg)
|
|
|
|
stdin, stdout, stderr = conn.exec_command(cmd)
|
|
|
|
results = "\n".join(stdout.readlines())
|
|
|
|
return results
|
|
|
|
|
2012-02-27 22:52:37 +00:00
|
|
|
def _get_tmp_path(self, conn):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' gets a temporary path on a remote box '''
|
2012-02-27 22:52:37 +00:00
|
|
|
|
|
|
|
if conn not in self._tmp_paths:
|
|
|
|
output = self._exec_command(conn, "mktemp -d /tmp/ansible.XXXXXX")
|
|
|
|
self._tmp_paths[conn] = output.split("\n")[0] + '/'
|
|
|
|
|
|
|
|
return self._tmp_paths[conn]
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
def _copy_module(self, conn):
|
2012-02-27 01:29:27 +00:00
|
|
|
''' transfer a module over SFTP, does not run it '''
|
2012-02-25 22:16:23 +00:00
|
|
|
in_path = os.path.expanduser(
|
|
|
|
os.path.join(self.module_path, self.module_name)
|
|
|
|
)
|
2012-02-27 22:52:37 +00:00
|
|
|
out_path = self._get_tmp_path(conn) + self.module_name
|
2012-02-25 22:16:23 +00:00
|
|
|
sftp = conn.open_sftp()
|
|
|
|
sftp.put(in_path, out_path)
|
|
|
|
sftp.close()
|
|
|
|
return out_path
|
|
|
|
|
2012-02-27 01:29:27 +00:00
|
|
|
def match_hosts(self, pattern):
|
|
|
|
''' return all matched hosts fitting a pattern '''
|
2012-02-25 22:16:23 +00:00
|
|
|
return [ h for h in self.host_list if self._matches(h, pattern) ]
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
''' xfer & run module on all matched hosts '''
|
2012-02-27 05:43:02 +00:00
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
# find hosts that match the pattern
|
2012-02-27 01:29:27 +00:00
|
|
|
hosts = self.match_hosts(self.pattern)
|
2012-03-02 03:10:47 +00:00
|
|
|
if len(hosts) == 0:
|
|
|
|
return None
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
# attack pool of hosts in N forks
|
2012-02-27 01:29:27 +00:00
|
|
|
# _executor_hook does all of the work
|
2012-02-25 22:16:23 +00:00
|
|
|
hosts = [ (self,x) for x in hosts ]
|
|
|
|
if self.forks > 1:
|
2012-02-27 05:43:02 +00:00
|
|
|
job_queue = multiprocessing.Queue()
|
|
|
|
result_queue = multiprocessing.Queue()
|
|
|
|
|
|
|
|
for i in hosts:
|
|
|
|
job_queue.put(i)
|
|
|
|
|
|
|
|
workers = []
|
|
|
|
for i in range(self.forks):
|
|
|
|
tmp = multiprocessing.Process(target=_executor_hook,
|
|
|
|
args=(job_queue, result_queue))
|
|
|
|
tmp.start()
|
|
|
|
workers.append(tmp)
|
|
|
|
|
|
|
|
try:
|
|
|
|
for worker in workers:
|
|
|
|
worker.join()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print 'parent received ctrl-c'
|
|
|
|
for worker in workers:
|
|
|
|
worker.terminate()
|
|
|
|
worker.join()
|
|
|
|
|
|
|
|
results = []
|
|
|
|
while not result_queue.empty():
|
|
|
|
results.append(result_queue.get(block=False))
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
else:
|
2012-02-27 05:43:02 +00:00
|
|
|
results = [ x._executor(h) for (x,h) in hosts ]
|
2012-02-25 22:16:23 +00:00
|
|
|
|
|
|
|
# sort hosts by ones we successfully contacted
|
2012-02-27 01:29:27 +00:00
|
|
|
# and ones we did not so that we can return a
|
|
|
|
# dictionary containing results of everything
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
results2 = {
|
2012-02-24 04:28:58 +00:00
|
|
|
"contacted" : {},
|
|
|
|
"dark" : {}
|
2012-02-25 22:16:23 +00:00
|
|
|
}
|
2012-02-27 05:43:02 +00:00
|
|
|
hosts_with_results = []
|
2012-02-25 22:16:23 +00:00
|
|
|
for x in results:
|
|
|
|
(host, is_ok, result) = x
|
2012-02-27 05:43:02 +00:00
|
|
|
hosts_with_results.append(host)
|
2012-02-25 22:16:23 +00:00
|
|
|
if not is_ok:
|
|
|
|
results2["dark"][host] = result
|
|
|
|
else:
|
|
|
|
results2["contacted"][host] = result
|
2012-02-27 05:43:02 +00:00
|
|
|
# hosts which were contacted but never got a chance
|
|
|
|
# to return a result before we exited/ctrl-c'd
|
|
|
|
# perhaps these shouldn't be 'dark' but I'm not sure if they fit
|
|
|
|
# anywhere else.
|
|
|
|
for host in self.match_hosts(self.pattern):
|
|
|
|
if host not in hosts_with_results:
|
|
|
|
results2["dark"][host] = {}
|
|
|
|
|
2012-02-25 22:16:23 +00:00
|
|
|
return results2
|
2012-02-24 04:28:58 +00:00
|
|
|
|
|
|
|
|