2014-03-21 18:05:18 +00:00
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
2012-03-03 02:08:48 +00:00
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
2014-03-21 18:05:18 +00:00
import errno
2012-03-03 02:08:48 +00:00
import sys
2013-04-10 21:52:35 +00:00
import re
2012-03-03 03:10:51 +00:00
import os
2012-03-15 01:16:15 +00:00
import shlex
2012-03-20 02:42:31 +00:00
import yaml
2013-01-23 20:39:34 +00:00
import copy
2012-04-06 14:59:15 +00:00
import optparse
2012-07-15 12:46:58 +00:00
import operator
2013-10-30 14:50:16 +00:00
from ansible import errors
from ansible import __version__
2014-04-30 20:33:46 +00:00
from ansible . utils . display_functions import *
from ansible . utils . plugins import *
2014-08-21 18:09:32 +00:00
from ansible . utils . su_prompts import *
2013-10-30 14:50:16 +00:00
from ansible . callbacks import display
2014-07-29 19:16:47 +00:00
from ansible . module_utils . splitter import split_args , unquote
2013-10-30 14:50:16 +00:00
import ansible . constants as C
2014-03-31 22:33:40 +00:00
import ast
2012-08-09 01:05:58 +00:00
import time
2012-08-11 15:59:14 +00:00
import StringIO
2012-09-24 18:47:59 +00:00
import stat
2012-09-22 06:07:49 +00:00
import termios
import tty
2013-01-10 05:50:56 +00:00
import pipes
import random
2013-02-08 03:51:33 +00:00
import difflib
2013-02-18 00:32:28 +00:00
import warnings
2013-04-10 21:52:35 +00:00
import traceback
2013-04-23 03:57:20 +00:00
import getpass
2013-10-11 22:37:39 +00:00
import sys
2014-03-07 02:42:01 +00:00
import json
2014-06-03 14:34:42 +00:00
import subprocess
2014-08-09 22:47:08 +00:00
import contextlib
2014-10-08 02:44:59 +00:00
import jinja2 . exceptions
2012-07-09 07:52:00 +00:00
2014-02-24 18:09:36 +00:00
from vault import VaultLib
2014-02-11 17:03:11 +00:00
2012-08-09 01:09:14 +00:00
VERBOSITY = 0
2013-02-25 22:32:52 +00:00
MAX_FILE_SIZE_FOR_DIFF = 1 * 1024 * 1024
2014-07-21 16:20:49 +00:00
# caching the compilation of the regex used
# to check for lookup calls within data
2014-08-09 22:47:08 +00:00
LOOKUP_REGEX = re . compile ( r ' lookup \ s* \ ( ' )
PRINT_CODE_REGEX = re . compile ( r ' (?: { [ { % ]|[ % }]}) ' )
CODE_REGEX = re . compile ( r ' (?: { % | % }) ' )
2014-07-21 16:20:49 +00:00
2014-07-29 01:21:31 +00:00
2012-03-03 02:08:48 +00:00
try :
import json
except ImportError :
import simplejson as json
2012-07-09 07:52:00 +00:00
try :
2012-07-09 17:27:47 +00:00
from hashlib import md5 as _md5
2012-08-07 00:07:02 +00:00
except ImportError :
2012-07-09 17:27:47 +00:00
from md5 import md5 as _md5
2012-07-09 07:52:00 +00:00
2012-08-09 14:56:40 +00:00
PASSLIB_AVAILABLE = False
try :
import passlib . hash
PASSLIB_AVAILABLE = True
except :
pass
2014-06-26 13:01:06 +00:00
try :
import builtin
except ImportError :
import __builtin__ as builtin
2013-08-11 05:41:18 +00:00
KEYCZAR_AVAILABLE = False
try :
2014-04-30 20:33:46 +00:00
try :
# some versions of pycrypto may not have this?
from Crypto . pct_warnings import PowmInsecureWarning
except ImportError :
PowmInsecureWarning = RuntimeWarning
2014-04-29 19:27:51 +00:00
with warnings . catch_warnings ( record = True ) as warning_handler :
warnings . simplefilter ( " error " , PowmInsecureWarning )
try :
import keyczar . errors as key_errors
from keyczar . keys import AesKey
except PowmInsecureWarning :
2014-04-30 19:44:10 +00:00
system_warning (
" The version of gmp you have installed has a known issue regarding " + \
" timing vulnerabilities when used with pycrypto. " + \
2014-05-03 16:40:05 +00:00
" If possible, you should update it (i.e. yum update gmp). "
2014-04-30 19:44:10 +00:00
)
2014-04-29 19:27:51 +00:00
warnings . resetwarnings ( )
warnings . simplefilter ( " ignore " )
import keyczar . errors as key_errors
from keyczar . keys import AesKey
KEYCZAR_AVAILABLE = True
2013-08-11 05:41:18 +00:00
except ImportError :
pass
2014-07-29 01:21:31 +00:00
2012-09-27 03:50:54 +00:00
###############################################################
2013-08-11 05:41:18 +00:00
# Abstractions around keyczar
2013-08-07 13:12:25 +00:00
###############################################################
2013-08-11 05:41:18 +00:00
def key_for_hostname ( hostname ) :
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE :
2013-11-03 17:47:07 +00:00
raise errors . AnsibleError ( " python-keyczar must be installed on the control machine to use accelerated modes " )
2013-08-11 05:41:18 +00:00
2014-02-06 08:53:43 +00:00
key_path = os . path . expanduser ( C . ACCELERATE_KEYS_DIR )
2013-08-11 05:41:18 +00:00
if not os . path . exists ( key_path ) :
2014-02-10 19:27:43 +00:00
os . makedirs ( key_path , mode = 0700 )
2014-02-06 12:48:34 +00:00
os . chmod ( key_path , int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) )
2014-02-06 08:53:43 +00:00
elif not os . path . isdir ( key_path ) :
raise errors . AnsibleError ( ' ACCELERATE_KEYS_DIR is not a directory. ' )
2014-02-06 12:48:34 +00:00
2014-02-06 13:02:11 +00:00
if stat . S_IMODE ( os . stat ( key_path ) . st_mode ) != int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) :
2014-03-19 19:09:03 +00:00
raise errors . AnsibleError ( ' Incorrect permissions on the private key directory. Use `chmod 0 %o %s ` to correct this issue, and make sure any of the keys files contained within that directory are set to 0 %o ' % ( int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) , C . ACCELERATE_KEYS_DIR , int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) ) )
2014-02-06 12:48:34 +00:00
2014-02-06 08:53:43 +00:00
key_path = os . path . join ( key_path , hostname )
2013-08-11 05:41:18 +00:00
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os . path . exists ( key_path ) or ( time . time ( ) - os . path . getmtime ( key_path ) > 60 * 60 * 2 ) :
key = AesKey . Generate ( )
2014-02-06 08:53:43 +00:00
fd = os . open ( key_path , os . O_WRONLY | os . O_CREAT , int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) )
fh = os . fdopen ( fd , ' w ' )
2013-08-11 05:41:18 +00:00
fh . write ( str ( key ) )
fh . close ( )
return key
else :
2014-02-06 13:02:11 +00:00
if stat . S_IMODE ( os . stat ( key_path ) . st_mode ) != int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) :
2014-03-19 19:09:03 +00:00
raise errors . AnsibleError ( ' Incorrect permissions on the key file for this host. Use `chmod 0 %o %s ` to correct this issue. ' % ( int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) , key_path ) )
2013-08-11 05:41:18 +00:00
fh = open ( key_path )
key = AesKey . Read ( fh . read ( ) )
fh . close ( )
return key
def encrypt ( key , msg ) :
return key . Encrypt ( msg )
def decrypt ( key , msg ) :
try :
return key . Decrypt ( msg )
except key_errors . InvalidSignatureError :
raise errors . AnsibleError ( " decryption failed " )
2012-09-27 03:50:54 +00:00
2012-03-03 02:16:29 +00:00
###############################################################
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
###############################################################
2014-06-03 14:34:42 +00:00
def read_vault_file ( vault_password_file ) :
""" Read a vault password from a file or if executable, execute the script and
retrieve password from STDOUT
"""
if vault_password_file :
this_path = os . path . realpath ( os . path . expanduser ( vault_password_file ) )
if is_executable ( this_path ) :
try :
# STDERR not captured to make it easier for users to prompt for input in their scripts
p = subprocess . Popen ( this_path , stdout = subprocess . PIPE )
except OSError , e :
raise errors . AnsibleError ( " problem running %s ( %s ) " % ( ' ' . join ( this_path ) , e ) )
stdout , stderr = p . communicate ( )
vault_pass = stdout . strip ( ' \r \n ' )
else :
try :
f = open ( this_path , " rb " )
vault_pass = f . read ( ) . strip ( )
f . close ( )
except ( OSError , IOError ) , e :
raise errors . AnsibleError ( " Could not read %s : %s " % ( this_path , e ) )
return vault_pass
else :
return None
2012-03-03 02:08:48 +00:00
def err ( msg ) :
2012-03-03 02:16:29 +00:00
''' print an error message to stderr '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:08:48 +00:00
print >> sys . stderr , msg
def exit ( msg , rc = 1 ) :
2012-03-03 02:16:29 +00:00
''' quit with an error to stdout and a failure code '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:08:48 +00:00
err ( msg )
sys . exit ( rc )
2012-07-15 14:12:49 +00:00
def jsonify ( result , format = False ) :
''' format JSON output (uncompressed or uncompressed) '''
2012-07-15 16:29:53 +00:00
2013-02-17 20:01:49 +00:00
if result is None :
2013-02-27 18:46:31 +00:00
return " {} "
2012-03-21 02:29:21 +00:00
result2 = result . copy ( )
2013-05-19 15:57:08 +00:00
for key , value in result2 . items ( ) :
2013-05-19 16:25:19 +00:00
if type ( value ) is str :
result2 [ key ] = value . decode ( ' utf-8 ' , ' ignore ' )
2014-08-21 20:52:36 +00:00
indent = None
2012-07-15 14:12:49 +00:00
if format :
2014-08-21 20:52:36 +00:00
indent = 4
try :
return json . dumps ( result2 , sort_keys = True , indent = indent , ensure_ascii = False )
except UnicodeDecodeError :
return json . dumps ( result2 , sort_keys = True , indent = indent )
2012-03-03 02:08:48 +00:00
2012-03-03 03:10:51 +00:00
def write_tree_file ( tree , hostname , buf ) :
2012-03-03 02:16:29 +00:00
''' write something into treedir/hostname '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:16:29 +00:00
# TODO: might be nice to append playbook runs per host in a similar way
# in which case, we'd want append mode.
2012-03-03 03:10:51 +00:00
path = os . path . join ( tree , hostname )
2012-03-03 02:08:48 +00:00
fd = open ( path , " w+ " )
fd . write ( buf )
fd . close ( )
def is_failed ( result ) :
2012-03-03 02:16:29 +00:00
''' is a given JSON result a failed result? '''
2012-07-15 16:29:53 +00:00
2012-07-15 15:09:15 +00:00
return ( ( result . get ( ' rc ' , 0 ) != 0 ) or ( result . get ( ' failed ' , False ) in [ True , ' True ' , ' true ' ] ) )
2012-03-03 02:08:48 +00:00
2013-01-01 01:27:16 +00:00
def is_changed ( result ) :
''' is a given JSON result a changed result? '''
return ( result . get ( ' changed ' , False ) in [ True , ' True ' , ' true ' ] )
2014-01-04 00:13:21 +00:00
def check_conditional ( conditional , basedir , inject , fail_on_undefined = False ) :
2014-07-25 00:15:04 +00:00
from ansible . utils import template
2013-08-20 21:09:44 +00:00
2014-01-04 00:23:19 +00:00
if conditional is None or conditional == ' ' :
return True
2013-10-31 21:19:58 +00:00
if isinstance ( conditional , list ) :
for x in conditional :
2014-01-04 00:13:21 +00:00
if not check_conditional ( x , basedir , inject , fail_on_undefined = fail_on_undefined ) :
2013-10-31 21:19:58 +00:00
return False
return True
2013-10-12 21:39:28 +00:00
2013-02-04 19:11:25 +00:00
if not isinstance ( conditional , basestring ) :
return conditional
2014-01-04 00:13:21 +00:00
conditional = conditional . replace ( " jinja2_compare " , " " )
# allow variable names
2014-01-23 15:02:17 +00:00
if conditional in inject and ' - ' not in str ( inject [ conditional ] ) :
2014-01-04 00:13:21 +00:00
conditional = inject [ conditional ]
conditional = template . template ( basedir , conditional , inject , fail_on_undefined = fail_on_undefined )
original = str ( conditional ) . replace ( " jinja2_compare " , " " )
# a Jinja2 evaluation that results in something Python can eval!
presented = " { %% if %s %% } True { %% else %% } False { %% endif %% } " % conditional
conditional = template . template ( basedir , presented , inject )
val = conditional . strip ( )
if val == presented :
2014-08-11 16:22:55 +00:00
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
2014-01-04 00:13:21 +00:00
# looking for an undefined variable, return True,
# otherwise fail
2014-01-23 15:02:17 +00:00
if " is undefined " in conditional :
2014-01-04 00:13:21 +00:00
return True
2014-01-23 15:02:17 +00:00
elif " is defined " in conditional :
2014-01-04 00:13:21 +00:00
return False
else :
raise errors . AnsibleError ( " error while evaluating conditional: %s " % original )
elif val == " True " :
return True
elif val == " False " :
return False
else :
raise errors . AnsibleError ( " unable to evaluate conditional: %s " % original )
2012-09-24 19:06:34 +00:00
2012-09-24 18:47:59 +00:00
def is_executable ( path ) :
''' is the given path executable? '''
2013-01-20 14:05:07 +00:00
return ( stat . S_IXUSR & os . stat ( path ) [ stat . ST_MODE ]
or stat . S_IXGRP & os . stat ( path ) [ stat . ST_MODE ]
2012-09-24 18:47:59 +00:00
or stat . S_IXOTH & os . stat ( path ) [ stat . ST_MODE ] )
2013-08-20 17:03:50 +00:00
def unfrackpath ( path ) :
2014-08-11 16:22:55 +00:00
'''
2013-08-20 17:03:50 +00:00
returns a path that is free of symlinks , environment
variables , relative path traversals and symbols ( ~ )
example :
' $HOME/../../var/mail ' becomes ' /var/spool/mail '
'''
2013-10-30 14:50:16 +00:00
return os . path . normpath ( os . path . realpath ( os . path . expandvars ( os . path . expanduser ( path ) ) ) )
2013-08-20 17:03:50 +00:00
def prepare_writeable_dir ( tree , mode = 0777 ) :
2012-03-03 02:16:29 +00:00
''' make sure a directory exists and is writeable '''
2012-07-15 16:29:53 +00:00
2013-08-20 17:03:50 +00:00
# modify the mode to ensure the owner at least
# has read/write access to this directory
mode | = 0700
# make sure the tree path is always expanded
# and normalized and free of symlinks
tree = unfrackpath ( tree )
2012-03-03 02:08:48 +00:00
if not os . path . exists ( tree ) :
try :
2013-08-20 17:03:50 +00:00
os . makedirs ( tree , mode )
2012-03-03 02:08:48 +00:00
except ( IOError , OSError ) , e :
2013-08-25 16:46:45 +00:00
raise errors . AnsibleError ( " Could not make dir %s : %s " % ( tree , e ) )
2012-03-03 02:08:48 +00:00
if not os . access ( tree , os . W_OK ) :
2013-08-25 16:46:45 +00:00
raise errors . AnsibleError ( " Cannot write to path %s " % tree )
2013-08-20 17:03:50 +00:00
return tree
2012-03-03 02:08:48 +00:00
2012-03-03 15:53:15 +00:00
def path_dwim ( basedir , given ) :
2013-01-03 05:53:00 +00:00
'''
make relative paths work like folks expect .
'''
2013-10-30 14:50:16 +00:00
2014-08-25 15:56:47 +00:00
if given . startswith ( " ' " ) :
given = given [ 1 : - 1 ]
2013-10-30 14:50:16 +00:00
if given . startswith ( " / " ) :
return os . path . abspath ( given )
elif given . startswith ( " ~ " ) :
return os . path . abspath ( os . path . expanduser ( given ) )
else :
2014-01-07 08:48:35 +00:00
if basedir is None :
basedir = " . "
2013-10-30 14:50:16 +00:00
return os . path . abspath ( os . path . join ( basedir , given ) )
2013-04-06 16:13:04 +00:00
def path_dwim_relative ( original , dirname , source , playbook_base , check = True ) :
''' find one file in a directory one level up in a dir named dirname relative to current '''
2013-10-30 14:50:16 +00:00
# (used by roles code)
2014-07-25 00:15:04 +00:00
from ansible . utils import template
2013-10-30 14:50:16 +00:00
basedir = os . path . dirname ( original )
if os . path . islink ( basedir ) :
basedir = unfrackpath ( basedir )
template2 = os . path . join ( basedir , dirname , source )
else :
template2 = os . path . join ( basedir , ' .. ' , dirname , source )
source2 = path_dwim ( basedir , template2 )
if os . path . exists ( source2 ) :
return source2
obvious_local_path = path_dwim ( playbook_base , source )
if os . path . exists ( obvious_local_path ) :
return obvious_local_path
if check :
raise errors . AnsibleError ( " input file not found at %s or %s " % ( source2 , obvious_local_path ) )
return source2 # which does not exist
2012-03-03 02:08:48 +00:00
2014-08-14 10:20:10 +00:00
def repo_url_to_role_name ( repo_url ) :
2014-08-21 20:24:21 +00:00
# gets the role name out of a repo like
# http://git.example.com/repos/repo.git" => "repo"
2014-09-10 11:55:27 +00:00
if ' :// ' not in repo_url and ' @ ' not in repo_url :
2014-08-20 07:50:06 +00:00
return repo_url
2014-08-14 10:20:10 +00:00
trailing_path = repo_url . split ( ' / ' ) [ - 1 ]
if trailing_path . endswith ( ' .git ' ) :
trailing_path = trailing_path [ : - 4 ]
2014-08-15 11:19:40 +00:00
if trailing_path . endswith ( ' .tar.gz ' ) :
trailing_path = trailing_path [ : - 7 ]
2014-08-14 10:48:25 +00:00
if ' , ' in trailing_path :
trailing_path = trailing_path . split ( ' , ' ) [ 0 ]
2014-08-14 10:20:10 +00:00
return trailing_path
2014-08-15 11:19:40 +00:00
def role_spec_parse ( role_spec ) :
2014-08-21 20:24:21 +00:00
# takes a repo and a version like
# git+http://git.example.com/repos/repo.git,v1.0
# and returns a list of properties such as:
# {
# 'scm': 'git',
# 'src': 'http://git.example.com/repos/repo.git',
# 'version': 'v1.0',
# 'name': 'repo'
# }
2014-08-21 21:14:55 +00:00
2014-08-15 11:19:40 +00:00
role_spec = role_spec . strip ( )
2014-08-22 18:01:15 +00:00
role_version = ' '
2014-09-09 23:07:50 +00:00
default_role_versions = dict ( git = ' master ' , hg = ' tip ' )
2014-08-15 11:19:40 +00:00
if role_spec == " " or role_spec . startswith ( " # " ) :
return ( None , None , None , None )
2014-08-21 21:14:55 +00:00
2014-08-22 00:04:12 +00:00
tokens = [ s . strip ( ) for s in role_spec . split ( ' , ' ) ]
2014-08-22 18:01:15 +00:00
2014-08-21 21:14:55 +00:00
# assume https://github.com URLs are git+https:// URLs and not
2014-08-22 00:04:12 +00:00
# tarballs unless they end in '.zip'
if ' github.com/ ' in tokens [ 0 ] and not tokens [ 0 ] . startswith ( " git+ " ) and not tokens [ 0 ] . endswith ( ' .tar.gz ' ) :
2014-08-21 21:14:55 +00:00
tokens [ 0 ] = ' git+ ' + tokens [ 0 ]
2014-08-15 11:19:40 +00:00
if ' + ' in tokens [ 0 ] :
( scm , role_url ) = tokens [ 0 ] . split ( ' + ' )
else :
scm = None
role_url = tokens [ 0 ]
if len ( tokens ) > = 2 :
role_version = tokens [ 1 ]
if len ( tokens ) == 3 :
role_name = tokens [ 2 ]
else :
role_name = repo_url_to_role_name ( tokens [ 0 ] )
2014-09-09 23:07:50 +00:00
if scm and not role_version :
role_version = default_role_versions . get ( scm , ' ' )
2014-08-18 11:27:41 +00:00
return dict ( scm = scm , src = role_url , version = role_version , name = role_name )
def role_yaml_parse ( role ) :
2014-08-22 00:04:12 +00:00
if ' github.com ' in role [ " src " ] and ' http ' in role [ " src " ] and ' + ' not in role [ " src " ] and not role [ " src " ] . endswith ( ' .tar.gz ' ) :
2014-08-21 21:14:55 +00:00
role [ " src " ] = " git+ " + role [ " src " ]
2014-08-18 11:27:41 +00:00
if ' + ' in role [ " src " ] :
( scm , src ) = role [ " src " ] . split ( ' + ' )
role [ " scm " ] = scm
role [ " src " ] = src
if ' name ' not in role :
role [ " name " ] = repo_url_to_role_name ( role [ " src " ] )
return role
2014-08-15 11:19:40 +00:00
2012-03-18 21:53:58 +00:00
def json_loads ( data ) :
2012-07-07 12:18:33 +00:00
''' parse a JSON string and return a data structure '''
2012-07-15 16:29:53 +00:00
2012-03-18 21:53:58 +00:00
return json . loads ( data )
2014-07-21 16:20:49 +00:00
def _clean_data ( orig_data , from_remote = False , from_inventory = False ) :
2014-07-28 20:27:54 +00:00
''' remove jinja2 template tags from a string '''
if not isinstance ( orig_data , basestring ) :
return orig_data
# when the data is marked as having come from a remote, we always
# replace any print blocks (ie. {{var}}), however when marked as coming
# from inventory we only replace print blocks that contain a call to
# a lookup plugin (ie. {{lookup('foo','bar'))}})
replace_prints = from_remote or ( from_inventory and ' {{ ' in orig_data and LOOKUP_REGEX . search ( orig_data ) is not None )
2014-08-11 07:43:40 +00:00
regex = PRINT_CODE_REGEX if replace_prints else CODE_REGEX
2014-08-09 22:47:08 +00:00
2014-08-11 08:07:21 +00:00
with contextlib . closing ( StringIO . StringIO ( orig_data ) ) as data :
2014-08-09 22:47:08 +00:00
# these variables keep track of opening block locations, as we only
# want to replace matched pairs of print/block tags
print_openings = [ ]
block_openings = [ ]
for mo in regex . finditer ( orig_data ) :
token = mo . group ( 0 )
token_start = mo . start ( 0 )
if token [ 0 ] == ' { ' :
if token == ' { % ' :
block_openings . append ( token_start )
elif token == ' {{ ' :
print_openings . append ( token_start )
2014-08-11 08:07:21 +00:00
2014-08-09 22:47:08 +00:00
elif token [ 1 ] == ' } ' :
prev_idx = None
if token == ' % } ' and block_openings :
prev_idx = block_openings . pop ( )
elif token == ' }} ' and print_openings :
prev_idx = print_openings . pop ( )
if prev_idx is not None :
# replace the opening
data . seek ( prev_idx , os . SEEK_SET )
data . write ( ' { # ' )
# replace the closing
2014-08-11 08:07:21 +00:00
data . seek ( token_start , os . SEEK_SET )
2014-08-09 22:47:08 +00:00
data . write ( ' #} ' )
2014-08-11 08:07:21 +00:00
2014-07-28 20:27:54 +00:00
else :
2014-08-09 22:47:08 +00:00
assert False , ' Unhandled regex match '
return data . getvalue ( )
2014-06-30 20:59:32 +00:00
2014-07-21 16:20:49 +00:00
def _clean_data_struct ( orig_data , from_remote = False , from_inventory = False ) :
2014-06-30 20:59:32 +00:00
'''
walk a complex data structure , and use _clean_data ( ) to
remove any template tags that may exist
'''
2014-07-21 16:20:49 +00:00
if not from_remote and not from_inventory :
raise errors . AnsibleErrors ( " when cleaning data, you must specify either from_remote or from_inventory " )
2014-06-30 20:59:32 +00:00
if isinstance ( orig_data , dict ) :
data = orig_data . copy ( )
for key in data :
2014-07-21 16:20:49 +00:00
new_key = _clean_data_struct ( key , from_remote , from_inventory )
new_val = _clean_data_struct ( data [ key ] , from_remote , from_inventory )
2014-06-30 20:59:32 +00:00
if key != new_key :
del data [ key ]
data [ new_key ] = new_val
elif isinstance ( orig_data , list ) :
data = orig_data [ : ]
for i in range ( 0 , len ( data ) ) :
2014-07-21 16:20:49 +00:00
data [ i ] = _clean_data_struct ( data [ i ] , from_remote , from_inventory )
2014-06-30 20:59:32 +00:00
elif isinstance ( orig_data , basestring ) :
2014-07-21 16:20:49 +00:00
data = _clean_data ( orig_data , from_remote , from_inventory )
2014-06-30 20:59:32 +00:00
else :
data = orig_data
return data
2014-09-11 16:26:54 +00:00
def parse_json ( raw_data , from_remote = False , from_inventory = False , no_exceptions = False ) :
2012-03-18 21:53:58 +00:00
''' this version for module return data only '''
2013-01-20 14:05:07 +00:00
2012-10-20 16:12:07 +00:00
orig_data = raw_data
2012-07-15 16:29:53 +00:00
2012-08-11 15:59:14 +00:00
# ignore stuff like tcgetattr spewage or other warnings
2012-08-11 14:24:16 +00:00
data = filter_leading_non_json_lines ( raw_data )
2012-08-11 13:55:14 +00:00
2012-03-15 01:16:15 +00:00
try :
2014-06-30 20:59:32 +00:00
results = json . loads ( data )
2012-03-15 01:16:15 +00:00
except :
2014-09-11 16:26:54 +00:00
if no_exceptions :
return dict ( failed = True , parsed = False , msg = raw_data )
else :
2012-08-07 00:07:02 +00:00
raise
2014-06-30 20:59:32 +00:00
if from_remote :
2014-07-21 16:20:49 +00:00
results = _clean_data_struct ( results , from_remote , from_inventory )
2014-06-30 20:59:32 +00:00
return results
2012-03-15 01:16:15 +00:00
2014-08-27 15:20:07 +00:00
def serialize_args ( args ) :
'''
Flattens a dictionary args to a k = v string
'''
module_args = " "
for ( k , v ) in args . iteritems ( ) :
if isinstance ( v , basestring ) :
module_args = " %s = %s %s " % ( k , pipes . quote ( v ) , module_args )
elif isinstance ( v , bool ) :
module_args = " %s = %s %s " % ( k , str ( v ) , module_args )
return module_args . strip ( )
2014-07-21 16:20:49 +00:00
def merge_module_args ( current_args , new_args ) :
'''
merges either a dictionary or string of k = v pairs with another string of k = v pairs ,
and returns a new k = v string without duplicates .
'''
if not isinstance ( current_args , basestring ) :
raise errors . AnsibleError ( " expected current_args to be a basestring " )
# we use parse_kv to split up the current args into a dictionary
final_args = parse_kv ( current_args )
if isinstance ( new_args , dict ) :
final_args . update ( new_args )
elif isinstance ( new_args , basestring ) :
new_args_kv = parse_kv ( new_args )
final_args . update ( new_args_kv )
2014-08-27 15:20:07 +00:00
return serialize_args ( final_args )
2014-07-21 16:20:49 +00:00
2014-03-07 03:07:35 +00:00
def parse_yaml ( data , path_hint = None ) :
2014-03-07 02:42:01 +00:00
''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!! '''
2014-03-10 18:43:34 +00:00
stripped_data = data . lstrip ( )
2014-03-07 03:15:21 +00:00
loaded = None
2014-03-10 18:43:34 +00:00
if stripped_data . startswith ( " { " ) or stripped_data . startswith ( " [ " ) :
2014-03-07 02:42:01 +00:00
# since the line starts with { or [ we can infer this is a JSON document.
2014-03-07 03:07:35 +00:00
try :
loaded = json . loads ( data )
except ValueError , ve :
if path_hint :
raise errors . AnsibleError ( path_hint + " : " + str ( ve ) )
else :
raise errors . AnsibleError ( str ( ve ) )
2014-03-07 02:42:01 +00:00
else :
# else this is pretty sure to be a YAML document
loaded = yaml . safe_load ( data )
2014-07-25 01:16:24 +00:00
return loaded
2012-08-07 00:07:02 +00:00
2013-10-06 14:23:44 +00:00
def process_common_errors ( msg , probline , column ) :
replaced = probline . replace ( " " , " " )
2013-10-11 21:02:28 +00:00
2014-01-23 15:02:17 +00:00
if " : {{ " in replaced and " }} " in replaced :
2013-10-06 14:23:44 +00:00
msg = msg + """
2014-08-11 16:22:55 +00:00
This one looks easy to fix . YAML thought it was looking for the start of a
2013-10-06 14:23:44 +00:00
hash / dictionary and was confused to see a second " { " . Most likely this was
2014-08-11 16:22:55 +00:00
meant to be an ansible template evaluation instead , so we have to give the
parser a small hint that we wanted a string instead . The solution here is to
2013-10-06 14:23:44 +00:00
just quote the entire value .
For instance , if the original line was :
app_path : { { base_path } } / foo
It should be written as :
app_path : " {{ base_path }}/foo "
"""
2013-10-11 21:34:06 +00:00
return msg
2013-10-06 14:23:44 +00:00
2014-02-26 22:28:34 +00:00
elif len ( probline ) and len ( probline ) > 1 and len ( probline ) > column and probline [ column ] == " : " and probline . count ( ' : ' ) > 1 :
2013-10-06 14:23:44 +00:00
msg = msg + """
2014-08-11 16:22:55 +00:00
This one looks easy to fix . There seems to be an extra unquoted colon in the line
and this is confusing the parser . It was only expecting to find one free
colon . The solution is just add some quotes around the colon , or quote the
2013-10-06 14:23:44 +00:00
entire line after the first colon .
For instance , if the original line was :
2013-10-11 21:34:06 +00:00
copy : src = file . txt dest = / path / filename : with_colon . txt
2013-10-06 14:23:44 +00:00
It can be written as :
2013-10-11 21:34:06 +00:00
copy : src = file . txt dest = ' /path/filename:with_colon.txt '
2013-10-06 14:23:44 +00:00
Or :
2014-08-11 16:22:55 +00:00
2013-10-11 21:34:06 +00:00
copy : ' src=file.txt dest=/path/filename:with_colon.txt '
2013-10-06 14:23:44 +00:00
"""
2013-10-11 21:34:06 +00:00
return msg
else :
parts = probline . split ( " : " )
if len ( parts ) > 1 :
middle = parts [ 1 ] . strip ( )
match = False
2013-10-11 22:37:39 +00:00
unbalanced = False
if middle . startswith ( " ' " ) and not middle . endswith ( " ' " ) :
2013-10-11 21:34:06 +00:00
match = True
elif middle . startswith ( ' " ' ) and not middle . endswith ( ' " ' ) :
match = True
2014-03-19 18:49:31 +00:00
if len ( middle ) > 0 and middle [ 0 ] in [ ' " ' , " ' " ] and middle [ - 1 ] in [ ' " ' , " ' " ] and probline . count ( " ' " ) > 2 or probline . count ( ' " ' ) > 2 :
2013-10-11 22:37:39 +00:00
unbalanced = True
2013-10-11 21:34:06 +00:00
if match :
msg = msg + """
2014-08-11 16:22:55 +00:00
This one looks easy to fix . It seems that there is a value started
with a quote , and the YAML parser is expecting to see the line ended
2013-10-11 21:34:06 +00:00
with the same kind of quote . For instance :
when : " ok " in result . stdout
Could be written as :
when : ' " ok " in result.stdout '
or equivalently :
when : " ' ok ' in result.stdout "
2013-10-11 22:37:39 +00:00
"""
return msg
if unbalanced :
msg = msg + """
2014-08-11 16:22:55 +00:00
We could be wrong , but this one looks like it might be an issue with
unbalanced quotes . If starting a value with a quote , make sure the
line ends with the same set of quotes . For instance this arbitrary
2013-10-11 22:37:39 +00:00
example :
foo : " bad " " wolf "
Could be written as :
foo : ' " bad " " wolf " '
2013-10-11 21:34:06 +00:00
"""
return msg
2013-10-06 14:23:44 +00:00
return msg
2014-04-23 14:16:50 +00:00
def process_yaml_error ( exc , data , path = None , show_content = True ) :
2013-02-09 19:30:19 +00:00
if hasattr ( exc , ' problem_mark ' ) :
mark = exc . problem_mark
2014-04-23 14:16:50 +00:00
if show_content :
if mark . line - 1 > = 0 :
before_probline = data . split ( " \n " ) [ mark . line - 1 ]
else :
before_probline = ' '
probline = data . split ( " \n " ) [ mark . line ]
arrow = " " * mark . column + " ^ "
msg = """ Syntax Error while loading YAML script, %s
2013-02-09 19:30:19 +00:00
Note : The error may actually appear before this position : line % s , column % s
% s
% s
% s """ % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
2013-10-06 14:23:44 +00:00
2014-04-23 14:16:50 +00:00
unquoted_var = None
if ' {{ ' in probline and ' }} ' in probline :
if ' " {{ ' not in probline or " ' {{ " not in probline :
unquoted_var = True
2014-01-06 21:47:19 +00:00
2014-04-23 14:16:50 +00:00
if not unquoted_var :
msg = process_common_errors ( msg , probline , mark . column )
else :
msg = msg + """
2014-01-06 21:47:19 +00:00
We could be wrong , but this one looks like it might be an issue with
2014-08-11 16:22:55 +00:00
missing quotes . Always quote template expression brackets when they
start a value . For instance :
2014-01-06 21:47:19 +00:00
with_items :
- { { foo } }
2013-10-06 14:23:44 +00:00
2014-01-06 21:47:19 +00:00
Should be written as :
with_items :
2014-08-11 16:22:55 +00:00
- " {{ foo }} "
2014-01-06 21:47:19 +00:00
"""
2014-04-23 14:16:50 +00:00
else :
# most likely displaying a file with sensitive content,
# so don't show any of the actual lines of yaml just the
# line number itself
msg = """ Syntax error while loading YAML script, %s
The error appears to have been on line % s , column % s , but may actually
be before there depending on the exact syntax problem .
""" % (path, mark.line + 1, mark.column + 1)
2013-10-06 14:23:44 +00:00
2013-02-09 19:30:19 +00:00
else :
# No problem markers means we have to throw a generic
# "stuff messed up" type message. Sry bud.
if path :
msg = " Could not parse YAML. Check over %s again. " % path
else :
msg = " Could not parse YAML. "
raise errors . AnsibleYAMLValidationFailed ( msg )
2014-02-11 17:03:11 +00:00
def parse_yaml_from_file ( path , vault_password = None ) :
2012-07-07 12:18:33 +00:00
''' convert a yaml file to a data structure '''
2012-07-15 16:29:53 +00:00
2014-02-11 17:03:11 +00:00
data = None
2014-04-23 14:16:50 +00:00
show_content = True
2014-02-11 17:03:11 +00:00
2014-02-24 18:09:36 +00:00
try :
data = open ( path ) . read ( )
except IOError :
raise errors . AnsibleError ( " file could not read: %s " % path )
vault = VaultLib ( password = vault_password )
if vault . is_encrypted ( data ) :
2014-09-19 20:08:38 +00:00
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
if vault_password is None :
raise errors . AnsibleError ( " A vault password must be specified to decrypt %s " % path )
2014-02-24 18:09:36 +00:00
data = vault . decrypt ( data )
2014-04-23 14:16:50 +00:00
show_content = False
2014-02-11 17:03:11 +00:00
2012-03-21 02:29:21 +00:00
try :
2014-03-07 03:07:35 +00:00
return parse_yaml ( data , path_hint = path )
2012-08-07 22:24:22 +00:00
except yaml . YAMLError , exc :
2014-04-23 14:16:50 +00:00
process_yaml_error ( exc , data , path , show_content )
2012-03-19 23:23:14 +00:00
2012-03-31 02:52:38 +00:00
def parse_kv ( args ) :
2012-03-22 03:39:09 +00:00
''' convert a string of key/value items to a dict '''
options = { }
2012-04-27 00:42:20 +00:00
if args is not None :
2014-03-17 21:14:32 +00:00
try :
2014-07-25 01:16:24 +00:00
vargs = split_args ( args )
2014-03-17 21:14:32 +00:00
except ValueError , ve :
if ' no closing quotation ' in str ( ve ) . lower ( ) :
raise errors . AnsibleError ( " error parsing argument string, try quoting the entire line. " )
else :
raise
2012-04-27 01:25:43 +00:00
for x in vargs :
2014-01-23 15:02:17 +00:00
if " = " in x :
2012-08-03 00:21:59 +00:00
k , v = x . split ( " = " , 1 )
2014-08-08 16:54:38 +00:00
options [ k . strip ( ) ] = unquote ( v . strip ( ) )
2012-03-22 03:39:09 +00:00
return options
2012-03-31 01:56:10 +00:00
2014-08-21 15:40:47 +00:00
def _validate_both_dicts ( a , b ) :
if not ( isinstance ( a , dict ) and isinstance ( b , dict ) ) :
2014-08-21 18:24:53 +00:00
raise errors . AnsibleError (
" failed to combine variables, expected dicts but got a ' %s ' and a ' %s ' " % ( type ( a ) . __name__ , type ( b ) . __name__ )
)
2014-08-21 15:40:47 +00:00
2013-01-23 20:39:34 +00:00
def merge_hash ( a , b ) :
2013-06-03 22:19:11 +00:00
''' recursively merges hash b into a
2013-08-11 04:54:05 +00:00
keys from b take precedence over keys from a '''
2013-01-23 20:39:34 +00:00
2014-03-25 20:59:13 +00:00
result = { }
2013-06-03 22:19:11 +00:00
2014-08-21 18:24:53 +00:00
# we check here as well as in combine_vars() since this
# function can work recursively with nested dicts
_validate_both_dicts ( a , b )
2014-03-25 20:59:13 +00:00
for dicts in a , b :
# next, iterate over b keys and values
for k , v in dicts . iteritems ( ) :
# if there's already such key in a
# and that key contains dict
if k in result and isinstance ( result [ k ] , dict ) :
# merge those dicts recursively
result [ k ] = merge_hash ( a [ k ] , v )
else :
# otherwise, just copy a value from b to a
result [ k ] = v
2013-06-03 22:19:11 +00:00
return result
2013-01-23 20:39:34 +00:00
2012-11-18 16:51:01 +00:00
def md5s ( data ) :
''' Return MD5 hex digest of data. '''
digest = _md5 ( )
2013-09-25 05:15:08 +00:00
try :
digest . update ( data )
except UnicodeEncodeError :
digest . update ( data . encode ( ' utf-8 ' ) )
2012-11-18 16:51:01 +00:00
return digest . hexdigest ( )
2012-07-09 07:52:00 +00:00
def md5 ( filename ) :
2014-06-18 21:10:10 +00:00
''' Return MD5 hex digest of local file, None if file is not present or a directory. '''
2012-07-15 16:29:53 +00:00
2014-06-18 21:10:10 +00:00
if not os . path . exists ( filename ) or os . path . isdir ( filename ) :
2012-07-09 17:27:47 +00:00
return None
digest = _md5 ( )
blocksize = 64 * 1024
2014-04-14 17:36:54 +00:00
try :
infile = open ( filename , ' rb ' )
2012-07-09 17:27:47 +00:00
block = infile . read ( blocksize )
2014-04-14 17:36:54 +00:00
while block :
digest . update ( block )
block = infile . read ( blocksize )
infile . close ( )
except IOError , e :
raise errors . AnsibleError ( " error while accessing the file %s , error was: %s " % ( filename , e ) )
2012-07-09 17:27:47 +00:00
return digest . hexdigest ( )
2012-07-21 20:51:31 +00:00
def default ( value , function ) :
''' syntactic sugar around lazy evaluation of defaults '''
if value is None :
return function ( )
return value
2014-09-28 01:03:47 +00:00
2014-09-28 15:24:32 +00:00
def _git_repo_info ( repo_path ) :
2012-08-09 01:05:58 +00:00
''' returns a string containing git branch, commit id and commit date '''
2012-08-08 09:18:51 +00:00
result = None
if os . path . exists ( repo_path ) :
2012-09-05 09:40:14 +00:00
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
2012-09-04 13:12:39 +00:00
if os . path . isfile ( repo_path ) :
2012-09-04 14:05:00 +00:00
try :
2013-02-23 18:30:10 +00:00
gitdir = yaml . safe_load ( open ( repo_path ) ) . get ( ' gitdir ' )
2014-05-03 16:40:05 +00:00
# There is a possibility the .git file to have an absolute path.
2012-09-06 12:18:29 +00:00
if os . path . isabs ( gitdir ) :
repo_path = gitdir
else :
2014-09-30 14:33:42 +00:00
repo_path = os . path . join ( repo_path [ : - 4 ] , gitdir )
2012-09-05 09:40:14 +00:00
except ( IOError , AttributeError ) :
2012-09-05 01:07:29 +00:00
return ' '
2012-08-17 15:35:17 +00:00
f = open ( os . path . join ( repo_path , " HEAD " ) )
branch = f . readline ( ) . split ( ' / ' ) [ - 1 ] . rstrip ( " \n " )
f . close ( )
2012-08-09 01:05:58 +00:00
branch_path = os . path . join ( repo_path , " refs " , " heads " , branch )
2012-08-23 16:30:34 +00:00
if os . path . exists ( branch_path ) :
f = open ( branch_path )
commit = f . readline ( ) [ : 10 ]
f . close ( )
2014-09-28 01:03:47 +00:00
else :
# detached HEAD
commit = branch [ : 10 ]
branch = ' detached HEAD '
branch_path = os . path . join ( repo_path , " HEAD " )
date = time . localtime ( os . stat ( branch_path ) . st_mtime )
if time . daylight == 0 :
offset = time . timezone
else :
offset = time . altzone
result = " ( {0} {1} ) last updated {2} (GMT {3:+04d} ) " . format ( branch , commit ,
time . strftime ( " % Y/ % m/ %d % H: % M: % S " , date ) , offset / - 36 )
2012-08-22 18:54:25 +00:00
else :
2012-09-05 01:07:29 +00:00
result = ' '
2012-08-08 09:18:51 +00:00
return result
2014-09-28 01:03:47 +00:00
def _gitinfo ( ) :
basedir = os . path . join ( os . path . dirname ( __file__ ) , ' .. ' , ' .. ' , ' .. ' )
repo_path = os . path . join ( basedir , ' .git ' )
2014-09-28 15:24:32 +00:00
result = _git_repo_info ( repo_path )
2014-09-28 01:03:47 +00:00
submodules = os . path . join ( basedir , ' .gitmodules ' )
2014-09-28 15:24:32 +00:00
if not os . path . exists ( submodules ) :
return result
2014-09-28 01:03:47 +00:00
f = open ( submodules )
for line in f :
tokens = line . strip ( ) . split ( ' ' )
if tokens [ 0 ] == ' path ' :
2014-09-28 03:19:56 +00:00
submodule_path = tokens [ 2 ]
2014-09-28 15:24:32 +00:00
submodule_info = _git_repo_info ( os . path . join ( basedir , submodule_path , ' .git ' ) )
2014-09-28 03:19:56 +00:00
if not submodule_info :
submodule_info = ' not found - use git submodule update --init ' + submodule_path
result + = " \n {0} : {1} " . format ( submodule_path , submodule_info )
2014-09-28 01:03:47 +00:00
f . close ( )
return result
2012-08-08 09:18:51 +00:00
def version ( prog ) :
result = " {0} {1} " . format ( prog , __version__ )
gitinfo = _gitinfo ( )
if gitinfo :
result = result + " {0} " . format ( gitinfo )
2014-09-28 15:39:04 +00:00
result = result + " \n configured module search path = %s " % C . DEFAULT_MODULE_PATH
2012-08-08 09:18:51 +00:00
return result
2014-03-21 17:08:55 +00:00
def version_info ( gitinfo = False ) :
if gitinfo :
# expensive call, user with care
ansible_version_string = version ( ' ' )
else :
ansible_version_string = __version__
ansible_version = ansible_version_string . split ( ) [ 0 ]
ansible_versions = ansible_version . split ( ' . ' )
for counter in range ( len ( ansible_versions ) ) :
if ansible_versions [ counter ] == " " :
ansible_versions [ counter ] = 0
try :
ansible_versions [ counter ] = int ( ansible_versions [ counter ] )
except :
pass
if len ( ansible_versions ) < 3 :
for counter in range ( len ( ansible_versions ) , 3 ) :
ansible_versions . append ( 0 )
return { ' string ' : ansible_version_string . strip ( ) ,
' full ' : ansible_version ,
' major ' : ansible_versions [ 0 ] ,
' minor ' : ansible_versions [ 1 ] ,
' revision ' : ansible_versions [ 2 ] }
2012-09-22 06:07:49 +00:00
def getch ( ) :
''' read in a single character '''
fd = sys . stdin . fileno ( )
old_settings = termios . tcgetattr ( fd )
try :
tty . setraw ( sys . stdin . fileno ( ) )
ch = sys . stdin . read ( 1 )
finally :
termios . tcsetattr ( fd , termios . TCSADRAIN , old_settings )
return ch
2014-02-13 20:23:49 +00:00
def sanitize_output ( str ) :
''' strips private info out of a string '''
private_keys = [ ' password ' , ' login_password ' ]
filter_re = [
# filter out things like user:pass@foo/whatever
# and http://username:pass@wherever/foo
re . compile ( ' ^(?P<before>.*:)(?P<password>.*)(?P<after> \ @.*)$ ' ) ,
]
parts = str . split ( )
output = ' '
for part in parts :
try :
( k , v ) = part . split ( ' = ' , 1 )
if k in private_keys :
output + = " %s =VALUE_HIDDEN " % k
else :
found = False
for filter in filter_re :
m = filter . match ( v )
if m :
d = m . groupdict ( )
output + = " %s = %s " % ( k , d [ ' before ' ] + " ******** " + d [ ' after ' ] )
found = True
break
if not found :
output + = " %s " % part
except :
output + = " %s " % part
return output . strip ( )
2012-07-07 12:45:06 +00:00
####################################################################
2012-08-07 00:07:02 +00:00
# option handling code for /usr/bin/ansible and ansible-playbook
2012-07-07 12:45:06 +00:00
# below this line
2012-04-10 17:51:58 +00:00
class SortedOptParser ( optparse . OptionParser ) :
''' Optparser which sorts the options by opt before outputting --help '''
2012-07-15 16:29:53 +00:00
2012-04-10 17:51:58 +00:00
def format_help ( self , formatter = None ) :
2012-07-15 12:46:58 +00:00
self . option_list . sort ( key = operator . methodcaller ( ' get_opt_string ' ) )
2012-04-10 17:51:58 +00:00
return optparse . OptionParser . format_help ( self , formatter = None )
2012-08-09 01:09:14 +00:00
def increment_debug ( option , opt , value , parser ) :
global VERBOSITY
VERBOSITY + = 1
2012-09-22 06:07:49 +00:00
def base_parser ( constants = C , usage = " " , output_opts = False , runas_opts = False ,
2013-02-08 03:51:33 +00:00
async_opts = False , connect_opts = False , subset_opts = False , check_opts = False , diff_opts = False ) :
2012-04-10 17:51:58 +00:00
''' create an options parser for any ansible script '''
2012-08-08 09:18:51 +00:00
parser = SortedOptParser ( usage , version = version ( " % prog " ) )
2012-08-09 01:09:14 +00:00
parser . add_option ( ' -v ' , ' --verbose ' , default = False , action = " callback " ,
2013-09-19 10:58:54 +00:00
callback = increment_debug , help = " verbose mode (-vvv for more, -vvvv to enable connection debugging) " )
2012-08-09 01:09:14 +00:00
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -f ' , ' --forks ' , dest = ' forks ' , default = constants . DEFAULT_FORKS , type = ' int ' ,
2012-04-13 01:30:49 +00:00
help = " specify number of parallel processes to use (default= %s ) " % constants . DEFAULT_FORKS )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -i ' , ' --inventory-file ' , dest = ' inventory ' ,
2012-08-07 00:07:02 +00:00
help = " specify inventory host file (default= %s ) " % constants . DEFAULT_HOST_LIST ,
2012-04-13 01:30:49 +00:00
default = constants . DEFAULT_HOST_LIST )
2012-04-13 23:06:11 +00:00
parser . add_option ( ' -k ' , ' --ask-pass ' , default = False , dest = ' ask_pass ' , action = ' store_true ' ,
2012-04-10 17:51:58 +00:00
help = ' ask for SSH password ' )
2012-09-07 18:37:32 +00:00
parser . add_option ( ' --private-key ' , default = C . DEFAULT_PRIVATE_KEY_FILE , dest = ' private_key_file ' ,
2012-05-14 20:14:38 +00:00
help = ' use this file to authenticate the connection ' )
2012-04-13 23:06:11 +00:00
parser . add_option ( ' -K ' , ' --ask-sudo-pass ' , default = False , dest = ' ask_sudo_pass ' , action = ' store_true ' ,
help = ' ask for sudo password ' )
2014-08-11 16:22:55 +00:00
parser . add_option ( ' --ask-su-pass ' , default = False , dest = ' ask_su_pass ' , action = ' store_true ' ,
2014-02-26 16:00:48 +00:00
help = ' ask for su password ' )
2014-08-11 16:22:55 +00:00
parser . add_option ( ' --ask-vault-pass ' , default = False , dest = ' ask_vault_pass ' , action = ' store_true ' ,
2014-02-26 16:00:48 +00:00
help = ' ask for vault password ' )
2014-06-03 14:34:42 +00:00
parser . add_option ( ' --vault-password-file ' , default = constants . DEFAULT_VAULT_PASSWORD_FILE ,
dest = ' vault_password_file ' , help = " vault password file " )
2013-06-27 01:57:31 +00:00
parser . add_option ( ' --list-hosts ' , dest = ' listhosts ' , action = ' store_true ' ,
help = ' outputs a list of matching hosts; does not execute anything else ' )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -M ' , ' --module-path ' , dest = ' module_path ' ,
2012-06-06 20:42:29 +00:00
help = " specify path(s) to module library (default= %s ) " % constants . DEFAULT_MODULE_PATH ,
2013-01-22 14:42:49 +00:00
default = None )
2012-08-10 06:45:29 +00:00
if subset_opts :
parser . add_option ( ' -l ' , ' --limit ' , default = constants . DEFAULT_SUBSET , dest = ' subset ' ,
help = ' further limit selected hosts to an additional pattern ' )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -T ' , ' --timeout ' , default = constants . DEFAULT_TIMEOUT , type = ' int ' ,
2012-08-07 00:07:02 +00:00
dest = ' timeout ' ,
2012-04-13 01:30:49 +00:00
help = " override the SSH timeout in seconds (default= %s ) " % constants . DEFAULT_TIMEOUT )
2012-04-13 23:33:19 +00:00
2012-04-05 21:06:23 +00:00
if output_opts :
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -o ' , ' --one-line ' , dest = ' one_line ' , action = ' store_true ' ,
help = ' condense output ' )
parser . add_option ( ' -t ' , ' --tree ' , dest = ' tree ' , default = None ,
help = ' log output to this directory ' )
2012-04-05 21:06:23 +00:00
if runas_opts :
2013-07-05 17:10:36 +00:00
parser . add_option ( " -s " , " --sudo " , default = constants . DEFAULT_SUDO , action = " store_true " ,
2012-04-10 17:51:58 +00:00
dest = ' sudo ' , help = " run operations with sudo (nopasswd) " )
2014-01-21 01:19:03 +00:00
parser . add_option ( ' -U ' , ' --sudo-user ' , dest = ' sudo_user ' , default = None ,
help = ' desired sudo user (default=root) ' ) # Can't default to root because we need to detect when this option was given
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -u ' , ' --user ' , default = constants . DEFAULT_REMOTE_USER ,
2014-01-21 01:19:03 +00:00
dest = ' remote_user ' , help = ' connect as this user (default= %s ) ' % constants . DEFAULT_REMOTE_USER )
parser . add_option ( ' -S ' , ' --su ' , default = constants . DEFAULT_SU ,
action = ' store_true ' , help = ' run operations with su ' )
parser . add_option ( ' -R ' , ' --su-user ' , help = ' run operations with su as this '
' user (default= %s ) ' % constants . DEFAULT_SU_USER )
2012-08-07 00:07:02 +00:00
2012-04-10 23:13:18 +00:00
if connect_opts :
2012-04-12 18:18:35 +00:00
parser . add_option ( ' -c ' , ' --connection ' , dest = ' connection ' ,
default = C . DEFAULT_TRANSPORT ,
2012-04-13 01:20:37 +00:00
help = " connection type to use (default= %s ) " % C . DEFAULT_TRANSPORT )
2012-04-10 23:13:18 +00:00
2012-04-05 21:06:23 +00:00
if async_opts :
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -P ' , ' --poll ' , default = constants . DEFAULT_POLL_INTERVAL , type = ' int ' ,
2012-08-07 00:07:02 +00:00
dest = ' poll_interval ' ,
2012-04-13 01:30:49 +00:00
help = " set the poll interval if using -B (default= %s ) " % constants . DEFAULT_POLL_INTERVAL )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -B ' , ' --background ' , dest = ' seconds ' , type = ' int ' , default = 0 ,
2012-04-13 01:30:49 +00:00
help = ' run asynchronously, failing after X seconds (default=N/A) ' )
2012-04-06 14:59:15 +00:00
2013-02-04 00:46:25 +00:00
if check_opts :
parser . add_option ( " -C " , " --check " , default = False , dest = ' check ' , action = ' store_true ' ,
2013-06-27 01:57:31 +00:00
help = " don ' t make any changes; instead, try to predict some of the changes that may occur "
2013-02-04 00:46:25 +00:00
)
2013-02-08 03:51:33 +00:00
if diff_opts :
parser . add_option ( " -D " , " --diff " , default = False , dest = ' diff ' , action = ' store_true ' ,
2013-06-27 01:57:31 +00:00
help = " when changing (small) files and templates, show the differences in those files; works great with --check "
2013-02-08 03:51:33 +00:00
)
2012-04-10 17:51:58 +00:00
return parser
2012-04-06 14:59:15 +00:00
2014-02-19 16:05:08 +00:00
def ask_vault_passwords ( ask_vault_pass = False , ask_new_vault_pass = False , confirm_vault = False , confirm_new = False ) :
2014-02-11 17:03:11 +00:00
vault_pass = None
new_vault_pass = None
if ask_vault_pass :
vault_pass = getpass . getpass ( prompt = " Vault password: " )
if ask_vault_pass and confirm_vault :
2014-02-19 16:06:53 +00:00
vault_pass2 = getpass . getpass ( prompt = " Confirm Vault password: " )
2014-02-11 17:03:11 +00:00
if vault_pass != vault_pass2 :
raise errors . AnsibleError ( " Passwords do not match " )
if ask_new_vault_pass :
new_vault_pass = getpass . getpass ( prompt = " New Vault password: " )
if ask_new_vault_pass and confirm_new :
2014-02-19 16:06:53 +00:00
new_vault_pass2 = getpass . getpass ( prompt = " Confirm New Vault password: " )
2014-02-11 17:03:11 +00:00
if new_vault_pass != new_vault_pass2 :
raise errors . AnsibleError ( " Passwords do not match " )
2014-03-19 19:56:14 +00:00
# enforce no newline chars at the end of passwords
2014-03-19 20:08:35 +00:00
if vault_pass :
vault_pass = vault_pass . strip ( )
if new_vault_pass :
new_vault_pass = new_vault_pass . strip ( )
2014-03-19 19:56:14 +00:00
2014-02-11 17:03:11 +00:00
return vault_pass , new_vault_pass
def ask_passwords ( ask_pass = False , ask_sudo_pass = False , ask_su_pass = False , ask_vault_pass = False ) :
2013-04-23 03:57:20 +00:00
sshpass = None
sudopass = None
2014-01-21 01:19:03 +00:00
su_pass = None
2014-02-11 17:03:11 +00:00
vault_pass = None
2013-04-23 03:57:20 +00:00
sudo_prompt = " sudo password: "
2014-01-21 01:19:03 +00:00
su_prompt = " su password: "
2013-04-23 03:57:20 +00:00
if ask_pass :
sshpass = getpass . getpass ( prompt = " SSH password: " )
sudo_prompt = " sudo password [defaults to SSH password]: "
if ask_sudo_pass :
sudopass = getpass . getpass ( prompt = sudo_prompt )
if ask_pass and sudopass == ' ' :
sudopass = sshpass
2014-01-21 01:19:03 +00:00
if ask_su_pass :
su_pass = getpass . getpass ( prompt = su_prompt )
2014-02-11 17:03:11 +00:00
if ask_vault_pass :
vault_pass = getpass . getpass ( prompt = " Vault password: " )
return ( sshpass , sudopass , su_pass , vault_pass )
2013-04-23 03:57:20 +00:00
2012-08-09 14:56:40 +00:00
def do_encrypt ( result , encrypt , salt_size = None , salt = None ) :
if PASSLIB_AVAILABLE :
try :
crypt = getattr ( passlib . hash , encrypt )
except :
raise errors . AnsibleError ( " passlib does not support ' %s ' algorithm " % encrypt )
2012-07-24 23:30:02 +00:00
2012-08-09 14:56:40 +00:00
if salt_size :
result = crypt . encrypt ( result , salt_size = salt_size )
elif salt :
result = crypt . encrypt ( result , salt = salt )
else :
result = crypt . encrypt ( result )
else :
raise errors . AnsibleError ( " passlib must be installed to encrypt vars_prompt values " )
return result
2012-07-24 23:30:02 +00:00
2012-08-11 15:59:14 +00:00
def last_non_blank_line ( buf ) :
all_lines = buf . splitlines ( )
2012-08-11 14:14:19 +00:00
all_lines . reverse ( )
for line in all_lines :
if ( len ( line ) > 0 ) :
return line
2012-08-11 15:59:14 +00:00
# shouldn't occur unless there's no output
2012-09-22 06:07:49 +00:00
return " "
2012-08-11 15:59:14 +00:00
def filter_leading_non_json_lines ( buf ) :
2012-09-22 06:07:49 +00:00
'''
2012-08-11 15:59:14 +00:00
used to avoid random output from SSH at the top of JSON output , like messages from
tcagetattr , or where dropbear spews MOTD on every single command ( which is nuts ) .
2012-09-22 06:07:49 +00:00
2012-08-11 15:59:14 +00:00
need to filter anything which starts not with ' { ' , ' [ ' , ' , ' = ' or is an empty line.
2012-09-22 06:07:49 +00:00
filter only leading lines since multiline JSON is valid .
2012-08-11 15:59:14 +00:00
'''
filtered_lines = StringIO . StringIO ( )
stop_filtering = False
for line in buf . splitlines ( ) :
2014-09-11 17:29:59 +00:00
if stop_filtering or line . startswith ( ' { ' ) or line . startswith ( ' [ ' ) :
2012-08-11 15:59:14 +00:00
stop_filtering = True
filtered_lines . write ( line + ' \n ' )
return filtered_lines . getvalue ( )
2012-08-11 13:55:14 +00:00
2012-10-27 20:46:33 +00:00
def boolean ( value ) :
val = str ( value )
if val . lower ( ) in [ " true " , " t " , " y " , " 1 " , " yes " ] :
return True
else :
return False
2014-09-27 15:39:49 +00:00
def make_sudo_cmd ( sudo_exe , sudo_user , executable , cmd ) :
2013-01-10 05:50:56 +00:00
"""
helper function for connection plugins to create sudo commands
"""
# Rather than detect if sudo wants a password this time, -k makes
# sudo always ask for a password if one is required.
# Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote()
# and pass the quoted string to the user's shell. We loop reading
# output until we see the randomly-generated sudo prompt set with
# the -p option.
randbits = ' ' . join ( chr ( random . randint ( ord ( ' a ' ) , ord ( ' z ' ) ) ) for x in xrange ( 32 ) )
prompt = ' [sudo via ansible, key= %s ] password: ' % randbits
2013-10-30 18:18:35 +00:00
success_key = ' SUDO-SUCCESS- %s ' % randbits
2013-01-28 16:41:43 +00:00
sudocmd = ' %s -k && %s %s -S -p " %s " -u %s %s -c %s ' % (
2014-09-27 15:39:49 +00:00
sudo_exe , sudo_exe , C . DEFAULT_SUDO_FLAGS ,
2013-10-30 18:18:35 +00:00
prompt , sudo_user , executable or ' $SHELL ' , pipes . quote ( ' echo %s ; %s ' % ( success_key , cmd ) ) )
return ( ' /bin/sh -c ' + pipes . quote ( sudocmd ) , prompt , success_key )
2013-02-08 03:51:33 +00:00
2014-01-21 01:19:03 +00:00
def make_su_cmd ( su_user , executable , cmd ) :
"""
Helper function for connection plugins to create direct su commands
"""
# TODO: work on this function
randbits = ' ' . join ( chr ( random . randint ( ord ( ' a ' ) , ord ( ' z ' ) ) ) for x in xrange ( 32 ) )
success_key = ' SUDO-SUCCESS- %s ' % randbits
2014-05-23 15:06:09 +00:00
sudocmd = ' %s %s %s -c " %s -c %s " ' % (
2014-01-21 01:19:03 +00:00
C . DEFAULT_SU_EXE , C . DEFAULT_SU_FLAGS , su_user , executable or ' $SHELL ' ,
pipes . quote ( ' echo %s ; %s ' % ( success_key , cmd ) )
)
2014-08-21 18:09:32 +00:00
return ( ' /bin/sh -c ' + pipes . quote ( sudocmd ) , None , success_key )
2014-01-21 01:19:03 +00:00
2013-07-31 11:58:32 +00:00
_TO_UNICODE_TYPES = ( unicode , type ( None ) )
def to_unicode ( value ) :
if isinstance ( value , _TO_UNICODE_TYPES ) :
return value
return value . decode ( " utf-8 " )
2014-10-03 00:26:09 +00:00
2013-02-25 22:32:52 +00:00
def get_diff ( diff ) :
2013-02-08 03:51:33 +00:00
# called by --diff usage in playbook and runner via callbacks
# include names in diffs 'before' and 'after' and do diff -U 10
2013-02-18 00:32:28 +00:00
try :
with warnings . catch_warnings ( ) :
warnings . simplefilter ( ' ignore ' )
2013-02-25 22:32:52 +00:00
ret = [ ]
if ' dst_binary ' in diff :
ret . append ( " diff skipped: destination file appears to be binary \n " )
if ' src_binary ' in diff :
ret . append ( " diff skipped: source file appears to be binary \n " )
if ' dst_larger ' in diff :
ret . append ( " diff skipped: destination file size is greater than %d \n " % diff [ ' dst_larger ' ] )
if ' src_larger ' in diff :
ret . append ( " diff skipped: source file size is greater than %d \n " % diff [ ' src_larger ' ] )
if ' before ' in diff and ' after ' in diff :
2013-02-26 15:53:59 +00:00
if ' before_header ' in diff :
before_header = " before: %s " % diff [ ' before_header ' ]
else :
before_header = ' before '
if ' after_header ' in diff :
after_header = " after: %s " % diff [ ' after_header ' ]
else :
after_header = ' after '
2013-07-31 11:58:32 +00:00
differ = difflib . unified_diff ( to_unicode ( diff [ ' before ' ] ) . splitlines ( True ) , to_unicode ( diff [ ' after ' ] ) . splitlines ( True ) , before_header , after_header , ' ' , ' ' , 10 )
2013-02-25 22:32:52 +00:00
for line in list ( differ ) :
ret . append ( line )
2013-07-31 11:58:32 +00:00
return u " " . join ( ret )
2013-02-18 00:32:28 +00:00
except UnicodeDecodeError :
return " >> the files are different, but the diff library cannot compare unicode strings "
2013-03-01 23:32:32 +00:00
def is_list_of_strings ( items ) :
2013-06-03 22:19:11 +00:00
for x in items :
2013-04-10 20:37:49 +00:00
if not isinstance ( x , basestring ) :
return False
return True
2013-03-01 23:32:32 +00:00
2014-05-13 20:54:32 +00:00
def list_union ( a , b ) :
2014-05-30 13:47:18 +00:00
result = [ ]
for x in a :
if x not in result :
result . append ( x )
for x in b :
if x not in result :
result . append ( x )
2014-05-30 06:54:48 +00:00
return result
2014-05-13 20:54:32 +00:00
def list_intersection ( a , b ) :
2014-05-30 06:54:48 +00:00
result = [ ]
2014-05-30 13:47:18 +00:00
for x in a :
if x in b and x not in result :
result . append ( x )
2014-05-30 06:54:48 +00:00
return result
2014-05-13 20:54:32 +00:00
2014-08-15 20:57:02 +00:00
def list_difference ( a , b ) :
result = [ ]
for x in a :
if x not in b and x not in result :
result . append ( x )
for x in b :
if x not in a and x not in result :
result . append ( x )
return result
2014-09-04 21:00:02 +00:00
def contains_vars ( data ) :
'''
returns True if the data contains a variable pattern
'''
return " $ " in data or " {{ " in data
2014-03-31 22:33:40 +00:00
def safe_eval ( expr , locals = { } , include_exceptions = False ) :
2013-06-03 22:19:11 +00:00
'''
2014-07-21 16:20:49 +00:00
This is intended for allowing things like :
2013-10-15 01:01:38 +00:00
with_items : a_list_variable
2014-07-21 16:20:49 +00:00
Where Jinja2 would return a string but we do not want to allow it to
call functions ( outside of Jinja2 , where the env is constrained ) . If
the input data to this function came from an untrusted ( remote ) source ,
it should first be run through _clean_data_struct ( ) to ensure the data
is further sanitized prior to evaluation .
2014-03-31 22:33:40 +00:00
Based on :
http : / / stackoverflow . com / questions / 12523516 / using - ast - and - whitelists - to - make - pythons - eval - safe
2013-04-10 20:37:49 +00:00
'''
2013-06-03 22:19:11 +00:00
2014-08-11 16:22:55 +00:00
# this is the whitelist of AST nodes we are going to
# allow in the evaluation. Any node type other than
2014-03-31 22:33:40 +00:00
# those listed here will raise an exception in our custom
# visitor class defined below.
SAFE_NODES = set (
(
2014-06-26 01:18:03 +00:00
ast . Add ,
ast . BinOp ,
ast . Call ,
2014-03-31 22:33:40 +00:00
ast . Compare ,
ast . Dict ,
2014-06-26 01:18:03 +00:00
ast . Div ,
ast . Expression ,
ast . List ,
2014-03-31 22:33:40 +00:00
ast . Load ,
2014-06-26 01:18:03 +00:00
ast . Mult ,
2014-03-31 22:33:40 +00:00
ast . Num ,
ast . Name ,
2014-06-26 01:18:03 +00:00
ast . Str ,
2014-03-31 22:33:40 +00:00
ast . Sub ,
2014-06-26 01:18:03 +00:00
ast . Tuple ,
ast . UnaryOp ,
2014-03-31 22:33:40 +00:00
)
)
# AST node types were expanded after 2.6
if not sys . version . startswith ( ' 2.6 ' ) :
SAFE_NODES . union (
set (
( ast . Set , )
)
)
2013-04-10 23:12:10 +00:00
2014-06-24 14:56:33 +00:00
filter_list = [ ]
for filter in filter_loader . all ( ) :
filter_list . extend ( filter . filters ( ) . keys ( ) )
2014-06-26 13:01:06 +00:00
CALL_WHITELIST = C . DEFAULT_CALLABLE_WHITELIST + filter_list
2013-04-10 20:37:49 +00:00
2014-03-31 22:33:40 +00:00
class CleansingNodeVisitor ( ast . NodeVisitor ) :
2014-06-26 13:01:06 +00:00
def generic_visit ( self , node , inside_call = False ) :
2014-03-31 22:33:40 +00:00
if type ( node ) not in SAFE_NODES :
raise Exception ( " invalid expression ( %s ) " % expr )
2014-06-26 01:18:03 +00:00
elif isinstance ( node , ast . Call ) :
2014-06-26 13:01:06 +00:00
inside_call = True
elif isinstance ( node , ast . Name ) and inside_call :
if hasattr ( builtin , node . id ) and node . id not in CALL_WHITELIST :
raise Exception ( " invalid function: %s " % node . id )
2014-06-26 01:18:03 +00:00
# iterate over all child nodes
for child_node in ast . iter_child_nodes ( node ) :
2014-06-26 13:01:06 +00:00
self . generic_visit ( child_node , inside_call )
2014-03-31 22:33:40 +00:00
if not isinstance ( expr , basestring ) :
2013-04-23 02:17:55 +00:00
# already templated to a datastructure, perhaps?
2013-10-31 01:26:16 +00:00
if include_exceptions :
2014-03-31 22:33:40 +00:00
return ( expr , None )
return expr
2014-06-26 13:01:06 +00:00
cnv = CleansingNodeVisitor ( )
2013-04-10 22:42:54 +00:00
try :
2014-03-31 22:33:40 +00:00
parsed_tree = ast . parse ( expr , mode = ' eval ' )
cnv . visit ( parsed_tree )
compiled = compile ( parsed_tree , expr , ' eval ' )
result = eval ( compiled , { } , locals )
2013-10-15 01:01:38 +00:00
if include_exceptions :
return ( result , None )
else :
return result
2014-03-31 22:33:40 +00:00
except SyntaxError , e :
# special handling for syntax errors, we just return
# the expression string back as-is
if include_exceptions :
return ( expr , None )
return expr
2013-04-10 22:42:54 +00:00
except Exception , e :
2013-10-15 01:01:38 +00:00
if include_exceptions :
2014-03-31 22:33:40 +00:00
return ( expr , e )
return expr
2013-04-10 22:42:54 +00:00
2013-04-16 22:50:00 +00:00
def listify_lookup_plugin_terms ( terms , basedir , inject ) :
2014-07-25 00:15:04 +00:00
from ansible . utils import template
2013-04-16 22:50:00 +00:00
if isinstance ( terms , basestring ) :
2013-08-11 04:54:05 +00:00
# someone did:
2013-04-16 22:50:00 +00:00
# with_items: alist
# OR
# with_items: {{ alist }}
2013-05-14 01:31:45 +00:00
stripped = terms . strip ( )
2014-07-28 21:21:37 +00:00
if not ( stripped . startswith ( ' { ' ) or stripped . startswith ( ' [ ' ) ) and \
not stripped . startswith ( " / " ) and \
not stripped . startswith ( ' set([ ' ) and \
not LOOKUP_REGEX . search ( terms ) :
2013-05-14 01:31:45 +00:00
# if not already a list, get ready to evaluate with Jinja2
# not sure why the "/" is in above code :)
2013-04-16 22:50:00 +00:00
try :
2014-10-08 02:44:59 +00:00
new_terms = template . template ( basedir , terms , inject , convert_bare = True , fail_on_undefined = C . DEFAULT_UNDEFINED_VAR_BEHAVIOR )
2014-03-27 17:56:33 +00:00
if isinstance ( new_terms , basestring ) and " {{ " in new_terms :
2013-04-16 23:07:19 +00:00
pass
else :
2013-06-03 22:19:11 +00:00
terms = new_terms
2014-10-08 02:44:59 +00:00
except jinja2 . exceptions . UndefinedError , e :
raise errors . AnsibleUndefinedVariable ( ' undefined variable in items: %s ' % e )
2013-04-16 22:50:00 +00:00
except :
pass
2013-04-17 17:40:06 +00:00
if ' { ' in terms or ' [ ' in terms :
2013-04-16 22:50:00 +00:00
# Jinja2 already evaluated a variable to a list.
# Jinja2-ified list needs to be converted back to a real type
# TODO: something a bit less heavy than eval
return safe_eval ( terms )
if isinstance ( terms , basestring ) :
terms = [ terms ]
return terms
2013-04-10 20:17:24 +00:00
2013-03-09 23:30:18 +00:00
def combine_vars ( a , b ) :
2013-10-11 23:04:26 +00:00
2014-08-21 15:40:47 +00:00
_validate_both_dicts ( a , b )
2013-03-09 23:30:18 +00:00
if C . DEFAULT_HASH_BEHAVIOUR == " merge " :
return merge_hash ( a , b )
else :
return dict ( a . items ( ) + b . items ( ) )
2013-04-10 20:17:24 +00:00
2013-11-01 14:51:35 +00:00
def random_password ( length = 20 , chars = C . DEFAULT_PASSWORD_CHARS ) :
''' Return a random password string of length containing only chars. '''
2013-04-10 20:17:24 +00:00
2013-11-01 14:51:35 +00:00
password = [ ]
while len ( password ) < length :
new_char = os . urandom ( 1 )
if new_char in chars :
password . append ( new_char )
return ' ' . join ( password )
2014-03-06 01:10:25 +00:00
def before_comment ( msg ) :
''' what ' s the part of a string before a comment? '''
msg = msg . replace ( " \ # " , " **NOT_A_COMMENT** " )
msg = msg . split ( " # " ) [ 0 ]
msg = msg . replace ( " **NOT_A_COMMENT** " , " # " )
return msg
2014-03-21 18:05:18 +00:00
def load_vars ( basepath , results , vault_password = None ) :
"""
Load variables from any potential yaml filename combinations of basepath ,
returning result .
"""
paths_to_check = [ " " . join ( [ basepath , ext ] )
for ext in C . YAML_FILENAME_EXTENSIONS ]
found_paths = [ ]
for path in paths_to_check :
found , results = _load_vars_from_path ( path , results , vault_password = vault_password )
if found :
found_paths . append ( path )
# disallow the potentially confusing situation that there are multiple
# variable files for the same name. For example if both group_vars/all.yml
# and group_vars/all.yaml
if len ( found_paths ) > 1 :
raise errors . AnsibleError ( " Multiple variable files found. "
" There should only be one. %s " % ( found_paths , ) )
return results
## load variables from yaml files/dirs
# e.g. host/group_vars
#
def _load_vars_from_path ( path , results , vault_password = None ) :
"""
Robustly access the file at path and load variables , carefully reporting
errors in a friendly / informative way .
2014-03-06 01:10:25 +00:00
2014-03-21 18:05:18 +00:00
Return the tuple ( found , new_results , )
"""
try :
# in the case of a symbolic link, we want the stat of the link itself,
# not its target
pathstat = os . lstat ( path )
except os . error , err :
# most common case is that nothing exists at that path.
if err . errno == errno . ENOENT :
return False , results
# otherwise this is a condition we should report to the user
raise errors . AnsibleError (
" %s is not accessible: %s . "
" Please check its permissions. " % ( path , err . strerror ) )
# symbolic link
if stat . S_ISLNK ( pathstat . st_mode ) :
try :
target = os . path . realpath ( path )
except os . error , err2 :
raise errors . AnsibleError ( " The symbolic link at %s "
" is not readable: %s . Please check its permissions. "
% ( path , err2 . strerror , ) )
# follow symbolic link chains by recursing, so we repeat the same
# permissions checks above and provide useful errors.
2014-09-08 14:10:18 +00:00
return _load_vars_from_path ( target , results , vault_password )
2014-03-21 18:05:18 +00:00
# directory
if stat . S_ISDIR ( pathstat . st_mode ) :
# support organizing variables across multiple files in a directory
return True , _load_vars_from_folder ( path , results , vault_password = vault_password )
# regular file
elif stat . S_ISREG ( pathstat . st_mode ) :
data = parse_yaml_from_file ( path , vault_password = vault_password )
2014-07-11 18:19:36 +00:00
if data and type ( data ) != dict :
2014-03-21 18:05:18 +00:00
raise errors . AnsibleError (
" %s must be stored as a dictionary/hash " % path )
2014-07-11 18:19:36 +00:00
elif data is None :
data = { }
2014-03-21 18:05:18 +00:00
# combine vars overrides by default but can be configured to do a
# hash merge in settings
results = combine_vars ( results , data )
return True , results
# something else? could be a fifo, socket, device, etc.
else :
raise errors . AnsibleError ( " Expected a variable file or directory "
" but found a non-file object at path %s " % ( path , ) )
2014-03-06 01:10:25 +00:00
2014-03-21 18:05:18 +00:00
def _load_vars_from_folder ( folder_path , results , vault_password = None ) :
"""
Load all variables within a folder recursively .
"""
# this function and _load_vars_from_path are mutually recursive
try :
names = os . listdir ( folder_path )
except os . error , err :
raise errors . AnsibleError (
" This folder cannot be listed: %s : %s . "
% ( folder_path , err . strerror ) )
# evaluate files in a stable order rather than whatever order the
# filesystem lists them.
names . sort ( )
# do not parse hidden files or dirs, e.g. .svn/
paths = [ os . path . join ( folder_path , name ) for name in names if not name . startswith ( ' . ' ) ]
for path in paths :
_found , results = _load_vars_from_path ( path , results , vault_password = vault_password )
return results
2014-01-23 15:02:17 +00:00
2014-08-11 16:22:55 +00:00
def update_hash ( hash , key , new_value ) :
''' used to avoid nested .update calls on the parent '''
value = hash . get ( key , { } )
value . update ( new_value )
hash [ key ] = value
2014-08-12 17:35:38 +00:00
def censor_unlogged_data ( data ) :
'''
used when the no_log : True attribute is passed to a task to keep data from a callback .
NOT intended to prevent variable registration , but only things from showing up on
screen
'''
new_data = { }
for ( x , y ) in data . iteritems ( ) :
if x in [ ' skipped ' , ' changed ' , ' failed ' , ' rc ' ] :
new_data [ x ] = y
new_data [ ' censored ' ] = ' results hidden due to no_log parameter '
return new_data