2012-03-03 02:08:48 +00:00
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
2013-04-10 21:52:35 +00:00
import re
2012-03-03 03:10:51 +00:00
import os
2012-03-15 01:16:15 +00:00
import shlex
2012-03-20 02:42:31 +00:00
import yaml
2013-01-23 20:39:34 +00:00
import copy
2012-04-06 14:59:15 +00:00
import optparse
2012-07-15 12:46:58 +00:00
import operator
2013-10-30 14:50:16 +00:00
from ansible import errors
from ansible import __version__
from ansible . utils . plugins import *
from ansible . utils import template
from ansible . callbacks import display
import ansible . constants as C
2014-03-31 22:33:40 +00:00
import ast
2012-08-09 01:05:58 +00:00
import time
2012-08-11 15:59:14 +00:00
import StringIO
2012-09-24 18:47:59 +00:00
import stat
2012-09-22 06:07:49 +00:00
import termios
import tty
2013-01-10 05:50:56 +00:00
import pipes
import random
2013-02-08 03:51:33 +00:00
import difflib
2013-02-18 00:32:28 +00:00
import warnings
2013-04-10 21:52:35 +00:00
import traceback
2013-04-23 03:57:20 +00:00
import getpass
2013-10-11 22:37:39 +00:00
import sys
import textwrap
2014-03-07 02:42:01 +00:00
import json
2012-07-09 07:52:00 +00:00
2014-02-24 18:09:36 +00:00
#import vault
from vault import VaultLib
2014-02-11 17:03:11 +00:00
2012-08-09 01:09:14 +00:00
VERBOSITY = 0
2013-10-11 22:37:39 +00:00
# list of all deprecation messages to prevent duplicate display
deprecations = { }
2013-10-15 18:58:15 +00:00
warns = { }
2013-10-11 22:37:39 +00:00
2013-02-25 22:32:52 +00:00
MAX_FILE_SIZE_FOR_DIFF = 1 * 1024 * 1024
2012-03-03 02:08:48 +00:00
try :
import json
except ImportError :
import simplejson as json
2012-07-09 07:52:00 +00:00
try :
2012-07-09 17:27:47 +00:00
from hashlib import md5 as _md5
2012-08-07 00:07:02 +00:00
except ImportError :
2012-07-09 17:27:47 +00:00
from md5 import md5 as _md5
2012-07-09 07:52:00 +00:00
2012-08-09 14:56:40 +00:00
PASSLIB_AVAILABLE = False
try :
import passlib . hash
PASSLIB_AVAILABLE = True
except :
pass
2013-08-11 05:41:18 +00:00
KEYCZAR_AVAILABLE = False
try :
import keyczar . errors as key_errors
from keyczar . keys import AesKey
KEYCZAR_AVAILABLE = True
except ImportError :
pass
2012-09-27 03:50:54 +00:00
###############################################################
2013-08-11 05:41:18 +00:00
# Abstractions around keyczar
2013-08-07 13:12:25 +00:00
###############################################################
2013-08-11 05:41:18 +00:00
def key_for_hostname ( hostname ) :
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE :
2013-11-03 17:47:07 +00:00
raise errors . AnsibleError ( " python-keyczar must be installed on the control machine to use accelerated modes " )
2013-08-11 05:41:18 +00:00
2014-02-06 08:53:43 +00:00
key_path = os . path . expanduser ( C . ACCELERATE_KEYS_DIR )
2013-08-11 05:41:18 +00:00
if not os . path . exists ( key_path ) :
2014-02-10 19:27:43 +00:00
os . makedirs ( key_path , mode = 0700 )
2014-02-06 12:48:34 +00:00
os . chmod ( key_path , int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) )
2014-02-06 08:53:43 +00:00
elif not os . path . isdir ( key_path ) :
raise errors . AnsibleError ( ' ACCELERATE_KEYS_DIR is not a directory. ' )
2014-02-06 12:48:34 +00:00
2014-02-06 13:02:11 +00:00
if stat . S_IMODE ( os . stat ( key_path ) . st_mode ) != int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) :
2014-03-19 19:09:03 +00:00
raise errors . AnsibleError ( ' Incorrect permissions on the private key directory. Use `chmod 0 %o %s ` to correct this issue, and make sure any of the keys files contained within that directory are set to 0 %o ' % ( int ( C . ACCELERATE_KEYS_DIR_PERMS , 8 ) , C . ACCELERATE_KEYS_DIR , int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) ) )
2014-02-06 12:48:34 +00:00
2014-02-06 08:53:43 +00:00
key_path = os . path . join ( key_path , hostname )
2013-08-11 05:41:18 +00:00
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os . path . exists ( key_path ) or ( time . time ( ) - os . path . getmtime ( key_path ) > 60 * 60 * 2 ) :
key = AesKey . Generate ( )
2014-02-06 08:53:43 +00:00
fd = os . open ( key_path , os . O_WRONLY | os . O_CREAT , int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) )
fh = os . fdopen ( fd , ' w ' )
2013-08-11 05:41:18 +00:00
fh . write ( str ( key ) )
fh . close ( )
return key
else :
2014-02-06 13:02:11 +00:00
if stat . S_IMODE ( os . stat ( key_path ) . st_mode ) != int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) :
2014-03-19 19:09:03 +00:00
raise errors . AnsibleError ( ' Incorrect permissions on the key file for this host. Use `chmod 0 %o %s ` to correct this issue. ' % ( int ( C . ACCELERATE_KEYS_FILE_PERMS , 8 ) , key_path ) )
2013-08-11 05:41:18 +00:00
fh = open ( key_path )
key = AesKey . Read ( fh . read ( ) )
fh . close ( )
return key
def encrypt ( key , msg ) :
return key . Encrypt ( msg )
def decrypt ( key , msg ) :
try :
return key . Decrypt ( msg )
except key_errors . InvalidSignatureError :
raise errors . AnsibleError ( " decryption failed " )
2012-09-27 03:50:54 +00:00
2012-03-03 02:16:29 +00:00
###############################################################
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
###############################################################
2012-03-03 02:08:48 +00:00
def err ( msg ) :
2012-03-03 02:16:29 +00:00
''' print an error message to stderr '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:08:48 +00:00
print >> sys . stderr , msg
def exit ( msg , rc = 1 ) :
2012-03-03 02:16:29 +00:00
''' quit with an error to stdout and a failure code '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:08:48 +00:00
err ( msg )
sys . exit ( rc )
2012-07-15 14:12:49 +00:00
def jsonify ( result , format = False ) :
''' format JSON output (uncompressed or uncompressed) '''
2012-07-15 16:29:53 +00:00
2013-02-17 20:01:49 +00:00
if result is None :
2013-02-27 18:46:31 +00:00
return " {} "
2012-03-21 02:29:21 +00:00
result2 = result . copy ( )
2013-05-19 15:57:08 +00:00
for key , value in result2 . items ( ) :
2013-05-19 16:25:19 +00:00
if type ( value ) is str :
result2 [ key ] = value . decode ( ' utf-8 ' , ' ignore ' )
2012-07-15 14:12:49 +00:00
if format :
return json . dumps ( result2 , sort_keys = True , indent = 4 )
2012-03-03 02:08:48 +00:00
else :
2012-07-15 14:12:49 +00:00
return json . dumps ( result2 , sort_keys = True )
2012-03-03 02:08:48 +00:00
2012-03-03 03:10:51 +00:00
def write_tree_file ( tree , hostname , buf ) :
2012-03-03 02:16:29 +00:00
''' write something into treedir/hostname '''
2012-07-15 16:29:53 +00:00
2012-03-03 02:16:29 +00:00
# TODO: might be nice to append playbook runs per host in a similar way
# in which case, we'd want append mode.
2012-03-03 03:10:51 +00:00
path = os . path . join ( tree , hostname )
2012-03-03 02:08:48 +00:00
fd = open ( path , " w+ " )
fd . write ( buf )
fd . close ( )
def is_failed ( result ) :
2012-03-03 02:16:29 +00:00
''' is a given JSON result a failed result? '''
2012-07-15 16:29:53 +00:00
2012-07-15 15:09:15 +00:00
return ( ( result . get ( ' rc ' , 0 ) != 0 ) or ( result . get ( ' failed ' , False ) in [ True , ' True ' , ' true ' ] ) )
2012-03-03 02:08:48 +00:00
2013-01-01 01:27:16 +00:00
def is_changed ( result ) :
''' is a given JSON result a changed result? '''
return ( result . get ( ' changed ' , False ) in [ True , ' True ' , ' true ' ] )
2014-01-04 00:13:21 +00:00
def check_conditional ( conditional , basedir , inject , fail_on_undefined = False ) :
2013-08-20 21:09:44 +00:00
2014-01-04 00:23:19 +00:00
if conditional is None or conditional == ' ' :
return True
2013-10-31 21:19:58 +00:00
if isinstance ( conditional , list ) :
for x in conditional :
2014-01-04 00:13:21 +00:00
if not check_conditional ( x , basedir , inject , fail_on_undefined = fail_on_undefined ) :
2013-10-31 21:19:58 +00:00
return False
return True
2013-10-12 21:39:28 +00:00
2013-02-04 19:11:25 +00:00
if not isinstance ( conditional , basestring ) :
return conditional
2014-01-04 00:13:21 +00:00
conditional = conditional . replace ( " jinja2_compare " , " " )
# allow variable names
2014-01-23 15:02:17 +00:00
if conditional in inject and ' - ' not in str ( inject [ conditional ] ) :
2014-01-04 00:13:21 +00:00
conditional = inject [ conditional ]
conditional = template . template ( basedir , conditional , inject , fail_on_undefined = fail_on_undefined )
original = str ( conditional ) . replace ( " jinja2_compare " , " " )
# a Jinja2 evaluation that results in something Python can eval!
presented = " { %% if %s %% } True { %% else %% } False { %% endif %% } " % conditional
conditional = template . template ( basedir , presented , inject )
val = conditional . strip ( )
if val == presented :
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
# looking for an undefined variable, return True,
# otherwise fail
2014-01-23 15:02:17 +00:00
if " is undefined " in conditional :
2014-01-04 00:13:21 +00:00
return True
2014-01-23 15:02:17 +00:00
elif " is defined " in conditional :
2014-01-04 00:13:21 +00:00
return False
else :
raise errors . AnsibleError ( " error while evaluating conditional: %s " % original )
elif val == " True " :
return True
elif val == " False " :
return False
else :
raise errors . AnsibleError ( " unable to evaluate conditional: %s " % original )
2012-09-24 19:06:34 +00:00
2012-09-24 18:47:59 +00:00
def is_executable ( path ) :
''' is the given path executable? '''
2013-01-20 14:05:07 +00:00
return ( stat . S_IXUSR & os . stat ( path ) [ stat . ST_MODE ]
or stat . S_IXGRP & os . stat ( path ) [ stat . ST_MODE ]
2012-09-24 18:47:59 +00:00
or stat . S_IXOTH & os . stat ( path ) [ stat . ST_MODE ] )
2013-08-20 17:03:50 +00:00
def unfrackpath ( path ) :
'''
returns a path that is free of symlinks , environment
variables , relative path traversals and symbols ( ~ )
example :
' $HOME/../../var/mail ' becomes ' /var/spool/mail '
'''
2013-10-30 14:50:16 +00:00
return os . path . normpath ( os . path . realpath ( os . path . expandvars ( os . path . expanduser ( path ) ) ) )
2013-08-20 17:03:50 +00:00
def prepare_writeable_dir ( tree , mode = 0777 ) :
2012-03-03 02:16:29 +00:00
''' make sure a directory exists and is writeable '''
2012-07-15 16:29:53 +00:00
2013-08-20 17:03:50 +00:00
# modify the mode to ensure the owner at least
# has read/write access to this directory
mode | = 0700
# make sure the tree path is always expanded
# and normalized and free of symlinks
tree = unfrackpath ( tree )
2012-03-03 02:08:48 +00:00
if not os . path . exists ( tree ) :
try :
2013-08-20 17:03:50 +00:00
os . makedirs ( tree , mode )
2012-03-03 02:08:48 +00:00
except ( IOError , OSError ) , e :
2013-08-25 16:46:45 +00:00
raise errors . AnsibleError ( " Could not make dir %s : %s " % ( tree , e ) )
2012-03-03 02:08:48 +00:00
if not os . access ( tree , os . W_OK ) :
2013-08-25 16:46:45 +00:00
raise errors . AnsibleError ( " Cannot write to path %s " % tree )
2013-08-20 17:03:50 +00:00
return tree
2012-03-03 02:08:48 +00:00
2012-03-03 15:53:15 +00:00
def path_dwim ( basedir , given ) :
2013-01-03 05:53:00 +00:00
'''
make relative paths work like folks expect .
'''
2013-10-30 14:50:16 +00:00
if given . startswith ( " / " ) :
return os . path . abspath ( given )
elif given . startswith ( " ~ " ) :
return os . path . abspath ( os . path . expanduser ( given ) )
else :
2014-01-07 08:48:35 +00:00
if basedir is None :
basedir = " . "
2013-10-30 14:50:16 +00:00
return os . path . abspath ( os . path . join ( basedir , given ) )
2013-04-06 16:13:04 +00:00
def path_dwim_relative ( original , dirname , source , playbook_base , check = True ) :
''' find one file in a directory one level up in a dir named dirname relative to current '''
2013-10-30 14:50:16 +00:00
# (used by roles code)
basedir = os . path . dirname ( original )
if os . path . islink ( basedir ) :
basedir = unfrackpath ( basedir )
template2 = os . path . join ( basedir , dirname , source )
else :
template2 = os . path . join ( basedir , ' .. ' , dirname , source )
source2 = path_dwim ( basedir , template2 )
if os . path . exists ( source2 ) :
return source2
obvious_local_path = path_dwim ( playbook_base , source )
if os . path . exists ( obvious_local_path ) :
return obvious_local_path
if check :
raise errors . AnsibleError ( " input file not found at %s or %s " % ( source2 , obvious_local_path ) )
return source2 # which does not exist
2012-03-03 02:08:48 +00:00
2012-03-18 21:53:58 +00:00
def json_loads ( data ) :
2012-07-07 12:18:33 +00:00
''' parse a JSON string and return a data structure '''
2012-07-15 16:29:53 +00:00
2012-03-18 21:53:58 +00:00
return json . loads ( data )
2012-08-11 13:55:14 +00:00
def parse_json ( raw_data ) :
2012-03-18 21:53:58 +00:00
''' this version for module return data only '''
2013-01-20 14:05:07 +00:00
2012-10-20 16:12:07 +00:00
orig_data = raw_data
2012-07-15 16:29:53 +00:00
2012-08-11 15:59:14 +00:00
# ignore stuff like tcgetattr spewage or other warnings
2012-08-11 14:24:16 +00:00
data = filter_leading_non_json_lines ( raw_data )
2012-08-11 13:55:14 +00:00
2012-03-15 01:16:15 +00:00
try :
return json . loads ( data )
except :
# not JSON, but try "Baby JSON" which allows many of our modules to not
# require JSON and makes writing modules in bash much simpler
results = { }
2012-07-15 13:32:47 +00:00
try :
2012-06-14 18:17:38 +00:00
tokens = shlex . split ( data )
2012-08-07 00:07:02 +00:00
except :
2012-06-14 18:17:38 +00:00
print " failed to parse json: " + data
2012-08-07 00:07:02 +00:00
raise
2012-03-15 01:16:15 +00:00
for t in tokens :
2014-03-27 17:56:33 +00:00
if " = " not in t :
2012-10-20 16:12:07 +00:00
raise errors . AnsibleError ( " failed to parse: %s " % orig_data )
2012-03-15 01:16:15 +00:00
( key , value ) = t . split ( " = " , 1 )
if key == ' changed ' or ' failed ' :
2012-03-16 02:32:14 +00:00
if value . lower ( ) in [ ' true ' , ' 1 ' ] :
2012-03-15 01:16:15 +00:00
value = True
elif value . lower ( ) in [ ' false ' , ' 0 ' ] :
value = False
if key == ' rc ' :
2012-08-07 00:07:02 +00:00
value = int ( value )
2012-03-15 01:16:15 +00:00
results [ key ] = value
2012-03-16 01:53:14 +00:00
if len ( results . keys ( ) ) == 0 :
2012-10-20 16:12:07 +00:00
return { " failed " : True , " parsed " : False , " msg " : orig_data }
2012-03-15 01:16:15 +00:00
return results
2013-04-25 01:59:47 +00:00
def smush_braces ( data ) :
''' smush Jinaj2 braces so unresolved templates like {{ foo }} don ' t get parsed weird by key=value code '''
2014-01-23 15:02:17 +00:00
while ' {{ ' in data :
2013-04-25 01:59:47 +00:00
data = data . replace ( ' {{ ' , ' {{ ' )
2014-01-23 15:02:17 +00:00
while ' }} ' in data :
2013-04-25 01:59:47 +00:00
data = data . replace ( ' }} ' , ' }} ' )
return data
def smush_ds ( data ) :
# things like key={{ foo }} are not handled by shlex.split well, so preprocess any YAML we load
# so we do not have to call smush elsewhere
if type ( data ) == list :
return [ smush_ds ( x ) for x in data ]
elif type ( data ) == dict :
for ( k , v ) in data . items ( ) :
data [ k ] = smush_ds ( v )
return data
elif isinstance ( data , basestring ) :
return smush_braces ( data )
else :
return data
2014-03-07 03:07:35 +00:00
def parse_yaml ( data , path_hint = None ) :
2014-03-07 02:42:01 +00:00
''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!! '''
2014-03-10 18:43:34 +00:00
stripped_data = data . lstrip ( )
2014-03-07 03:15:21 +00:00
loaded = None
2014-03-10 18:43:34 +00:00
if stripped_data . startswith ( " { " ) or stripped_data . startswith ( " [ " ) :
2014-03-07 02:42:01 +00:00
# since the line starts with { or [ we can infer this is a JSON document.
2014-03-07 03:07:35 +00:00
try :
loaded = json . loads ( data )
except ValueError , ve :
if path_hint :
raise errors . AnsibleError ( path_hint + " : " + str ( ve ) )
else :
raise errors . AnsibleError ( str ( ve ) )
2014-03-07 02:42:01 +00:00
else :
# else this is pretty sure to be a YAML document
loaded = yaml . safe_load ( data )
2014-03-07 03:15:21 +00:00
return smush_ds ( loaded )
2012-08-07 00:07:02 +00:00
2013-10-06 14:23:44 +00:00
def process_common_errors ( msg , probline , column ) :
replaced = probline . replace ( " " , " " )
2013-10-11 21:02:28 +00:00
2014-01-23 15:02:17 +00:00
if " : {{ " in replaced and " }} " in replaced :
2013-10-06 14:23:44 +00:00
msg = msg + """
This one looks easy to fix . YAML thought it was looking for the start of a
hash / dictionary and was confused to see a second " { " . Most likely this was
meant to be an ansible template evaluation instead , so we have to give the
parser a small hint that we wanted a string instead . The solution here is to
just quote the entire value .
For instance , if the original line was :
app_path : { { base_path } } / foo
It should be written as :
app_path : " {{ base_path }}/foo "
"""
2013-10-11 21:34:06 +00:00
return msg
2013-10-06 14:23:44 +00:00
2014-02-26 22:28:34 +00:00
elif len ( probline ) and len ( probline ) > 1 and len ( probline ) > column and probline [ column ] == " : " and probline . count ( ' : ' ) > 1 :
2013-10-06 14:23:44 +00:00
msg = msg + """
2013-10-11 21:02:28 +00:00
This one looks easy to fix . There seems to be an extra unquoted colon in the line
2013-10-06 14:23:44 +00:00
and this is confusing the parser . It was only expecting to find one free
colon . The solution is just add some quotes around the colon , or quote the
entire line after the first colon .
For instance , if the original line was :
2013-10-11 21:34:06 +00:00
copy : src = file . txt dest = / path / filename : with_colon . txt
2013-10-06 14:23:44 +00:00
It can be written as :
2013-10-11 21:34:06 +00:00
copy : src = file . txt dest = ' /path/filename:with_colon.txt '
2013-10-06 14:23:44 +00:00
Or :
2013-10-11 21:34:06 +00:00
copy : ' src=file.txt dest=/path/filename:with_colon.txt '
2013-10-06 14:23:44 +00:00
"""
2013-10-11 21:34:06 +00:00
return msg
else :
parts = probline . split ( " : " )
if len ( parts ) > 1 :
middle = parts [ 1 ] . strip ( )
match = False
2013-10-11 22:37:39 +00:00
unbalanced = False
if middle . startswith ( " ' " ) and not middle . endswith ( " ' " ) :
2013-10-11 21:34:06 +00:00
match = True
elif middle . startswith ( ' " ' ) and not middle . endswith ( ' " ' ) :
match = True
2014-03-19 18:49:31 +00:00
if len ( middle ) > 0 and middle [ 0 ] in [ ' " ' , " ' " ] and middle [ - 1 ] in [ ' " ' , " ' " ] and probline . count ( " ' " ) > 2 or probline . count ( ' " ' ) > 2 :
2013-10-11 22:37:39 +00:00
unbalanced = True
2013-10-11 21:34:06 +00:00
if match :
msg = msg + """
This one looks easy to fix . It seems that there is a value started
with a quote , and the YAML parser is expecting to see the line ended
with the same kind of quote . For instance :
when : " ok " in result . stdout
Could be written as :
when : ' " ok " in result.stdout '
or equivalently :
when : " ' ok ' in result.stdout "
2013-10-11 22:37:39 +00:00
"""
return msg
if unbalanced :
msg = msg + """
We could be wrong , but this one looks like it might be an issue with
unbalanced quotes . If starting a value with a quote , make sure the
line ends with the same set of quotes . For instance this arbitrary
example :
foo : " bad " " wolf "
Could be written as :
foo : ' " bad " " wolf " '
2013-10-11 21:34:06 +00:00
"""
return msg
2013-10-06 14:23:44 +00:00
return msg
2013-02-09 19:30:19 +00:00
def process_yaml_error ( exc , data , path = None ) :
if hasattr ( exc , ' problem_mark ' ) :
mark = exc . problem_mark
if mark . line - 1 > = 0 :
before_probline = data . split ( " \n " ) [ mark . line - 1 ]
else :
before_probline = ' '
probline = data . split ( " \n " ) [ mark . line ]
arrow = " " * mark . column + " ^ "
msg = """ Syntax Error while loading YAML script, %s
Note : The error may actually appear before this position : line % s , column % s
% s
% s
% s """ % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
2013-10-06 14:23:44 +00:00
2014-01-06 21:47:19 +00:00
unquoted_var = None
if ' {{ ' in probline and ' }} ' in probline :
if ' " {{ ' not in probline or " ' {{ " not in probline :
unquoted_var = True
2013-10-06 14:23:44 +00:00
msg = process_common_errors ( msg , probline , mark . column )
2014-01-06 21:47:19 +00:00
if not unquoted_var :
msg = process_common_errors ( msg , probline , mark . column )
else :
msg = msg + """
We could be wrong , but this one looks like it might be an issue with
missing quotes . Always quote template expression brackets when they
start a value . For instance :
with_items :
- { { foo } }
2013-10-06 14:23:44 +00:00
2014-01-06 21:47:19 +00:00
Should be written as :
with_items :
- " {{ foo }} "
"""
msg = process_common_errors ( msg , probline , mark . column )
2013-10-06 14:23:44 +00:00
2013-02-09 19:30:19 +00:00
else :
# No problem markers means we have to throw a generic
# "stuff messed up" type message. Sry bud.
if path :
msg = " Could not parse YAML. Check over %s again. " % path
else :
msg = " Could not parse YAML. "
raise errors . AnsibleYAMLValidationFailed ( msg )
2014-02-11 17:03:11 +00:00
def parse_yaml_from_file ( path , vault_password = None ) :
2012-07-07 12:18:33 +00:00
''' convert a yaml file to a data structure '''
2012-07-15 16:29:53 +00:00
2014-02-11 17:03:11 +00:00
data = None
2014-02-24 18:09:36 +00:00
try :
data = open ( path ) . read ( )
except IOError :
raise errors . AnsibleError ( " file could not read: %s " % path )
vault = VaultLib ( password = vault_password )
if vault . is_encrypted ( data ) :
data = vault . decrypt ( data )
2014-02-11 17:03:11 +00:00
2012-03-21 02:29:21 +00:00
try :
2014-03-07 03:07:35 +00:00
return parse_yaml ( data , path_hint = path )
2012-08-07 22:24:22 +00:00
except yaml . YAMLError , exc :
2013-02-09 19:30:19 +00:00
process_yaml_error ( exc , data , path )
2012-03-19 23:23:14 +00:00
2012-03-31 02:52:38 +00:00
def parse_kv ( args ) :
2012-03-22 03:39:09 +00:00
''' convert a string of key/value items to a dict '''
options = { }
2012-04-27 00:42:20 +00:00
if args is not None :
2012-08-03 00:21:59 +00:00
# attempting to split a unicode here does bad things
2013-04-03 05:03:30 +00:00
args = args . encode ( ' utf-8 ' )
2014-03-17 21:14:32 +00:00
try :
vargs = shlex . split ( args , posix = True )
except ValueError , ve :
if ' no closing quotation ' in str ( ve ) . lower ( ) :
raise errors . AnsibleError ( " error parsing argument string, try quoting the entire line. " )
else :
raise
vargs = [ x . decode ( ' utf-8 ' ) for x in vargs ]
2012-04-27 01:25:43 +00:00
for x in vargs :
2014-01-23 15:02:17 +00:00
if " = " in x :
2012-08-03 00:21:59 +00:00
k , v = x . split ( " = " , 1 )
2012-04-27 01:25:43 +00:00
options [ k ] = v
2012-03-22 03:39:09 +00:00
return options
2012-03-31 01:56:10 +00:00
2013-01-23 20:39:34 +00:00
def merge_hash ( a , b ) :
2013-06-03 22:19:11 +00:00
''' recursively merges hash b into a
2013-08-11 04:54:05 +00:00
keys from b take precedence over keys from a '''
2013-01-23 20:39:34 +00:00
2013-06-03 22:19:11 +00:00
result = copy . deepcopy ( a )
# next, iterate over b keys and values
2013-01-23 20:39:34 +00:00
for k , v in b . iteritems ( ) :
2013-06-03 22:19:11 +00:00
# if there's already such key in a
# and that key contains dict
if k in result and isinstance ( result [ k ] , dict ) :
# merge those dicts recursively
result [ k ] = merge_hash ( a [ k ] , v )
2013-01-23 20:39:34 +00:00
else :
2013-06-03 22:19:11 +00:00
# otherwise, just copy a value from b to a
result [ k ] = v
return result
2013-01-23 20:39:34 +00:00
2012-11-18 16:51:01 +00:00
def md5s ( data ) :
''' Return MD5 hex digest of data. '''
digest = _md5 ( )
2013-09-25 05:15:08 +00:00
try :
digest . update ( data )
except UnicodeEncodeError :
digest . update ( data . encode ( ' utf-8 ' ) )
2012-11-18 16:51:01 +00:00
return digest . hexdigest ( )
2012-07-09 07:52:00 +00:00
def md5 ( filename ) :
2012-07-09 17:27:47 +00:00
''' Return MD5 hex digest of local file, or None if file is not present. '''
2012-07-15 16:29:53 +00:00
2012-07-09 17:27:47 +00:00
if not os . path . exists ( filename ) :
return None
digest = _md5 ( )
blocksize = 64 * 1024
infile = open ( filename , ' rb ' )
block = infile . read ( blocksize )
while block :
digest . update ( block )
block = infile . read ( blocksize )
infile . close ( )
return digest . hexdigest ( )
2012-07-21 20:51:31 +00:00
def default ( value , function ) :
''' syntactic sugar around lazy evaluation of defaults '''
if value is None :
return function ( )
return value
2012-08-08 09:18:51 +00:00
def _gitinfo ( ) :
2012-08-09 01:05:58 +00:00
''' returns a string containing git branch, commit id and commit date '''
2012-08-08 09:18:51 +00:00
result = None
2012-11-21 22:54:42 +00:00
repo_path = os . path . join ( os . path . dirname ( __file__ ) , ' .. ' , ' .. ' , ' .. ' , ' .git ' )
2012-09-22 06:07:49 +00:00
2012-08-08 09:18:51 +00:00
if os . path . exists ( repo_path ) :
2012-09-05 09:40:14 +00:00
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
2012-09-04 13:12:39 +00:00
if os . path . isfile ( repo_path ) :
2012-09-04 14:05:00 +00:00
try :
2013-02-23 18:30:10 +00:00
gitdir = yaml . safe_load ( open ( repo_path ) ) . get ( ' gitdir ' )
2012-09-05 09:40:14 +00:00
# There is a posibility the .git file to have an absolute path.
2012-09-06 12:18:29 +00:00
if os . path . isabs ( gitdir ) :
repo_path = gitdir
else :
repo_path = os . path . join ( repo_path . split ( ' .git ' ) [ 0 ] , gitdir )
2012-09-05 09:40:14 +00:00
except ( IOError , AttributeError ) :
2012-09-05 01:07:29 +00:00
return ' '
2012-08-17 15:35:17 +00:00
f = open ( os . path . join ( repo_path , " HEAD " ) )
branch = f . readline ( ) . split ( ' / ' ) [ - 1 ] . rstrip ( " \n " )
f . close ( )
2012-08-09 01:05:58 +00:00
branch_path = os . path . join ( repo_path , " refs " , " heads " , branch )
2012-08-23 16:30:34 +00:00
if os . path . exists ( branch_path ) :
f = open ( branch_path )
commit = f . readline ( ) [ : 10 ]
f . close ( )
date = time . localtime ( os . stat ( branch_path ) . st_mtime )
2012-09-22 06:07:49 +00:00
if time . daylight == 0 :
2012-08-23 16:30:34 +00:00
offset = time . timezone
else :
offset = time . altzone
result = " ( {0} {1} ) last updated {2} (GMT {3:+04d} ) " . format ( branch , commit ,
time . strftime ( " % Y/ % m/ %d % H: % M: % S " , date ) , offset / - 36 )
2012-08-22 18:54:25 +00:00
else :
2012-09-05 01:07:29 +00:00
result = ' '
2012-08-08 09:18:51 +00:00
return result
def version ( prog ) :
result = " {0} {1} " . format ( prog , __version__ )
gitinfo = _gitinfo ( )
if gitinfo :
result = result + " {0} " . format ( gitinfo )
return result
2012-09-22 06:07:49 +00:00
def getch ( ) :
''' read in a single character '''
fd = sys . stdin . fileno ( )
old_settings = termios . tcgetattr ( fd )
try :
tty . setraw ( sys . stdin . fileno ( ) )
ch = sys . stdin . read ( 1 )
finally :
termios . tcsetattr ( fd , termios . TCSADRAIN , old_settings )
return ch
2014-02-13 20:23:49 +00:00
def sanitize_output ( str ) :
''' strips private info out of a string '''
private_keys = [ ' password ' , ' login_password ' ]
filter_re = [
# filter out things like user:pass@foo/whatever
# and http://username:pass@wherever/foo
re . compile ( ' ^(?P<before>.*:)(?P<password>.*)(?P<after> \ @.*)$ ' ) ,
]
parts = str . split ( )
output = ' '
for part in parts :
try :
( k , v ) = part . split ( ' = ' , 1 )
if k in private_keys :
output + = " %s =VALUE_HIDDEN " % k
else :
found = False
for filter in filter_re :
m = filter . match ( v )
if m :
d = m . groupdict ( )
output + = " %s = %s " % ( k , d [ ' before ' ] + " ******** " + d [ ' after ' ] )
found = True
break
if not found :
output + = " %s " % part
except :
output + = " %s " % part
return output . strip ( )
2012-07-07 12:45:06 +00:00
####################################################################
2012-08-07 00:07:02 +00:00
# option handling code for /usr/bin/ansible and ansible-playbook
2012-07-07 12:45:06 +00:00
# below this line
2012-04-10 17:51:58 +00:00
class SortedOptParser ( optparse . OptionParser ) :
''' Optparser which sorts the options by opt before outputting --help '''
2012-07-15 16:29:53 +00:00
2012-04-10 17:51:58 +00:00
def format_help ( self , formatter = None ) :
2012-07-15 12:46:58 +00:00
self . option_list . sort ( key = operator . methodcaller ( ' get_opt_string ' ) )
2012-04-10 17:51:58 +00:00
return optparse . OptionParser . format_help ( self , formatter = None )
2012-08-09 01:09:14 +00:00
def increment_debug ( option , opt , value , parser ) :
global VERBOSITY
VERBOSITY + = 1
2012-09-22 06:07:49 +00:00
def base_parser ( constants = C , usage = " " , output_opts = False , runas_opts = False ,
2013-02-08 03:51:33 +00:00
async_opts = False , connect_opts = False , subset_opts = False , check_opts = False , diff_opts = False ) :
2012-04-10 17:51:58 +00:00
''' create an options parser for any ansible script '''
2012-08-08 09:18:51 +00:00
parser = SortedOptParser ( usage , version = version ( " % prog " ) )
2012-08-09 01:09:14 +00:00
parser . add_option ( ' -v ' , ' --verbose ' , default = False , action = " callback " ,
2013-09-19 10:58:54 +00:00
callback = increment_debug , help = " verbose mode (-vvv for more, -vvvv to enable connection debugging) " )
2012-08-09 01:09:14 +00:00
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -f ' , ' --forks ' , dest = ' forks ' , default = constants . DEFAULT_FORKS , type = ' int ' ,
2012-04-13 01:30:49 +00:00
help = " specify number of parallel processes to use (default= %s ) " % constants . DEFAULT_FORKS )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -i ' , ' --inventory-file ' , dest = ' inventory ' ,
2012-08-07 00:07:02 +00:00
help = " specify inventory host file (default= %s ) " % constants . DEFAULT_HOST_LIST ,
2012-04-13 01:30:49 +00:00
default = constants . DEFAULT_HOST_LIST )
2012-04-13 23:06:11 +00:00
parser . add_option ( ' -k ' , ' --ask-pass ' , default = False , dest = ' ask_pass ' , action = ' store_true ' ,
2012-04-10 17:51:58 +00:00
help = ' ask for SSH password ' )
2012-09-07 18:37:32 +00:00
parser . add_option ( ' --private-key ' , default = C . DEFAULT_PRIVATE_KEY_FILE , dest = ' private_key_file ' ,
2012-05-14 20:14:38 +00:00
help = ' use this file to authenticate the connection ' )
2012-04-13 23:06:11 +00:00
parser . add_option ( ' -K ' , ' --ask-sudo-pass ' , default = False , dest = ' ask_sudo_pass ' , action = ' store_true ' ,
help = ' ask for sudo password ' )
2014-02-26 16:00:48 +00:00
parser . add_option ( ' --ask-su-pass ' , default = False , dest = ' ask_su_pass ' , action = ' store_true ' ,
help = ' ask for su password ' )
parser . add_option ( ' --ask-vault-pass ' , default = False , dest = ' ask_vault_pass ' , action = ' store_true ' ,
help = ' ask for vault password ' )
parser . add_option ( ' --vault-password-file ' , default = None , dest = ' vault_password_file ' ,
help = " vault password file " )
2013-06-27 01:57:31 +00:00
parser . add_option ( ' --list-hosts ' , dest = ' listhosts ' , action = ' store_true ' ,
help = ' outputs a list of matching hosts; does not execute anything else ' )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -M ' , ' --module-path ' , dest = ' module_path ' ,
2012-06-06 20:42:29 +00:00
help = " specify path(s) to module library (default= %s ) " % constants . DEFAULT_MODULE_PATH ,
2013-01-22 14:42:49 +00:00
default = None )
2012-08-10 06:45:29 +00:00
if subset_opts :
parser . add_option ( ' -l ' , ' --limit ' , default = constants . DEFAULT_SUBSET , dest = ' subset ' ,
help = ' further limit selected hosts to an additional pattern ' )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -T ' , ' --timeout ' , default = constants . DEFAULT_TIMEOUT , type = ' int ' ,
2012-08-07 00:07:02 +00:00
dest = ' timeout ' ,
2012-04-13 01:30:49 +00:00
help = " override the SSH timeout in seconds (default= %s ) " % constants . DEFAULT_TIMEOUT )
2012-04-13 23:33:19 +00:00
2012-04-05 21:06:23 +00:00
if output_opts :
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -o ' , ' --one-line ' , dest = ' one_line ' , action = ' store_true ' ,
help = ' condense output ' )
parser . add_option ( ' -t ' , ' --tree ' , dest = ' tree ' , default = None ,
help = ' log output to this directory ' )
2012-04-05 21:06:23 +00:00
if runas_opts :
2013-07-05 17:10:36 +00:00
parser . add_option ( " -s " , " --sudo " , default = constants . DEFAULT_SUDO , action = " store_true " ,
2012-04-10 17:51:58 +00:00
dest = ' sudo ' , help = " run operations with sudo (nopasswd) " )
2014-01-21 01:19:03 +00:00
parser . add_option ( ' -U ' , ' --sudo-user ' , dest = ' sudo_user ' , default = None ,
help = ' desired sudo user (default=root) ' ) # Can't default to root because we need to detect when this option was given
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -u ' , ' --user ' , default = constants . DEFAULT_REMOTE_USER ,
2014-01-21 01:19:03 +00:00
dest = ' remote_user ' , help = ' connect as this user (default= %s ) ' % constants . DEFAULT_REMOTE_USER )
parser . add_option ( ' -S ' , ' --su ' , default = constants . DEFAULT_SU ,
action = ' store_true ' , help = ' run operations with su ' )
parser . add_option ( ' -R ' , ' --su-user ' , help = ' run operations with su as this '
' user (default= %s ) ' % constants . DEFAULT_SU_USER )
2012-08-07 00:07:02 +00:00
2012-04-10 23:13:18 +00:00
if connect_opts :
2012-04-12 18:18:35 +00:00
parser . add_option ( ' -c ' , ' --connection ' , dest = ' connection ' ,
default = C . DEFAULT_TRANSPORT ,
2012-04-13 01:20:37 +00:00
help = " connection type to use (default= %s ) " % C . DEFAULT_TRANSPORT )
2012-04-10 23:13:18 +00:00
2012-04-05 21:06:23 +00:00
if async_opts :
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -P ' , ' --poll ' , default = constants . DEFAULT_POLL_INTERVAL , type = ' int ' ,
2012-08-07 00:07:02 +00:00
dest = ' poll_interval ' ,
2012-04-13 01:30:49 +00:00
help = " set the poll interval if using -B (default= %s ) " % constants . DEFAULT_POLL_INTERVAL )
2012-04-10 17:51:58 +00:00
parser . add_option ( ' -B ' , ' --background ' , dest = ' seconds ' , type = ' int ' , default = 0 ,
2012-04-13 01:30:49 +00:00
help = ' run asynchronously, failing after X seconds (default=N/A) ' )
2012-04-06 14:59:15 +00:00
2013-02-04 00:46:25 +00:00
if check_opts :
parser . add_option ( " -C " , " --check " , default = False , dest = ' check ' , action = ' store_true ' ,
2013-06-27 01:57:31 +00:00
help = " don ' t make any changes; instead, try to predict some of the changes that may occur "
2013-02-04 00:46:25 +00:00
)
2013-02-08 03:51:33 +00:00
if diff_opts :
parser . add_option ( " -D " , " --diff " , default = False , dest = ' diff ' , action = ' store_true ' ,
2013-06-27 01:57:31 +00:00
help = " when changing (small) files and templates, show the differences in those files; works great with --check "
2013-02-08 03:51:33 +00:00
)
2012-04-10 17:51:58 +00:00
return parser
2012-04-06 14:59:15 +00:00
2014-02-19 16:05:08 +00:00
def ask_vault_passwords ( ask_vault_pass = False , ask_new_vault_pass = False , confirm_vault = False , confirm_new = False ) :
2014-02-11 17:03:11 +00:00
vault_pass = None
new_vault_pass = None
if ask_vault_pass :
vault_pass = getpass . getpass ( prompt = " Vault password: " )
if ask_vault_pass and confirm_vault :
2014-02-19 16:06:53 +00:00
vault_pass2 = getpass . getpass ( prompt = " Confirm Vault password: " )
2014-02-11 17:03:11 +00:00
if vault_pass != vault_pass2 :
raise errors . AnsibleError ( " Passwords do not match " )
if ask_new_vault_pass :
new_vault_pass = getpass . getpass ( prompt = " New Vault password: " )
if ask_new_vault_pass and confirm_new :
2014-02-19 16:06:53 +00:00
new_vault_pass2 = getpass . getpass ( prompt = " Confirm New Vault password: " )
2014-02-11 17:03:11 +00:00
if new_vault_pass != new_vault_pass2 :
raise errors . AnsibleError ( " Passwords do not match " )
2014-03-19 19:56:14 +00:00
# enforce no newline chars at the end of passwords
2014-03-19 20:08:35 +00:00
if vault_pass :
vault_pass = vault_pass . strip ( )
if new_vault_pass :
new_vault_pass = new_vault_pass . strip ( )
2014-03-19 19:56:14 +00:00
2014-02-11 17:03:11 +00:00
return vault_pass , new_vault_pass
def ask_passwords ( ask_pass = False , ask_sudo_pass = False , ask_su_pass = False , ask_vault_pass = False ) :
2013-04-23 03:57:20 +00:00
sshpass = None
sudopass = None
2014-01-21 01:19:03 +00:00
su_pass = None
2014-02-11 17:03:11 +00:00
vault_pass = None
2013-04-23 03:57:20 +00:00
sudo_prompt = " sudo password: "
2014-01-21 01:19:03 +00:00
su_prompt = " su password: "
2013-04-23 03:57:20 +00:00
if ask_pass :
sshpass = getpass . getpass ( prompt = " SSH password: " )
sudo_prompt = " sudo password [defaults to SSH password]: "
if ask_sudo_pass :
sudopass = getpass . getpass ( prompt = sudo_prompt )
if ask_pass and sudopass == ' ' :
sudopass = sshpass
2014-01-21 01:19:03 +00:00
if ask_su_pass :
su_pass = getpass . getpass ( prompt = su_prompt )
2014-02-11 17:03:11 +00:00
if ask_vault_pass :
vault_pass = getpass . getpass ( prompt = " Vault password: " )
return ( sshpass , sudopass , su_pass , vault_pass )
2013-04-23 03:57:20 +00:00
2012-08-09 14:56:40 +00:00
def do_encrypt ( result , encrypt , salt_size = None , salt = None ) :
if PASSLIB_AVAILABLE :
try :
crypt = getattr ( passlib . hash , encrypt )
except :
raise errors . AnsibleError ( " passlib does not support ' %s ' algorithm " % encrypt )
2012-07-24 23:30:02 +00:00
2012-08-09 14:56:40 +00:00
if salt_size :
result = crypt . encrypt ( result , salt_size = salt_size )
elif salt :
result = crypt . encrypt ( result , salt = salt )
else :
result = crypt . encrypt ( result )
else :
raise errors . AnsibleError ( " passlib must be installed to encrypt vars_prompt values " )
return result
2012-07-24 23:30:02 +00:00
2012-08-11 15:59:14 +00:00
def last_non_blank_line ( buf ) :
all_lines = buf . splitlines ( )
2012-08-11 14:14:19 +00:00
all_lines . reverse ( )
for line in all_lines :
if ( len ( line ) > 0 ) :
return line
2012-08-11 15:59:14 +00:00
# shouldn't occur unless there's no output
2012-09-22 06:07:49 +00:00
return " "
2012-08-11 15:59:14 +00:00
def filter_leading_non_json_lines ( buf ) :
2012-09-22 06:07:49 +00:00
'''
2012-08-11 15:59:14 +00:00
used to avoid random output from SSH at the top of JSON output , like messages from
tcagetattr , or where dropbear spews MOTD on every single command ( which is nuts ) .
2012-09-22 06:07:49 +00:00
2012-08-11 15:59:14 +00:00
need to filter anything which starts not with ' { ' , ' [ ' , ' , ' = ' or is an empty line.
2012-09-22 06:07:49 +00:00
filter only leading lines since multiline JSON is valid .
2012-08-11 15:59:14 +00:00
'''
filtered_lines = StringIO . StringIO ( )
stop_filtering = False
for line in buf . splitlines ( ) :
if stop_filtering or " = " in line or line . startswith ( ' { ' ) or line . startswith ( ' [ ' ) :
stop_filtering = True
filtered_lines . write ( line + ' \n ' )
return filtered_lines . getvalue ( )
2012-08-11 13:55:14 +00:00
2012-10-27 20:46:33 +00:00
def boolean ( value ) :
val = str ( value )
if val . lower ( ) in [ " true " , " t " , " y " , " 1 " , " yes " ] :
return True
else :
return False
2013-01-10 05:50:56 +00:00
def make_sudo_cmd ( sudo_user , executable , cmd ) :
"""
helper function for connection plugins to create sudo commands
"""
# Rather than detect if sudo wants a password this time, -k makes
# sudo always ask for a password if one is required.
# Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote()
# and pass the quoted string to the user's shell. We loop reading
# output until we see the randomly-generated sudo prompt set with
# the -p option.
randbits = ' ' . join ( chr ( random . randint ( ord ( ' a ' ) , ord ( ' z ' ) ) ) for x in xrange ( 32 ) )
prompt = ' [sudo via ansible, key= %s ] password: ' % randbits
2013-10-30 18:18:35 +00:00
success_key = ' SUDO-SUCCESS- %s ' % randbits
2013-01-28 16:41:43 +00:00
sudocmd = ' %s -k && %s %s -S -p " %s " -u %s %s -c %s ' % (
C . DEFAULT_SUDO_EXE , C . DEFAULT_SUDO_EXE , C . DEFAULT_SUDO_FLAGS ,
2013-10-30 18:18:35 +00:00
prompt , sudo_user , executable or ' $SHELL ' , pipes . quote ( ' echo %s ; %s ' % ( success_key , cmd ) ) )
return ( ' /bin/sh -c ' + pipes . quote ( sudocmd ) , prompt , success_key )
2013-02-08 03:51:33 +00:00
2014-01-21 01:19:03 +00:00
def make_su_cmd ( su_user , executable , cmd ) :
"""
Helper function for connection plugins to create direct su commands
"""
# TODO: work on this function
randbits = ' ' . join ( chr ( random . randint ( ord ( ' a ' ) , ord ( ' z ' ) ) ) for x in xrange ( 32 ) )
prompt = ' assword: '
success_key = ' SUDO-SUCCESS- %s ' % randbits
sudocmd = ' %s %s %s %s -c %s ' % (
C . DEFAULT_SU_EXE , C . DEFAULT_SU_FLAGS , su_user , executable or ' $SHELL ' ,
pipes . quote ( ' echo %s ; %s ' % ( success_key , cmd ) )
)
return ( ' /bin/sh -c ' + pipes . quote ( sudocmd ) , prompt , success_key )
2013-07-31 11:58:32 +00:00
_TO_UNICODE_TYPES = ( unicode , type ( None ) )
def to_unicode ( value ) :
if isinstance ( value , _TO_UNICODE_TYPES ) :
return value
return value . decode ( " utf-8 " )
2013-02-25 22:32:52 +00:00
def get_diff ( diff ) :
2013-02-08 03:51:33 +00:00
# called by --diff usage in playbook and runner via callbacks
# include names in diffs 'before' and 'after' and do diff -U 10
2013-02-18 00:32:28 +00:00
try :
with warnings . catch_warnings ( ) :
warnings . simplefilter ( ' ignore ' )
2013-02-25 22:32:52 +00:00
ret = [ ]
if ' dst_binary ' in diff :
ret . append ( " diff skipped: destination file appears to be binary \n " )
if ' src_binary ' in diff :
ret . append ( " diff skipped: source file appears to be binary \n " )
if ' dst_larger ' in diff :
ret . append ( " diff skipped: destination file size is greater than %d \n " % diff [ ' dst_larger ' ] )
if ' src_larger ' in diff :
ret . append ( " diff skipped: source file size is greater than %d \n " % diff [ ' src_larger ' ] )
if ' before ' in diff and ' after ' in diff :
2013-02-26 15:53:59 +00:00
if ' before_header ' in diff :
before_header = " before: %s " % diff [ ' before_header ' ]
else :
before_header = ' before '
if ' after_header ' in diff :
after_header = " after: %s " % diff [ ' after_header ' ]
else :
after_header = ' after '
2013-07-31 11:58:32 +00:00
differ = difflib . unified_diff ( to_unicode ( diff [ ' before ' ] ) . splitlines ( True ) , to_unicode ( diff [ ' after ' ] ) . splitlines ( True ) , before_header , after_header , ' ' , ' ' , 10 )
2013-02-25 22:32:52 +00:00
for line in list ( differ ) :
ret . append ( line )
2013-07-31 11:58:32 +00:00
return u " " . join ( ret )
2013-02-18 00:32:28 +00:00
except UnicodeDecodeError :
return " >> the files are different, but the diff library cannot compare unicode strings "
2013-03-01 23:32:32 +00:00
def is_list_of_strings ( items ) :
2013-06-03 22:19:11 +00:00
for x in items :
2013-04-10 20:37:49 +00:00
if not isinstance ( x , basestring ) :
return False
return True
2013-03-01 23:32:32 +00:00
2014-03-31 22:33:40 +00:00
def safe_eval ( expr , locals = { } , include_exceptions = False ) :
2013-06-03 22:19:11 +00:00
'''
2013-04-10 20:37:49 +00:00
this is intended for allowing things like :
2013-10-15 01:01:38 +00:00
with_items : a_list_variable
2013-04-10 20:37:49 +00:00
where Jinja2 would return a string
but we do not want to allow it to call functions ( outside of Jinja2 , where
the env is constrained )
2014-03-31 22:33:40 +00:00
Based on :
http : / / stackoverflow . com / questions / 12523516 / using - ast - and - whitelists - to - make - pythons - eval - safe
2013-04-10 20:37:49 +00:00
'''
2013-06-03 22:19:11 +00:00
2014-03-31 22:33:40 +00:00
# this is the whitelist of AST nodes we are going to
# allow in the evaluation. Any node type other than
# those listed here will raise an exception in our custom
# visitor class defined below.
SAFE_NODES = set (
(
ast . Expression ,
ast . Compare ,
ast . Str ,
ast . List ,
ast . Tuple ,
ast . Dict ,
ast . Call ,
ast . Load ,
ast . BinOp ,
ast . UnaryOp ,
ast . Num ,
ast . Name ,
ast . Add ,
ast . Sub ,
ast . Mult ,
ast . Div ,
)
)
# AST node types were expanded after 2.6
if not sys . version . startswith ( ' 2.6 ' ) :
SAFE_NODES . union (
set (
( ast . Set , )
)
)
2013-04-10 23:12:10 +00:00
2014-03-31 22:33:40 +00:00
# builtin functions that are not safe to call
INVALID_CALLS = (
' classmethod ' , ' compile ' , ' delattr ' , ' eval ' , ' execfile ' , ' file ' ,
' filter ' , ' help ' , ' input ' , ' object ' , ' open ' , ' raw_input ' , ' reduce ' ,
' reload ' , ' repr ' , ' setattr ' , ' staticmethod ' , ' super ' , ' type ' ,
)
2013-04-10 20:37:49 +00:00
2014-03-31 22:33:40 +00:00
class CleansingNodeVisitor ( ast . NodeVisitor ) :
def generic_visit ( self , node ) :
if type ( node ) not in SAFE_NODES :
#raise Exception("invalid expression (%s) type=%s" % (expr, type(node)))
raise Exception ( " invalid expression ( %s ) " % expr )
super ( CleansingNodeVisitor , self ) . generic_visit ( node )
def visit_Call ( self , call ) :
if call . func . id in INVALID_CALLS :
raise Exception ( " invalid function: %s " % call . func . id )
if not isinstance ( expr , basestring ) :
2013-04-23 02:17:55 +00:00
# already templated to a datastructure, perhaps?
2013-10-31 01:26:16 +00:00
if include_exceptions :
2014-03-31 22:33:40 +00:00
return ( expr , None )
return expr
2013-04-10 22:42:54 +00:00
try :
2014-03-31 22:33:40 +00:00
parsed_tree = ast . parse ( expr , mode = ' eval ' )
cnv = CleansingNodeVisitor ( )
cnv . visit ( parsed_tree )
compiled = compile ( parsed_tree , expr , ' eval ' )
result = eval ( compiled , { } , locals )
2013-10-15 01:01:38 +00:00
if include_exceptions :
return ( result , None )
else :
return result
2014-03-31 22:33:40 +00:00
except SyntaxError , e :
# special handling for syntax errors, we just return
# the expression string back as-is
if include_exceptions :
return ( expr , None )
return expr
2013-04-10 22:42:54 +00:00
except Exception , e :
2013-10-15 01:01:38 +00:00
if include_exceptions :
2014-03-31 22:33:40 +00:00
return ( expr , e )
return expr
2013-04-10 22:42:54 +00:00
2013-04-16 22:50:00 +00:00
def listify_lookup_plugin_terms ( terms , basedir , inject ) :
if isinstance ( terms , basestring ) :
2013-08-11 04:54:05 +00:00
# someone did:
2013-04-16 22:50:00 +00:00
# with_items: alist
# OR
# with_items: {{ alist }}
2013-05-14 01:31:45 +00:00
stripped = terms . strip ( )
2014-03-31 22:33:40 +00:00
if not ( stripped . startswith ( ' { ' ) or stripped . startswith ( ' [ ' ) ) and not stripped . startswith ( " / " ) and not stripped . startswith ( ' set([ ' ) :
2013-05-14 01:31:45 +00:00
# if not already a list, get ready to evaluate with Jinja2
# not sure why the "/" is in above code :)
2013-04-16 22:50:00 +00:00
try :
2013-04-16 23:07:19 +00:00
new_terms = template . template ( basedir , " {{ %s }} " % terms , inject )
2014-03-27 17:56:33 +00:00
if isinstance ( new_terms , basestring ) and " {{ " in new_terms :
2013-04-16 23:07:19 +00:00
pass
else :
2013-06-03 22:19:11 +00:00
terms = new_terms
2013-04-16 22:50:00 +00:00
except :
pass
2013-04-17 17:40:06 +00:00
if ' { ' in terms or ' [ ' in terms :
2013-04-16 22:50:00 +00:00
# Jinja2 already evaluated a variable to a list.
# Jinja2-ified list needs to be converted back to a real type
# TODO: something a bit less heavy than eval
return safe_eval ( terms )
if isinstance ( terms , basestring ) :
terms = [ terms ]
return terms
2013-04-10 20:17:24 +00:00
2014-01-03 18:46:31 +00:00
def deprecated ( msg , version , removed = False ) :
2013-10-11 22:37:39 +00:00
''' used to print out a deprecation message. '''
2014-01-03 18:46:31 +00:00
if not removed and not C . DEPRECATION_WARNINGS :
2013-10-11 22:37:39 +00:00
return
2014-01-03 18:46:31 +00:00
if not removed :
if version :
new_msg = " \n [DEPRECATION WARNING]: %s . This feature will be removed in version %s . " % ( msg , version )
else :
new_msg = " \n [DEPRECATION WARNING]: %s . This feature will be removed in a future release. " % ( msg )
new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. \n \n "
else :
raise errors . AnsibleError ( " [DEPRECATED]: %s . Please update your playbooks. " % msg )
2013-10-11 22:37:39 +00:00
wrapped = textwrap . wrap ( new_msg , 79 )
new_msg = " \n " . join ( wrapped ) + " \n "
if new_msg not in deprecations :
display ( new_msg , color = ' purple ' , stderr = True )
deprecations [ new_msg ] = 1
2013-10-11 23:04:26 +00:00
def warning ( msg ) :
new_msg = " \n [WARNING]: %s " % msg
wrapped = textwrap . wrap ( new_msg , 79 )
new_msg = " \n " . join ( wrapped ) + " \n "
2013-10-15 18:58:15 +00:00
if new_msg not in warns :
2013-10-11 23:04:26 +00:00
display ( new_msg , color = ' bright purple ' , stderr = True )
2013-10-15 18:58:15 +00:00
warns [ new_msg ] = 1
2013-10-11 23:04:26 +00:00
2013-03-09 23:30:18 +00:00
def combine_vars ( a , b ) :
2013-10-11 23:04:26 +00:00
2013-03-09 23:30:18 +00:00
if C . DEFAULT_HASH_BEHAVIOUR == " merge " :
return merge_hash ( a , b )
else :
return dict ( a . items ( ) + b . items ( ) )
2013-04-10 20:17:24 +00:00
2013-11-01 14:51:35 +00:00
def random_password ( length = 20 , chars = C . DEFAULT_PASSWORD_CHARS ) :
''' Return a random password string of length containing only chars. '''
2013-04-10 20:17:24 +00:00
2013-11-01 14:51:35 +00:00
password = [ ]
while len ( password ) < length :
new_char = os . urandom ( 1 )
if new_char in chars :
password . append ( new_char )
return ' ' . join ( password )
2014-03-06 01:10:25 +00:00
def before_comment ( msg ) :
''' what ' s the part of a string before a comment? '''
msg = msg . replace ( " \ # " , " **NOT_A_COMMENT** " )
msg = msg . split ( " # " ) [ 0 ]
msg = msg . replace ( " **NOT_A_COMMENT** " , " # " )
return msg
2014-01-23 15:02:17 +00:00