2015-02-10 14:28:00 +00:00
require " thread "
require_relative " base "
module VagrantPlugins
module Ansible
module Provisioner
class Host < Base
@@lock = Mutex . new
def initialize ( machine , config )
super
@logger = Log4r :: Logger . new ( " vagrant::provisioners::ansible_host " )
end
def provision
# At this stage, the SSH access is guaranteed to be ready
@ssh_info = @machine . ssh_info
provisioners/ansible(both): fix ansible config files presence checks
With this change, the presence of Ansible configuration files (like
playbook file, inventory path, galaxy role file, etc.) is no longer
performed by the `config` classes, but by the `provisioner` classes
(at the beginning of the provision command).
This change fixes several issues:
- Resolve #6984 as `provision` method are only executed when remote
(ssh) communication with the guest machine is possible.
- Resolve #6763 in a better way than 4e451c6 initially did.
- Improve the general provisioner speed since the `config` checks are
actually triggered by many vagrant actions (e.g. `destroy`,...), and
can also be triggered multiple times during a vagrant run (e.g. on
callback request made by the machine provider).
Unlike the former `config`-based checks, the provision action won't
collect all the invalid options, but only report the first invalid
option found and abort the execution.
Some unit tests were not implemented yet to save my scarce "open source
contribution time" for other important issues, but they should be done
at last via GH-6633.
2016-05-31 22:30:07 +00:00
check_files_existence
2015-02-10 14:28:00 +00:00
warn_for_unsupported_platform
2015-11-17 21:06:06 +00:00
execute_ansible_galaxy_from_host if config . galaxy_role_file
2015-02-10 14:28:00 +00:00
execute_ansible_playbook_from_host
end
protected
2015-11-19 22:42:01 +00:00
VAGRANT_ARG_SEPARATOR = 'VAGRANT_ARG_SEP'
2015-02-10 14:28:00 +00:00
def warn_for_unsupported_platform
if Vagrant :: Util :: Platform . windows?
@machine . env . ui . warn ( I18n . t ( " vagrant.provisioners.ansible.windows_not_supported_for_control_machine " ) )
end
end
def prepare_command_arguments
# Connect with native OpenSSH client
# Other modes (e.g. paramiko) are not officially supported,
# but can be enabled via raw_arguments option.
@command_arguments << " --connection=ssh "
# Increase the SSH connection timeout, as the Ansible default value (10 seconds)
# is a bit demanding for some overloaded developer boxes. This is particularly
# helpful when additional virtual networks are configured, as their availability
# is not controlled during vagrant boot process.
@command_arguments << " --timeout=30 "
2015-11-08 13:01:23 +00:00
if ! config . force_remote_user
# Pass the vagrant ssh username as Ansible default remote user, because
# the ansible_ssh_user parameter won't be added to the auto-generated inventory.
@command_arguments << " --user= #{ @ssh_info [ :username ] } "
elsif config . inventory_path
# Using an extra variable is the only way to ensure that the Ansible remote user
# is overridden (as the ansible inventory is not under vagrant control)
@command_arguments << " --extra-vars=ansible_ssh_user=' #{ @ssh_info [ :username ] } ' "
end
2015-02-10 14:28:00 +00:00
@command_arguments << " --ask-sudo-pass " if config . ask_sudo_pass
@command_arguments << " --ask-vault-pass " if config . ask_vault_pass
prepare_common_command_arguments
end
def prepare_environment_variables
prepare_common_environment_variables
# Some Ansible options must be passed as environment variables,
# as there is no equivalent command line arguments
@environment_variables [ " ANSIBLE_HOST_KEY_CHECKING " ] = " #{ config . host_key_checking } "
# ANSIBLE_SSH_ARGS is required for Multiple SSH keys, SSH forwarding and custom SSH settings
@environment_variables [ " ANSIBLE_SSH_ARGS " ] = ansible_ssh_args unless ansible_ssh_args . empty?
end
2015-11-17 21:06:06 +00:00
def execute_command_from_host ( command )
begin
result = Vagrant :: Util :: Subprocess . execute ( * command ) do | type , data |
if type == :stdout || type == :stderr
@machine . env . ui . detail ( data , new_line : false , prefix : false )
end
end
raise Ansible :: Errors :: AnsibleCommandFailed if result . exit_code != 0
rescue Vagrant :: Errors :: CommandUnavailable
raise Ansible :: Errors :: AnsibleNotFoundOnHost
end
end
def execute_ansible_galaxy_from_host
command_values = {
2016-06-11 05:28:05 +00:00
role_file : get_galaxy_role_file ,
roles_path : get_galaxy_roles_path
2015-11-19 23:13:01 +00:00
}
2015-11-19 22:42:01 +00:00
command_template = config . galaxy_command . gsub ( ' ' , VAGRANT_ARG_SEPARATOR )
2015-11-17 21:06:06 +00:00
str_command = command_template % command_values
2015-11-19 22:42:01 +00:00
command = str_command . split ( VAGRANT_ARG_SEPARATOR )
2015-11-17 21:06:06 +00:00
command << {
# Write stdout and stderr data, since it's the regular Ansible output
notify : [ :stdout , :stderr ] ,
workdir : @machine . env . root_path . to_s
}
2016-06-11 05:28:05 +00:00
# FIXME: role_file and roles_path arguments should be quoted in the console output
2016-04-20 16:05:28 +00:00
ui_running_ansible_command " galaxy " , str_command . gsub ( VAGRANT_ARG_SEPARATOR , ' ' )
2015-11-17 21:06:06 +00:00
execute_command_from_host command
end
2015-02-10 14:28:00 +00:00
def execute_ansible_playbook_from_host
2015-11-17 21:06:06 +00:00
prepare_command_arguments
prepare_environment_variables
2015-02-10 14:28:00 +00:00
# Assemble the full ansible-playbook command
2016-04-20 16:05:28 +00:00
command = %w( ansible-playbook ) << @command_arguments
# Add the raw arguments at the end, to give them the highest precedence
command << config . raw_arguments if config . raw_arguments
2015-02-10 14:28:00 +00:00
2016-04-20 16:05:28 +00:00
command << config . playbook
command = command . flatten
2015-02-10 14:28:00 +00:00
command << {
env : @environment_variables ,
2015-11-17 21:06:06 +00:00
# Write stdout and stderr data, since it's the regular Ansible output
2015-02-10 14:28:00 +00:00
notify : [ :stdout , :stderr ] ,
workdir : @machine . env . root_path . to_s
}
2016-04-20 16:05:28 +00:00
ui_running_ansible_command " playbook " , ansible_playbook_command_for_shell_execution
2015-11-17 21:06:06 +00:00
execute_command_from_host command
2015-02-10 14:28:00 +00:00
end
def ship_generated_inventory ( inventory_content )
inventory_path = Pathname . new ( File . join ( @machine . env . local_data_path . join , %w( provisioners ansible inventory ) ) )
FileUtils . mkdir_p ( inventory_path ) unless File . directory? ( inventory_path )
inventory_file = Pathname . new ( File . join ( inventory_path , 'vagrant_ansible_inventory' ) )
@@lock . synchronize do
if ! File . exists? ( inventory_file ) or inventory_content != File . read ( inventory_file )
2016-03-31 13:44:17 +00:00
begin
# ansible dir inventory will ignore files starting with '.'
inventory_tmpfile = Tempfile . new ( '.vagrant_ansible_inventory' , inventory_path )
inventory_tmpfile . write ( inventory_content )
inventory_tmpfile . close
File . rename ( inventory_tmpfile . path , inventory_file )
ensure
inventory_tmpfile . close
inventory_tmpfile . unlink
2015-02-10 14:28:00 +00:00
end
end
end
return inventory_path
end
def generate_inventory_machines
machines = " "
@machine . env . active_machines . each do | am |
begin
m = @machine . env . machine ( * am )
2015-11-18 22:34:55 +00:00
# Call only once the SSH and WinRM info computation
# Note that machines configured with WinRM communicator, also have a "partial" ssh_info.
2015-02-10 14:28:00 +00:00
m_ssh_info = m . ssh_info
2015-12-01 17:15:40 +00:00
host_vars = get_inventory_host_vars_string ( m . name )
2015-11-18 22:34:55 +00:00
if m . config . vm . communicator == :winrm
m_winrm_net_info = CommunicatorWinRM :: Helper . winrm_info ( m ) # can raise a WinRMNotReady exception...
machines += get_inventory_winrm_machine ( m , m_winrm_net_info )
2015-12-01 17:15:40 +00:00
machines . sub! ( / \ n$ / , " #{ host_vars } \n " ) if host_vars
2015-11-18 22:34:55 +00:00
@inventory_machines [ m . name ] = m
elsif ! m_ssh_info . nil?
machines += get_inventory_ssh_machine ( m , m_ssh_info )
2015-12-01 17:15:40 +00:00
machines . sub! ( / \ n$ / , " #{ host_vars } \n " ) if host_vars
2015-02-10 14:28:00 +00:00
@inventory_machines [ m . name ] = m
else
@logger . error ( " Auto-generated inventory: Impossible to get SSH information for machine ' #{ m . name } ( #{ m . provider_name } )'. This machine should be recreated. " )
# Let a note about this missing machine
machines += " # MISSING: ' #{ m . name } ' machine was probably removed without using Vagrant. This machine should be recreated. \n "
end
2015-11-18 22:34:55 +00:00
rescue Vagrant :: Errors :: MachineNotFound , CommunicatorWinRM :: Errors :: WinRMNotReady = > e
2015-02-10 14:28:00 +00:00
@logger . info ( " Auto-generated inventory: Skip machine ' #{ am [ 0 ] } ( #{ am [ 1 ] } )', which is not configured for this Vagrant environment. " )
end
end
return machines
end
2016-06-11 05:28:05 +00:00
def get_provisioning_working_directory
machine . env . root_path
end
2015-11-18 22:34:55 +00:00
def get_inventory_ssh_machine ( machine , ssh_info )
forced_remote_user = " "
if config . force_remote_user
forced_remote_user = " ansible_ssh_user=' #{ ssh_info [ :username ] } ' "
end
" #{ machine . name } ansible_ssh_host= #{ ssh_info [ :host ] } ansible_ssh_port= #{ ssh_info [ :port ] } #{ forced_remote_user } ansible_ssh_private_key_file=' #{ ssh_info [ :private_key_path ] [ 0 ] } ' \n "
end
def get_inventory_winrm_machine ( machine , winrm_net_info )
forced_remote_user = " "
if config . force_remote_user
forced_remote_user = " ansible_ssh_user=' #{ machine . config . winrm . username } ' "
end
" #{ machine . name } ansible_connection=winrm ansible_ssh_host= #{ winrm_net_info [ :host ] } ansible_ssh_port= #{ winrm_net_info [ :port ] } #{ forced_remote_user } ansible_ssh_pass=' #{ machine . config . winrm . password } ' \n "
end
2015-02-10 14:28:00 +00:00
def ansible_ssh_args
@ansible_ssh_args || = prepare_ansible_ssh_args
end
def prepare_ansible_ssh_args
ssh_options = [ ]
# Use an SSH ProxyCommand when using the Docker provider with the intermediate host
if @machine . provider_name == :docker && machine . provider . host_vm?
docker_host_ssh_info = machine . provider . host_vm . ssh_info
proxy_cmd = " ssh #{ docker_host_ssh_info [ :username ] } @ #{ docker_host_ssh_info [ :host ] } " +
" -p #{ docker_host_ssh_info [ :port ] } -i #{ docker_host_ssh_info [ :private_key_path ] [ 0 ] } "
# Use same options than plugins/providers/docker/communicator.rb
# Note: this could be improved (DRY'ed) by sharing these settings.
proxy_cmd += " -o Compression=yes -o ConnectTimeout=5 -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "
proxy_cmd += " -o ForwardAgent=yes " if @ssh_info [ :forward_agent ]
proxy_cmd += " exec nc %h %p 2>/dev/null "
ssh_options << " -o ProxyCommand=' #{ proxy_cmd } ' "
end
# Don't access user's known_hosts file, except when host_key_checking is enabled.
ssh_options << " -o UserKnownHostsFile=/dev/null " unless config . host_key_checking
# Set IdentitiesOnly=yes to avoid authentication errors when the host has more than 5 ssh keys.
# Notes:
# - Solaris/OpenSolaris/Illumos uses SunSSH which doesn't support the IdentitiesOnly option.
# - this could be improved by sharing logic with lib/vagrant/util/ssh.rb
ssh_options << " -o IdentitiesOnly=yes " unless Vagrant :: Util :: Platform . solaris?
# Multiple Private Keys
unless ! config . inventory_path && @ssh_info [ :private_key_path ] . size == 1
@ssh_info [ :private_key_path ] . each do | key |
2015-12-16 09:19:38 +00:00
ssh_options << " -i ' #{ key } ' "
2015-02-10 14:28:00 +00:00
end
end
# SSH Forwarding
ssh_options << " -o ForwardAgent=yes " if @ssh_info [ :forward_agent ]
# Unchecked SSH Parameters
2016-04-20 20:27:55 +00:00
ssh_options . concat ( config . raw_ssh_args ) if config . raw_ssh_args
2015-02-10 14:28:00 +00:00
# Re-enable ControlPersist Ansible defaults,
# which are lost when ANSIBLE_SSH_ARGS is defined.
unless ssh_options . empty?
ssh_options << " -o ControlMaster=auto "
ssh_options << " -o ControlPersist=60s "
# Intentionally keep ControlPath undefined to let ansible-playbook
# automatically sets this option to Ansible default value
end
ssh_options . join ( ' ' )
end
provisioners/ansible(both): fix ansible config files presence checks
With this change, the presence of Ansible configuration files (like
playbook file, inventory path, galaxy role file, etc.) is no longer
performed by the `config` classes, but by the `provisioner` classes
(at the beginning of the provision command).
This change fixes several issues:
- Resolve #6984 as `provision` method are only executed when remote
(ssh) communication with the guest machine is possible.
- Resolve #6763 in a better way than 4e451c6 initially did.
- Improve the general provisioner speed since the `config` checks are
actually triggered by many vagrant actions (e.g. `destroy`,...), and
can also be triggered multiple times during a vagrant run (e.g. on
callback request made by the machine provider).
Unlike the former `config`-based checks, the provision action won't
collect all the invalid options, but only report the first invalid
option found and abort the execution.
Some unit tests were not implemented yet to save my scarce "open source
contribution time" for other important issues, but they should be done
at last via GH-6633.
2016-05-31 22:30:07 +00:00
def check_path ( path , path_test_method , option_name )
# Checks for the existence of given file (or directory) on the host system,
# and error if it doesn't exist.
expanded_path = Pathname . new ( path ) . expand_path ( @machine . env . root_path )
if ! expanded_path . public_send ( path_test_method )
raise Ansible :: Errors :: AnsibleError ,
_key : :config_file_not_found ,
config_option : option_name ,
path : expanded_path ,
system : " host "
end
end
def check_path_is_a_file ( path , option_name )
check_path ( path , " file? " , option_name )
end
def check_path_exists ( path , option_name )
check_path ( path , " exist? " , option_name )
end
2015-02-10 14:28:00 +00:00
end
end
end
end