Allow Ansible provisioner to run reliably in parallel
The Ansible Vagrant provisioner has a race where the inventory file is updated every time the provisioner runs unless a file is provided. Therefore if Ansible attempts to provision two nodes in parallel, you may see the following race: * System A writes the inventory file and calls Ansible. * System B starts to provision and truncates the file before creating a new one. * Ansible on system A now attempts to read the inventory file, which is blank. Ansible bombs out with "ERROR: provided hosts list is empty". To fix this, we only allow Vagrant to update the inventory file if it needs to.
This commit is contained in:
parent
04ed8d3d03
commit
eb6aa2ac8c
|
@ -1,9 +1,12 @@
|
||||||
require "vagrant/util/platform"
|
require "vagrant/util/platform"
|
||||||
|
require "thread"
|
||||||
|
|
||||||
module VagrantPlugins
|
module VagrantPlugins
|
||||||
module Ansible
|
module Ansible
|
||||||
class Provisioner < Vagrant.plugin("2", :provisioner)
|
class Provisioner < Vagrant.plugin("2", :provisioner)
|
||||||
|
|
||||||
|
@@lock = Mutex.new
|
||||||
|
|
||||||
def initialize(machine, config)
|
def initialize(machine, config)
|
||||||
super
|
super
|
||||||
|
|
||||||
|
@ -117,55 +120,63 @@ module VagrantPlugins
|
||||||
FileUtils.mkdir_p(generated_inventory_dir) unless File.directory?(generated_inventory_dir)
|
FileUtils.mkdir_p(generated_inventory_dir) unless File.directory?(generated_inventory_dir)
|
||||||
generated_inventory_file = generated_inventory_dir.join('vagrant_ansible_inventory')
|
generated_inventory_file = generated_inventory_dir.join('vagrant_ansible_inventory')
|
||||||
|
|
||||||
generated_inventory_file.open('w') do |file|
|
inventory = "# Generated by Vagrant\n\n"
|
||||||
file.write("# Generated by Vagrant\n\n")
|
|
||||||
|
|
||||||
@machine.env.active_machines.each do |am|
|
@machine.env.active_machines.each do |am|
|
||||||
begin
|
begin
|
||||||
m = @machine.env.machine(*am)
|
m = @machine.env.machine(*am)
|
||||||
m_ssh_info = m.ssh_info
|
m_ssh_info = m.ssh_info
|
||||||
if !m_ssh_info.nil?
|
if !m_ssh_info.nil?
|
||||||
file.write("#{m.name} ansible_ssh_host=#{m_ssh_info[:host]} ansible_ssh_port=#{m_ssh_info[:port]}\n")
|
inventory += "#{m.name} ansible_ssh_host=#{m_ssh_info[:host]} ansible_ssh_port=#{m_ssh_info[:port]}\n"
|
||||||
inventory_machines[m.name] = m
|
inventory_machines[m.name] = m
|
||||||
else
|
else
|
||||||
@logger.error("Auto-generated inventory: Impossible to get SSH information for machine '#{m.name} (#{m.provider_name})'. This machine should be recreated.")
|
@logger.error("Auto-generated inventory: Impossible to get SSH information for machine '#{m.name} (#{m.provider_name})'. This machine should be recreated.")
|
||||||
# Let a note about this missing machine
|
# Let a note about this missing machine
|
||||||
file.write("# MISSING: '#{m.name}' machine was probably removed without using Vagrant. This machine should be recreated.\n")
|
inventory += "# MISSING: '#{m.name}' machine was probably removed without using Vagrant. This machine should be recreated.\n"
|
||||||
end
|
|
||||||
rescue Vagrant::Errors::MachineNotFound => e
|
|
||||||
@logger.info("Auto-generated inventory: Skip machine '#{am[0]} (#{am[1]})', which is not configured for this Vagrant environment.")
|
|
||||||
end
|
end
|
||||||
|
rescue Vagrant::Errors::MachineNotFound => e
|
||||||
|
@logger.info("Auto-generated inventory: Skip machine '#{am[0]} (#{am[1]})', which is not configured for this Vagrant environment.")
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Write out groups information.
|
# Write out groups information.
|
||||||
# All defined groups will be included, but only supported
|
# All defined groups will be included, but only supported
|
||||||
# machines and defined child groups will be included.
|
# machines and defined child groups will be included.
|
||||||
# Group variables are intentionally skipped.
|
# Group variables are intentionally skipped.
|
||||||
groups_of_groups = {}
|
groups_of_groups = {}
|
||||||
defined_groups = []
|
defined_groups = []
|
||||||
|
|
||||||
config.groups.each_pair do |gname, gmembers|
|
config.groups.each_pair do |gname, gmembers|
|
||||||
# Require that gmembers be an array
|
# Require that gmembers be an array
|
||||||
# (easier to be tolerant and avoid error management of few value)
|
# (easier to be tolerant and avoid error management of few value)
|
||||||
gmembers = [gmembers] if !gmembers.is_a?(Array)
|
gmembers = [gmembers] if !gmembers.is_a?(Array)
|
||||||
|
|
||||||
if gname.end_with?(":children")
|
if gname.end_with?(":children")
|
||||||
groups_of_groups[gname] = gmembers
|
groups_of_groups[gname] = gmembers
|
||||||
defined_groups << gname.sub(/:children$/, '')
|
defined_groups << gname.sub(/:children$/, '')
|
||||||
elsif !gname.include?(':vars')
|
elsif !gname.include?(':vars')
|
||||||
defined_groups << gname
|
defined_groups << gname
|
||||||
file.write("\n[#{gname}]\n")
|
inventory += "\n[#{gname}]\n"
|
||||||
gmembers.each do |gm|
|
|
||||||
file.write("#{gm}\n") if inventory_machines.include?(gm.to_sym)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defined_groups.uniq!
|
|
||||||
groups_of_groups.each_pair do |gname, gmembers|
|
|
||||||
file.write("\n[#{gname}]\n")
|
|
||||||
gmembers.each do |gm|
|
gmembers.each do |gm|
|
||||||
file.write("#{gm}\n") if defined_groups.include?(gm)
|
inventory += "#{gm}\n" if inventory_machines.include?(gm.to_sym)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defined_groups.uniq!
|
||||||
|
groups_of_groups.each_pair do |gname, gmembers|
|
||||||
|
inventory += "\n[#{gname}]\n"
|
||||||
|
gmembers.each do |gm|
|
||||||
|
inventory += "#{gm}\n" if defined_groups.include?(gm)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@@lock.synchronize do
|
||||||
|
if ! File.exists?(generated_inventory_file) or
|
||||||
|
inventory != File.read(generated_inventory_file)
|
||||||
|
|
||||||
|
generated_inventory_file.open('w') do |file|
|
||||||
|
file.write(inventory)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue