Properly accumulate and parse cURL output for progress

This commit is contained in:
Mitchell Hashimoto 2013-03-17 11:59:18 -07:00
parent 05c6379abb
commit d1e2596ce5
1 changed files with 35 additions and 23 deletions

View File

@ -42,15 +42,26 @@ module Vagrant
# tell us output so we can parse it out. # tell us output so we can parse it out.
options << { :notify => :stderr } options << { :notify => :stderr }
progress_data = ""
progress_regexp = /(\r(.+?))\r/
# Setup the proc that'll receive the real-time data from # Setup the proc that'll receive the real-time data from
# the downloader. # the downloader.
data_proc = Proc.new do |type, data| data_proc = Proc.new do |type, data|
# Type will always be "stderr" because that is the only # Type will always be "stderr" because that is the only
# type of data we're subscribed for notifications. # type of data we're subscribed for notifications.
# If the data doesn't start with a \r then it isn't a progress # Accumulate progress_data
# notification, so ignore it. progress_data << data
next if data[0] != "\r"
while true
# If we have a full amount of column data (two "\r") then
# we report new progress reports. Otherwise, just keep
# accumulating.
match = progress_regexp.match(progress_data)
break if !match
data = match[2]
progress_data.gsub!(match[1], "")
# Ignore the first \r and split by whitespace to grab the columns # Ignore the first \r and split by whitespace to grab the columns
columns = data[1..-1].split(/\s+/) columns = data[1..-1].split(/\s+/)
@ -76,6 +87,7 @@ module Vagrant
@ui.info(output, :new_line => false) @ui.info(output, :new_line => false)
end end
end end
end
# Create the callback that is called if we are interrupted # Create the callback that is called if we are interrupted
interrupted = false interrupted = false