Add CI scripts and configuration
Provides scripts for building, storing, and releasing Vagrant via builders. Includes updates for testing and removal of deprecated CI configuration.
This commit is contained in:
parent
7f09202571
commit
ee302f3a9b
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
csource="${BASH_SOURCE[0]}"
|
||||||
|
while [ -h "$csource" ] ; do csource="$(readlink "$csource")"; done
|
||||||
|
root="$( cd -P "$( dirname "$csource" )/../" && pwd )"
|
||||||
|
|
||||||
|
. "${root}/.ci/init.sh"
|
||||||
|
|
||||||
|
pushd "${root}" > "${output}"
|
||||||
|
|
||||||
|
# Build our gem
|
||||||
|
wrap gem build *.gemspec \
|
||||||
|
"Failed to build Vagrant RubyGem"
|
||||||
|
|
||||||
|
# Get the path of our new gem
|
||||||
|
g=(vagrant*.gem)
|
||||||
|
gem=$(printf "%s" "${g}")
|
||||||
|
|
||||||
|
wrap aws s3 cp "${gem}" "${ASSETS_PRIVATE_BUCKET}/${repository}/vagrant-master.gem" \
|
||||||
|
"Failed to store Vagrant RubyGem master build"
|
|
@ -0,0 +1,432 @@
|
||||||
|
# last-modified: Tue Jan 14 20:37:58 UTC 2020
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Path to file used for output redirect
|
||||||
|
# and extracting messages for warning and
|
||||||
|
# failure information sent to slack
|
||||||
|
function output_file() {
|
||||||
|
printf "/tmp/.ci-output"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write failure message, send error to configured
|
||||||
|
# slack, and exit with non-zero status. If an
|
||||||
|
# "$(output_file)" file exists, the last 5 lines will be
|
||||||
|
# included in the slack message.
|
||||||
|
#
|
||||||
|
# $1: Failure message
|
||||||
|
function fail() {
|
||||||
|
(>&2 echo "ERROR: ${1}")
|
||||||
|
if [ -f ""$(output_file)"" ]; then
|
||||||
|
slack -s error -m "ERROR: ${1}" -f "$(output_file)" -T 5
|
||||||
|
else
|
||||||
|
slack -s error -m "ERROR: ${1}"
|
||||||
|
fi
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write warning message, send warning to configured
|
||||||
|
# slack
|
||||||
|
#
|
||||||
|
# $1: Warning message
|
||||||
|
function warn() {
|
||||||
|
(>&2 echo "WARN: ${1}")
|
||||||
|
if [ -f ""$(output_file)"" ]; then
|
||||||
|
slack -s warn -m "WARNING: ${1}" -f "$(output_file)"
|
||||||
|
else
|
||||||
|
slack -s warn -m "WARNING: ${1}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute command while redirecting all output to
|
||||||
|
# a file (file is used within fail mesage on when
|
||||||
|
# command is unsuccessful). Final argument is the
|
||||||
|
# error message used when the command fails.
|
||||||
|
#
|
||||||
|
# $@{1:$#-1}: Command to execute
|
||||||
|
# $@{$#}: Failure message
|
||||||
|
function wrap() {
|
||||||
|
i=$(("${#}" - 1))
|
||||||
|
wrap_raw "${@:1:$i}"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat "$(output_file)"
|
||||||
|
fail "${@:$#}"
|
||||||
|
fi
|
||||||
|
rm "$(output_file)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute command while redirecting all output to
|
||||||
|
# a file. Exit status is returned.
|
||||||
|
function wrap_raw() {
|
||||||
|
rm -f "$(output_file)"
|
||||||
|
"${@}" > "$(output_file)" 2>&1
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute command while redirecting all output to
|
||||||
|
# a file (file is used within fail mesage on when
|
||||||
|
# command is unsuccessful). Command output will be
|
||||||
|
# streamed during execution. Final argument is the
|
||||||
|
# error message used when the command fails.
|
||||||
|
#
|
||||||
|
# $@{1:$#-1}: Command to execute
|
||||||
|
# $@{$#}: Failure message
|
||||||
|
function wrap_stream() {
|
||||||
|
i=$(("${#}" - 1))
|
||||||
|
wrap_stream_raw "${@:1:$i}"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
fail "${@:$#}"
|
||||||
|
fi
|
||||||
|
rm "$(output_file)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute command while redirecting all output
|
||||||
|
# to a file. Command output will be streamed
|
||||||
|
# during execution. Exit status is returned
|
||||||
|
function wrap_stream_raw() {
|
||||||
|
rm -f "$(output_file)"
|
||||||
|
"${@}" > "$(output_file)" 2>&1 &
|
||||||
|
pid=$!
|
||||||
|
until [ -f "$(output_file)" ]; do
|
||||||
|
sleep 0.1
|
||||||
|
done
|
||||||
|
tail -f --quiet --pid "${pid}" "$(output_file)"
|
||||||
|
wait "${pid}"
|
||||||
|
return $?
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Send command to packet device and wrap
|
||||||
|
# execution
|
||||||
|
# $@{1:$#-1}: Command to execute
|
||||||
|
# $@{$#}: Failure message
|
||||||
|
function pkt_wrap() {
|
||||||
|
wrap packet-exec run -quiet -- "${@}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send command to packet device and wrap
|
||||||
|
# execution
|
||||||
|
# $@: Command to execute
|
||||||
|
function pkt_wrap_raw() {
|
||||||
|
wrap_raw packet-exec run -quiet -- "${@}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send command to packet device and wrap
|
||||||
|
# execution with output streaming
|
||||||
|
# $@{1:$#-1}: Command to execute
|
||||||
|
# $@{$#}: Failure message
|
||||||
|
function pkt_wrap_stream() {
|
||||||
|
wrap_stream packet-exec run -quiet -- "${@}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send command to packet device and wrap
|
||||||
|
# execution with output streaming
|
||||||
|
# $@: Command to execute
|
||||||
|
function pkt_wrap_stream_raw() {
|
||||||
|
wrap_stream_raw packet-exec run -quiet -- "${@}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generates location within the asset storage
|
||||||
|
# bucket to retain built assets.
|
||||||
|
function asset_location() {
|
||||||
|
if [ "${tag}" = "" ]; then
|
||||||
|
dst="${ASSETS_PRIVATE_LONGTERM}/${repository}/${ident_ref}/${short_sha}"
|
||||||
|
else
|
||||||
|
if [[ "${tag}" = *"+"* ]]; then
|
||||||
|
dst="${ASSETS_PRIVATE_LONGTERM}/${repository}/${tag}"
|
||||||
|
else
|
||||||
|
dst="${ASSETS_PRIVATE_BUCKET}/${repository}/${tag}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
echo -n "${dst}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Upload assets to the asset storage bucket.
|
||||||
|
#
|
||||||
|
# $1: Path to asset file or directory to upload
|
||||||
|
function upload_assets() {
|
||||||
|
if [ "${1}" = "" ]; then
|
||||||
|
fail "Parameter required for asset upload"
|
||||||
|
fi
|
||||||
|
if [ -d "${1}" ]; then
|
||||||
|
wrap aws s3 cp --recursive "${1}" "$(asset_location)/" \
|
||||||
|
"Upload to asset storage failed"
|
||||||
|
else
|
||||||
|
wrap aws s3 cp "${1}" "$(asset_location)/" \
|
||||||
|
"Upload to asset storage failed"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Download assets from the asset storage bucket. If
|
||||||
|
# destination is not provided, remote path will be
|
||||||
|
# used locally.
|
||||||
|
#
|
||||||
|
# $1: Path to asset or directory to download
|
||||||
|
# $2: Optional destination for downloaded assets
|
||||||
|
function download_assets() {
|
||||||
|
if [ "${1}" = "" ]; then
|
||||||
|
fail "At least one parameter required for asset download"
|
||||||
|
fi
|
||||||
|
if [ "${2}" = "" ]; then
|
||||||
|
dst="${1#/}"
|
||||||
|
else
|
||||||
|
dst="${2}"
|
||||||
|
fi
|
||||||
|
mkdir -p "${dst}"
|
||||||
|
src="$(asset_location)/${1#/}"
|
||||||
|
remote=$(aws s3 ls "${src}")
|
||||||
|
if [[ "${remote}" = *" PRE "* ]]; then
|
||||||
|
mkdir -p "${dst}"
|
||||||
|
wrap aws s3 cp --recursive "${src%/}/" "${dst}" \
|
||||||
|
"Download from asset storage failed"
|
||||||
|
else
|
||||||
|
mkdir -p "$(dirname "${dst}")"
|
||||||
|
wrap aws s3 cp "${src}" "${dst}" \
|
||||||
|
"Download from asset storage failed"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Upload assets to the cache storage bucket.
|
||||||
|
#
|
||||||
|
# $1: Path to asset file or directory to upload
|
||||||
|
function upload_cache() {
|
||||||
|
if [ "${1}" = "" ]; then
|
||||||
|
fail "Parameter required for cache upload"
|
||||||
|
fi
|
||||||
|
if [ -d "${1}" ]; then
|
||||||
|
wrap aws s3 cp --recursive "${1}" "${asset_cache}/" \
|
||||||
|
"Upload to cache failed"
|
||||||
|
else
|
||||||
|
wrap aws s3 cp "${1}" "${asset_cache}/" \
|
||||||
|
"Upload to cache failed"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Download assets from the cache storage bucket. If
|
||||||
|
# destination is not provided, remote path will be
|
||||||
|
# used locally.
|
||||||
|
#
|
||||||
|
# $1: Path to asset or directory to download
|
||||||
|
# $2: Optional destination for downloaded assets
|
||||||
|
function download_cache() {
|
||||||
|
if [ "${1}" = "" ]; then
|
||||||
|
fail "At least one parameter required for cache download"
|
||||||
|
fi
|
||||||
|
if [ "${2}" = "" ]; then
|
||||||
|
dst="${1#/}"
|
||||||
|
else
|
||||||
|
dst="${2}"
|
||||||
|
fi
|
||||||
|
mkdir -p "${dst}"
|
||||||
|
src="${asset_cache}/${1#/}"
|
||||||
|
remote=$(aws s3 ls "${src}")
|
||||||
|
if [[ "${remote}" = *" PRE "* ]]; then
|
||||||
|
mkdir -p "${dst}"
|
||||||
|
wrap aws s3 cp --recursive "${src%/}/" "${dst}" \
|
||||||
|
"Download from cache storage failed"
|
||||||
|
else
|
||||||
|
mkdir -p "$(dirname "${dst}")"
|
||||||
|
wrap aws s3 cp "${src}" "${dst}" \
|
||||||
|
"Download from cache storage failed"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate arguments for GitHub release. Checks for
|
||||||
|
# two arguments and that second argument is an exiting
|
||||||
|
# file asset, or directory.
|
||||||
|
#
|
||||||
|
# $1: GitHub tag name
|
||||||
|
# $2: Asset file or directory of assets
|
||||||
|
function release_validate() {
|
||||||
|
if [ "${1}" = "" ]; then
|
||||||
|
fail "Missing required position 1 argument (TAG) for release"
|
||||||
|
fi
|
||||||
|
if [ "${2}" = "" ]; then
|
||||||
|
fail "Missing required position 2 argument (PATH) for release"
|
||||||
|
fi
|
||||||
|
if [ ! -e "${2}" ]; then
|
||||||
|
fail "Path provided for release (${2}) does not exist"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate a GitHub release
|
||||||
|
#
|
||||||
|
# $1: GitHub tag name
|
||||||
|
# $2: Asset file or directory of assets
|
||||||
|
function release() {
|
||||||
|
release_validate "${@}"
|
||||||
|
wrap_raw ghr -u "${repo_owner}" -r "${repo_name}" -c "${full_sha}" -n "${1}" -delete
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
wrap ghr -u "${repo_owner}" -r "${repo_name}" -c "${full_sha}" -n "${1}" \
|
||||||
|
"${1}" "${2}" "Failed to create release for version ${1}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate a GitHub prerelease
|
||||||
|
#
|
||||||
|
# $1: GitHub tag name
|
||||||
|
# $2: Asset file or directory of assets
|
||||||
|
function prerelease() {
|
||||||
|
release_validate "${@}"
|
||||||
|
if [[ "${1}" != *"+"* ]]; then
|
||||||
|
ptag="${1}+${short_sha}"
|
||||||
|
else
|
||||||
|
ptag="${1}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
wrap_raw ghr -u "${repo_owner}" -r "${repo_name}" -c "${full_sha}" -n "${ptag}" \
|
||||||
|
-delete -prerelease "${ptag}" "${2}"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
wrap ghr -u "${repo_owner}" -r "${repo_name}" -c "${full_sha}" -n "${ptag}" \
|
||||||
|
-prerelease "${ptag}" "${2}" \
|
||||||
|
"Failed to create prerelease for version ${1}"
|
||||||
|
fi
|
||||||
|
echo -n "${ptag}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if version string is valid for release
|
||||||
|
#
|
||||||
|
# $1: Version
|
||||||
|
# Returns: 0 if valid, 1 if invalid
|
||||||
|
function valid_release_version() {
|
||||||
|
if [[ "${1}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate arguments for HashiCorp release. Ensures asset
|
||||||
|
# directory exists, and checks that the SHASUMS and SHASUM.sig
|
||||||
|
# files are present.
|
||||||
|
#
|
||||||
|
# $1: Asset directory
|
||||||
|
function hashicorp_release_validate() {
|
||||||
|
directory="${1}"
|
||||||
|
|
||||||
|
# Directory checks
|
||||||
|
if [ "${directory}" = "" ]; then
|
||||||
|
fail "No asset directory was provided for HashiCorp release"
|
||||||
|
fi
|
||||||
|
if [ ! -d "${directory}" ]; then
|
||||||
|
fail "Asset directory for HashiCorp release does not exist"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SHASUMS checks
|
||||||
|
if [ ! -e "${directory}/"*SHASUMS ]; then
|
||||||
|
fail "Asset directory is missing SHASUMS file"
|
||||||
|
fi
|
||||||
|
if [ ! -e "${directory}/"*SHASUMS.sig ]; then
|
||||||
|
fail "Asset directory is missing SHASUMS signature file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Verify release assets by validating checksum properly match
|
||||||
|
# and that signature file is valid
|
||||||
|
#
|
||||||
|
# $1: Asset directory
|
||||||
|
function hashicorp_release_verify() {
|
||||||
|
directory="${1}"
|
||||||
|
pushd "${directory}" > "${output}"
|
||||||
|
|
||||||
|
# First do a checksum validation
|
||||||
|
wrap shasum -a 256 -c *_SHA256SUMS \
|
||||||
|
"Checksum validation of release assets failed"
|
||||||
|
# Next check that the signature is valid
|
||||||
|
gpghome=$(mktemp -qd)
|
||||||
|
export GNUPGHOME="${gpghome}"
|
||||||
|
wrap gpg --import "${HASHICORP_PUBLIC_GPG_KEY}" \
|
||||||
|
"Failed to import HashiCorp public GPG key"
|
||||||
|
wrap gpg --verify *SHA256SUMS.sig *SHA256SUMS \
|
||||||
|
"Validation of SHA256SUMS signature failed"
|
||||||
|
rm -rf "${gpghome}" > "${output}" 2>&1
|
||||||
|
popd > "${output}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate a HashiCorp release
|
||||||
|
#
|
||||||
|
# $1: Asset directory
|
||||||
|
function hashicorp_release() {
|
||||||
|
directory="${1}"
|
||||||
|
|
||||||
|
hashicorp_release_validate "${directory}"
|
||||||
|
hashicorp_release_verify "${directory}"
|
||||||
|
|
||||||
|
oid="${AWS_ACCESS_KEY_ID}"
|
||||||
|
okey="${AWS_SECRET_ACCESS_KEY}"
|
||||||
|
export AWS_ACCESS_KEY_ID="${RELEASE_AWS_ACCESS_KEY_ID}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${RELEASE_AWS_SECRET_ACCESS_KEY}"
|
||||||
|
|
||||||
|
wrap_stream hc-releases upload "${directory}" \
|
||||||
|
"Failed to upload HashiCorp release assets"
|
||||||
|
wrap_stream hc-releases publish \
|
||||||
|
"Failed to publish HashiCorp release"
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID="${oid}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${okey}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Configures git for hashibot usage
|
||||||
|
function hashibot_git() {
|
||||||
|
wrap git config user.name "${HASHIBOT_USERNAME}" \
|
||||||
|
"Failed to setup git for hashibot usage (username)"
|
||||||
|
wrap git config user.email "${HASHIBOT_EMAIL}" \
|
||||||
|
"Failed to setup git for hashibot usage (email)"
|
||||||
|
wrap git remote set-url origin "https://${HASHIBOT_USERNAME}:${HASHIBOT_TOKEN}@github.com/${repository}" \
|
||||||
|
"Failed to setup git for hashibot usage (remote)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Stub cleanup method which can be redefined
|
||||||
|
# within actual script
|
||||||
|
function cleanup() {
|
||||||
|
(>&2 echo "** No cleanup tasks defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Enable debugging. This needs to be enabled with
|
||||||
|
# extreme caution when used on public repositories.
|
||||||
|
# Output with debugging enabled will likely include
|
||||||
|
# secret values which should not be publicly exposed.
|
||||||
|
#
|
||||||
|
# If repository is public, FORCE_PUBLIC_DEBUG environment
|
||||||
|
# variable must also be set.
|
||||||
|
|
||||||
|
is_private=$(curl -H "Authorization: token ${HASHIBOT_TOKEN}" -s "https://api.github.com/repos/${GITHUB_REPOSITORY}" | jq .private)
|
||||||
|
|
||||||
|
if [ "${DEBUG}" != "" ]; then
|
||||||
|
if [ "${is_private}" = "false" ]; then
|
||||||
|
if [ "${FORCE_PUBLIC_DEBUG}" != "" ]; then
|
||||||
|
set -x
|
||||||
|
output="/dev/stdout"
|
||||||
|
else
|
||||||
|
fail "Cannot enable debug mode on public repository unless forced"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
set -x
|
||||||
|
output="/dev/stdout"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
output="/dev/null"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we are running a public repository on private runners
|
||||||
|
if [ "${VAGRANT_PRIVATE}" != "" ] && [ "${is_private}" = "false" ]; then
|
||||||
|
fail "Cannot run public repositories on private Vagrant runners. Disable runners now!"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Common variables
|
||||||
|
full_sha="${GITHUB_SHA}"
|
||||||
|
short_sha="${full_sha:0:8}"
|
||||||
|
ident_ref="${GITHUB_REF#*/*/}"
|
||||||
|
if [[ "${GITHUB_REF}" == *"refs/tags/"* ]]; then
|
||||||
|
tag="${GITHUB_REF##*tags/}"
|
||||||
|
valid_release_version "${tag}"
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
release=1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
repository="${GITHUB_REPOSITORY}"
|
||||||
|
repo_owner="${repository%/*}"
|
||||||
|
repo_name="${repository#*/}"
|
||||||
|
asset_cache="${ASSETS_PRIVATE_SHORTTERM}/${repository}/${GITHUB_ACTION}"
|
||||||
|
job_id="${GITHUB_ACTION}"
|
|
@ -0,0 +1,6 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
. "${root}/.ci/common.sh"
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND="noninteractive"
|
||||||
|
export PATH="${PATH}:${root}/.ci"
|
|
@ -0,0 +1,62 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
ghr_version="0.13.0"
|
||||||
|
|
||||||
|
# NOTE: This release will generate a new release on the installers
|
||||||
|
# repository which in turn triggers a full package build
|
||||||
|
target_owner="hashicorp"
|
||||||
|
target_repository="vagrant-builders"
|
||||||
|
|
||||||
|
csource="${BASH_SOURCE[0]}"
|
||||||
|
while [ -h "$csource" ] ; do csource="$(readlink "$csource")"; done
|
||||||
|
root="$( cd -P "$( dirname "$csource" )/../" && pwd )"
|
||||||
|
|
||||||
|
. "${root}/.ci/init.sh"
|
||||||
|
|
||||||
|
pushd "${root}" > "${output}"
|
||||||
|
|
||||||
|
# Install ghr
|
||||||
|
wrap curl -Lso /tmp/ghr.tgz "https://github.com/tcnksm/ghr/releases/download/v${ghr_version}/ghr_v${ghr_version}_linux_amd64.tar.gz" \
|
||||||
|
"Failed to download ghr utility"
|
||||||
|
wrap tar -C /tmp/ -xf /tmp/ghr.tgz \
|
||||||
|
"Failed to unpack ghr archive"
|
||||||
|
wrap mv "/tmp/ghr_v${ghr_version}_linux_amd64/ghr" "${root}/.ci/" \
|
||||||
|
"Failed to install ghr utility"
|
||||||
|
|
||||||
|
# Build our gem
|
||||||
|
wrap gem build *.gemspec \
|
||||||
|
"Failed to build Vagrant RubyGem"
|
||||||
|
|
||||||
|
# Get the path of our new gem
|
||||||
|
g=(vagrant*.gem)
|
||||||
|
gem=$(printf "%s" "${g}")
|
||||||
|
|
||||||
|
# Determine the version of the release
|
||||||
|
vagrant_version="$(gem specification "${gem}" version)"
|
||||||
|
vagrant_version="${vagrant_version##*version: }"
|
||||||
|
|
||||||
|
# We want to release into the builders repository so
|
||||||
|
# update the repository variable with the desired destination
|
||||||
|
repo_owner="${target_owner}"
|
||||||
|
repo_name="${target_repository}"
|
||||||
|
full_sha="master"
|
||||||
|
|
||||||
|
export GITHUB_TOKEN="${HASHIBOT_TOKEN}"
|
||||||
|
|
||||||
|
if [ "${tag}" = "" ]; then
|
||||||
|
echo "Generating Vagrant RubyGem pre-release... "
|
||||||
|
version="v${vagrant_version}+${short_sha}"
|
||||||
|
prerelease "${version}" "${gem}"
|
||||||
|
else
|
||||||
|
# Validate this is a proper release version
|
||||||
|
valid_release_version "${vagrant_version}"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
fail "Invalid version format for Vagrant release: ${vagrant_version}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Generating Vagrant RubyGem release... "
|
||||||
|
version="v${vagrant_version}"
|
||||||
|
release "${version}" "${gem}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
slack -m "New Vagrant installers release triggered: *${version}*"
|
|
@ -0,0 +1,176 @@
|
||||||
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
require "optparse"
|
||||||
|
require "net/https"
|
||||||
|
require "uri"
|
||||||
|
require "json"
|
||||||
|
|
||||||
|
OPTIONS = [:channel, :username, :icon, :state, :message,
|
||||||
|
:message_file, :file, :title, :tail, :webhook].freeze
|
||||||
|
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.banner = "Usage: #{File.basename(__FILE__)} [options]"
|
||||||
|
|
||||||
|
opts.on("-c", "--channel CHAN", "Send to channel") do |c|
|
||||||
|
options[:channel] = c
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-u", "--username USER", "Send as username") do |u|
|
||||||
|
options[:username] = u
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-i", "--icon URL", "User icon image") do |i|
|
||||||
|
options[:icon] = i
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-s", "--state STATE", "Message state (success, warn, error, or color code)") do |s|
|
||||||
|
options[:state] = s
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-m", "--message MESSAGE", "Message to send") do |m|
|
||||||
|
options[:message] = m
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-M", "--message-file MESSAGE_FILE", "Use file contents as message") do |m|
|
||||||
|
options[:message_file] = m
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-f", "--file MESSAGE_FILE", "Send raw contents of file in message") do |f|
|
||||||
|
options[:file] = f
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-t", "--title TITLE", "Message title") do |t|
|
||||||
|
options[:title] = t
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-T", "--tail N", "Send last N lines of content from raw message file") do |t|
|
||||||
|
options[:tail] = t
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-w", "--webhook HOOK", "Slack webhook") do |w|
|
||||||
|
options[:webhook] = w
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-h", "--help", "Print help") do
|
||||||
|
puts opts
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
end.parse!
|
||||||
|
|
||||||
|
OPTIONS.each do |key|
|
||||||
|
if !options.key?(key)
|
||||||
|
env_key = "SLACK_#{key.to_s.upcase}"
|
||||||
|
if ENV[env_key]
|
||||||
|
options[key] = ENV[env_key]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if !options[:webhook]
|
||||||
|
$stderr.puts "ERROR: Webhook is required!"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
|
||||||
|
if ENV["CIRCLECI"]
|
||||||
|
options[:icon] = "https://emoji.slack-edge.com/TF1GCKJNM/circleci/054b58d488e65138.png" unless options[:icon]
|
||||||
|
options[:username] = "circleci" unless options[:username]
|
||||||
|
options[:footer] = "CircleCI - <#{ENV["CIRCLE_BUILD_URL"]}|#{ENV["CIRCLE_PROJECT_USERNAME"]}/#{ENV["CIRCLE_PROJECT_REPONAME"]}>"
|
||||||
|
options[:footer_icon] = "https://emoji.slack-edge.com/TF1GCKJNM/circleci/054b58d488e65138.png"
|
||||||
|
end
|
||||||
|
|
||||||
|
if ENV["GITHUB_ACTIONS"]
|
||||||
|
options[:icon] = "https://ca.slack-edge.com/T024UT03C-WG8NDATGT-f82ae03b9fca-48" unless options[:icon]
|
||||||
|
options[:username] = "github" unless options[:username]
|
||||||
|
options[:footer] = "Actions - <https://github.com/#{ENV["GITHUB_REPOSITORY"]}/commit/#{ENV["GITHUB_SHA"]}/checks|#{ENV["GITHUB_REPOSITORY"]}>"
|
||||||
|
options[:footer_icon] = "https://ca.slack-edge.com/T024UT03C-WG8NDATGT-f82ae03b9fca-48"
|
||||||
|
end
|
||||||
|
|
||||||
|
options[:state] = "success" unless options[:state]
|
||||||
|
|
||||||
|
case options[:state]
|
||||||
|
when "success", "good"
|
||||||
|
options[:state] = "good"
|
||||||
|
when "warn", "warning"
|
||||||
|
options[:state] = "warning"
|
||||||
|
when "error", "danger"
|
||||||
|
options[:state] = "danger"
|
||||||
|
else
|
||||||
|
if !options[:state].start_with?("#")
|
||||||
|
$stderr.puts "ERROR: Invalid value for `state` (#{options[:state]})"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
msg = options[:message]
|
||||||
|
|
||||||
|
# NOTE: Message provided from CLI argument will end up with
|
||||||
|
# double escaped newlines so remove one
|
||||||
|
msg.gsub!("\\n", "\n") if msg
|
||||||
|
|
||||||
|
if options[:message_file]
|
||||||
|
if !File.exist?(options[:message_file])
|
||||||
|
$stderr.puts "ERROR: Message file does not exist `#{options[:message_file]}`"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
msg_c = File.read(options[:message_file])
|
||||||
|
msg = msg ? "#{msg}\n\n#{msg_c}" : msg_c
|
||||||
|
end
|
||||||
|
|
||||||
|
if options[:file]
|
||||||
|
if !File.exist?(options[:file])
|
||||||
|
$stderr.puts "ERROR: Message file does not exist `#{options[:file]}`"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
if (tail = options[:tail].to_i) > 0
|
||||||
|
content = ""
|
||||||
|
buffer = 0
|
||||||
|
File.open(options[:file], "r") do |f|
|
||||||
|
until (content.split("\n").size > tail) || buffer >= f.size
|
||||||
|
buffer += 1000
|
||||||
|
buffer = f.size if buffer > f.size
|
||||||
|
f.seek(f.size - buffer)
|
||||||
|
content = f.read
|
||||||
|
end
|
||||||
|
end
|
||||||
|
parts = content.split("\n")
|
||||||
|
if parts.size > tail
|
||||||
|
parts = parts.slice(-tail, tail)
|
||||||
|
end
|
||||||
|
fmsg = parts ? parts.join("\n") : ""
|
||||||
|
else
|
||||||
|
fmsg = File.read(options[:file])
|
||||||
|
end
|
||||||
|
fmsg = "```\n#{fmsg}\n```"
|
||||||
|
if msg
|
||||||
|
msg = msg << "\n\n" << fmsg
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if msg.to_s.empty?
|
||||||
|
$stderr.puts "ERROR: No message content provided!"
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
|
||||||
|
attach = {text: msg, fallback: msg, color: options[:state], mrkdn: true}
|
||||||
|
attach[:title] = options[:title] if options[:title]
|
||||||
|
attach[:footer] = options[:footer] if options[:footer]
|
||||||
|
attach[:footer_icon] = options[:footer_icon] if options[:footer_icon]
|
||||||
|
attach[:ts] = Time.now.to_i
|
||||||
|
|
||||||
|
payload = {}.tap do |pd|
|
||||||
|
pd[:username] = options.fetch(:username, "packet-exec")
|
||||||
|
pd[:channel] = options[:channel] if options[:channel]
|
||||||
|
pd[:icon_url] = options[:icon] if options[:icon]
|
||||||
|
pd[:attachments] = [attach]
|
||||||
|
end
|
||||||
|
|
||||||
|
result = Net::HTTP.post(URI(options[:webhook]), payload.to_json, "Content-Type" => "application/json")
|
||||||
|
|
||||||
|
if !result.code.start_with?("2")
|
||||||
|
$stderr.puts "Failed to send slack message"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
$stdout.puts "ok"
|
||||||
|
end
|
|
@ -0,0 +1,27 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
csource="${BASH_SOURCE[0]}"
|
||||||
|
while [ -h "$csource" ] ; do csource="$(readlink "$csource")"; done
|
||||||
|
root="$( cd -P "$( dirname "$csource" )/../" && pwd )"
|
||||||
|
|
||||||
|
pushd "${root}" > /dev/null
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND="noninteractive"
|
||||||
|
|
||||||
|
# Install required dependencies
|
||||||
|
sudo apt-get update || exit 1
|
||||||
|
sudo apt-get install -yq bsdtar || exit 1
|
||||||
|
|
||||||
|
# Ensure bundler is installed
|
||||||
|
gem install --no-document bundler || exit 1
|
||||||
|
|
||||||
|
# Install the bundle
|
||||||
|
bundle install || exit 1
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
bundle exec rake test:unit
|
||||||
|
|
||||||
|
result=$?
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
|
exit $result
|
|
@ -1,127 +1,6 @@
|
||||||
version: 2
|
version: 2
|
||||||
reference:
|
|
||||||
environment: &ENVIRONMENT
|
|
||||||
SLACK_TITLE: Vagrant CI
|
|
||||||
RELEASE_TARGET_REPONAME: vagrant-installers
|
|
||||||
images:
|
|
||||||
ruby24: &ruby24
|
|
||||||
docker:
|
|
||||||
- image: circleci/ruby:2.4
|
|
||||||
ruby25: &ruby25
|
|
||||||
docker:
|
|
||||||
- image: circleci/ruby:2.5
|
|
||||||
ruby26: &ruby26
|
|
||||||
docker:
|
|
||||||
- image: circleci/ruby:2.6
|
|
||||||
builder: &builder
|
|
||||||
environment:
|
|
||||||
<<: *ENVIRONMENT
|
|
||||||
docker:
|
|
||||||
- image: $BUILDER_IMAGE
|
|
||||||
auth:
|
|
||||||
username: $BUILDER_USERNAME
|
|
||||||
password: $BUILDER_PASSWORD
|
|
||||||
workflows:
|
|
||||||
public: &PUBLIC_WORKFLOW
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /^pull\/.*/
|
|
||||||
master: &MASTER_WORKFLOW
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: master
|
|
||||||
private_build: &PRIVATE_WORKFLOW_BUILD
|
|
||||||
context: vagrant
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only:
|
|
||||||
- /^build-.*/
|
|
||||||
tags:
|
|
||||||
only: /.*/
|
|
||||||
jobs:
|
|
||||||
private_failure: &PRIVATE_FAILURE
|
|
||||||
run:
|
|
||||||
name: Failure handler
|
|
||||||
command: |
|
|
||||||
if [ -f .output ]; then
|
|
||||||
slack -m "Vagrant job has failed: *${CIRCLE_JOB}*" -s error -f .output -T 5
|
|
||||||
else
|
|
||||||
slack -m "Vagrant job has failed: *${CIRCLE_JOB}*" -s error
|
|
||||||
fi
|
|
||||||
when: on_fail
|
|
||||||
unit_tests: &unit_tests
|
|
||||||
steps:
|
|
||||||
- run: sudo apt-get update ; sudo apt-get -yq install bsdtar
|
|
||||||
- checkout
|
|
||||||
- restore_cache:
|
|
||||||
key: static-site-gems-v1-{{ checksum "Gemfile.lock" }}
|
|
||||||
- run:
|
|
||||||
command: bundle check || bundle install --path vendor/bundle
|
|
||||||
- save_cache:
|
|
||||||
key: static-site-gems-v1-{{ checksum "Gemfile.lock" }}
|
|
||||||
paths:
|
|
||||||
- ./vendor/bundle
|
|
||||||
- run: bundle exec rake test:unit
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
|
||||||
<<: *builder
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: gem build vagrant.gemspec
|
|
||||||
- *PRIVATE_FAILURE
|
|
||||||
- persist_to_workspace:
|
|
||||||
root: .
|
|
||||||
paths:
|
|
||||||
- ./*.gem
|
|
||||||
store:
|
|
||||||
<<: *builder
|
|
||||||
steps:
|
|
||||||
- attach_workspace:
|
|
||||||
at: .
|
|
||||||
- run: |
|
|
||||||
gem_name=(vagrant-*.gem)
|
|
||||||
if [ "${CIRCLE_TAG}" == "" ]; then
|
|
||||||
remote_gem_name="vagrant-master.gem"
|
|
||||||
else
|
|
||||||
remote_gem_name="vagrant.gem"
|
|
||||||
fi
|
|
||||||
if [[ "${CIRCLE_BRANCH}" = "build-"* ]]; then
|
|
||||||
s3_dst="${ASSETS_PRIVATE_LONGTERM}/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/${CIRCLE_BRANCH##build-}/"
|
|
||||||
else
|
|
||||||
s3_dst="${ASSETS_PRIVATE_BUCKET}/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/"
|
|
||||||
fi
|
|
||||||
aws s3 cp "${gem_name}" "${s3_dst}${remote_gem_name}" > .output 2>&1
|
|
||||||
- *PRIVATE_FAILURE
|
|
||||||
release:
|
|
||||||
<<: *builder
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- attach_workspace:
|
|
||||||
at: .
|
|
||||||
- run: |
|
|
||||||
set +e
|
|
||||||
gem=(vagrant-*.gem)
|
|
||||||
gem_version="${gem##vagrant-}"
|
|
||||||
gem_version="${gem_version%%.gem}"
|
|
||||||
export GITHUB_TOKEN="${HASHIBOT_TOKEN}"
|
|
||||||
if [ "${CIRCLE_TAG}" = "" ]; then
|
|
||||||
version="v${gem_version}+$(git rev-parse --short "${CIRCLE_SHA1}")"
|
|
||||||
ghr -u ${CIRCLE_PROJECT_USERNAME} -r ${RELEASE_TARGET_REPONAME} -c master -prerelease -delete -replace ${version} ${gem} > .output 2>&1
|
|
||||||
else
|
|
||||||
version="${CIRCLE_TAG}"
|
|
||||||
ghr -u ${CIRCLE_PROJECT_USERNAME} -r ${RELEASE_TARGET_REPONAME} -c master -delete -replace ${version} ${gem} > .output 2>&1
|
|
||||||
fi
|
|
||||||
slack -m "New Vagrant installers release triggered: *${version}*"
|
|
||||||
- *PRIVATE_FAILURE
|
|
||||||
test_ruby24:
|
|
||||||
<<: *ruby24
|
|
||||||
<<: *unit_tests
|
|
||||||
test_ruby25:
|
|
||||||
<<: *ruby25
|
|
||||||
<<: *unit_tests
|
|
||||||
test_ruby26:
|
|
||||||
<<: *ruby26
|
|
||||||
<<: *unit_tests
|
|
||||||
build-website:
|
build-website:
|
||||||
# setting the working_directory along with the checkout path allows us to not have
|
# setting the working_directory along with the checkout path allows us to not have
|
||||||
# to cd into the website/ directory for commands
|
# to cd into the website/ directory for commands
|
||||||
|
@ -145,41 +24,6 @@ jobs:
|
||||||
command: ./scripts/deploy.sh
|
command: ./scripts/deploy.sh
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
builds:
|
|
||||||
jobs:
|
|
||||||
- build:
|
|
||||||
<<: *PRIVATE_WORKFLOW_BUILD
|
|
||||||
- store:
|
|
||||||
<<: *PRIVATE_WORKFLOW_BUILD
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
- release:
|
|
||||||
<<: *PRIVATE_WORKFLOW_BUILD
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
pull_requests:
|
|
||||||
jobs:
|
|
||||||
- test_ruby24:
|
|
||||||
<<: *PUBLIC_WORKFLOW
|
|
||||||
- test_ruby25:
|
|
||||||
<<: *PUBLIC_WORKFLOW
|
|
||||||
- test_ruby26:
|
|
||||||
<<: *PUBLIC_WORKFLOW
|
|
||||||
master:
|
|
||||||
jobs:
|
|
||||||
- test_ruby24:
|
|
||||||
<<: *MASTER_WORKFLOW
|
|
||||||
- test_ruby25:
|
|
||||||
<<: *MASTER_WORKFLOW
|
|
||||||
- test_ruby26:
|
|
||||||
<<: *MASTER_WORKFLOW
|
|
||||||
- build:
|
|
||||||
<<: *MASTER_WORKFLOW
|
|
||||||
context: vagrant
|
|
||||||
requires:
|
|
||||||
- test_ruby24
|
|
||||||
- test_ruby25
|
|
||||||
- test_ruby26
|
|
||||||
website:
|
website:
|
||||||
jobs:
|
jobs:
|
||||||
- build-website:
|
- build-website:
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths-ignore:
|
||||||
|
- 'CHANGELOG.md'
|
||||||
|
- 'website/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-gem:
|
||||||
|
name: Build Vagrant RubyGem
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
steps:
|
||||||
|
- name: Code Checkout
|
||||||
|
uses: actions/checkout@v1
|
||||||
|
- name: Set Ruby
|
||||||
|
uses: actions/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: '2.6'
|
||||||
|
- name: Build RubyGem
|
||||||
|
run: ./.ci/build.sh
|
||||||
|
working-directory: ${{github.workspace}}
|
||||||
|
env:
|
||||||
|
ASSETS_LONGTERM_PREFIX: elt
|
||||||
|
ASSETS_PRIVATE_BUCKET: est
|
||||||
|
ASSETS_PRIVATE_LONGTERM: ${{ secrets.ASSETS_PRIVATE_LONGTERM }}
|
||||||
|
ASSETS_PRIVATE_SHORTTERM: ${{ secrets.ASSETS_PRIVATE_SHORTTERM }}
|
||||||
|
ASSETS_PUBLIC_BUCKET: ${{ secrets.ASSETS_PUBLIC_BUCKET }}
|
||||||
|
ASSETS_PUBLIC_LONGTERM: ${{ secrets.ASSETS_PUBLIC_LONGTERM }}
|
||||||
|
ASSETS_PUBLIC_SHORTTERM: ${{ secrets.ASSETS_PUBLIC_SHORTTERM }}
|
||||||
|
ASSETS_SHORTTERM_PREFIX: ${{ secrets.ASSETS_SHORTTERM_PREFIX }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
HASHIBOT_EMAIL: ${{ secrets.HASHIBOT_EMAIL }}
|
||||||
|
HASHIBOT_TOKEN: ${{ secrets.HASHIBOT_TOKEN }}
|
||||||
|
HASHIBOT_USERNAME: ${{ secrets.HASHIBOT_USERNAME }}
|
||||||
|
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
|
@ -0,0 +1,38 @@
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'build-*'
|
||||||
|
tags: '*'
|
||||||
|
paths-ignore:
|
||||||
|
- 'CHANGELOG.md'
|
||||||
|
- 'website/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
trigger-release:
|
||||||
|
name: Trigger Installers Build
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
steps:
|
||||||
|
- name: Code Checkout
|
||||||
|
uses: actions/checkout@v1
|
||||||
|
- name: Set Ruby
|
||||||
|
uses: actions/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: '2.6'
|
||||||
|
- name: Create Builders Release
|
||||||
|
run: ./.ci/release.sh
|
||||||
|
working-directory: ${{github.workspace}}
|
||||||
|
env:
|
||||||
|
ASSETS_LONGTERM_PREFIX: elt
|
||||||
|
ASSETS_PRIVATE_BUCKET: est
|
||||||
|
ASSETS_PRIVATE_LONGTERM: ${{ secrets.ASSETS_PRIVATE_LONGTERM }}
|
||||||
|
ASSETS_PRIVATE_SHORTTERM: ${{ secrets.ASSETS_PRIVATE_SHORTTERM }}
|
||||||
|
ASSETS_PUBLIC_BUCKET: ${{ secrets.ASSETS_PUBLIC_BUCKET }}
|
||||||
|
ASSETS_PUBLIC_LONGTERM: ${{ secrets.ASSETS_PUBLIC_LONGTERM }}
|
||||||
|
ASSETS_PUBLIC_SHORTTERM: ${{ secrets.ASSETS_PUBLIC_SHORTTERM }}
|
||||||
|
ASSETS_SHORTTERM_PREFIX: ${{ secrets.ASSETS_SHORTTERM_PREFIX }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
HASHIBOT_EMAIL: ${{ secrets.HASHIBOT_EMAIL }}
|
||||||
|
HASHIBOT_TOKEN: ${{ secrets.HASHIBOT_TOKEN }}
|
||||||
|
HASHIBOT_USERNAME: ${{ secrets.HASHIBOT_USERNAME }}
|
||||||
|
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
|
@ -0,0 +1,29 @@
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- 'test-*'
|
||||||
|
paths-ignore:
|
||||||
|
- 'CHANGELOG.md'
|
||||||
|
- 'website/**'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
ruby: [ '2.4.x', '2.5.x', '2.6.x' ]
|
||||||
|
name: Vagrant unit tests on Ruby ${{ matrix.ruby }}
|
||||||
|
steps:
|
||||||
|
- name: Code Checkout
|
||||||
|
uses: actions/checkout@v1
|
||||||
|
- name: Setup Ruby
|
||||||
|
uses: actions/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: ${{matrix.ruby}}
|
||||||
|
architecture: 'x64'
|
||||||
|
- name: Run Tests
|
||||||
|
run: .ci/test.sh
|
Loading…
Reference in New Issue