Skip to content

Commit

Permalink
Update CircleCI config and performance test scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
Claes Mogren authored and mogren committed Aug 17, 2020
1 parent f7693ab commit 0ce68f1
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 27 deletions.
4 changes: 4 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ version: 2.1
orbs:
aws-cli: circleci/aws-cli@0.1.19
k8s: circleci/kubernetes@0.11.0
jq: circleci/jq@2.2.0

references:
build_job: &build
Expand Down Expand Up @@ -92,6 +93,7 @@ jobs:
RUN_PERFORMANCE_TESTS: "true"
steps:
- checkout
- jq/install
- setup_remote_docker
- aws-cli/setup:
profile-name: awstester
Expand Down Expand Up @@ -266,6 +268,7 @@ workflows:

# triggers daily test run on master
nightly-test-run:
when: ${RUN_NIGHTLY_TESTS}
triggers:
- schedule:
cron: "0 0 * * *"
Expand All @@ -278,6 +281,7 @@ workflows:

# triggers weekly tests on master (Friday at 11 PM PST)
weekly-test-run:
when: ${RUN_WEEKLY_TESTS}
triggers:
- schedule:
cron: "0 6 * * 6"
Expand Down
49 changes: 22 additions & 27 deletions scripts/lib/performance_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,33 +12,31 @@ function check_for_timeout() {
}

function save_results_to_file() {
echo $filename
echo "Date", "\"slot1\"", "\"slot2\"" >> $filename
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[0])), $((SCALE_DOWN_DURATION_ARRAY[0])) >> $filename
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[1])), $((SCALE_DOWN_DURATION_ARRAY[1])) >> $filename
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[2])), $((SCALE_DOWN_DURATION_ARRAY[2])) >> $filename
echo "$filename"
echo "Date", "\"slot1\"", "\"slot2\"" >> "$filename"
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[0])), $((SCALE_DOWN_DURATION_ARRAY[0])) >> "$filename"
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[1])), $((SCALE_DOWN_DURATION_ARRAY[1])) >> "$filename"
echo $(date +"%Y-%m-%d-%T"), $((SCALE_UP_DURATION_ARRAY[2])), $((SCALE_DOWN_DURATION_ARRAY[2])) >> "$filename"

cat $filename
if [[ ${#PERFORMANCE_TEST_S3_BUCKET_NAME} -gt 0 ]]; then
aws s3 cp $filename ${PERFORMANCE_TEST_S3_BUCKET_NAME}${1}
aws s3 cp "$filename" "${PERFORMANCE_TEST_S3_BUCKET_NAME}${1}"
else
echo "No S3 bucket name given, skipping test result upload."
fi
}

function check_for_slow_performance() {
BUCKET=s3://cni-scale-test-data${1}
FILE1=`aws s3 ls ${BUCKET} | sort | tail -n 2 | sed -n '1 p' | awk '{print $4}'`
FILE2=`aws s3 ls ${BUCKET} | sort | tail -n 3 | sed -n '1 p' | awk '{print $4}'`
FILE3=`aws s3 ls ${BUCKET} | sort | tail -n 4 | sed -n '1 p' | awk '{print $4}'`
BUCKET=s3://${PERFORMANCE_TEST_S3_BUCKET_NAME}${1}
FILE1=$(aws s3 ls "${BUCKET}" | sort | tail -n 2 | sed -n '1 p' | awk '{print $4}')
FILE2=$(aws s3 ls "${BUCKET}" | sort | tail -n 3 | sed -n '1 p' | awk '{print $4}')
FILE3=$(aws s3 ls "${BUCKET}" | sort | tail -n 4 | sed -n '1 p' | awk '{print $4}')

PAST_PERFORMANCE_UP_AVERAGE_SUM=0
PAST_PERFORMANCE_DOWN_AVERAGE_SUM=0
find_performance_duration_average $FILE1 1
find_performance_duration_average $FILE2 2
find_performance_duration_average $FILE3 3
PAST_PERFORMANCE_UP_AVERAGE=$((PAST_PERFORMANCE_UP_AVERAGE_SUM / 3))
PAST_PERFORMANCE_DOWN_AVERAGE=$((PAST_PERFORMANCE_DOWN_AVERAGE_SUM / 3))

# Divided by 3 to get current average, multiply past averages by 5/4 to get 25% window
if [[ $((CURRENT_PERFORMANCE_UP_SUM / 3)) -gt $((PAST_PERFORMANCE_UP_AVERAGE * 5 / 4)) ]]; then
Expand Down Expand Up @@ -134,7 +132,9 @@ function run_performance_test_130_pods() {

echo "TIMELINE: 130 Pod performance test took $DEPLOY_DURATION seconds."
RUNNING_PERFORMANCE=false
check_for_slow_performance "/130-pods/"
if [[ ${#PERFORMANCE_TEST_S3_BUCKET_NAME} -gt 0 ]]; then
check_for_slow_performance "/130-pods/"
fi
$KUBECTL_PATH delete -f ./testdata/deploy-130-pods.yaml
}

Expand Down Expand Up @@ -205,24 +205,17 @@ function run_performance_test_730_pods() {

echo "TIMELINE: 730 Pod performance test took $DEPLOY_DURATION seconds."
RUNNING_PERFORMANCE=false
check_for_slow_performance "/730-pods/"
if [[ ${#PERFORMANCE_TEST_S3_BUCKET_NAME} -gt 0 ]]; then
check_for_slow_performance "/730-pods/"
fi
$KUBECTL_PATH delete -f ./testdata/deploy-730-pods.yaml
}

function scale_nodes_for_5000_pod_test() {
AUTO_SCALE_GROUP_INFO=$(aws autoscaling describe-auto-scaling-groups | grep -B44 100,)
echo "Group info ${AUTO_SCALE_GROUP_INFO}"
AUTO_SCALE_GROUP_NAME_WITH_QUOTES=${AUTO_SCALE_GROUP_INFO%%,*}
echo "Group name with quotes ${AUTO_SCALE_GROUP_NAME_WITH_QUOTES}"
AUTO_SCALE_GROUP_NAME_WITH_QUOTES=${AUTO_SCALE_GROUP_NAME_WITH_QUOTES##* }
echo "Group name with quotes ${AUTO_SCALE_GROUP_NAME_WITH_QUOTES}"
AUTO_SCALE_GROUP_NAME="${AUTO_SCALE_GROUP_NAME_WITH_QUOTES%\"}"
echo "Group name ${AUTO_SCALE_GROUP_NAME}"
AUTO_SCALE_GROUP_NAME=$(echo $AUTO_SCALE_GROUP_NAME | cut -c2-)
echo $AUTO_SCALE_GROUP_NAME

AUTO_SCALE_GROUP_NAME=$(aws autoscaling describe-auto-scaling-groups | jq --raw-output '.AutoScalingGroups[0].AutoScalingGroupName')
echo "$AUTO_SCALE_GROUP_NAME"
aws autoscaling update-auto-scaling-group \
--auto-scaling-group-name $AUTO_SCALE_GROUP_NAME \
--auto-scaling-group-name "$AUTO_SCALE_GROUP_NAME" \
--desired-capacity 99
}

Expand Down Expand Up @@ -293,6 +286,8 @@ function run_performance_test_5000_pods() {

echo "TIMELINE: 5000 Pod performance test took $DEPLOY_DURATION seconds."
RUNNING_PERFORMANCE=false
check_for_slow_performance "/5000-pods/"
if [[ ${#PERFORMANCE_TEST_S3_BUCKET_NAME} -gt 0 ]]; then
check_for_slow_performance "/5000-pods/"
fi
$KUBECTL_PATH delete -f ./testdata/deploy-5000-pods.yaml
}

0 comments on commit 0ce68f1

Please sign in to comment.