Skip to content
Snippets Groups Projects
Unverified Commit 29211065 authored by Sergey Bronnikov's avatar Sergey Bronnikov Committed by Igor Munkin
Browse files

ci: run performance tests

Performance tests added to perf directory are not automated and
currently we run these tests manually from time to time. From other side
source code that used rarely could lead to software rot [1].

The patch adds CMake target "test-perf" and GitHub workflow, that runs
these tests in CI. Workflow is based on workflow release.yml, it builds
performance tests and runs them.

1. https://en.wikipedia.org/wiki/Software_rot

NO_CHANGELOG=testing
NO_DOC=testing
NO_TEST=testing

(cherry picked from commit 5edcb712)
parent 4ff80886
No related branches found
No related tags found
No related merge requests found
......@@ -16,6 +16,7 @@ runs:
lcov \
ruby-dev \
liblz4-dev \
libbenchmark-dev \
autoconf \
automake \
libtool
......
name: perf_micro
on:
push:
branches:
- 'master'
- 'release/**'
tags:
- '**'
pull_request:
workflow_dispatch:
concurrency:
# Update of a developer branch cancels the previously scheduled workflow
# run for this branch. However, the 'master' branch, release branch, and
# tag workflow runs are never canceled.
#
# We use a trick here: define the concurrency group as 'workflow run ID' +
# 'workflow run attempt' because it is a unique combination for any run.
# So it effectively discards grouping.
#
# Important: we cannot use `github.sha` as a unique identifier because
# pushing a tag may cancel a run that works on a branch push event.
group: ${{ (
github.ref == 'refs/heads/master' ||
startsWith(github.ref, 'refs/heads/release/') ||
startsWith(github.ref, 'refs/tags/')) &&
format('{0}-{1}', github.run_id, github.run_attempt) ||
format('{0}-{1}', github.workflow, github.ref) }}
cancel-in-progress: true
jobs:
perf_micro:
# Run on push to the 'master' and release branches of tarantool/tarantool
# or on pull request if the 'notest' label is not set.
if: github.repository == 'tarantool/tarantool' &&
( github.event_name != 'pull_request' ||
( github.event_name == 'pull_request' &&
!contains(github.event.pull_request.labels.*.name, 'notest') ) )
runs-on: ubuntu-20.04-self-hosted
steps:
- name: Prepare checkout
uses: tarantool/actions/prepare-checkout@master
- uses: actions/checkout@v3
with:
fetch-depth: 0
submodules: recursive
- uses: ./.github/actions/environment
- name: Install deps
uses: ./.github/actions/install-deps-debian
- name: test
run: make -f .test.mk test-perf
- name: Send VK Teams message on failure
if: failure()
uses: ./.github/actions/report-job-status
with:
bot-token: ${{ secrets.VKTEAMS_BOT_TOKEN }}
- name: artifacts
uses: actions/upload-artifact@v3
if: failure()
with:
name: release
retention-days: 21
path: ${{ env.VARDIR }}/artifacts
- name: Upload artifacts to S3
uses: ./.github/actions/s3-upload-artifact
if: ( success() || failure() ) && ( github.ref == 'refs/heads/master' ||
startsWith(github.ref, 'refs/heads/release/') ||
startsWith(github.ref, 'refs/tags/') )
with:
job-name: ${{ github.job }}
access-key-id: ${{ secrets.MULTIVAC_S3_ACCESS_KEY_ID }}
secret-access-key: ${{ secrets.MULTIVAC_S3_SECRET_ACCESS_KEY }}
source: ${{ env.VARDIR }}/artifacts
......@@ -56,6 +56,11 @@ install-test-deps:
run-test: install-test-deps
cd test && ${TEST_RUN_ENV} ./test-run.py --force --vardir ${VARDIR} ${TEST_RUN_PARAMS} ${TEST_RUN_EXTRA_PARAMS}
.PHONY: run-perf-test
run-perf-test:
cmake --build ${BUILD_DIR} --parallel
cmake --build ${BUILD_DIR} --target test-perf
##############################
# Linux #
##############################
......@@ -69,6 +74,13 @@ test-release: CMAKE_PARAMS = -DCMAKE_BUILD_TYPE=RelWithDebInfo \
test-release: build run-luajit-test run-test
.PHONY: test-perf
test-perf: CMAKE_PARAMS = -DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DENABLE_WERROR=ON \
-DTEST_BUILD=ON
test-perf: build run-perf-test
# Release ASAN build
.PHONY: test-release-asan
......
......@@ -10,6 +10,10 @@ if (NOT ${benchmark_FOUND})
COMMAND ${CMAKE_COMMAND} -E cmake_echo_color --red ${MSG}
COMMENT ${MSG}
)
add_custom_target(test-perf
DEPENDS test-c-perf test-lua-perf
COMMENT "Running performance tests"
)
return()
endif()
......@@ -77,3 +81,8 @@ add_custom_target(test-c-perf
DEPENDS ${RUN_PERF_C_TESTS_LIST}
COMMENT "Running C performance tests"
)
add_custom_target(test-perf
DEPENDS test-c-perf test-lua-perf
COMMENT "Running performance tests"
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment