Skip to content
Snippets Groups Projects
Commit dcf6fdb3 authored by mechanik20051988's avatar mechanik20051988 Committed by Nikita Pettik
Browse files

tests: add ability to run box/engine tests with different allocators

The ability to select an allocator for memtx has been added to
tarantool. To test a new type of allocator, all tests must also
be run with it. Implemented new option, which allows to set allocator
for memtx. If you wan't to choose allocator type for tests, run test-run.py
with --memtx-allocator="small" or --memtx-allocator="system". Allocator type
is passed via MEMTX_ALLOCATOR environment variable to the test.
parent 279ca6fb
No related branches found
No related tags found
No related merge requests found
name: memtx_allocator_based_on_malloc
on:
push:
pull_request:
repository_dispatch:
types: [backend_automation]
workflow_dispatch:
env:
CI_MAKE: make -f .travis.mk
TEST_RUN_EXTRA_PARAMS: --memtx-allocator=system
jobs:
memtx_allocator_based_on_malloc:
# We want to run on external PRs, but not on our own internal PRs
# as they'll be run by the push to the branch.
if: ( github.event_name == 'push' ||
github.event.pull_request.head.repo.full_name != github.repository ) &&
! endsWith(github.ref, '-notest')
runs-on: ubuntu-20.04
strategy:
fail-fast: false
# image built by .gitlab.mk instructions and targets from .travis.mk
container:
image: docker.io/tarantool/testing:debian-stretch
# Our testing expects that the init process (PID 1) will
# reap orphan processes. At least the following test leans
# on it: app-tap/gh-4983-tnt-e-assert-false-hangs.test.lua.
options: '--init'
steps:
- uses: actions/checkout@v1
- uses: ./.github/actions/environment
- name: test
run: ${CI_MAKE} test_debian_no_deps
- name: call action to send Telegram message on failure
env:
TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_CORE_TOKEN }}
TELEGRAM_TO: ${{ secrets.TELEGRAM_CORE_TO }}
uses: ./.github/actions/send-telegram-notify
if: failure()
- name: artifacts
uses: actions/upload-artifact@v2
if: failure()
with:
name: memtx_allocator_based_on_malloc
retention-days: 21
path: test/var/artifacts
...@@ -64,7 +64,7 @@ cfg_filter(box.cfg) ...@@ -64,7 +64,7 @@ cfg_filter(box.cfg)
- - log_level - - log_level
- 5 - 5
- - memtx_allocator - - memtx_allocator
- small - <hidden>
- - memtx_dir - - memtx_dir
- <hidden> - <hidden>
- - memtx_max_tuple_size - - memtx_max_tuple_size
......
#!/usr/bin/env tarantool #!/usr/bin/env tarantool
box.cfg{listen = os.getenv("LISTEN")} box.cfg({
listen = os.getenv("LISTEN"),
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
})
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
...@@ -7,6 +7,7 @@ box.cfg{ ...@@ -7,6 +7,7 @@ box.cfg{
listen = os.getenv("LISTEN"), listen = os.getenv("LISTEN"),
memtx_memory = 107374182, memtx_memory = 107374182,
pid_file = "tarantool.pid", pid_file = "tarantool.pid",
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
} }
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
...@@ -15,7 +16,7 @@ local _hide = { ...@@ -15,7 +16,7 @@ local _hide = {
pid_file=1, log=1, listen=1, vinyl_dir=1, pid_file=1, log=1, listen=1, vinyl_dir=1,
memtx_dir=1, wal_dir=1, memtx_dir=1, wal_dir=1,
memtx_max_tuple_size=1, memtx_min_tuple_size=1, memtx_max_tuple_size=1, memtx_min_tuple_size=1,
replication_sync_timeout=1 replication_sync_timeout=1, memtx_allocator=1
} }
function cfg_filter(data) function cfg_filter(data)
......
...@@ -52,7 +52,7 @@ cfg_filter(box.cfg) ...@@ -52,7 +52,7 @@ cfg_filter(box.cfg)
| - - log_level | - - log_level
| - 5 | - 5
| - - memtx_allocator | - - memtx_allocator
| - small | - <hidden>
| - - memtx_dir | - - memtx_dir
| - <hidden> | - <hidden>
| - - memtx_max_tuple_size | - - memtx_max_tuple_size
...@@ -177,7 +177,7 @@ cfg_filter(box.cfg) ...@@ -177,7 +177,7 @@ cfg_filter(box.cfg)
| - - log_level | - - log_level
| - 5 | - 5
| - - memtx_allocator | - - memtx_allocator
| - small | - <hidden>
| - - memtx_dir | - - memtx_dir
| - <hidden> | - <hidden>
| - - memtx_max_tuple_size | - - memtx_max_tuple_size
......
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
box.cfg({ box.cfg({
listen = os.getenv("LISTEN"), listen = os.getenv("LISTEN"),
force_recovery = true, force_recovery = true,
read_only = false, read_only = false,
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
}) })
...@@ -21,7 +21,8 @@ end ...@@ -21,7 +21,8 @@ end
box.ctl.on_schema_init(on_init_trig) box.ctl.on_schema_init(on_init_trig)
box.cfg{ box.cfg{
listen = os.getenv("LISTEN") listen = os.getenv("LISTEN"),
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
} }
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
...@@ -5,6 +5,7 @@ box.cfg{ ...@@ -5,6 +5,7 @@ box.cfg{
memtx_memory = 107374182, memtx_memory = 107374182,
pid_file = "tarantool.pid", pid_file = "tarantool.pid",
memtx_use_mvcc_engine = true, memtx_use_mvcc_engine = true,
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
} }
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
...@@ -12,6 +12,7 @@ box.cfg{ ...@@ -12,6 +12,7 @@ box.cfg{
vinyl_page_size = 1024, vinyl_page_size = 1024,
memtx_max_tuple_size = 1024 * 1024 * 100, memtx_max_tuple_size = 1024 * 1024 * 100,
vinyl_max_tuple_size = 1024 * 1024 * 100, vinyl_max_tuple_size = 1024 * 1024 * 100,
memtx_allocator = os.getenv("MEMTX_ALLOCATOR")
} }
require('console').listen(os.getenv('ADMIN')) require('console').listen(os.getenv('ADMIN'))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment