Skip to content
Snippets Groups Projects
Commit 3110ef9a authored by Sergey Kaplun's avatar Sergey Kaplun Committed by Sergey Ostanevich
Browse files

perf: introduce benchmark.lua helper module

This module helps to aggregate various subbenchmark runs and dump them
either to stdout or to the specified file. Also, it allows you to output
results in the JSON format.

Usually, these options should be used together to dump machine-readable
results for the benchmarks.

Also, set the `LUA_PATH` environment variable for the Lua benchmarks to
make the introduced module requirable.

NO_DOC=perf test
NO_CHANGELOG=perf test
NO_TEST=perf test
parent 435906db
No related branches found
No related tags found
No related merge requests found
set(TARANTOOL_BIN $<TARGET_FILE:tarantool>)
set(RUN_PERF_LUA_TESTS_LIST "")
set(LUA_PATH "${CMAKE_CURRENT_SOURCE_DIR}/?.lua\;\;")
function(create_perf_lua_test)
set(prefix PERF)
set(noValues)
......@@ -18,7 +20,9 @@ function(create_perf_lua_test)
message(STATUS "Creating Lua performance test ${PERF_NAME}_perftest")
set(TEST_PATH ${CMAKE_CURRENT_SOURCE_DIR}/${PERF_NAME}.lua)
add_custom_target(${PERF_NAME}_perftest
COMMAND ${TARANTOOL_BIN} ${TEST_PATH}
COMMAND ${CMAKE_COMMAND} -E env
LUA_PATH="${LUA_PATH}"
${TARANTOOL_BIN} ${TEST_PATH}
COMMENT Running ${PERF_NAME}_perftest
DEPENDS tarantool ${TEST_PATH}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
......
-- Usage:
--
-- local benchmark = require('benchmark')
-- local clock = require('clock')
--
-- local USAGE = 'tarantool mybench.lua [options]'
--
-- -- These options are parsed by the module by default:
-- -- output = 'string',
-- -- output_format = 'string',
-- local opts = benchmark.argparse(arg, {
-- <..your options..>
-- }, USAGE)
-- local bench = benchmark.new(opts)
--
-- local ITERATIONS = 10
--
-- local start_time = {
-- time = clock.time(),
-- proc = clock.proc(),
-- }
-- for _ = 1, ITERATIONS do
-- workload()
-- end
--
-- bench:add_result('subtest name', {
-- items = ITERATIONS,
-- real_time = clock.time() - start_time.time,
-- cpu_time = clock.proc() - start_time.proc,
-- })
--
-- bench:dump_results()
local json = require('json')
local fio = require('fio')
local argparse = require('internal.argparse')
local M = {}
local function format_report(bench)
local output_format = bench.output_format
local results = bench.results
local report = ''
if output_format == 'json' then
-- The output should have the same format as the Google
-- Benchmark JSON output format:
-- https://github.com/google/benchmark/blob/main/docs/user_guide.md
report = json.encode({benchmarks = results})
else
assert(output_format == 'console', 'unknown output format')
for _, res in ipairs(results) do
report = report .. ('%s %d rps\n'):format(res.name,
res.items_per_second)
end
end
return report
end
local function dump_report(bench, text)
local output = bench.output
if output then
local fh = assert(fio.open(output, {'O_WRONLY', 'O_CREAT', 'O_TRUNC'}))
fh:write(text)
fh:close()
else
io.stdout:write(text)
end
end
local function add_result(bench, name, data)
local items_per_second = math.floor(data.items / data.real_time)
local result = {
name = name,
real_time = data.real_time,
cpu_time = data.cpu_time,
iterations = data.items,
items_per_second = items_per_second,
}
table.insert(bench.results, result)
return result
end
local function dump_results(bench)
dump_report(bench, format_report(bench))
end
local GENERAL_HELP = [[
The supported general options list:
help (same as -h) <boolean> - print this message
output <string> - filename to dump the benchmark results
output_format <string, 'console'> - format (console, json) in which results
are dumped
Options can be used with '--', followed by the value if it's not a boolean
option.
There are a bunch of suggestions how to achieve the most stable results:
https://github.com/tarantool/tarantool/wiki/Benchmarking
]]
function M.argparse(arg, argtable, custom_help)
local benchname = fio.basename(debug.getinfo(2).short_src)
local usageline = ('\n Usage: tarantool %s [options]\n\n'):format(benchname)
argtable = argtable or {}
table.insert(argtable, {'h', 'boolean'})
table.insert(argtable, {'help', 'boolean'})
table.insert(argtable, {'output', 'string'})
table.insert(argtable, {'output_format', 'string'})
local params = argparse.parse(arg, argtable)
if params.h or params.help then
local help_msg = usageline .. GENERAL_HELP
if custom_help then
help_msg = ('%s%s%s'):format(usageline, custom_help, GENERAL_HELP)
end
print(help_msg)
os.exit(0)
end
return params
end
function M.new(opts)
assert(type(opts) == 'table', 'given argument should be a table')
local output_format = opts.output_format or 'console'
return setmetatable({
output = opts.output,
output_format = output_format:lower(),
results = {},
}, {__index = {
add_result = add_result,
dump_results = dump_results,
}})
end
return M
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment