diff --git a/.gitmodules b/.gitmodules
index de8640ef194f4747daae53e634594b96880ded9b..aba1c2e269cf4a9cdfd2c992154d6485b67fddc0 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -11,3 +11,6 @@
        path = third_party/sophia
        url = https://github.com/tarantool/sophia.git
        branch = dev
+[submodule "test-run"]
+	path = test-run
+	url = https://github.com/tarantool/test-run.git
diff --git a/test-run b/test-run
new file mode 160000
index 0000000000000000000000000000000000000000..4c984ce318af98ff3b0891610fe042c27b638711
--- /dev/null
+++ b/test-run
@@ -0,0 +1 @@
+Subproject commit 4c984ce318af98ff3b0891610fe042c27b638711
diff --git a/test/lib/Makefile b/test/lib/Makefile
deleted file mode 100644
index 428f9811ad01af96efe84faf0c4d8f37007e4981..0000000000000000000000000000000000000000
--- a/test/lib/Makefile
+++ /dev/null
@@ -1,2 +0,0 @@
-sql.py: sql.g
-	yapps sql.g
diff --git a/test/lib/__init__.py b/test/lib/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/test/lib/admin_connection.py b/test/lib/admin_connection.py
deleted file mode 100644
index 5841a12e2a2742630101a5bb8b15e2e24f0d21c5..0000000000000000000000000000000000000000
--- a/test/lib/admin_connection.py
+++ /dev/null
@@ -1,64 +0,0 @@
-__author__ = "Konstantin Osipov <kostja.osipov@gmail.com>"
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-# 1. Redistributions of source code must retain the above copyright
-#    notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-#    notice, this list of conditions and the following disclaimer in the
-#    documentation and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
-
-import socket
-import yaml
-import sys
-import re
-from tarantool_connection import TarantoolConnection
-
-ADMIN_SEPARATOR = '\n'
-
-class AdminConnection(TarantoolConnection):
-    def execute_no_reconnect(self, command, silent):
-        if not command:
-            return
-        if not silent:
-            sys.stdout.write(command + ADMIN_SEPARATOR)
-        cmd = command.replace('\n', ' ') + ADMIN_SEPARATOR
-        self.socket.sendall(cmd)
-
-        bufsiz = 4096
-        res = ""
-
-        while True:
-            buf = self.socket.recv(bufsiz)
-            if not buf:
-                break
-            res = res + buf
-            if (res.rfind("\n...\n") >= 0 or res.rfind("\r\n...\r\n") >= 0):
-                break
-
-        # validate yaml by parsing it
-        try:
-            yaml.load(res)
-        finally:
-            if not silent:
-                sys.stdout.write(res.replace("\r\n", "\n"))
-        return res
-
-    def connect(self):
-        super(AdminConnection, self).connect()
-        handshake = self.socket.recv(128)
-        if not re.search(r'^Tarantool.*console.*', str(handshake)):
-            raise RuntimeError('Broken tarantool console handshake')
diff --git a/test/lib/app_server.py b/test/lib/app_server.py
deleted file mode 100644
index e7d22005f95faf61150aa2db9c8bef519ab95a99..0000000000000000000000000000000000000000
--- a/test/lib/app_server.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import os
-import re
-import sys
-import glob
-import traceback
-import shutil
-from subprocess import Popen, PIPE
-
-from lib.server import Server
-from lib.tarantool_server import Test
-
-class AppTest(Test):
-    def execute(self, server):
-        execs = []
-        proc = Popen([os.path.join(os.getcwd(), self.name)], stdout=PIPE, cwd=server.vardir)
-        sys.stdout.write(proc.communicate()[0])
-
-class AppServer(Server):
-    """A dummy server implementation for application server tests"""
-    def __new__(cls, ini=None):
-        return Server.__new__(cls)
-
-    def __init__(self, _ini=None):
-        if _ini is None:
-            _ini = {}
-        ini = {
-            'vardir': None
-        }; ini.update(_ini)
-        Server.__init__(self, ini)
-        self.testdir = os.path.abspath(os.curdir)
-        self.vardir = ini['vardir']
-        self.re_vardir_cleanup += [
-            "*.snap", "*.xlog", "*.inprogress",
-            "*.sup", "*.lua", "*.pid"]
-        self.cleanup()
-        self.builddir = ini['builddir']
-        self.debug = False
-        self.lua_libs = ini['lua_libs']
-
-    def deploy(self, vardir=None, silent=True, need_init=True):
-        self.vardir = vardir
-        if not os.access(self.vardir, os.F_OK):
-            os.makedirs(self.vardir)
-        if self.lua_libs:
-            for i in self.lua_libs:
-                source = os.path.join(self.testdir, i)
-                shutil.copy(source, self.vardir)
-
-    @classmethod
-    def find_exe(cls, builddir):
-        cls.builddir = builddir
-
-    def find_tests(self, test_suite, suite_path):
-        def patterned(test, patterns):
-            answer = []
-            for i in patterns:
-                if test.name.find(i) != -1:
-                    answer.append(test)
-            return answer
-
-        test_suite.tests = [AppTest(k, test_suite.args, test_suite.ini) for k in sorted(glob.glob(os.path.join(suite_path, "*.test.lua" )))]
-        test_suite.tests = sum(map((lambda x: patterned(x, test_suite.args.tests)), test_suite.tests), [])
-
-    def print_log(self, lines):
-        pass
diff --git a/test/lib/box_connection.py b/test/lib/box_connection.py
deleted file mode 100644
index 9d318de0da6e840a691062cf8ddf5c957816df84..0000000000000000000000000000000000000000
--- a/test/lib/box_connection.py
+++ /dev/null
@@ -1,89 +0,0 @@
-__author__ = "Konstantin Osipov <kostja.osipov@gmail.com>"
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-# 1. Redistributions of source code must retain the above copyright
-#    notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-#    notice, this list of conditions and the following disclaimer in the
-#    documentation and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
-import os
-import sys
-import errno
-import ctypes
-import socket
-import struct
-import warnings
-
-import sql
-from tarantool_connection import TarantoolConnection
-
-from tarantool import Connection as tnt_connection
-from tarantool import Schema
-
-class BoxConnection(TarantoolConnection):
-    def __init__(self, host, port):
-        super(BoxConnection, self).__init__(host, port)
-        self.py_con = tnt_connection(host, port, connect_now=False, socket_timeout=100)
-        self.py_con.error = False
-        self.sort = False
-
-    def connect(self):
-        self.py_con.connect()
-
-    def authenticate(self, user, password):
-        self.py_con.authenticate(user, password)
-
-    def disconnect(self):
-        self.py_con.close()
-
-    def reconnect(self):
-        if self.py_con.connected:
-            self.disconnect()
-        self.connect()
-
-    def set_schema(self, schemadict):
-        self.py_con.schema = Schema(schemadict)
-
-    def check_connection(self):
-        rc = self.py_con._sys_recv(self.py_con._socket.fileno(), '  ', 1, socket.MSG_DONTWAIT | socket.MSG_PEEK)
-        if ctypes.get_errno() == errno.EAGAIN:
-            ctypes.set_errno(0)
-            return True
-        return False
-
-    def execute(self, command, silent=True):
-        return self.execute_no_reconnect(command, silent)
-
-    def execute_no_reconnect(self, command, silent=True):
-        statement = sql.parse("sql", command)
-        if statement == None:
-            return "You have an error in your SQL syntax\n"
-        statement.sort = self.sort
-
-        if not silent:
-            print command
-
-        response = None
-        request = statement.pack(self.py_con)
-        with warnings.catch_warnings():
-            warnings.simplefilter("ignore")
-            response = self.py_con._send_request(request)
-
-        if not silent:
-            print statement.unpack(response)
-
-        return statement.unpack(response)
diff --git a/test/lib/colorer.py b/test/lib/colorer.py
deleted file mode 100644
index e46517441c2ebe35e97d732442093cfe7d3b3cb7..0000000000000000000000000000000000000000
--- a/test/lib/colorer.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import os
-import sys
-
-class Singleton(type):
-    _instances = {}
-    def __call__(cls, *args, **kwargs):
-        if cls not in cls._instances:
-            cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
-        return cls._instances[cls]
-
-class CSchema(object):
-    objects = {}
-
-    def __init__(self):
-        self.main_objects = {
-            'diff_mark': {},
-            'diff_in':   {},
-            'diff_out':  {},
-            'test_pass': {},
-            'test_fail': {},
-            'test_new':  {},
-            'test_skip': {},
-            'test_disa': {},
-            'error':     {},
-            'lerror':    {},
-            'tail':      {},
-            'ts_text':   {},
-            'path':      {},
-            'info':      {},
-            'separator': {},
-            't_name':    {},
-            'serv_text': {},
-            'version':   {},
-            'tr_text':   {},
-            'log':       {},
-        }
-        self.main_objects.update(self.objects)
-
-class SchemaAscetic(CSchema):
-    objects = {
-        'diff_mark': {'fgcolor': 'magenta'},
-        'diff_in':   {'fgcolor': 'green'},
-        'diff_out':  {'fgcolor': 'red'},
-        'test_pass': {'fgcolor': 'green'},
-        'test_fail': {'fgcolor': 'red'},
-        'test_new':  {'fgcolor': 'lblue'},
-        'test_skip': {'fgcolor': 'grey'},
-        'test_disa': {'fgcolor': 'grey'},
-        'error':     {'fgcolor': 'red'},
-    }
-
-class SchemaPretty(CSchema):
-    objects = {
-        'diff_mark': {'fgcolor': 'magenta'},
-        'diff_in':   {'fgcolor': 'blue'},
-        'diff_out':  {'fgcolor': 'red'},
-        'test_pass': {'fgcolor': 'green'},
-        'test_fail': {'fgcolor': 'red'},
-        'test_new':  {'fgcolor': 'lblue'},
-        'test_skip': {'fgcolor': 'grey'},
-        'test_disa': {'fgcolor': 'grey'},
-        'error':     {'fgcolor': 'red'},
-        'lerror':    {'fgcolor': 'lred'},
-        'tail':      {'fgcolor': 'lblue'},
-        'ts_text':   {'fgcolor': 'lmagenta'},
-        'path':      {'fgcolor': 'green',  'bold':True},
-        'info':      {'fgcolor': 'yellow', 'bold':True},
-        'separator': {'fgcolor': 'blue'},
-        't_name':    {'fgcolor': 'lblue'},
-        'serv_text': {'fgcolor': 'lmagenta'},
-        'version':   {'fgcolor': 'yellow', 'bold':True},
-        'tr_text':   {'fgcolor': 'green'},
-        'log':       {'fgcolor': 'grey'}
-    }
-
-class Colorer(object):
-    """
-    Colorer/Styler based on VT220+ specifications (Not full). Based on:
-    1. ftp://ftp.cs.utk.edu/pub/shuford/terminal/dec_vt220_codes.txt
-    2. http://invisible-island.net/xterm/ctlseqs/ctlseqs.html
-    """
-    __metaclass__ = Singleton
-    fgcolor = {
-        "black"    : '0;30',
-        "red"      : '0;31',
-        "green"    : '0;32',
-        "brown"    : '0;33',
-        "blue"     : '0;34',
-        "magenta"  : '0;35',
-        "cyan"     : '0;36',
-        "grey"     : '0;37',
-        "lgrey"    : '1;30',
-        "lred"     : '1;31',
-        "lgreen"   : '1;32',
-        "yellow"   : '1;33',
-        "lblue"    : '1;34',
-        "lmagenta" : '1;35',
-        "lcyan"    : '1;36',
-        "white"    : '1;37',
-    }
-    bgcolor = {
-        "black"    : '0;40',
-        "red"      : '0;41',
-        "green"    : '0;42',
-        "brown"    : '0;43',
-        "blue"     : '0;44',
-        "magenta"  : '0;45',
-        "cyan"     : '0;46',
-        "grey"     : '0;47',
-        "lgrey"    : '1;40',
-        "lred"     : '1;41',
-        "lgreen"   : '1;42',
-        "yellow"   : '1;43',
-        "lblue"    : '1;44',
-        "lmagenta" : '1;45',
-        "lcyan"    : '1;46',
-        "white"    : '1;47',
-    }
-    attributes = {
-        "bold"      : '1',
-        "underline" : '4',
-        "blinking"  : '5',
-        "negative"  : '7',
-        "invisible" : '8',
-    }
-    begin = "\033["
-    end = "m"
-    disable = begin+'0'+end
-
-    def __init__(self):
-        self.stdout = sys.stdout
-        self.is_term = self.stdout.isatty()
-        self.colors = int(os.popen('tput colors').read()) if self.is_term else None
-        schema = os.getenv('TT_SCHEMA', 'ascetic')
-        if schema == 'ascetic':
-            self.schema = SchemaAscetic()
-        elif schema == 'pretty':
-            self.schema = SchemaPretty()
-        else:
-            self.schema = CSchema()
-        self.schema = self.schema.main_objects
-
-    def set_stdout(self):
-        sys.stdout = self
-
-    def ret_stdout(self):
-        sys.stdout = self.stdout
-
-    def write(self, *args, **kwargs):
-        flags = []
-        if 'schema' in kwargs:
-            kwargs.update(self.schema[kwargs['schema']])
-        for i in self.attributes:
-            if i in kwargs and kwargs[i] == True:
-                flags.append(self.attributes[i])
-        flags.append(self.fgcolor[kwargs['fgcolor']]) if 'fgcolor' in kwargs else None
-        flags.append(self.bgcolor[kwargs['bgcolor']]) if 'bgcolor' in kwargs else None
-
-        if self.is_term:
-            self.stdout.write(self.begin+';'.join(flags)+self.end)
-        for i in args:
-            self.stdout.write(str(i))
-        if self.is_term:
-            self.stdout.write(self.disable)
-        self.stdout.flush()
-
-    def __call__(self, *args, **kwargs):
-        self.write(*args, **kwargs)
-
-    def writeout_unidiff(self, diff):
-        for i in diff:
-            if i.startswith('+'):
-                self.write(i, schema='diff_in')
-            elif i.startswith('-'):
-                self.write(i, schema='diff_out')
-            elif i.startswith('@'):
-                self.write(i, schema='diff_mark')
-            else:
-                self.write(i)
-
-    def flush(self):
-        return self.stdout.flush()
-
-    def fileno(self):
-        return self.stdout.fileno()
-
-    def isatty(self):
-        return self.is_term
diff --git a/test/lib/parallel.py b/test/lib/parallel.py
deleted file mode 100644
index b992cbee0bf560701120a8356e277fc0ada52965..0000000000000000000000000000000000000000
--- a/test/lib/parallel.py
+++ /dev/null
@@ -1,388 +0,0 @@
-#!/usr/bin/env python2
-
-import os
-import re
-import sys
-import copy
-import glob
-import time
-import uuid
-import random
-import itertools
-import traceback
-import ConfigParser
-
-import multiprocessing
-from multiprocessing import Process as MProcess
-from multiprocessing.managers import BaseManager as MBaseManager
-
-from Queue import Empty as QueueEmpty
-from Queue import Full as QueueFull
-
-from lib.test import FilteredStream
-from lib.preprocessor import TestState
-from lib.tarantool_server import TarantoolServer
-
-import logging
-logger = multiprocessing.log_to_stderr()
-logger.setLevel(logging.INFO)
-
-import pickle
-
-from StringIO import StringIO
-
-class ParallelManager(MBaseManager): pass
-ParallelManager.register('Queue', multiprocessing.Queue)
-
-class Gopher(MProcess):
-    def __init__(self, **kwargs):
-        if kwargs and 'target' in kwargs:
-            del kwargs['target']
-        super(Gopher, self).__init__(**kwargs)
-
-    def run(self):
-        try:
-            queuein  = self._args[0]
-            queueout = self._args[1]
-            while True:
-                logger.debug("||_process.run > getting job")
-                obj = queuein.get()
-                logger.debug("||_process.run > ok, it's great")
-                assert obj
-                assert len(obj) == 2
-                assert callable(obj[0])
-                assert isinstance(obj[1], (tuple, list))
-                retv = obj[0](*obj[1])
-                logger.debug("||_process.run > job is done, let's put into outqueue")
-                queueout.put(retv)
-        except EOFError:
-            pass
-
-class GopherPoolException(Exception):
-    def __init__(self, message):
-        self.message = message
-
-class GopherPool(object):
-    DEFAULT = -1
-    INITED = 0
-    POPULATED = 1
-    STARTED = 2
-    ENDED = 3
-
-    def __init__(self, **kwargs):
-        self.status   = GopherPool.DEFAULT
-        self.pool     = []
-        self.number   = kwargs.get('processes', 1)
-        self.manager  = ParallelManager()
-        self.manager.start()
-        self.queuein  = self.manager.Queue()
-        self.queueout = self.manager.Queue()
-        self.jobs_in  = 0
-        self.jobs_out = 0
-        self.jobs_end = False
-        self.status   = GopherPool.INITED
-        self._populate()
-
-    def _populate(self):
-        assert(self.status == GopherPool.INITED)
-        for i in xrange(self.number):
-            kwargs = {
-            }
-            self.pool.append(Gopher(
-                group    = None,
-                name     = 'Worker-%d' % i,
-                args     = [
-                    self.queuein,
-                    self.queueout
-                ]
-            ))
-        self.status = GopherPool.POPULATED
-
-    def _repopulate(self):
-        assert(self.status >= GopherPool.STARTED)
-        logger.debug('||_pool.repopulate > Begin repopulation')
-        for n, proc in enumerate(self.pool):
-            if not proc.is_alive() and self.status != GopherPool.ENDED:
-                logger.debug("Manager: Process %s is dead (code %s). Recreating",
-                        repr(proc.name), proc.exitcode)
-                self.pool[n] = Gopher(
-                        group     = None,
-                        name      = proc.name,
-                        args      = [
-                            self.queuein,
-                            self.queueout
-                        ]
-                )
-                self.jobs_out += 1
-        logger.debug('||_pool.repopulate > Ending repopulation')
-        return 0
-
-    def fill(self, iterable=None):
-        logger.debug('||_pool.fill > Entering')
-        assert(self.status > GopherPool.INITED and self.status < GopherPool.ENDED)
-        if iterable == None:
-            raise GopherPoolException("Iterable must be defined \
-                    for '||_pool.fill'")
-        jobs = iterable
-        target = 0
-        while True:
-            self._repopulate()
-            try:
-                while (not self.queuein.full()   and
-                       target < 10               and
-                       not self.queueout.full()):
-                    logger.debug("I'll put a job now")
-                    job = iterable.next()
-                    self.queuein.put(job)
-                    self.jobs_in += 1
-                    target += 1
-                logger.debug("||_pool.fill > While stopped")
-            except StopIteration:
-                logger.debug("||_pool.fill > StopIteration")
-                self.jobs_end = True
-                raise StopIteration
-            yield
-            target = 0
-
-    def run(self):
-        for proc in self.pool:
-            proc.start()
-        self.status = GopherPool.STARTED
-        return Parallel_Iterator(self)
-
-class Parallel_Iterator(object):
-    def __init__(self, pool):
-        self.pool = pool
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        return self.next()
-
-    def next(self, timeout=None):
-        if self.pool.jobs_end == True and \
-                (self.pool.jobs_in == self.pool.jobs_out):
-            raise StopIteration()
-        else:
-            ans = self.pool.queueout.get(block=True, timeout=timeout)
-            self.pool.jobs_out += 1
-            return ans
-
-class Parallel_FilteredStream(object):
-    def __init__(self):
-        self.stream = StringIO()
-        self.filters = []
-
-    def write(self, fragment):
-        skipped = False
-        for line in fragment.splitlines(True):
-            original_len = len(line.strip())
-            for pattern, replacement in self.filters:
-                line = pattern.sub(replacement, line)
-                skipped = original_len and not line.strip()
-                if skipped:
-                    break
-            if not skipped:
-                self.stream.write(line)
-
-    def push_filter(self, pattern, replacement):
-        self.filters.append([re.compile(pattern), replacement])
-
-    def pop_filter(self):
-        self.filters.pop()
-
-    def clear_all_filters(self):
-        self.filters = []
-
-    def close(self):
-        self.clear_all_filters()
-
-    def getvalue(self):
-        return self.stream.getvalue()
-
-class TestStatus(object):
-    def __init__(self, status, reject = ''):
-        if isinstance(status, basestring):
-            status = status.lower()
-            if (status == "pass"):
-                status = 0
-            elif (status == "skip"):
-                status = 1
-            elif (status == "new"):
-                status = 2
-            else:
-                status = 3
-        self.reject = reject
-        self.status = status
-        self.message = ''
-
-    def set_message(self, msg):
-        self.message = msg
-        return self
-
-    def generate_printable(self):
-        pass
-
-class Supervisor(object):
-    def __init__(self, suite_path, args):
-        self.args = args
-        self.tests = []
-        self.suite_path = suite_path
-        self.ini = {
-                'core': 'tarantool',
-                'script': os.path.join(suite_path, 'parallel.lua'),
-        }
-
-        # read suite config
-        config = ConfigParser.ConfigParser()
-        config.read(os.path.join(suite_path, "suite.ini"))
-        self.ini.update(dict(config.items("default")))
-        self.ini.update(self.args.__dict__)
-        self.jobs = int(self.ini.get('jobs', 1))
-        self.count = int(self.ini.get('count', 0))
-
-        for i in ["script"]:
-            self.ini[i] = os.path.join(suite_path, self.ini[i]) if i in self.ini else None
-        self.server = TarantoolServer(self.ini)
-        self.pool = None
-        self.iterator = None
-
-    def find_tests(self):
-        self.tests += [Parallel_PythonTest(k) \
-                for k in sorted(glob.glob(os.path.join(self.suite_path, "*.test.py" )))]
-
-    def take_rand(self):
-        if self.count != 0:
-            for test in self.tests:
-                sql = self.server.sql.clone()
-                admin = self.server.admin.clone()
-                yield [test, [sql, admin]]
-        else:
-            while True:
-                sql = self.server.sql.clone()
-                admin = self.server.admin.clone()
-                yield [random.choice(self.tests), [sql, admin]]
-
-    def run_all(self):
-        self.find_tests()
-        if self.count != 0:
-            self.tests *= self.count
-            random.shuffle(self.tests)
-        self.pool = GopherPool(processes = self.jobs)
-        self.iterator = self.pool.run()
-        self.filler = self.pool.fill(self.take_rand())
-        try:
-            self.server.cleanup()
-            logger.info("Tarantool.Instance > Server cleaned up")
-            logger.info("Tarantool.Instance > Server's path: %s", self.server.binary)
-            self.server.deploy()
-            logger.info("Tarantool.Instance > Server deployed")
-            try:
-                while True:
-                    self.filler.next()
-                    logger.debug("BigBrother.run > Jobs filled %d %d" %
-                            (self.pool.queuein.qsize(), self.pool.queueout.qsize()))
-                    while True:
-                        try:
-                            logger.debug("BigBrother.run > waiting for task")
-                            task = self.iterator.next(1)
-                            logger.debug("BigBrother.run > took task")
-                            if task is None:
-                                logger.info('>>>> Test return NONE')
-                                continue
-                            stat = task.get_status()
-                            if stat.status != 3:
-                                logger.info('>>>> Test %s finished' % repr(task.name))
-                            else:
-                                logger.error('>>>> Test %s failed with %s (%s)' %
-                                        (repr(task.name), stat.message, stat.reject))
-                        except (QueueEmpty, StopIteration):
-                            break
-            except StopIteration:
-                pass
-        finally:
-            self.server.stop()
-            logger.info("Tarantool.Instance > Server stopped")
-
-class Parallel_Test(object):
-    def __init__(self, name):
-        rg = re.compile('.test.*')
-        self.name   = name
-        self.reject = None
-        self.id     = None
-        logger.debug("||_Test.__init__ > Entering test '%s'" % self.name)
-        self.result = rg.sub('.result', name)
-        self.is_executed     = False
-        self.is_executed_ok  = False
-        self.is_equal_result = False
-        self.is_new          = False
-
-    def passed(self):
-        return self.is_executed and self.is_executed_ok and self.is_equal_result
-
-    def execute(self, server, sql, admin):
-        pass
-
-    def run(self, sql, admin):
-        self.id     = uuid.uuid1().get_hex().replace('-', '')[0:6]
-        self.reject = re.sub('.test.*', '.reject_%s' % self.id, self.name)
-        self.diagnostics = "unknown"
-        save_stdout = sys.stdout
-        self.test_stdout = Parallel_FilteredStream()
-        try:
-            sys.stdout = self.test_stdout
-            logger.debug("Entering")
-            self.execute(sql, admin)
-            self.is_executed_ok = True
-        except Exception as e:
-            logger.error("||_Test.run > Exception '%s' was thrown for '%s'" % (type(e), str(e)))
-            #logger.error(traceback.format_exc())
-            with open(self.reject, 'a') as reject:
-                traceback.print_exc(e, reject)
-            self.diagnostics = str(e)
-        finally:
-            sys.stdout = save_stdout
-        self.is_executed = True
-
-        if not os.path.isfile(self.result):
-            self.is_new = True
-            with open(self.result, 'w') as result:
-                result.write(self.test_stdout.getvalue())
-
-        if self.is_executed_ok and not self.is_new:
-            self.is_equal_result = \
-                    (open(self.result, 'r').read() == self.test_stdout.getvalue())
-
-        if not self.is_equal_result:
-            with open(self.reject, 'a') as reject:
-                reject.write(self.test_stdout.getvalue())
-
-        return self
-
-    def get_status(self):
-        if self.is_executed_ok and self.is_equal_result:
-            return TestStatus("pass")
-        elif (self.is_executed_ok and not self.is_equal_result and self.is_new):
-            return TestStatus("new")
-        else:
-            where = ""
-            if not self.is_executed_ok:
-                where = "test execution aborted, reason '{0}'".format(self.diagnostics)
-            elif not self.is_equal_result:
-                where = "wrong test output"
-            return TestStatus("fail", self.reject).set_message(where)
-
-    def __call__(self, sql, admin):
-        try:
-            logger.debug("||_Test.__call__ > Entering test '%s'" % self.name)
-            return self.run(sql, admin)
-        except Exception as e:
-            logger.error("||_Test.__call__ > Exception '%s' was thrown for '%s'" % (type(e), str(e)))
-            logger.error(traceback.format_exc())
-
-class Parallel_FuncTest(Parallel_Test):
-    def execute(self, sql, admin):
-        execfile(self.name, dict(locals(), sql=sql, admin=admin))
-
-class Parallel_PythonTest(Parallel_FuncTest): pass
diff --git a/test/lib/preprocessor.py b/test/lib/preprocessor.py
deleted file mode 100644
index f5f63678679fd15f8f0aaf3d9d257cce2b2960ff..0000000000000000000000000000000000000000
--- a/test/lib/preprocessor.py
+++ /dev/null
@@ -1,230 +0,0 @@
-import os
-
-import sys
-import shlex
-import shutil
-import socket
-
-from collections import deque
-
-from lib.admin_connection import AdminConnection
-
-class Namespace(object):
-    pass
-
-class LuaPreprocessorException(Exception):
-    def __init__(self, val):
-        super(LuaPreprocessorException, self).__init__()
-        self.value = val
-    def __str__(self):
-        return "lua preprocessor error: " + repr(self.value)
-
-class TestState(object):
-    def __init__(self, suite_ini, default_server, create_server):
-        self.delimiter = ''
-        self.suite_ini = suite_ini
-        self.environ = Namespace()
-        self.create_server = create_server
-        self.servers =      { 'default': default_server }
-        self.connections =  { 'default': default_server.admin }
-        # curcon is an array since we may have many connections
-        self.curcon = [self.connections['default']]
-        nmsp = Namespace()
-        setattr(nmsp, 'admin', default_server.admin.uri)
-        setattr(nmsp, 'listen', default_server.sql.uri)
-        setattr(self.environ, 'default', nmsp)
-
-    def parse_preprocessor(self, string):
-        token_store = deque()
-        lexer = shlex.shlex(string)
-        lexer.commenters = []
-        token = lexer.get_token()
-        if not token:
-            return
-        if token == 'setopt':
-            option = lexer.get_token()
-            if not option:
-                raise LuaPreprocessorException("Wrong token for setopt: expected option name")
-            value = lexer.get_token()
-            if not value:
-                raise LuaPreprocessorException("Wrong token for setopt: expected option value")
-            return self.options(option, value)
-        token_store.append(token)
-        token = lexer.get_token()
-        if token == 'server':
-            stype = token_store.popleft()
-            sname = lexer.get_token()
-            if not sname:
-                raise LuaPreprocessorException("Wrong token for server: expected name")
-            options = {}
-            temp = lexer.get_token()
-            if not temp:
-                pass
-            elif temp == 'with':
-                while True:
-                    k = lexer.get_token()
-                    if not k:
-                        break
-                    v = lexer.get_token()
-                    if v == '=':
-                        v = lexer.get_token()
-                    options[k] = v
-                    lexer.get_token()
-            else:
-                raise LuaPreprocessorException("Wrong token for server: expected 'with', got " + repr(temp))
-            return self.server(stype, sname, options)
-        elif token == 'connection':
-            ctype = token_store.popleft()
-            cname = [lexer.get_token()]
-            if not cname[0]:
-                raise LuaPreprocessorException("Wrong token for connection: expected name")
-            cargs = None
-            temp = lexer.get_token()
-            if temp == 'to':
-                cargs = lexer.get_token()
-            elif temp == ',':
-                while True:
-                    a = lexer.get_token()
-                    if not a:
-                        break
-                    if a == ',':
-                        continue
-                    cname.append(a)
-            elif temp:
-                raise LuaPreprocessorException("Wrong token for server: expected 'to' or ',', got " + repr(temp))
-            return self.connection(ctype, cname, cargs)
-        elif token == 'filter':
-            ftype = token_store.popleft()
-            ref = None
-            ret = None
-            temp = lexer.get_token()
-            if temp:
-                ref = temp
-                if not temp:
-                    raise LuaPreprocessorException("Wrong token for filter: expected filter1")
-                if lexer.get_token() != 'to':
-                    raise LuaPreprocessorException("Wrong token for filter: expected 'to', got {0}".format(repr(temp)))
-                temp = lexer.get_token()
-                if not temp:
-                    raise LuaPreprocessorException("Wrong token for filter: expected filter2")
-                ret = temp
-            return self.filter(ftype, ref, ret)
-        elif token == 'variable':
-            ftype = token_store.popleft()
-            ref = lexer.get_token()
-            temp = lexer.get_token()
-            if temp != 'to':
-                raise LuaPreprocessorException("Wrong token for filter: exptected 'to', got {0}".format(repr(temp)))
-            ret = lexer.get_token()
-            return self.variable(ftype, ref, ret)
-        else:
-            raise LuaPreprocessorException("Wrong command: "+repr(lexer.instream.getvalue()))
-
-    def options(self, key, value):
-        if key == 'delimiter':
-            self.delimiter = value[1:-1]
-        else:
-            raise LuaPreprocessorException("Wrong option: "+repr(key))
-
-    def server(self, ctype, sname, opts):
-        if ctype == 'create':
-            if sname in self.servers:
-                raise LuaPreprocessorException('Server {0} already exists'.format(repr(sname)))
-            temp = self.create_server()
-            if 'need_init' in opts:
-                temp.need_init   = True if opts['need_init'] == 'True' else False
-            if 'script' in opts:
-                temp.script = opts['script'][1:-1]
-            temp.rpl_master = None
-            if 'rpl_master' in opts:
-                temp.rpl_master = self.servers[opts['rpl_master']]
-            temp.vardir = os.path.join(self.suite_ini['vardir'], sname)
-            temp.name = sname
-            self.servers[sname] = temp
-            self.servers[sname].deploy(silent=True)
-            nmsp = Namespace()
-            setattr(nmsp, 'admin', temp.admin.port)
-            setattr(nmsp, 'listen', temp.sql.port)
-            if temp.rpl_master:
-                setattr(nmsp, 'master', temp.rpl_master.sql.port)
-            setattr(self.environ, sname, nmsp)
-        elif ctype == 'start':
-            if sname not in self.servers:
-                raise LuaPreprocessorException('Can\'t start nonexistent server '+repr(sname))
-            self.servers[sname].start(silent=True)
-            self.connections[sname] = self.servers[sname].admin
-            try:
-                self.connections[sname]('return true', silent=True)
-            except socket.error as e:
-                LuaPreprocessorException('Can\'t start server '+repr(sname))
-        elif ctype == 'stop':
-            if sname not in self.servers:
-                raise LuaPreprocessorException('Can\'t stop nonexistent server '+repr(sname))
-            self.connections[sname].disconnect()
-            self.connections.pop(sname)
-            self.servers[sname].stop()
-        elif ctype == 'deploy':
-            self.servers[sname].deploy()
-        elif ctype == 'cleanup':
-            if sname not in self.servers:
-                raise LuaPreprocessorException('Can\'t cleanup nonexistent server '+repr(sname))
-            self.servers[sname].cleanup()
-            if sname != 'default':
-                delattr(self.environ, sname)
-        else:
-            raise LuaPreprocessorException('Unknown command for server: '+repr(ctype))
-
-    def connection(self, ctype, cnames, sname):
-        # we always get a list of connections as input here
-        cname = cnames[0]
-        if ctype == 'create':
-            if sname not in self.servers:
-                raise LuaPreprocessorException('Can\'t create connection to nonexistent server '+repr(sname))
-            if cname in self.connections:
-                raise LuaPreprocessorException('Connection {0} already exists'.format(repr(cname)))
-            self.connections[cname] = AdminConnection('localhost', self.servers[sname].admin.port)
-            self.connections[cname].connect()
-        elif ctype == 'drop':
-            if cname not in self.connections:
-                raise LuaPreprocessorException('Can\'t drop nonexistent connection '+repr(cname))
-            self.connections[cname].disconnect()
-            self.connections.pop(cname)
-        elif ctype == 'set':
-            for i in cnames:
-                if not i in self.connections:
-                    raise LuaPreprocessorException('Can\'t set nonexistent connection '+repr(cname))
-            self.curcon = [self.connections[i] for i in cnames]
-        else:
-            raise LuaPreprocessorException('Unknown command for connection: '+repr(ctype))
-
-    def filter(self, ctype, ref, ret):
-        if ctype == 'push':
-            sys.stdout.push_filter(ref[1:-1], ret[1:-1])
-        elif ctype == 'pop':
-            sys.stdout.pop_filter()
-        elif ctype == 'clear':
-            sys.stdout.clear_all_filters()
-        else:
-            raise LuaPreprocessorException("Wrong command for filters: " + repr(ctype))
-
-    def variable(self, ctype, ref, ret):
-        if ctype == 'set':
-            self.curcon[0](ref+'='+str(eval(ret[1:-1], {}, self.environ.__dict__)), silent=True)
-        else:
-            raise LuaPreprocessorException("Wrong command for variables: " + repr(ctype))
-
-    def __call__(self, string):
-        string = string[3:].strip()
-        self.parse_preprocessor(string)
-
-    def cleanup(self):
-        sys.stdout.clear_all_filters()
-        # don't stop the default server
-        self.servers.pop('default')
-        for k, v in self.servers.iteritems():
-            v.stop(silent=True)
-            v.cleanup()
-            if k in self.connections:
-                self.connections[k].disconnect()
-                self.connections.pop(k)
-
diff --git a/test/lib/server.py b/test/lib/server.py
deleted file mode 100644
index 5aed7f27a4f043480c1dacd8d5794908b699efb7..0000000000000000000000000000000000000000
--- a/test/lib/server.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import os
-import re
-import sys
-import glob
-import stat
-import time
-import shlex
-import daemon
-import shutil
-import signal
-import socket
-import subprocess
-import ConfigParser
-
-def check_port(port):
-    """Check if the port we're connecting to is available"""
-    try:
-        sock = socket.create_connection(("localhost", port))
-    except socket.error as e:
-        return
-    raise RuntimeError("The server is already running on port {0}".format(port))
-
-def prepare_gdb(binary, args):
-    """Prepare server startup arguments to run under gdb."""
-    args = shlex.split('screen -dmS tnt-gdb gdb %s -ex \'b main\' -ex run' % binary) + args
-    return args
-
-def prepare_valgrind(args, valgrind_log, valgrind_sup):
-    "Prepare server startup arguments to run under valgrind."
-    args = [ "valgrind", "--log-file={0}".format(valgrind_log),
-             "--suppressions={0}".format(valgrind_sup),
-             "--gen-suppressions=all", "--show-reachable=yes", "--leak-check=full",
-             "--read-var-info=yes", "--quiet" ] + args
-    return args
-
-def check_tmpfs_exists():
-    return os.uname()[0] in 'Linux' and os.path.isdir("/dev/shm")
-
-def create_tmpfs_vardir(vardir):
-    os.makedirs(os.path.join("/dev/shm", vardir))
-    os.symlink(os.path.join("/dev/shm", vardir), vardir)
-
-class Server(object):
-    """Server represents a single server instance. Normally, the
-    program operates with only one server, but in future we may add
-    replication slaves. The server is started once at the beginning
-    of each suite, and stopped at the end."""
-
-    @property
-    def vardir(self):
-        if not hasattr(self, '_vardir'):
-            raise ValueError("No vardir specified")
-        return self._vardir
-    @vardir.setter
-    def vardir(self, path):
-        if path == None:
-            return
-        self._vardir = os.path.abspath(path)
-
-
-    def __new__(cls, ini=None):
-        if ini == None or 'core' not in ini or ini['core'] is None:
-            return object.__new__(cls)
-        core = ini['core'].lower().strip()
-        cls.mdlname = "lib.{0}_server".format(core.replace(' ', '_'))
-        cls.clsname = "{0}Server".format(core.title().replace(' ', ''))
-        corecls = __import__(cls.mdlname, fromlist=cls.clsname).__dict__[cls.clsname]
-        return corecls.__new__(corecls, core)
-
-    def __init__(self, ini):
-        self.core = ini['core']
-        self.ini = ini
-        self.re_vardir_cleanup = ['*.core.*', 'core']
-        self.vardir = ini['vardir']
-
-    def prepare_args(self):
-        return []
-
-    def cleanup(self, full=False):
-        if full:
-            shutil.rmtree(self.vardir)
-            return
-        for re in self.re_vardir_cleanup:
-            for f in glob.glob(os.path.join(self.vardir, re)):
-                os.remove(f)
-
-    def install(self, binary=None, vardir=None, mem=None, silent=True):
-        pass
-    def init(self):
-        pass
-    def start(self, silent=True):
-        pass
-    def stop(self, silent=True):
-        pass
-    def restart(self):
-        pass
diff --git a/test/lib/sql.g b/test/lib/sql.g
deleted file mode 100644
index 226c56b6d2e96d72c4c74d98be5ae3bbb6131e82..0000000000000000000000000000000000000000
--- a/test/lib/sql.g
+++ /dev/null
@@ -1,88 +0,0 @@
-import sql_ast
-import re
-
-object_no_re = re.compile("[a-z_]*", re.I)
-
-%%
-
-# The grammar below solely covers the functionality provided by
-# Tarantool binary protocol, from which follow all the
-# limitations. For reference please see http://tarantool.org/docs/box-protocol.html
-
-parser sql:
-
-    ignore:           '\\s+'
-    token NUM:        '[+-]?[0-9]+'
-    token ID:         '[a-z_]+[0-9]+' 
-    token PROC_ID:    '[a-z_][a-z0-9_.:]*'
-    token STR:        '\'([^\']+|\\\\.)*\''
-    token PING:       'ping'
-    token INSERT:     'insert'
-    token REPLACE:    'replace'
-    token UPDATE:     'update'
-    token DELETE:     'delete'
-    token SELECT:     'select'
-    token INTO:       'into'
-    token FROM:       'from'
-    token WHERE:      'where'
-    token VALUES:     'values'
-    token SET:        'set'
-    token OR:         'or'
-    token LIMIT:      'limit'
-    token CALL:       'call'
-    token END:        '\\s*$'
-
-    rule sql:         (insert {{ stmt = insert }} |
-                      replace {{ stmt = replace }} |
-                      update {{ stmt = update }} |
-                      delete {{ stmt = delete }} |
-                      select {{ stmt = select }} |
-                      call {{ stmt = call }} |
-                      ping {{ stmt = ping }}) END {{ return stmt }}
-                      
-    rule insert:      INSERT [INTO] ident VALUES value_list
-                      {{ return sql_ast.StatementInsert(ident, value_list) }}
-    rule replace:     REPLACE [INTO] ident VALUES value_list
-                      {{ return sql_ast.StatementReplace(ident, value_list) }}
-    rule update:      UPDATE ident SET update_list opt_simple_where
-                      {{ return sql_ast.StatementUpdate(ident, update_list, opt_simple_where) }}
-    rule delete:      DELETE FROM ident opt_simple_where
-                      {{ return sql_ast.StatementDelete(ident, opt_simple_where) }}
-    rule select:      SELECT '\*' FROM ident opt_where opt_limit
-                      {{ return sql_ast.StatementSelect(ident, opt_where, opt_limit) }}
-    rule ping:        PING
-                      {{ return sql_ast.StatementPing() }}
-    rule call:        CALL PROC_ID value_list
-                      {{ return sql_ast.StatementCall(PROC_ID, value_list) }}
-    rule predicate:   ident '=' constant
-                      {{ return (ident, constant) }}
-    rule opt_simple_where:   {{ return None }}
-                      | WHERE predicate
-                      {{ return predicate }}
-    rule opt_where:   {{ return (0, None) }}
-                      | WHERE predicate 
-                      {{ return predicate }}
-    rule opt_limit:   {{ return 0xffffffff }}
-                      | LIMIT NUM {{ return int(NUM) }}
-    rule value_list:  '\(' {{ value_list = [] }}
-                         [expr {{ value_list = [expr] }} [("," expr {{ value_list.append(expr) }} )+]]
-                      '\)' {{ return value_list }}
-    rule update_list: predicate {{ update_list = [('=', predicate[0], predicate[1])] }}
-                      [(',' predicate {{ update_list.append(("=", predicate[0], predicate[1])) }})+]
-                      {{ return update_list }}
-    rule expr:        constant {{ return constant }}
-    rule constant:    NUM {{ return int(NUM) }} | STR {{ return STR[1:-1] }}
-    rule ident:       ID {{ return int(object_no_re.sub("", ID)) }}
-%%
-
-# SQL is case-insensitive, but in yapps it's not possible to
-# specify that a token must match in case-insensitive fashion.
-# This is hack to add re.IGNORECASE flag to all regular
-# expressions that represent tokens in the generated grammar.
-
-sqlScanner.patterns = map(lambda tup:
-                          (tup[0], re.compile(tup[1].pattern, re.IGNORECASE)),
-                          sqlScanner.patterns)
-
-# vim: nospell syntax=off ts=4 et
-
diff --git a/test/lib/sql.py b/test/lib/sql.py
deleted file mode 100644
index 6014e261475d985fd97393f34923d2b4a2a97d14..0000000000000000000000000000000000000000
--- a/test/lib/sql.py
+++ /dev/null
@@ -1,235 +0,0 @@
-import sql_ast
-import re
-
-object_no_re = re.compile("[a-z_]*", re.I)
-
-
-# Begin -- grammar generated by Yapps
-import sys, re
-from yapps import runtime
-
-class sqlScanner(runtime.Scanner):
-    patterns = [
-        ("','", re.compile(',')),
-        ("'\\)'", re.compile('\\)')),
-        ('","', re.compile(',')),
-        ("'\\('", re.compile('\\(')),
-        ("'='", re.compile('=')),
-        ("'\\*'", re.compile('\\*')),
-        ('\\s+', re.compile('\\s+')),
-        ('NUM', re.compile('[+-]?[0-9]+')),
-        ('ID', re.compile('[a-z_]+[0-9]+')),
-        ('PROC_ID', re.compile('[a-z_][a-z0-9_.:]*')),
-        ('STR', re.compile("'([^']+|\\\\.)*'")),
-        ('PING', re.compile('ping')),
-        ('INSERT', re.compile('insert')),
-        ('REPLACE', re.compile('replace')),
-        ('UPDATE', re.compile('update')),
-        ('DELETE', re.compile('delete')),
-        ('SELECT', re.compile('select')),
-        ('INTO', re.compile('into')),
-        ('FROM', re.compile('from')),
-        ('WHERE', re.compile('where')),
-        ('VALUES', re.compile('values')),
-        ('SET', re.compile('set')),
-        ('OR', re.compile('or')),
-        ('LIMIT', re.compile('limit')),
-        ('CALL', re.compile('call')),
-        ('END', re.compile('\\s*$')),
-    ]
-    def __init__(self, str,*args,**kw):
-        runtime.Scanner.__init__(self,None,{'\\s+':None,},str,*args,**kw)
-
-class sql(runtime.Parser):
-    Context = runtime.Context
-    def sql(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'sql', [])
-        _token = self._peek('INSERT', 'REPLACE', 'UPDATE', 'DELETE', 'SELECT', 'CALL', 'PING', context=_context)
-        if _token == 'INSERT':
-            insert = self.insert(_context)
-            stmt = insert
-        elif _token == 'REPLACE':
-            replace = self.replace(_context)
-            stmt = replace
-        elif _token == 'UPDATE':
-            update = self.update(_context)
-            stmt = update
-        elif _token == 'DELETE':
-            delete = self.delete(_context)
-            stmt = delete
-        elif _token == 'SELECT':
-            select = self.select(_context)
-            stmt = select
-        elif _token == 'CALL':
-            call = self.call(_context)
-            stmt = call
-        else: # == 'PING'
-            ping = self.ping(_context)
-            stmt = ping
-        END = self._scan('END', context=_context)
-        return stmt
-
-    def insert(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'insert', [])
-        INSERT = self._scan('INSERT', context=_context)
-        if self._peek('INTO', 'ID', context=_context) == 'INTO':
-            INTO = self._scan('INTO', context=_context)
-        ident = self.ident(_context)
-        VALUES = self._scan('VALUES', context=_context)
-        value_list = self.value_list(_context)
-        return sql_ast.StatementInsert(ident, value_list)
-
-    def replace(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'replace', [])
-        REPLACE = self._scan('REPLACE', context=_context)
-        if self._peek('INTO', 'ID', context=_context) == 'INTO':
-            INTO = self._scan('INTO', context=_context)
-        ident = self.ident(_context)
-        VALUES = self._scan('VALUES', context=_context)
-        value_list = self.value_list(_context)
-        return sql_ast.StatementReplace(ident, value_list)
-
-    def update(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'update', [])
-        UPDATE = self._scan('UPDATE', context=_context)
-        ident = self.ident(_context)
-        SET = self._scan('SET', context=_context)
-        update_list = self.update_list(_context)
-        opt_simple_where = self.opt_simple_where(_context)
-        return sql_ast.StatementUpdate(ident, update_list, opt_simple_where)
-
-    def delete(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'delete', [])
-        DELETE = self._scan('DELETE', context=_context)
-        FROM = self._scan('FROM', context=_context)
-        ident = self.ident(_context)
-        opt_simple_where = self.opt_simple_where(_context)
-        return sql_ast.StatementDelete(ident, opt_simple_where)
-
-    def select(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'select', [])
-        SELECT = self._scan('SELECT', context=_context)
-        self._scan("'\\*'", context=_context)
-        FROM = self._scan('FROM', context=_context)
-        ident = self.ident(_context)
-        opt_where = self.opt_where(_context)
-        opt_limit = self.opt_limit(_context)
-        return sql_ast.StatementSelect(ident, opt_where, opt_limit)
-
-    def ping(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'ping', [])
-        PING = self._scan('PING', context=_context)
-        return sql_ast.StatementPing()
-
-    def call(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'call', [])
-        CALL = self._scan('CALL', context=_context)
-        PROC_ID = self._scan('PROC_ID', context=_context)
-        value_list = self.value_list(_context)
-        return sql_ast.StatementCall(PROC_ID, value_list)
-
-    def predicate(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'predicate', [])
-        ident = self.ident(_context)
-        self._scan("'='", context=_context)
-        constant = self.constant(_context)
-        return (ident, constant)
-
-    def opt_simple_where(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'opt_simple_where', [])
-        _token = self._peek('WHERE', 'END', context=_context)
-        if _token == 'END':
-            return None
-        else: # == 'WHERE'
-            WHERE = self._scan('WHERE', context=_context)
-            predicate = self.predicate(_context)
-            return predicate
-
-    def opt_where(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'opt_where', [])
-        _token = self._peek('WHERE', 'LIMIT', 'END', context=_context)
-        if _token != 'WHERE':
-            return (0, None)
-        else: # == 'WHERE'
-            WHERE = self._scan('WHERE', context=_context)
-            predicate = self.predicate(_context)
-            return predicate
-
-    def opt_limit(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'opt_limit', [])
-        _token = self._peek('LIMIT', 'END', context=_context)
-        if _token == 'END':
-            return 0xffffffff
-        else: # == 'LIMIT'
-            LIMIT = self._scan('LIMIT', context=_context)
-            NUM = self._scan('NUM', context=_context)
-            return int(NUM)
-
-    def value_list(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'value_list', [])
-        self._scan("'\\('", context=_context)
-        value_list = []
-        if self._peek("'\\)'", '","', 'NUM', 'STR', context=_context) in ['NUM', 'STR']:
-            expr = self.expr(_context)
-            value_list = [expr]
-            if self._peek('","', "'\\)'", context=_context) == '","':
-                while 1:
-                    self._scan('","', context=_context)
-                    expr = self.expr(_context)
-                    value_list.append(expr)
-                    if self._peek('","', "'\\)'", context=_context) != '","': break
-        self._scan("'\\)'", context=_context)
-        return value_list
-
-    def update_list(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'update_list', [])
-        predicate = self.predicate(_context)
-        update_list = [('=', predicate[0], predicate[1])]
-        if self._peek("','", 'WHERE', 'END', context=_context) == "','":
-            while 1:
-                self._scan("','", context=_context)
-                predicate = self.predicate(_context)
-                update_list.append(("=", predicate[0], predicate[1]))
-                if self._peek("','", 'WHERE', 'END', context=_context) != "','": break
-        return update_list
-
-    def expr(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'expr', [])
-        constant = self.constant(_context)
-        return constant
-
-    def constant(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'constant', [])
-        _token = self._peek('NUM', 'STR', context=_context)
-        if _token == 'NUM':
-            NUM = self._scan('NUM', context=_context)
-            return int(NUM)
-        else: # == 'STR'
-            STR = self._scan('STR', context=_context)
-            return STR[1:-1]
-
-    def ident(self, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'ident', [])
-        ID = self._scan('ID', context=_context)
-        return int(object_no_re.sub("", ID))
-
-
-def parse(rule, text):
-    P = sql(sqlScanner(text))
-    return runtime.wrap_error_reporter(P, rule)
-
-# End -- grammar generated by Yapps
-
-
-
-# SQL is case-insensitive, but in yapps it's not possible to
-# specify that a token must match in case-insensitive fashion.
-# This is hack to add re.IGNORECASE flag to all regular
-# expressions that represent tokens in the generated grammar.
-
-sqlScanner.patterns = map(lambda tup:
-                          (tup[0], re.compile(tup[1].pattern, re.IGNORECASE)),
-                          sqlScanner.patterns)
-
-# vim: nospell syntax=off ts=4 et
-
diff --git a/test/lib/sql_ast.py b/test/lib/sql_ast.py
deleted file mode 100644
index f49a42ac6f8bb2ada158b1dcabb4cbfe01bcff38..0000000000000000000000000000000000000000
--- a/test/lib/sql_ast.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import os
-import re
-import sys
-import ctypes
-import struct
-
-from lib.utils import check_libs
-check_libs()
-
-from tarantool.request import (
-        RequestPing,
-        RequestInsert,
-        RequestReplace,
-        RequestSelect,
-        RequestCall,
-        RequestUpdate,
-        RequestDelete,
-)
-
-ER = {
-     0: "ER_OK"                 ,
-     1: "ER_ILLEGAL_PARAMS"     ,
-     2: "ER_MEMORY_ISSUE"       ,
-     3: "ER_TUPLE_FOUND"        ,
-     4: "ER_TUPLE_NOT_FOUND"    ,
-     5: "ER_UNSUPPORTED"        ,
-     6: "ER_NONMASTER"          ,
-     7: "ER_READONLY"           ,
-     8: "ER_INJECTION"          ,
-     9: "ER_CREATE_SPACE"       ,
-    10: "ER_SPACE_EXISTS"       ,
-    11: "ER_DROP_SPACE"         ,
-    12: "ER_ALTER_SPACE"        ,
-    13: "ER_INDEX_TYPE"         ,
-    14: "ER_MODIFY_INDEX"       ,
-    15: "ER_LAST_DROP"          ,
-    16: "ER_TUPLE_FORMAT_LIMIT" ,
-    17: "ER_DROP_PRIMARY_KEY"   ,
-    18: "ER_KEY_FIELD_TYPE"     ,
-    19: "ER_EXACT_MATCH"        ,
-    20: "ER_INVALID_MSGPACK"    ,
-    21: "ER_PROC_RET"           ,
-    22: "ER_TUPLE_NOT_ARRAY"    ,
-    23: "ER_FIELD_TYPE"         ,
-    24: "ER_FIELD_TYPE_MISMATCH",
-    25: "ER_SPLICE"             ,
-    26: "ER_ARG_TYPE"           ,
-    27: "ER_TUPLE_IS_TOO_LONG"  ,
-    28: "ER_UNKNOWN_UPDATE_OP"  ,
-    29: "ER_UPDATE_FIELD"       ,
-    30: "ER_FIBER_STACK"        ,
-    31: "ER_KEY_PART_COUNT"     ,
-    32: "ER_PROC_LUA"           ,
-    33: "ER_NO_SUCH_PROC"       ,
-    34: "ER_NO_SUCH_TRIGGER"    ,
-    35: "ER_NO_SUCH_INDEX"      ,
-    36: "ER_NO_SUCH_SPACE"      ,
-    37: "ER_NO_SUCH_FIELD"      ,
-    38: "ER_SPACE_ARITY"        ,
-    39: "ER_INDEX_ARITY"        ,
-    40: "ER_WAL_IO"
-}
-
-errstr = """---
-- error:
-    errcode: {0}
-    errmsg: {1}
-..."""
-
-def format_error(response):
-    return errstr.format(
-        ER.get(response.return_code, "ER_UNKNOWN (%d)" % response.return_code),
-        response.return_message)
-
-def format_yamllike(response):
-    table = ("\n"+"\n".join(["- "+str(list(k)) for k in response])) \
-            if len(response) else ""
-    return "---{0}\n...".format(table)
-
-class Statement(object):
-    def __init__(self):
-        pass
-    def pack(self, connection):
-        pass
-    def unpack(self, response):
-        pass
-
-class StatementPing(Statement):
-    def pack(self, connection):
-        return RequestPing(connection)
-
-    def unpack(self, response):
-        if response._return_code:
-            return format_error(response)
-        return "---\n- ok\n..."
-
-class StatementInsert(Statement):
-    def __init__(self, table_name, value_list):
-        self.space_no = table_name
-        self.value_list = value_list
-
-    def pack(self, connection):
-        return RequestInsert(connection, self.space_no, self.value_list)
-
-    def unpack(self, response):
-        if response.return_code:
-            return format_error(response)
-        return format_yamllike(response)
-
-class StatementReplace(Statement):
-    def __init__(self, table_name, value_list):
-        self.space_no = table_name
-        self.value_list = value_list
-
-    def pack(self, connection):
-        return RequestReplace(connection, self.space_no, self.value_list)
-
-    def unpack(self, response):
-        if response.return_code:
-            return format_error(response)
-        return format_yamllike(response)
-
-class StatementUpdate(Statement):
-    def __init__(self, table_name, update_list, where):
-        self.space_no = table_name
-        self.key_no = where[0]
-        if self.key_no != 0:
-            raise RuntimeError("UPDATE can only be made by the"
-                    " primary key (#0)")
-        self.value_list = where[1]
-        if not isinstance(self.value_list, (list, tuple)):
-            self.value_list = [self.value_list]
-        self.update_list = update_list
-
-    def pack(self, connection):
-        return RequestUpdate(connection, self.space_no, 0, self.value_list, self.update_list)
-
-    def unpack(self, response):
-        if response.return_code:
-            return format_error(response)
-        return format_yamllike(response)
-
-class StatementDelete(Statement):
-    def __init__(self, table_name, where):
-        self.space_no = table_name
-        key_no = where[0]
-        if key_no != 0:
-            raise RuntimeError("DELETE can only be made by the "
-                    "primary key (#0)")
-        self.value_list = where[1]
-        if not isinstance(self.value_list, (list, tuple)):
-            self.value_list = [self.value_list]
-
-    def pack(self, connection):
-        return RequestDelete(connection, self.space_no, 0, self.value_list)
-
-    def unpack(self, response):
-        if response.return_code:
-            return format_error(response)
-        return format_yamllike(response)
-
-class StatementSelect(Statement):
-    def __init__(self, table_name, where, limit):
-        self.space_no = table_name
-        (self.index_no, self.key) = where
-        if not isinstance(self.key, (list, tuple)):
-            self.key = [self.key]
-        self.offset = 0
-        self.limit = limit
-        self.iterator = 0
-
-    def pack(self, connection):
-        return RequestSelect(connection, self.space_no, self.index_no,
-                self.key, self.offset, self.limit, self.iterator)
-
-    def unpack(self, response):
-        if response.return_code:
-            return format_error(response)
-        if self.sort:
-            response = sorted(response[0:])
-        return format_yamllike(response)
-
-class StatementCall(StatementSelect):
-    def __init__(self, proc_name, value_list):
-        self.proc_name = proc_name
-        self.value_list = value_list
-
-    def pack(self, connection):
-        return RequestCall(connection, self.proc_name, self.value_list)
diff --git a/test/lib/tarantool_connection.py b/test/lib/tarantool_connection.py
deleted file mode 100644
index caf1a17d5e0febf50f90d6fd8b249ce18e9d5e9e..0000000000000000000000000000000000000000
--- a/test/lib/tarantool_connection.py
+++ /dev/null
@@ -1,91 +0,0 @@
-__author__ = "Konstantin Osipov <kostja.osipov@gmail.com>"
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-# 1. Redistributions of source code must retain the above copyright
-#    notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-#    notice, this list of conditions and the following disclaimer in the
-#    documentation and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
-
-import socket
-import sys
-import errno
-import re
-
-class TarantoolConnection(object):
-
-    @property
-    def uri(self):
-        if self.host == 'unix/' or re.search(r'^/', str(self.port)):
-            return self.port
-        else:
-            return self.host+':'+str(self.port)
-
-    def __init__(self, host, port):
-        self.host = host
-        self.port = port
-        self.is_connected = False
-
-    def connect(self):
-        if self.host == 'unix/' or re.search(r'^/', str(self.port)):
-            self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            self.socket.connect(self.port)
-        else:
-            self.socket = socket.create_connection((self.host, self.port))
-            self.socket.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
-        self.is_connected = True
-
-    def disconnect(self):
-        if self.is_connected:
-            self.socket.close()
-            self.is_connected = False
-
-    def reconnect(self):
-        self.disconnect()
-        self.connect()
-
-    def opt_reconnect(self):
-        """ On a socket which was disconnected, recv of 0 bytes immediately
-            returns with no data. On a socket which is alive, it returns EAGAIN.
-            Make use of this property and detect whether or not the socket is
-            dead. Reconnect a dead socket, do nothing if the socket is good."""
-        try:
-            if not self.is_connected or \
-                    self.socket.recv(0, socket.MSG_DONTWAIT) == '':
-                self.reconnect()
-        except socket.error as e:
-            if e.errno == errno.EAGAIN:
-                pass
-            else:
-                self.reconnect()
-
-    def clone(self):
-        return type(self)(self.host, self.port)
-
-    def execute(self, command, silent=True):
-        self.opt_reconnect()
-        return self.execute_no_reconnect(command, silent)
-
-    def __enter__(self):
-        self.connect()
-        return self
-
-    def __exit__(self, type, value, tb):
-        self.disconnect()
-
-    def __call__(self, command, silent=False, simple=False):
-        return self.execute(command, silent)
diff --git a/test/lib/tarantool_server.py b/test/lib/tarantool_server.py
deleted file mode 100644
index 04c32ad7788a33c8edb67a3f179cc15d350d3c3d..0000000000000000000000000000000000000000
--- a/test/lib/tarantool_server.py
+++ /dev/null
@@ -1,625 +0,0 @@
-import os
-import re
-import sys
-import glob
-import time
-import yaml
-import errno
-import shlex
-import daemon
-import random
-import shutil
-import signal
-import socket
-import difflib
-import filecmp
-import traceback
-import subprocess
-import collections
-import os.path
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from lib.test import Test
-from lib.server import Server
-from lib.preprocessor import TestState
-from lib.box_connection import BoxConnection
-from lib.admin_connection import AdminConnection
-
-from lib.colorer import Colorer
-color_stdout = Colorer()
-
-def check_port(port, rais=True):
-    try:
-        if isinstance(port, (int, long)):
-            sock = socket.create_connection(("localhost", port))
-        else:
-            sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            sock.connect(port)
-
-    except socket.error:
-        return True
-    if rais:
-        raise RuntimeError("The server is already running on port {0}".format(port))
-    return False
-
-def find_port(port):
-    while port < 65536:
-        if check_port(port, False):
-            return port
-        port += 1
-    return find_port(34000)
-
-def find_in_path(name):
-    path = os.curdir + os.pathsep + os.environ["PATH"]
-    for _dir in path.split(os.pathsep):
-        exe = os.path.join(_dir, name)
-        if os.access(exe, os.X_OK):
-            return exe
-    return ''
-
-
-class FuncTest(Test):
-    def execute(self, server):
-        execfile(self.name, dict(locals(), **server.__dict__))
-
-
-class LuaTest(FuncTest):
-    def execute(self, server):
-        ts = TestState(self.suite_ini, server, TarantoolServer)
-        cmd = None
-
-        def send_command(command):
-            result = ts.curcon[0](command, silent=True)
-            for conn in ts.curcon[1:]:
-                conn(command, silent=True)
-            return result
-
-        for line in open(self.name, 'r'):
-            if not cmd:
-                cmd = StringIO()
-            if line.find('--#') == 0:
-                rescom = cmd.getvalue().replace('\n\n', '\n')
-                if rescom:
-                    sys.stdout.write(cmd.getvalue())
-                    result = send_command(rescom)
-                    sys.stdout.write(result.replace("\r\n", "\n"))
-                sys.stdout.write(line)
-                ts(line)
-            elif line.find('--') == 0:
-                sys.stdout.write(line)
-            else:
-                if line.strip() or cmd.getvalue():
-                    cmd.write(line)
-                delim_len = -len(ts.delimiter) if len(ts.delimiter) else None
-                if line.endswith(ts.delimiter+'\n') and cmd.getvalue().strip()[:delim_len].strip():
-                    sys.stdout.write(cmd.getvalue())
-                    rescom = cmd.getvalue()[:delim_len].replace('\n\n', '\n')
-                    result = send_command(rescom)
-                    sys.stdout.write(result.replace("\r\n", "\n"))
-                    cmd.close()
-                    cmd = None
-        # stop any servers created by the test, except the default one
-        ts.cleanup()
-
-
-class PythonTest(FuncTest):
-    def execute(self, server):
-        execfile(self.name, dict(locals(), **server.__dict__))
-
-
-class TarantoolLog(object):
-    def __init__(self, path):
-        self.path = path
-        self.log_begin = 0
-
-    def positioning(self):
-        if os.path.exists(self.path):
-            with open(self.path, 'r') as f:
-                f.seek(0, os.SEEK_END)
-                self.log_begin = f.tell()
-        return self
-
-    def seek_once(self, msg):
-        if not os.path.exists(self.path):
-            return -1
-        with open(self.path, 'r') as f:
-            f.seek(self.log_begin, os.SEEK_SET)
-            while True:
-                log_str = f.readline()
-
-                if not log_str:
-                    return -1
-                pos = log_str.find(msg)
-                if pos != -1:
-                    return pos
-
-    def seek_wait(self, msg, proc=None):
-        while True:
-            if os.path.exists(self.path):
-                break
-            time.sleep(0.001)
-
-        with open(self.path, 'r') as f:
-            f.seek(self.log_begin, os.SEEK_SET)
-            cur_pos = self.log_begin
-            while True:
-                if not (proc is None):
-                    if not (proc.poll() is None):
-                        raise OSError("Can't start Tarantool")
-                log_str = f.readline()
-                if not log_str:
-                    time.sleep(0.001)
-                    f.seek(cur_pos, os.SEEK_SET)
-                    continue
-                if re.findall(msg, log_str):
-                    return
-                cur_pos = f.tell()
-
-class Mixin(object):
-    pass
-
-class ValgrindMixin(Mixin):
-    default_valgr = {
-            "logfile":       "valgrind.log",
-            "suppress_path": "share/",
-            "suppress_name": "tarantool.sup"
-    }
-
-    @property
-    def valgrind_log(self):
-        return os.path.join(self.vardir, self.default_valgr['logfile'])
-
-    @property
-    def valgrind_sup(self):
-        if not hasattr(self, '_valgrind_sup') or not self._valgrind_sup:
-            return os.path.join(self.testdir,
-                                self.default_valgr['suppress_path'],
-                                self.default_valgr['suppress_name'])
-        return self._valgrind_sup
-    @valgrind_sup.setter
-    def valgrind_sup(self, val):
-        self._valgrind_sup = os.path.abspath(val)
-
-    @property
-    def valgrind_sup_output(self):
-        return os.path.join(self.vardir, self.default_valgr['suppress_name'])
-
-    def prepare_args(self):
-        if not find_in_path('valgrind'):
-            raise OSError('`valgrind` executables not found in PATH')
-        return  shlex.split("valgrind --log-file={log} --suppressions={sup} \
-                --gen-suppressions=all --leak-check=full \
-                --read-var-info=yes --quiet {bin}".format(log = self.valgrind_log,
-                                                        sup = self.valgrind_sup,
-                                                        bin = self.script_dst if self.script else self.binary))
-
-    def wait_stop(self):
-        return self.process.wait()
-
-class GdbMixin(Mixin):
-    default_gdb = {
-        "name": "tarantool-gdb"
-    }
-
-    def prepare_args(self):
-        if not find_in_path('screen'):
-            raise OSError('`screen` executables not found in PATH')
-        if not find_in_path('gdb'):
-            raise OSError('`gdb` executables not found in PATH')
-        color_stdout('You started the server in gdb mode.\n', schema='info')
-        color_stdout('To attach, use `screen -r tarantool-gdb`\n', schema='info')
-        return shlex.split("screen -dmS {0} gdb {1} -ex \
-                \'b main\' -ex \'run {2} >> {3} 2>> {3}\'".format(self.default_gdb['name'],
-                                                       self.binary, self.script_dst if self.script else '',
-                                                       self.logfile))
-
-    def wait_stop(self):
-        self.kill_old_server()
-        self.process.wait()
-
-class TarantoolServer(Server):
-    default_tarantool = {
-        "bin":     "tarantool",
-        "logfile": "tarantool.log",
-        "pidfile": "tarantool.pid",
-        "name":    "default"
-    }
-#----------------------------------PROPERTIES----------------------------------#
-    @property
-    def debug(self):
-        return self.test_debug()
-    @property
-    def name(self):
-        if not hasattr(self, '_name') or not self._name:
-            return self.default_tarantool["name"]
-        return self._name
-    @name.setter
-    def name(self, val):
-        self._name = val
-
-    @property
-    def logfile(self):
-        if not hasattr(self, '_logfile') or not self._logfile:
-            return os.path.join(self.vardir, self.default_tarantool["logfile"])
-        return self._logfile
-    @logfile.setter
-    def logfile(self, val):
-        self._logfile = os.path.join(self.vardir, val)
-
-    @property
-    def pidfile(self):
-        if not hasattr(self, '_pidfile') or not self._pidfile:
-            return os.path.join(self.vardir, self.default_tarantool["pidfile"])
-        return self._pidfile
-    @pidfile.setter
-    def pidfile(self, val):
-        self._pidfile = os.path.join(self.vardir, val)
-
-    @property
-    def builddir(self):
-        if not hasattr(self, '_builddir'):
-            raise ValueError("No build-dir is specified")
-        return self._builddir
-    @builddir.setter
-    def builddir(self, val):
-        if val is None:
-            return
-        self._builddir = os.path.abspath(val)
-
-    @property
-    def script_dst(self):
-        return os.path.join(self.vardir, os.path.basename(self.script))
-
-    @property
-    def logfile_pos(self):
-        if not hasattr(self, '_logfile_pos'): self._logfile_pos = None
-        return self._logfile_pos
-    @logfile_pos.setter
-    def logfile_pos(self, val):
-        self._logfile_pos = TarantoolLog(val).positioning()
-
-    @property
-    def script(self):
-        if not hasattr(self, '_script'): self._script = None
-        return self._script
-    @script.setter
-    def script(self, val):
-        if val is None:
-            if hasattr(self, '_script'):
-                delattr(self, '_script')
-            return
-        self._script = os.path.abspath(val)
-
-    @property
-    def _admin(self):
-        if not hasattr(self, 'admin'): self.admin = None
-        return self.admin
-    @_admin.setter
-    def _admin(self, port):
-        if hasattr(self, 'admin'):
-            del self.admin
-        self.admin = AdminConnection('localhost', port)
-
-    @property
-    def _sql(self):
-        if not hasattr(self, 'sql'): self.sql = None
-        return self.sql
-    @_sql.setter
-    def _sql(self, port):
-        try:
-            port = int(port)
-        except ValueError as e:
-            raise ValueError("Bad port number: '%s'" % port)
-        if hasattr(self, 'sql'):
-            del self.sql
-        self.sql = BoxConnection('localhost', port)
-
-    @property
-    def log_des(self):
-        if not hasattr(self, '_log_des'): self._log_des = open(self.logfile, 'a')
-        return self._log_des
-    @log_des.deleter
-    def log_des(self):
-        if not hasattr(self, '_log_des'): return
-        if not self._log_des.closed: self._log_des.closed()
-        delattr(self, _log_des)
-
-    @property
-    def rpl_master(self):
-        if not hasattr(self, '_rpl_master'): self._rpl_master = None
-        return self._rpl_master
-    @rpl_master.setter
-    def rpl_master(self, val):
-        if not isinstance(self, (TarantoolServer, None)):
-            raise ValueError('Replication master must be Tarantool'
-                    ' Server class, his derivation or None')
-        self._rpl_master = val
-
-#------------------------------------------------------------------------------#
-
-    def __new__(cls, ini=None):
-        if ini is None:
-            ini = {'core': 'tarantool'}
-        if ('valgrind' in ini and ini['valgrind']) and ('gdb' in ini and ini['gdb']):
-            raise OSError('Can\'t run under valgrind and gdb simultaniously')
-        if 'valgrind' in ini and ini['valgrind']:
-            cls = type('ValgrindTarantooServer', (ValgrindMixin, TarantoolServer), {})
-        elif 'gdb' in ini and ini['gdb']:
-            cls = type('GdbTarantoolServer', (GdbMixin, TarantoolServer), {})
-
-        return super(TarantoolServer, cls).__new__(cls)
-
-    def __init__(self, _ini=None):
-        if _ini is None:
-            _ini = {}
-        ini = {
-            'core': 'tarantool',
-            'gdb': False,
-            'script': None,
-            'lua_libs': [],
-            'valgrind': False,
-            'vardir': None,
-            'use_unix_sockets': False,
-            'tarantool_port': None
-        }
-        ini.update(_ini)
-        Server.__init__(self, ini)
-        self.testdir = os.path.abspath(os.curdir)
-        self.sourcedir = os.path.abspath(os.path.join(os.path.basename(
-            sys.argv[0]), "..", ".."))
-        self.re_vardir_cleanup += [
-            "*.snap", "*.xlog", "*.inprogress",
-            "*.sup", "*.lua", "*.pid"]
-        self.name = "default"
-        self.conf = {}
-        self.status = None
-        #-----InitBasicVars-----#
-        self.core = ini['core']
-        self.gdb = ini['gdb']
-        self.script = ini['script']
-        self.lua_libs = ini['lua_libs']
-        self.valgrind = ini['valgrind']
-        self.use_unix_sockets = ini['use_unix_sockets']
-        self._start_against_running = ini['tarantool_port']
-
-    def __del__(self):
-        self.stop()
-
-    @classmethod
-    def find_exe(cls, builddir, silent=True):
-        cls.builddir = os.path.abspath(builddir)
-        builddir = os.path.join(builddir, "src")
-        path = builddir + os.pathsep + os.environ["PATH"]
-        if not silent:
-            color_stdout("Looking for server binary in ", schema='serv_text')
-            color_stdout(path + ' ...\n', schema='path')
-        for _dir in path.split(os.pathsep):
-            exe = os.path.join(_dir, cls.default_tarantool["bin"])
-            if os.access(exe, os.X_OK):
-                cls.binary = os.path.abspath(exe)
-                os.environ["PATH"] = os.path.abspath(_dir) + ":" + os.environ["PATH"]
-                return exe
-        raise RuntimeError("Can't find server executable in " + path)
-
-    def install(self, silent=True):
-        if self._start_against_running:
-            self._sql = self._start_against_running
-            self._admin = int(self._start_against_running) + 1
-            return
-
-        if not silent:
-            color_stdout('Installing the server ...\n', schema='serv_text')
-            color_stdout('    Found executable at ', schema='serv_text')
-            color_stdout(self.binary + '\n', schema='path')
-            color_stdout('    Creating and populating working directory in ', schema='serv_text')
-            color_stdout(self.vardir + ' ...\n', schema='path')
-        if not os.path.exists(self.vardir):
-            os.makedirs(self.vardir)
-        else:
-            if not silent:
-                color_stdout('    Found old vardir, deleting ...\n', schema='serv_text')
-            self.kill_old_server()
-            self.cleanup()
-        self.copy_files()
-        port = random.randrange(3300, 9999)
-
-        if self.use_unix_sockets:
-            self._admin = os.path.join(self.vardir, "socket-admin")
-        else:
-            self._admin = find_port(port)
-        self._sql = find_port(port + 1)
-
-    def deploy(self, silent=True, wait=True):
-        self.install(silent)
-        self.start(silent=silent, wait=wait)
-
-    def copy_files(self):
-        if self.script:
-            shutil.copy(self.script, self.script_dst)
-            os.chmod(self.script_dst, 0777)
-        if self.lua_libs:
-            for i in self.lua_libs:
-                source = os.path.join(self.testdir, i)
-                shutil.copy(source, self.vardir)
-
-    def prepare_args(self):
-        return shlex.split(self.script_dst if self.script else self.binary)
-
-    def start(self, silent=True, wait = True):
-        if self._start_against_running:
-            return
-        if self.status == 'started':
-            if not silent:
-                color_stdout('The server is already started.\n', schema='lerror')
-            return
-        if not silent:
-            color_stdout("Starting the server ...\n", schema='serv_text')
-            color_stdout("Starting ", schema='serv_text')
-            color_stdout((os.path.basename(self.binary) if not self.script else self.script_dst) + " \n", schema='path')
-            color_stdout(self.version() + "\n", schema='version')
-
-        check_port(self.admin.port)
-
-        os.putenv("LISTEN", self.sql.uri)
-        os.putenv("ADMIN", self.admin.uri)
-        if self.rpl_master:
-            os.putenv("MASTER", self.rpl_master.sql.uri)
-        args = self.prepare_args()
-        self.logfile_pos = self.logfile
-        self.process = subprocess.Popen(args,
-                cwd = self.vardir,
-                stdout=self.log_des,
-                stderr=self.log_des)
-        if wait:
-            self.wait_until_started()
-        self.status = 'started'
-
-    def wait_stop(self):
-        self.process.wait()
-
-    def stop(self, silent=True):
-        if self._start_against_running:
-            return
-        if self.status != 'started':
-            if not silent:
-                color_stdout('The server is not started.\n', schema='lerror')
-            return
-        if not silent:
-            color_stdout('Stopping the server ...\n', schema='serv_text')
-        self.process.terminate()
-        self.wait_stop()
-        self.status = None
-        if re.search(r'^/', str(self._admin.port)):
-            if os.path.exists(self._admin.port):
-                os.unlink(self._admin.port)
-
-    def restart(self):
-        self.stop()
-        self.start()
-
-    def kill_old_server(self, silent=True):
-        pid = self.read_pidfile()
-        if pid == -1:
-            return False
-        if not silent:
-            color_stdout('    Found old server, pid {0}, killing ...'.format(pid), schema='info')
-        try:
-            os.kill(pid, signal.SIGTERM)
-        except OSError:
-            pass
-        self.wait_until_stopped(pid)
-        return True
-
-    def wait_until_started(self):
-        """ Wait until server is started.
-
-        Server consists of two parts:
-        1) wait until server is listening on sockets
-        2) wait until server tells us his status
-
-        """
-        msg = 'entering the event loop|will retry binding'
-        self.logfile_pos.seek_wait(
-            msg, self.process if not self.gdb else None)
-        while True:
-            try:
-                temp = AdminConnection('localhost', self.admin.port)
-                ans = yaml.load(temp.execute('box.info.status'))[0]
-                if ans in ('running', 'hot_standby', 'orphan'):
-                    return True
-                else:
-                    raise Exception("Strange output for `box.info.status`: %s" % (ans))
-            except socket.error as e:
-                if e.errno == errno.ECONNREFUSED:
-                    time.sleep(0.1)
-                    continue
-                raise
-
-    def wait_until_stopped(self, pid):
-        while True:
-            try:
-                time.sleep(0.01)
-                os.kill(pid, 0)
-                continue
-            except OSError as err:
-                break
-
-    def read_pidfile(self):
-        pid = -1
-        if os.path.exists(self.pidfile):
-            try:
-                with open(self.pidfile) as f:
-                    pid = int(f.read())
-            except:
-                pass
-        return pid
-
-    def print_log(self, lines):
-        color_stdout("\nLast {0} lines of Tarantool Log file:\n".format(lines), schema='error')
-        if os.path.exists(self.logfile):
-            with open(self.logfile, 'r') as log:
-                return log.readlines()[-lines:]
-        color_stdout("    Can't find log:\n", schema='error')
-
-    def test_option_get(self, option_list_str, silent=False):
-        args = [self.binary] + shlex.split(option_list_str)
-        if not silent:
-            print " ".join([os.path.basename(self.binary)] + args[1:])
-        output = subprocess.Popen(args, cwd = self.vardir, stdout=subprocess.PIPE,
-                stderr=subprocess.STDOUT).stdout.read()
-        return output
-
-    def test_option(self, option_list_str):
-        print self.test_option_get(option_list_str)
-
-    def test_debug(self):
-        if re.findall(r"-Debug", self.test_option_get("-V", True), re.I):
-            return True
-        return False
-
-    def find_tests(self, test_suite, suite_path):
-        tests  = [PythonTest(k, test_suite.args, test_suite.ini) \
-                for k in sorted(glob.glob(os.path.join(suite_path, "*.test.py" )))]
-        tests += [LuaTest(k, test_suite.args, test_suite.ini)    \
-                for k in sorted(glob.glob(os.path.join(suite_path, "*.test.lua")))]
-        test_suite.tests = []
-        # don't sort, command line arguments must be run in
-        # the specified order
-        for name in test_suite.args.tests:
-            for test in tests:
-                if test.name.find(name) != -1:
-                    test_suite.tests.append(test)
-
-    def get_param(self, param = None):
-        if not param is None:
-            return yaml.load(self.admin("box.info." + param, silent=True))[0]
-        return yaml.load(self.admin("box.info", silent=True))
-
-    def get_lsn(self, node_id):
-        nodes = self.get_param("vclock")
-        if type(nodes) == dict and node_id in nodes:
-            return int(nodes[node_id])
-        elif type(nodes) == list and node_id <= len(nodes):
-            return int(nodes[node_id - 1])
-        else:
-            return -1
-
-    def wait_lsn(self, node_id, lsn):
-        while (self.get_lsn(node_id) < lsn):
-            #print("wait_lsn", node_id, lsn, self.get_param("vclock"))
-            time.sleep(0.01)
-
-    def version(self):
-        p = subprocess.Popen([self.binary, "--version"],
-                             cwd = self.vardir,
-                             stdout = subprocess.PIPE)
-        version = p.stdout.read().rstrip()
-        p.wait()
-        return version
-
-    def get_log(self):
-        return TarantoolLog(self.logfile).positioning()
diff --git a/test/lib/test.py b/test/lib/test.py
deleted file mode 100644
index 00996a46fc2bead7f00cbcc8a5b441e43a7db15e..0000000000000000000000000000000000000000
--- a/test/lib/test.py
+++ /dev/null
@@ -1,192 +0,0 @@
-import os
-import re
-import sys
-import time
-import filecmp
-import difflib
-import traceback
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from lib.colorer import Colorer
-from lib.utils import check_valgrind_log, print_tail_n
-color_stdout = Colorer()
-
-class FilteredStream:
-    """Helper class to filter .result file output"""
-    def __init__(self, filename):
-        self.stream = open(filename, "w+")
-        self.filters = []
-
-    def write(self, fragment):
-        """Apply all filters, then write result to the undelrying stream.
-        Do line-oriented filtering: the fragment doesn't have to represent
-        just one line."""
-        fragment_stream = StringIO(fragment)
-        skipped = False
-        for line in fragment_stream:
-            original_len = len(line.strip())
-            for pattern, replacement in self.filters:
-                line = re.sub(pattern, replacement, line)
-                # don't write lines that are completely filtered out:
-                skipped = original_len and not line.strip()
-                if skipped:
-                    break
-            if not skipped:
-                self.stream.write(line)
-
-    def push_filter(self, pattern, replacement):
-        self.filters.append([pattern, replacement])
-
-    def pop_filter(self):
-        self.filters.pop()
-
-    def clear_all_filters(self):
-        self.filters = []
-
-    def close(self):
-        self.clear_all_filters()
-        self.stream.close()
-
-    def flush(self):
-        self.stream.flush()
-
-
-class Test:
-    """An individual test file. A test object can run itself
-    and remembers completion state of the run.
-
-    If file <test_name>.skipcond is exists it will be executed before
-    test and if it sets self.skip to True value the test will be skipped.
-    """
-
-    def __init__(self, name, args, suite_ini):
-        """Initialize test properties: path to test file, path to
-        temporary result file, path to the client program, test status."""
-        rg = re.compile('.test.*')
-        self.name = name
-        self.args = args
-        self.suite_ini = suite_ini
-        self.result = rg.sub('.result', name)
-        self.skip_cond = rg.sub('.skipcond', name)
-        self.tmp_result = os.path.join(self.args.vardir,
-                                       os.path.basename(self.result))
-        self.reject = rg.sub('.reject', name)
-        self.is_executed = False
-        self.is_executed_ok = None
-        self.is_equal_result = None
-        self.is_valgrind_clean = True
-        self.is_terminated = False
-
-    def passed(self):
-        """Return true if this test was run successfully."""
-        return self.is_executed and self.is_executed_ok and self.is_equal_result
-
-    def execute(self, server):
-        pass
-
-    def run(self, server):
-        """Execute the test assuming it's a python program.
-        If the test aborts, print its output to stdout, and raise
-        an exception. Else, comprare result and reject files.
-        If there is a difference, print it to stdout and raise an
-        exception. The exception is raised only if is_force flag is
-        not set."""
-        diagnostics = "unknown"
-        save_stdout = sys.stdout
-        try:
-            self.skip = False
-            if os.path.exists(self.skip_cond):
-                sys.stdout = FilteredStream(self.tmp_result)
-                stdout_fileno = sys.stdout.stream.fileno()
-                execfile(self.skip_cond, dict(locals(), **server.__dict__))
-                sys.stdout.close()
-                sys.stdout = save_stdout
-            if not self.skip:
-                sys.stdout = FilteredStream(self.tmp_result)
-                stdout_fileno = sys.stdout.stream.fileno()
-                self.execute(server)
-                sys.stdout.flush()
-            self.is_executed_ok = True
-        except Exception as e:
-            traceback.print_exc(e)
-            diagnostics = str(e)
-        finally:
-            if sys.stdout and sys.stdout != save_stdout:
-                sys.stdout.close()
-            sys.stdout = save_stdout
-        self.is_executed = True
-        sys.stdout.flush()
-
-        if not self.skip:
-            if self.is_executed_ok and os.path.isfile(self.result):
-                self.is_equal_result = filecmp.cmp(self.result, self.tmp_result)
-        else:
-            self.is_equal_result = 1
-
-        if self.args.valgrind:
-            self.is_valgrind_clean = (check_valgrind_log(server.valgrind_log) == False)
-
-        if self.skip:
-            color_stdout("[ skip ]\n", schema='test_skip')
-            if os.path.exists(self.tmp_result):
-                os.remove(self.tmp_result)
-        elif self.is_executed_ok and self.is_equal_result and self.is_valgrind_clean:
-            color_stdout("[ pass ]\n", schema='test_pass')
-            if os.path.exists(self.tmp_result):
-                os.remove(self.tmp_result)
-        elif (self.is_executed_ok and not self.is_equal_result and not
-              os.path.isfile(self.result)):
-            os.rename(self.tmp_result, self.result)
-            color_stdout("[ new ]\n", schema='test_new')
-        else:
-            os.rename(self.tmp_result, self.reject)
-            color_stdout("[ fail ]\n", schema='test_fail')
-
-            where = ""
-            if not self.is_executed_ok:
-                self.print_diagnostics(self.reject, "Test failed! Last 10 lines of the result file:\n")
-                server.print_log(15)
-                where = ": test execution aborted, reason '{0}'".format(diagnostics)
-            elif not self.is_equal_result:
-                self.print_unidiff()
-                server.print_log(15)
-                where = ": wrong test output"
-            elif not self.is_valgrind_clean:
-                os.remove(self.reject)
-                self.print_diagnostics(server.valgrind_log, "Test failed! Last 10 lines of valgrind.log:\n")
-                where = ": there were warnings in valgrind.log"
-
-            if not self.args.is_force:
-                raise RuntimeError("Failed to run test " + self.name + where)
-
-    def print_diagnostics(self, logfile, message):
-        """Print 10 lines of client program output leading to test
-        failure. Used to diagnose a failure of the client program"""
-
-        color_stdout(message, schema='error')
-        print_tail_n(logfile, 10)
-
-    def print_unidiff(self):
-        """Print a unified diff between .test and .result files. Used
-        to establish the cause of a failure when .test differs
-        from .result."""
-
-        color_stdout("\nTest failed! Result content mismatch:\n", schema='error')
-        with open(self.result, "r") as result:
-            with open(self.reject, "r") as reject:
-                result_time = time.ctime(os.stat(self.result).st_mtime)
-                reject_time = time.ctime(os.stat(self.reject).st_mtime)
-                diff = difflib.unified_diff(result.readlines(),
-                                            reject.readlines(),
-                                            self.result,
-                                            self.reject,
-                                            result_time,
-                                            reject_time)
-
-                color_stdout.writeout_unidiff(diff)
-
-
diff --git a/test/lib/test_suite.py b/test/lib/test_suite.py
deleted file mode 100644
index 856d9a4fa391b1d50df3e1333762167b5bb4ff54..0000000000000000000000000000000000000000
--- a/test/lib/test_suite.py
+++ /dev/null
@@ -1,133 +0,0 @@
-import os
-import re
-import sys
-import time
-import shutil
-import difflib
-import threading
-import traceback
-import collections
-import ConfigParser
-
-
-from lib.tarantool_server import TarantoolServer
-from lib.server import Server
-from lib.colorer import Colorer
-from lib.utils import check_valgrind_log, print_tail_n
-
-color_stdout = Colorer()
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-class TestSuite:
-    """Each test suite contains a number of related tests files,
-    located in the same directory on disk. Each test file has
-    extention .test and contains a listing of server commands,
-    followed by their output. The commands are executed, and
-    obtained results are compared with pre-recorded output. In case
-    of a comparision difference, an exception is raised. A test suite
-    must also contain suite.ini, which describes how to start the
-    server for this suite, the client program to execute individual
-    tests and other suite properties. The server is started once per
-    suite."""
-
-    def __init__(self, suite_path, args):
-        """Initialize a test suite: check that it exists and contains
-        a syntactically correct configuration file. Then create
-        a test instance for each found test."""
-        self.args = args
-        self.tests = []
-        self.ini = {}
-        self.suite_path = suite_path
-        self.ini["core"] = "tarantool"
-
-        if os.access(suite_path, os.F_OK) == False:
-            raise RuntimeError("Suite %s doesn't exist" % repr(suite_path))
-
-        # read the suite config
-        config = ConfigParser.ConfigParser()
-        config.read(os.path.join(suite_path, "suite.ini"))
-        self.ini.update(dict(config.items("default")))
-        self.ini.update(self.args.__dict__)
-        if self.args.stress is None and self.ini['core'] == 'stress':
-            return
-
-        for i in ["script"]:
-            self.ini[i] = os.path.join(suite_path, self.ini[i]) if i in self.ini else None
-        for i in ["disabled", "valgrind_disabled", "release_disabled"]:
-            self.ini[i] = dict.fromkeys(self.ini[i].split()) if i in self.ini else dict()
-        for i in ["lua_libs"]:
-            self.ini[i] = map(lambda x: os.path.join(suite_path, x),
-                    dict.fromkeys(self.ini[i].split()) if i in self.ini else dict())
-        try:
-            if self.ini['core'] in ['tarantool', 'stress']:
-                self.server = TarantoolServer(self.ini)
-            else:
-                self.server = Server(self.ini)
-            self.ini["server"] = self.server
-        except Exception as e:
-            print e
-            raise RuntimeError("Unknown server: core = {0}".format(
-                               self.ini["core"]))
-        color_stdout("Collecting tests in ", schema='ts_text')
-        color_stdout(repr(suite_path), schema='path')
-        color_stdout(": ", self.ini["description"], ".\n", schema='ts_text')
-        self.server.find_tests(self, suite_path)
-        color_stdout("Found ", str(len(self.tests)), " tests.\n", schema='path')
-
-    def run_all(self):
-        """For each file in the test suite, run client program
-        assuming each file represents an individual test."""
-        if not self.tests:
-            # noting to test, exit
-            return []
-        self.server.deploy(silent=False)
-
-        longsep = '='*70
-        shortsep = '-'*60
-        color_stdout(longsep, "\n", schema='separator')
-        color_stdout("TEST".ljust(48), schema='t_name')
-        color_stdout("RESULT\n", schema='test_pass')
-        color_stdout(shortsep, "\n", schema='separator')
-        failed_tests = []
-        try:
-            for test in self.tests:
-                color_stdout(test.name.ljust(48), schema='t_name')
-                # for better diagnostics in case of a long-running test
-
-                test_name = os.path.basename(test.name)
-
-                if (test_name in self.ini["disabled"]
-                    or not self.server.debug and test_name in self.ini["release_disabled"]
-                    or self.args.valgrind and test_name in self.ini["valgrind_disabled"]):
-                    color_stdout("[ disabled ]\n", schema='t_name')
-                else:
-                    test.run(self.server)
-                    if not test.passed():
-                        failed_tests.append(test.name)
-            color_stdout(shortsep, "\n", schema='separator')
-            self.server.stop(silent=False)
-            # don't delete core files or state of the data dir
-            # in case of exception, which is raised when the
-            # server crashes
-            self.server.cleanup()
-        except (KeyboardInterrupt) as e:
-            color_stdout("\n%s\n" % shortsep, schema='separator')
-            self.server.stop(silent=False)
-            raise
-
-        if failed_tests:
-            color_stdout("Failed {0} tests: {1}.\n".format(len(failed_tests),
-                                                ", ".join(failed_tests)),
-                                                schema='error')
-
-        if self.args.valgrind and check_valgrind_log(self.server.valgrind_log):
-            color_stdout(shortsep, "\n", schema='separator')
-            color_stdout("  Error! There were warnings/errors in valgrind log file:\n", schema='error')
-            print_tail_n(self.server.valgrind_log, 20)
-            color_stdout(shortsep, "\n", schema='separator')
-            return ['valgrind error in ' + self.suite_path]
-        return failed_tests
-
diff --git a/test/lib/unittest_server.py b/test/lib/unittest_server.py
deleted file mode 100644
index 47f62fe352c3bb3229288cb9bfaf8ac9c05b25c0..0000000000000000000000000000000000000000
--- a/test/lib/unittest_server.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import os
-import re
-import sys
-import glob
-import traceback
-import subprocess
-from subprocess import Popen, PIPE, STDOUT
-
-from lib.server import Server
-from lib.tarantool_server import Test
-
-class UnitTest(Test):
-    def execute(self, server):
-        execs = [os.path.join(server.builddir, "test", self.name)]
-        proc = Popen(execs, stdout=PIPE, stderr=STDOUT)
-        sys.stdout.write(proc.communicate()[0])
-
-class UnittestServer(Server):
-    """A dummy server implementation for unit test suite"""
-    def __new__(cls, ini=None):
-        return Server.__new__(cls)
-
-    def __init__(self, _ini=None):
-        if _ini is None:
-            _ini = {}
-        ini = {
-            'vardir': None
-        }; ini.update(_ini)
-        Server.__init__(self, ini)
-        self.vardir = ini['vardir']
-        self.builddir = ini['builddir']
-        self.debug = False
-
-    def deploy(self, vardir=None, silent=True, wait=True):
-        self.vardir = vardir
-        if not os.access(self.vardir, os.F_OK):
-            os.makedirs(self.vardir)
-
-    @classmethod
-    def find_exe(cls, builddir):
-        cls.builddir = builddir
-
-    def find_tests(self, test_suite, suite_path):
-        def patterned(test, patterns):
-            answer = []
-            for i in patterns:
-                if test.name.find(i) != -1:
-                    answer.append(test)
-            return answer
-
-        test_suite.tests = [UnitTest(k, test_suite.args, test_suite.ini) for k in sorted(glob.glob(os.path.join(suite_path, "*.test" )))]
-        test_suite.tests = sum(map((lambda x: patterned(x, test_suite.args.tests)), test_suite.tests), [])
-
-    def print_log(self, lines):
-        pass
diff --git a/test/lib/utils.py b/test/lib/utils.py
deleted file mode 100644
index 9287599044c13b87d072e59d1348eb8058c6d6f7..0000000000000000000000000000000000000000
--- a/test/lib/utils.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-import sys
-import collections
-
-
-from lib.colorer import Colorer
-color_stdout = Colorer()
-
-def check_libs():
-    deps = [
-        ('msgpack', 'msgpack-python'),
-        ('tarantool', 'tarantool-python')
-    ]
-    base_path = os.path.dirname(os.path.abspath(__file__))
-
-    for (mod_name, mod_dir) in deps:
-        mod_path = os.path.join(base_path, mod_dir)
-        if mod_path not in sys.path:
-            sys.path = [mod_path] + sys.path
-
-    for (mod_name, _mod_dir) in deps:
-        try:
-            __import__(mod_name)
-        except ImportError as e:
-            color_stdout("\n\nNo %s library found\n" % mod_name, schema='error')
-            print(e)
-            sys.exit(1)
-
-def check_valgrind_log(path_to_log):
-    """ Check that there were no warnings in the log."""
-    return os.path.exists(path_to_log) and os.path.getsize(path_to_log) != 0
-
-def print_tail_n(filename, num_lines):
-    """Print N last lines of a file."""
-    with open(filename, "r+") as logfile:
-        tail_n = collections.deque(logfile, num_lines)
-        for line in tail_n:
-            color_stdout(line, schema='tail')
-
-
-
diff --git a/test/lib/yapps/__init__.py b/test/lib/yapps/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/test/lib/yapps/runtime.py b/test/lib/yapps/runtime.py
deleted file mode 100644
index 5d9d1d6594c235376393d821eabd9e692cd8c387..0000000000000000000000000000000000000000
--- a/test/lib/yapps/runtime.py
+++ /dev/null
@@ -1,442 +0,0 @@
-# Yapps 2 Runtime, part of Yapps 2 - yet another python parser system
-# Copyright 1999-2003 by Amit J. Patel <amitp@cs.stanford.edu>
-# Enhancements copyright 2003-2004 by Matthias Urlichs <smurf@debian.org>
-#
-# This version of the Yapps 2 Runtime can be distributed under the
-# terms of the MIT open source license, either found in the LICENSE file
-# included with the Yapps distribution
-# <http://theory.stanford.edu/~amitp/yapps/> or at
-# <http://www.opensource.org/licenses/mit-license.php>
-#
-
-"""Run time libraries needed to run parsers generated by Yapps.
-
-This module defines parse-time exception classes, a scanner class, a
-base class for parsers produced by Yapps, and a context class that
-keeps track of the parse stack.
-
-"""
-
-import sys, re
-
-MIN_WINDOW=4096
-# File lookup window
-
-class SyntaxError(Exception):
-	"""When we run into an unexpected token, this is the exception to use"""
-	def __init__(self, pos=None, msg="Bad Token", context=None):
-		Exception.__init__(self)
-		self.pos = pos
-		self.msg = msg
-		self.context = context
-		
-	def __str__(self):
-		if not self.pos: return 'SyntaxError'
-		else: return 'SyntaxError@%s(%s)' % (repr(self.pos), self.msg)
-
-class NoMoreTokens(Exception):
-	"""Another exception object, for when we run out of tokens"""
-	pass
-
-class Token(object):
-	"""Yapps token.
-
-	This is a container for a scanned token.
-	"""
-
-	def __init__(self, type,value, pos=None):
-		"""Initialize a token."""
-		self.type = type
-		self.value = value
-		self.pos = pos
-
-	def __repr__(self):
-		output = '<%s: %s' % (self.type, repr(self.value))
-		if self.pos:
-			output += " @ "
-			if self.pos[0]:
-				output += "%s:" % self.pos[0]
-			if self.pos[1]:
-				output += "%d" % self.pos[1]
-			if self.pos[2] is not None:
-				output += ".%d" % self.pos[2]
-		output += ">"
-		return output
-
-in_name=0
-class Scanner(object):
-	"""Yapps scanner.
-
-	The Yapps scanner can work in context sensitive or context
-	insensitive modes.  The token(i) method is used to retrieve the
-	i-th token.  It takes a restrict set that limits the set of tokens
-	it is allowed to return.  In context sensitive mode, this restrict
-	set guides the scanner.  In context insensitive mode, there is no
-	restriction (the set is always the full set of tokens).
-	
-	"""
-	
-	def __init__(self, patterns, ignore, input="",
-			file=None,filename=None,stacked=False):
-		"""Initialize the scanner.
-
-		Parameters:
-		  patterns : [(terminal, uncompiled regex), ...] or None
-		  ignore : {terminal:None, ...}
-		  input : string
-
-		If patterns is None, we assume that the subclass has
-		defined self.patterns : [(terminal, compiled regex), ...].
-		Note that the patterns parameter expects uncompiled regexes,
-		whereas the self.patterns field expects compiled regexes.
-
-		The 'ignore' value is either None or a callable, which is called
-		with the scanner and the to-be-ignored match object; this can
-		be used for include file or comment handling.
-		"""
-
-		if not filename:
-			global in_name
-			filename="<f.%d>" % in_name
-			in_name += 1
-
-		self.input = input
-		self.ignore = ignore
-		self.file = file
-		self.filename = filename
-		self.pos = 0
-		self.del_pos = 0 # skipped
-		self.line = 1
-		self.del_line = 0 # skipped
-		self.col = 0
-		self.tokens = []
-		self.stack = None
-		self.stacked = stacked
-		
-		self.last_read_token = None
-		self.last_token = None
-		self.last_types = None
-
-		if patterns is not None:
-			# Compile the regex strings into regex objects
-			self.patterns = []
-			for terminal, regex in patterns:
-				self.patterns.append( (terminal, re.compile(regex)) )
-
-	def stack_input(self, input="", file=None, filename=None):
-		"""Temporarily parse from a second file."""
-
-		# Already reading from somewhere else: Go on top of that, please.
-		if self.stack:
-			# autogenerate a recursion-level-identifying filename
-			if not filename:
-				filename = 1
-			else:
-				try:
-					filename += 1
-				except TypeError:
-					pass
-				# now pass off to the include file
-			self.stack.stack_input(input,file,filename)
-		else:
-
-			try:
-				filename += 0
-			except TypeError:
-				pass
-			else:
-				filename = "<str_%d>" % filename
-
-#			self.stack = object.__new__(self.__class__)
-#			Scanner.__init__(self.stack,self.patterns,self.ignore,input,file,filename, stacked=True)
-
-			# Note that the pattern+ignore are added by the generated
-			# scanner code
-			self.stack = self.__class__(input,file,filename, stacked=True)
-
-	def get_pos(self):
-		"""Return a file/line/char tuple."""
-		if self.stack: return self.stack.get_pos()
-
-		return (self.filename, self.line+self.del_line, self.col)
-
-#	def __repr__(self):
-#		"""Print the last few tokens that have been scanned in"""
-#		output = ''
-#		for t in self.tokens:
-#			output += '%s\n' % (repr(t),)
-#		return output
-	
-	def print_line_with_pointer(self, pos, length=0, out=sys.stderr):
-		"""Print the line of 'text' that includes position 'p',
-		along with a second line with a single caret (^) at position p"""
-
-		file,line,p = pos
-		if file != self.filename:
-			if self.stack: return self.stack.print_line_with_pointer(pos,length=length,out=out)
-			print >>out, "(%s: not in input buffer)" % file
-			return
-
-		text = self.input
-		p += length-1 # starts at pos 1
-
-		origline=line
-		line -= self.del_line
-		spos=0
-		if line > 0:
-			while 1:
-				line = line - 1
-				try:
-					cr = text.index("\n",spos)
-				except ValueError:
-					if line:
-						text = ""
-					break
-				if line == 0:
-					text = text[spos:cr]
-					break
-				spos = cr+1
-		else:
-			print >>out, "(%s:%d not in input buffer)" % (file,origline)
-			return
-
-		# Now try printing part of the line
-		text = text[max(p-80, 0):p+80]
-		p = p - max(p-80, 0)
-
-		# Strip to the left
-		i = text[:p].rfind('\n')
-		j = text[:p].rfind('\r')
-		if i < 0 or (0 <= j < i): i = j
-		if 0 <= i < p:
-			p = p - i - 1
-			text = text[i+1:]
-
-		# Strip to the right
-		i = text.find('\n', p)
-		j = text.find('\r', p)
-		if i < 0 or (0 <= j < i): i = j
-		if i >= 0:
-			text = text[:i]
-
-		# Now shorten the text
-		while len(text) > 70 and p > 60:
-			# Cut off 10 chars
-			text = "..." + text[10:]
-			p = p - 7
-
-		# Now print the string, along with an indicator
-		print >>out, '> ',text
-		print >>out, '> ',' '*p + '^'
-	
-	def grab_input(self):
-		"""Get more input if possible."""
-		if not self.file: return
-		if len(self.input) - self.pos >= MIN_WINDOW: return
-
-		data = self.file.read(MIN_WINDOW)
-		if data is None or data == "":
-			self.file = None
-
-		# Drop bytes from the start, if necessary.
-		if self.pos > 2*MIN_WINDOW:
-			self.del_pos += MIN_WINDOW
-			self.del_line += self.input[:MIN_WINDOW].count("\n")
-			self.pos -= MIN_WINDOW
-			self.input = self.input[MIN_WINDOW:] + data
-		else:
-			self.input = self.input + data
-
-	def getchar(self):
-		"""Return the next character."""
-		self.grab_input()
-
-		c = self.input[self.pos]
-		self.pos += 1
-		return c
-
-	def token(self, restrict, context=None):
-		"""Scan for another token."""
-
-		while 1:
-			if self.stack:
-				try:
-					return self.stack.token(restrict, context)
-				except StopIteration:
-					self.stack = None
-
-		# Keep looking for a token, ignoring any in self.ignore
-			self.grab_input()
-
-			# special handling for end-of-file
-			if self.stacked and self.pos==len(self.input):
-				raise StopIteration
-
-			# Search the patterns for the longest match, with earlier
-			# tokens in the list having preference
-			best_match = -1
-			best_pat = '(error)'
-			best_m = None
-			for p, regexp in self.patterns:
-				# First check to see if we're ignoring this token
-				if restrict and p not in restrict and p not in self.ignore:
-					continue
-				m = regexp.match(self.input, self.pos)
-				if m and m.end()-m.start() > best_match:
-					# We got a match that's better than the previous one
-					best_pat = p
-					best_match = m.end()-m.start()
-					best_m = m
-					
-			# If we didn't find anything, raise an error
-			if best_pat == '(error)' and best_match < 0:
-				msg = 'Bad Token'
-				if restrict:
-					msg = 'Trying to find one of '+', '.join(restrict)
-				raise SyntaxError(self.get_pos(), msg, context=context)
-
-			ignore = best_pat in self.ignore
-			value = self.input[self.pos:self.pos+best_match]
-			if not ignore:
-				tok=Token(type=best_pat, value=value, pos=self.get_pos())
-
-			self.pos += best_match
-
-			npos = value.rfind("\n")
-			if npos > -1:
-				self.col = best_match-npos
-				self.line += value.count("\n")
-			else:
-				self.col += best_match
-
-			# If we found something that isn't to be ignored, return it
-			if not ignore:
-				if len(self.tokens) >= 10:
-					del self.tokens[0]
-				self.tokens.append(tok)
-				self.last_read_token = tok
-				# print repr(tok)
-				return tok
-			else:
-				ignore = self.ignore[best_pat]
-				if ignore:
-					ignore(self, best_m)
-
-	def peek(self, *types, **kw):
-		"""Returns the token type for lookahead; if there are any args
-		then the list of args is the set of token types to allow"""
-		context = kw.get("context",None)
-		if self.last_token is None:
-			self.last_types = types
-			self.last_token = self.token(types,context)
-		elif self.last_types:
-			for t in types:
-				if t not in self.last_types:
-					raise NotImplementedError("Unimplemented: restriction set changed")
-		return self.last_token.type
-		
-	def scan(self, type, **kw):
-		"""Returns the matched text, and moves to the next token"""
-		context = kw.get("context",None)
-
-		if self.last_token is None:
-			tok = self.token([type],context)
-		else:
-			if self.last_types and type not in self.last_types:
-				raise NotImplementedError("Unimplemented: restriction set changed")
-
-			tok = self.last_token
-			self.last_token = None
-		if tok.type != type:
-			if not self.last_types: self.last_types=[]
-			raise SyntaxError(tok.pos, 'Trying to find '+type+': '+ ', '.join(self.last_types)+", got "+tok.type, context=context)
-		return tok.value
-
-class Parser(object):
-	"""Base class for Yapps-generated parsers.
-
-	"""
-	
-	def __init__(self, scanner):
-		self._scanner = scanner
-		
-	def _stack(self, input="",file=None,filename=None):
-		"""Temporarily read from someplace else"""
-		self._scanner.stack_input(input,file,filename)
-		self._tok = None
-
-	def _peek(self, *types, **kw):
-		"""Returns the token type for lookahead; if there are any args
-		then the list of args is the set of token types to allow"""
-		return self._scanner.peek(*types, **kw)
-		
-	def _scan(self, type, **kw):
-		"""Returns the matched text, and moves to the next token"""
-		return self._scanner.scan(type, **kw)
-
-class Context(object):
-	"""Class to represent the parser's call stack.
-
-	Every rule creates a Context that links to its parent rule.  The
-	contexts can be used for debugging.
-
-	"""
-	
-	def __init__(self, parent, scanner, rule, args=()):
-		"""Create a new context.
-
-		Args:
-		parent: Context object or None
-		scanner: Scanner object
-		rule: string (name of the rule)
-		args: tuple listing parameters to the rule
-
-		"""
-		self.parent = parent
-		self.scanner = scanner
-		self.rule = rule
-		self.args = args
-		while scanner.stack: scanner = scanner.stack
-		self.token = scanner.last_read_token
-
-	def __str__(self):
-		output = ''
-		if self.parent: output = str(self.parent) + ' > '
-		output += self.rule
-		return output
-	
-def print_error(err, scanner, max_ctx=None):
-	"""Print error messages, the parser stack, and the input text -- for human-readable error messages."""
-	# NOTE: this function assumes 80 columns :-(
-	# Figure out the line number
-	pos = err.pos
-	if not pos:
-		pos = scanner.get_pos()
-
-	file_name, line_number, column_number = pos
-	print >>sys.stderr, '%s:%d:%d: %s' % (file_name, line_number, column_number, err.msg)
-
-	scanner.print_line_with_pointer(pos)
-		
-	context = err.context
-	token = None
-	while context:
-		print >>sys.stderr, 'while parsing %s%s:' % (context.rule, tuple(context.args))
-		if context.token:
-			token = context.token
-		if token:
-			scanner.print_line_with_pointer(token.pos, length=len(token.value))
-		context = context.parent
-		if max_ctx:
-			max_ctx = max_ctx-1
-			if not max_ctx:
-				break
-
-def wrap_error_reporter(parser, rule, *args,**kw):
-	try:
-		return getattr(parser, rule)(*args,**kw)
-	except SyntaxError, e:
-		print_error(e, parser._scanner)
-	except NoMoreTokens:
-		print >>sys.stderr, 'Could not complete parsing; stopped around here:'
-		print >>sys.stderr, parser._scanner
diff --git a/test/test-run.py b/test/test-run.py
deleted file mode 100755
index 1f619e0624d0c630d0dabf1adcd04f637b1a504d..0000000000000000000000000000000000000000
--- a/test/test-run.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python2
-"""Tarantool regression test suite front-end."""
-
-__author__ = "Konstantin Osipov <kostja.osipov@gmail.com>"
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-# 1. Redistributions of source code must retain the above copyright
-#    notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-#    notice, this list of conditions and the following disclaimer in the
-#    documentation and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
-
-import os
-import sys
-import time
-import string
-import shutil
-import os.path
-import argparse
-
-from lib.colorer          import Colorer
-from lib.parallel         import Supervisor
-from lib.test_suite       import TestSuite
-from lib.tarantool_server import TarantoolServer
-from lib.unittest_server  import UnittestServer
-color_stdout = Colorer()
-#
-# Run a collection of tests.
-#
-
-class Options:
-    """Handle options of test-runner"""
-    def __init__(self):
-        """Add all program options, with their defaults."""
-
-        parser = argparse.ArgumentParser(
-                description = "Tarantool regression test suite front-end.")
-
-        parser.epilog = "For a complete description, use 'pydoc ./" +\
-                os.path.basename(sys.argv[0]) + "'"
-
-        parser.add_argument(
-                "tests",
-                metavar="test",
-                nargs="*",
-                default = [""],
-                help="""Can be empty. List of test names, to look for in suites. Each
-                name is used as a substring to look for in the path to test file,
-                e.g. "show" will run all tests that have "show" in their name in all
-                suites, "box/show" will only enable tests starting with "show" in
-                "box" suite. Default: run all tests in all specified suites.""")
-
-        parser.add_argument(
-                "--suite",
-                dest = 'suites',
-                metavar = "suite",
-                nargs="*",
-                default = [],
-                help = """List of test suites to look for tests in. Default: "" -
-                means find all available.""")
-
-        parser.add_argument(
-                "--force",
-                dest = "is_force",
-                action = "store_true",
-                default = False,
-                help = """Go on with other tests in case of an individual test failure.
-                Default: false.""")
-
-        parser.add_argument(
-                "--gdb",
-                dest = "gdb",
-                action = "store_true",
-                default = False,
-                help = """Start the server under 'gdb' debugger in detached
-                Screen. This option is mutually exclusive with --valgrind.
-                Default: false.""")
-
-        parser.add_argument(
-                "--valgrind",
-                dest = "valgrind",
-                action = "store_true",
-                default = False,
-                help = "Run the server under 'valgrind'. Default: false.")
-
-        parser.add_argument(
-                "--builddir",
-                dest = "builddir",
-                default = "..",
-                help = """Path to project build directory. Default: " + "../.""")
-
-        parser.add_argument(
-                "--stress",
-                dest = "stress",
-                default = None,
-                help = """Name of streess TestSuite to run""")
-
-        parser.add_argument(
-                "--tarantool-port",
-                dest = "tarantool_port",
-                default = None,
-                help = """Listen port number to run tests against. Admin port number must be listen+1""")
-
-        parser.add_argument(
-                "--vardir",
-                dest = "vardir",
-                default = "var",
-                help = """Path to data directory. Default: var.""")
-
-        self.args = parser.parse_args()
-        self.check()
-
-    def check(self):
-        """Check the arguments for correctness."""
-        check_error = False
-        if self.args.gdb and self.args.valgrind:
-            color_stdout("Error: option --gdb is not compatible with option --valgrind", schema='error')
-            check_error = True
-        if check_error:
-            exit(-1)
-
-
-def setenv():
-    os.putenv("TARANTOOL_SRC_DIR", os.path.abspath('..'))
-
-#######################################################################
-# Program body
-#######################################################################
-
-def main():
-    options = Options()
-    oldcwd = os.getcwd()
-    # Change the current working directory to where all test
-    # collections are supposed to reside
-    # If script executed with (python test-run.py) dirname is ''
-    # so we need to make it .
-    path = os.path.dirname(sys.argv[0])
-    if not path:
-        path = '.'
-    os.chdir(path)
-    setenv()
-
-    failed_tests = []
-
-    try:
-        TarantoolServer.find_exe(options.args.builddir)
-        UnittestServer.find_exe(options.args.builddir)
-
-        color_stdout("Started {0}\n".format(" ".join(sys.argv)), schema='tr_text')
-        suite_names = options.args.suites
-        if suite_names == []:
-            for root, dirs, names in os.walk(os.getcwd()):
-                if "suite.ini" in names:
-                    suite_names.append(os.path.basename(root))
-
-        if options.args.stress is None:
-            suites = [TestSuite(suite_name, options.args) for suite_name in sorted(suite_names)]
-            for suite in suites:
-                failed_tests.extend(suite.run_all())
-        else:
-            suite_names = [suite_name for suite_name in suite_names if suite_name.find(options.args.stress) != -1]
-            suites = [Supervisor(suite_name, options.args) for suite_name in sorted(suite_names)]
-            for suite in suites:
-                suite.run_all()
-    except RuntimeError as e:
-        color_stdout("\nFatal error: %s. Execution aborted.\n" % e, schema='error')
-        if options.args.gdb:
-            time.sleep(100)
-        return (-1)
-    finally:
-        os.chdir(oldcwd)
-
-    if failed_tests and options.args.is_force:
-        color_stdout("\n===== %d tests failed:\n" % len(failed_tests), schema='error')
-        for test in failed_tests:
-             color_stdout("----- %s\n" % test, schema='info')
-
-    return (-1 if failed_tests else 0)
-
-if __name__ == "__main__":
-    exit(main())
diff --git a/test/test-run.py b/test/test-run.py
new file mode 120000
index 0000000000000000000000000000000000000000..21bb780a6b9cb168e5fc1416d3ce22fd3e32093f
--- /dev/null
+++ b/test/test-run.py
@@ -0,0 +1 @@
+../test-run/test-run.py
\ No newline at end of file