From c8793c31e58befa6ae07326a3743bf1879633206 Mon Sep 17 00:00:00 2001 From: Kaitmazian Maksim <m.kaitmazian@picodata.io> Date: Sat, 2 Mar 2024 13:10:27 +0300 Subject: [PATCH] sync conftest.py with picodata --- pgproto/Pipfile | 2 + pgproto/Pipfile.lock | 154 +++++--- pgproto/test/auth_test.py | 10 +- pgproto/test/conftest.py | 590 +++++++++++++++++++++++----- pgproto/test/extended_query_test.py | 15 +- pgproto/test/simple_query_test.py | 24 +- pgproto/test/ssl_test.py | 5 +- 7 files changed, 618 insertions(+), 182 deletions(-) diff --git a/pgproto/Pipfile b/pgproto/Pipfile index 488cfe8ea7..938478e293 100644 --- a/pgproto/Pipfile +++ b/pgproto/Pipfile @@ -19,6 +19,8 @@ pytest-repeat = "*" msgpack = "*" pexpect = "*" psycopg = "*" +pyyaml = "*" +tomli = {version = ">=1.1.0", markers = "python_version < '3.11'"} [requires] python_version = "3.10" diff --git a/pgproto/Pipfile.lock b/pgproto/Pipfile.lock index 3bc8ef7137..0040ac1524 100644 --- a/pgproto/Pipfile.lock +++ b/pgproto/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "057c44857918eb60623cab8a07d0c05810a9a6c4cd3d70759759cd10d4d19278" + "sha256": "adbe218c8ab30608196f115eec0e644f3c5cc20f37072b42bf76207257542239" }, "pipfile-spec": 6, "requires": { @@ -25,31 +25,31 @@ }, "black": { "hashes": [ - "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50", - "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f", - "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e", - "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec", - "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055", - "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3", - "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5", - "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54", - "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b", - "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e", - "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e", - "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba", - "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea", - "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59", - "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d", - "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0", - "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9", - "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a", - "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e", - "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba", - "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2", - "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2" + "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8", + "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8", + "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd", + "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9", + "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31", + "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92", + "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f", + "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29", + "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4", + "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693", + "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218", + "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a", + "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23", + "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0", + "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982", + "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894", + "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540", + "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430", + "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b", + "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2", + "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6", + "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d" ], "index": "pypi", - "version": "==23.12.1" + "version": "==24.2.0" }, "click": { "hashes": [ @@ -266,27 +266,27 @@ }, "pg8000": { "hashes": [ - "sha256:2fa6964fff591a5e076fa6dd21a317c74de2caaa52991bb1f8b3d8ef2e56d172", - "sha256:64bbe27b11588a53cee08e840988416227263dc5191b649fab963949f3ddd84d" + "sha256:072f7ad00cd723695cb2e9fc02c1dfb84c781455e97b8de6f4c4281eea08078c", + "sha256:1abf18da652b0ad8e9cbfe57ed841c350b5330c33d8151303555db1fe5ce57f8" ], "index": "pypi", - "version": "==1.30.4" + "version": "==1.30.5" }, "platformdirs": { "hashes": [ - "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380", - "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420" + "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068", + "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" ], "markers": "python_version >= '3.8'", - "version": "==4.1.0" + "version": "==4.2.0" }, "pluggy": { "hashes": [ - "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", - "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" + "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981", + "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be" ], "markers": "python_version >= '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "pprintpp": { "hashes": [ @@ -297,19 +297,19 @@ }, "prettytable": { "hashes": [ - "sha256:a71292ab7769a5de274b146b276ce938786f56c31cf7cea88b6f3775d82fe8c8", - "sha256:f4ed94803c23073a90620b201965e5dc0bccf1760b7a7eaf3158cab8aaffdf34" + "sha256:6536efaf0757fdaa7d22e78b3aac3b69ea1b7200538c2c6995d649365bddab92", + "sha256:9665594d137fb08a1117518c25551e0ede1687197cf353a4fdc78d27e1073568" ], "index": "pypi", - "version": "==3.9.0" + "version": "==3.10.0" }, "psycopg": { "hashes": [ - "sha256:437e7d7925459f21de570383e2e10542aceb3b9cb972ce957fdd3826ca47edc6", - "sha256:96b7b13af6d5a514118b759a66b2799a8a4aa78675fa6bb0d3f7d52d67eff002" + "sha256:31144d3fb4c17d78094d9e579826f047d4af1da6a10427d91dfcfb6ecdf6f12b", + "sha256:4d5a0a5a8590906daa58ebd5f3cfc34091377354a1acced269dd10faf55da60e" ], "index": "pypi", - "version": "==3.1.17" + "version": "==3.1.18" }, "ptyprocess": { "hashes": [ @@ -344,11 +344,11 @@ }, "pytest": { "hashes": [ - "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", - "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8" + "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd", + "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096" ], "index": "pypi", - "version": "==7.4.4" + "version": "==8.0.2" }, "pytest-clarity": { "hashes": [ @@ -391,10 +391,67 @@ }, "pytz": { "hashes": [ - "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b", - "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7" + "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812", + "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319" + ], + "version": "==2024.1" + }, + "pyyaml": { + "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", + "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", + "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", + "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", + "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", + "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", + "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", + "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", + "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", + "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", + "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", + "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", + "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", + "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", + "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", + "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", + "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", + "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", + "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", + "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", + "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", + "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", + "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", + "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", + "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", + "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", + "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", + "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", + "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", + "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", + "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", + "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", + "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", + "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", + "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", + "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", + "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", + "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", + "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", + "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", + "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", + "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" ], - "version": "==2023.3.post1" + "index": "pypi", + "version": "==6.0.1" }, "rich": { "hashes": [ @@ -433,16 +490,17 @@ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], + "index": "pypi", "markers": "python_version < '3.11'", "version": "==2.0.1" }, "typing-extensions": { "hashes": [ - "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783", - "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd" + "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", + "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb" ], "markers": "python_version >= '3.8'", - "version": "==4.9.0" + "version": "==4.10.0" }, "wcwidth": { "hashes": [ diff --git a/pgproto/test/auth_test.py b/pgproto/test/auth_test.py index 7a7972c683..6c2a43f4c3 100644 --- a/pgproto/test/auth_test.py +++ b/pgproto/test/auth_test.py @@ -11,9 +11,8 @@ def test_auth(postgres: Postgres): i1 = postgres.instance user = "user" - password = "fANPIOUWEh79p12hdunqwADI" - i1.eval("box.cfg{auth_type='md5'}") - i1.call("pico.create_user", user, password, dict(timeout=3)) + password = "P@ssw0rd" + i1.sql(f"CREATE USER \"{user}\" WITH PASSWORD '{password}' USING md5") # test successful authentication conn = pg.Connection(user, password=password, host=host, port=port) @@ -32,9 +31,8 @@ def test_auth(postgres: Postgres): pg.Connection("unknown-user", password="aaa", host=host, port=port) sha_user = "chap-sha-enjoyer" - sha_password = "231321fnijphui217h08" - i1.eval("box.cfg{auth_type='chap-sha1'}") - i1.call("pico.create_user", sha_user, sha_password, dict(timeout=3)) + password = "P@ssw0rd" + i1.sql(f"CREATE USER \"{sha_user}\" WITH PASSWORD '{password}' USING md5") # test authentication with an unsupported method with pytest.raises( diff --git a/pgproto/test/conftest.py b/pgproto/test/conftest.py index 8f074d370f..c0782e9d31 100644 --- a/pgproto/test/conftest.py +++ b/pgproto/test/conftest.py @@ -1,3 +1,11 @@ +""" +This is a copy of conftest.py from picodata that instead of building picodata +tries to find it in PATH or in PICODATA_EXECUTABLE. + +This changes are reflected in cargo_build and binary_path fixtures, +the rest is the same to the picodata's version, except pgproto's fixtures. +""" + import io import os import re @@ -5,9 +13,10 @@ import socket import sys import time import threading +from types import SimpleNamespace import logging import shutil -from types import SimpleNamespace +import yaml # type: ignore import pytest import signal import subprocess @@ -15,11 +24,22 @@ import msgpack # type: ignore from functools import reduce from datetime import datetime from shutil import rmtree -from typing import Any, Callable, Literal, Generator, Iterator, Dict, List, Tuple, Type +from typing import ( + Any, + Callable, + Literal, + Generator, + Iterator, + Dict, + List, + Optional, + Tuple, + Type, +) from itertools import count from contextlib import contextmanager, suppress from dataclasses import dataclass, field -from tarantool.connection import Connection # type: ignore +import tarantool # type: ignore from tarantool.error import ( # type: ignore tnt_strerror, DatabaseError, @@ -33,6 +53,8 @@ BASE_HOST = "127.0.0.1" BASE_PORT = 3300 PORT_RANGE = 200 +MAX_LOGIN_ATTEMPTS = 4 + def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) @@ -54,6 +76,23 @@ def pytest_addoption(parser: pytest.Parser): default=False, help="Whether gather flamegraphs or not (for benchmarks only)", ) + parser.addoption( + "--with-webui", + action="store_true", + default=False, + help="Whether to run Web UI tests", + ) + + +def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]): + # https://docs.pytest.org/en/7.4.x/how-to/writing_hook_functions.html + # https://docs.pytest.org/en/7.4.x/example/simple.html#control-skipping-of-tests-according-to-command-line-option + + if not config.getoption("--with-webui"): + skip = pytest.mark.skip(reason="run: pytest --with-webui") + for item in items: + if "webui" in item.keywords: + item.add_marker(skip) @pytest.fixture(scope="session") @@ -87,17 +126,17 @@ def seed(pytestconfig): """Return a seed for randomized tests. Unless passed via command-line options it is generated automatically. """ - return pytestconfig.getoption("seed") + return pytestconfig.getoption("--seed") @pytest.fixture(scope="session") def delay(pytestconfig): - return pytestconfig.getoption("delay") + return pytestconfig.getoption("--delay") @pytest.fixture(scope="session") def with_flamegraph(pytestconfig): - return bool(pytestconfig.getoption("with_flamegraph")) + return bool(pytestconfig.getoption("--with-flamegraph")) @pytest.fixture(scope="session") @@ -170,6 +209,10 @@ def normalize_net_box_result(func): case _: raise exc from exc + # This is special case for Connection.__init__ + if result is None: + return + match result.data: case []: return None @@ -206,14 +249,6 @@ class KeyDef: return """{{ {} }}""".format(parts) -@dataclass(frozen=True) -class RaftStatus: - id: int - raft_state: str - term: int - leader_id: int | None = None - - class CasRange: key_min = dict(kind="unbounded", key=None) key_max = dict(kind="unbounded", key=None) @@ -389,6 +424,41 @@ OUT_LOCK = threading.Lock() POSITION_IN_SPACE_INSTANCE_ID = 3 +class Connection(tarantool.Connection): # type: ignore + @normalize_net_box_result + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + @normalize_net_box_result + def call(self, func_name, *args, on_push=None, on_push_ctx=None): + return super().call(func_name, *args, on_push=on_push, on_push_ctx=on_push_ctx) + + @normalize_net_box_result + def eval(self, expr, *args, on_push=None, on_push_ctx=None): + return super().eval(expr, *args, on_push=on_push, on_push_ctx=on_push_ctx) + + def sql(self, sql: str, *params, options={}) -> dict: + """Run SQL query and return result""" + return self.call("pico.sql", sql, params, options) + + def sudo_sql( + self, + sql: str, + *params, + ) -> dict: + """Run SQL query as admin and return result""" + old_euid = self.eval( + """ + local before = box.session.euid() + box.session.su('admin') + return before + """ + ) + ret = self.sql(sql, *params) + self.eval("box.session.su(...)", old_euid) + return ret + + @dataclass class Instance: binary_path: str @@ -397,17 +467,21 @@ class Instance: peers: list[str] host: str port: int - init_replication_factor: int color: Callable[[str], str] + audit: str | bool = True + tier: str | None = None + init_replication_factor: int | None = None + init_cfg_path: str | None = None instance_id: str | None = None replicaset_id: str | None = None failure_domain: dict[str, str] = field(default_factory=dict) + service_password_file: str | None = None env: dict[str, str] = field(default_factory=dict) process: subprocess.Popen | None = None raft_id: int = INVALID_RAFT_ID - _on_output_callbacks: list[Callable[[str], None]] = field(default_factory=list) + _on_output_callbacks: list[Callable[[bytes], None]] = field(default_factory=list) @property def listen(self): @@ -416,7 +490,7 @@ class Instance: def current_grade(self, instance_id=None): if instance_id is None: instance_id = self.instance_id - return self.call("pico.instance_info", instance_id)["current_grade"] + return self.call(".proc_instance_info", instance_id)["current_grade"] def instance_uuid(self): return self.eval("return box.info.uuid") @@ -424,8 +498,25 @@ class Instance: def replicaset_uuid(self): return self.eval("return box.info.cluster.uuid") + @property + def audit_flag_value(self): + """ + This property abstracts away peculiarities of the audit config. + This is the value we're going to pass via `--audit`, or `None` + if audit is disabled for this instance. + """ + if self.audit: + if isinstance(self.audit, bool): + return os.path.join(self.data_dir, "audit.log") + if isinstance(self.audit, str): + return self.audit + return None + @property def command(self): + audit = self.audit_flag_value + service_password = self.service_password_file + # fmt: off return [ self.binary_path, "run", @@ -436,7 +527,13 @@ class Instance: "--listen", self.listen, "--peer", ','.join(self.peers), *(f"--failure-domain={k}={v}" for k, v in self.failure_domain.items()), - "--init-replication-factor", f"{self.init_replication_factor}" + *(["--init-replication-factor", f"{self.init_replication_factor}"] + if self.init_replication_factor is not None else []), + *(["--init-cfg", self.init_cfg_path] + if self.init_cfg_path is not None else []), + *(["--tier", self.tier] if self.tier is not None else []), + *(["--audit", audit] if audit else []), + *(["--service-password-file", service_password] if service_password else []), ] # fmt: on @@ -447,6 +544,14 @@ class Instance: def connect( self, timeout: int | float, user: str | None = None, password: str | None = None ): + if user is None: + user = "pico_service" + if password is None and self.service_password_file is not None: + with open(self.service_password_file, "r") as f: + password = f.readline() + if password.endswith("\n"): + password = password[:-1] + c = Connection( self.host, self.port, @@ -462,7 +567,6 @@ class Instance: finally: c.close() - @normalize_net_box_result def call( self, fn, @@ -474,7 +578,6 @@ class Instance: with self.connect(timeout, user=user, password=password) as conn: return conn.call(fn, args) - @normalize_net_box_result def eval( self, expr, @@ -509,9 +612,47 @@ class Instance: ) return self.eval(lua) - def sql(self, sql: str, *params, timeout: int | float = 1) -> dict: + def sql( + self, + sql: str, + *params, + options={}, + user: str | None = None, + password: str | None = None, + timeout: int | float = 1, + ) -> dict: """Run SQL query and return result""" - return self.call("pico.sql", sql, params, timeout=timeout) + with self.connect(timeout=timeout, user=user, password=password) as conn: + return conn.sql(sql, *params, options=options) + + def sudo_sql( + self, + sql: str, + *params, + user: str | None = None, + password: str | None = None, + timeout: int | float = 1, + ) -> dict: + """Run SQL query as admin and return result""" + with self.connect(timeout, user=user, password=password) as conn: + return conn.sudo_sql(sql, params) + + def create_user( + self, + with_name: str, + with_password: str, + user: str | None = None, + password: str | None = None, + timeout: int | float = 1, + ): + self.sql( + f""" + CREATE USER "{with_name}" WITH PASSWORD '{with_password}' USING chap-sha1 + """, + user=user, + password=password, + timeout=timeout, + ) def terminate(self, kill_after_seconds=10) -> int | None: """Terminate the instance gracefully with SIGTERM""" @@ -532,22 +673,28 @@ class Instance: finally: self.kill() - def _process_output(self, src, out): + def _process_output(self, src, out: io.TextIOWrapper): id = self.instance_id or f":{self.port}" prefix = f"{id:<3} | " if sys.stdout.isatty(): prefix = self.color(prefix) - for line in io.TextIOWrapper(src, line_buffering=True): + prefix_bytes = prefix.encode("utf-8") + + # `iter(callable, sentinel)` form: calls callable until it returns sentinel + for line in iter(src.readline, b""): with OUT_LOCK: - out.write(prefix) - out.write(line) + out.buffer.write(prefix_bytes) + out.buffer.write(line) out.flush() for cb in self._on_output_callbacks: cb(line) - def on_output_line(self, cb: Callable[[str], None]): + # Close the stream, because `Instance.fail_to_start` is waiting for it + src.close() + + def on_output_line(self, cb: Callable[[bytes], None]): self._on_output_callbacks.append(cb) def start(self, peers=[]): @@ -567,12 +714,17 @@ class Instance: if os.environ.get("RUST_BACKTRACE") is not None: env.update(RUST_BACKTRACE=str(os.environ.get("RUST_BACKTRACE"))) + if os.getenv("NOLOG"): + out = subprocess.DEVNULL + else: + out = subprocess.PIPE + self.process = subprocess.Popen( self.command, env=env or None, stdin=subprocess.DEVNULL, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stdout=out, + stderr=out, # Picodata instance consists of two processes: a supervisor # and a child. Pytest manages it at the level of linux # process groups that is a collection of related processes @@ -590,6 +742,12 @@ class Instance: start_new_session=True, ) + # Assert a new process group is created + assert os.getpgid(self.process.pid) == self.process.pid + + if out == subprocess.DEVNULL: + return + for src, out in [ (self.process.stdout, sys.stdout), (self.process.stderr, sys.stderr), @@ -600,21 +758,41 @@ class Instance: daemon=True, ).start() - # Assert a new process group is created - assert os.getpgid(self.process.pid) == self.process.pid - def fail_to_start(self, timeout: int = 5): assert self.process is None self.start() assert self.process try: rc = self.process.wait(timeout) + + # Wait for all the output to be handled in the separate threads + while not self.process.stdout.closed or not self.process.stderr.closed: # type: ignore + time.sleep(0.1) + self.process = None assert rc != 0 except Exception as e: self.kill() raise e from e + def wait_process_stopped(self, timeout: int = 5): + if self.process is None: + return + + # FIXME: copy-pasted from above + self.process.wait(timeout) + + # When logs are disabled stdour and stderr are set to None + if not (self.process.stdout or self.process.stderr): + self.process = None + return + + # Wait for all the output to be handled in the separate threads + while not self.process.stdout.closed or not self.process.stderr.closed: # type: ignore + time.sleep(0.1) + + self.process = None + def restart(self, kill: bool = False, remove_data: bool = False): if kill: self.kill() @@ -629,11 +807,6 @@ class Instance: def remove_data(self): rmtree(self.data_dir) - def _raft_status(self) -> RaftStatus: - status = self.call("pico.raft_status") - assert isinstance(status, dict) - return RaftStatus(**status) - def raft_propose_nop(self): return self.call("pico.raft_propose_nop") @@ -669,10 +842,11 @@ class Instance: case _: raise TypeError("space must be str or int") + # FIXME: this method's parameters are out of sync with Cluster.cas def cas( self, op_kind: Literal["insert", "replace", "delete"], - space: str | int, + table: str | int, tuple: Tuple | List | None = None, index: int | None = None, term: int | None = None, @@ -694,14 +868,14 @@ class Instance: elif term is None: term = self.raft_term_by_index(index) - space_id = self.space_id(space) + table_id = self.space_id(table) predicate_ranges = [] if ranges is not None: for range in ranges: predicate_ranges.append( dict( - space=space_id, + table=table_id, key_min=range.key_min_packed, key_max=range.key_max_packed, ) @@ -717,30 +891,38 @@ class Instance: op = dict( kind="dml", op_kind=op_kind, - space=space_id, + table=table_id, tuple=msgpack.packb(tuple), ) elif op_kind == "delete": op = dict( kind="dml", op_kind=op_kind, - space=space_id, + table=table_id, key=msgpack.packb(tuple), ) else: raise Exception(f"unsupported {op_kind=}") + # guest has super privs for now by default this should be equal + # to ADMIN_USER_ID on the rust side + as_user = 1 + op["initiator"] = as_user + eprint(f"CaS:\n {predicate=}\n {op=}") - return self.call(".proc_cas", self.cluster_id, predicate, op)[0]["index"] + return self.call(".proc_cas", self.cluster_id, predicate, op, as_user)["index"] - def next_schema_version(self) -> int: - t = self.call("box.space._pico_property:get", "next_schema_version") - if t is None: - return 1 + def pico_property(self, key: str): + tup = self.call("box.space._pico_property:get", key) + if tup is None: + return None + + return tup[1] - return t[1] + def next_schema_version(self) -> int: + return self.pico_property("next_schema_version") or 1 - def create_space(self, params: dict, timeout: float = 3.0) -> int: + def create_table(self, params: dict, timeout: float = 3.0) -> int: """ Creates a space. Returns a raft index at which a newly created space has to exist on all peers. @@ -749,16 +931,16 @@ class Instance: which is more low level and directly proposes a raft entry. """ params["timeout"] = timeout - index = self.call("pico.create_space", params, timeout, timeout=timeout + 0.5) + index = self.call("pico.create_table", params, timeout, timeout=timeout + 0.5) return index - def drop_space(self, space: int | str, timeout: float = 3.0): + def drop_table(self, space: int | str, timeout: float = 3.0): """ Drops the space. Returns a raft index at which the space has to be dropped on all peers. """ index = self.call( - "pico.drop_space", space, dict(timeout=timeout), timeout=timeout + 0.5 + "pico.drop_table", space, dict(timeout=timeout), timeout=timeout + 0.5 ) return index @@ -786,7 +968,7 @@ class Instance: op = dict( kind="ddl_prepare", schema_version=self.next_schema_version(), - ddl=dict(kind="create_space", **space_def), + ddl=dict(kind="create_table", **space_def), ) # TODO: rewrite the test using pico.cas index = self.call("pico.raft_propose", op, timeout=timeout) @@ -798,21 +980,26 @@ class Instance: return index_fin def assert_raft_status(self, state, leader_id=None): - status = self._raft_status() + status = self.call(".proc_raft_info") if leader_id is None: - leader_id = status.leader_id + leader_id = status["leader_id"] assert { - "raft_state": status.raft_state, - "leader_id": status.leader_id, + "raft_state": status["state"], + "leader_id": status["leader_id"], } == {"raft_state": state, "leader_id": leader_id} - def wait_online(self, timeout: int | float = 6, rps: int | float = 5): - """Wait until instance attains Online grade + def wait_online( + self, timeout: int | float = 6, rps: int | float = 5, expected_incarnation=None + ): + """Wait until instance attains Online grade. + + This function will periodically check the current instance's grade and + reset the timeout each time the grade changes. Args: - timeout (int | float, default=6): total time limit + timeout (int | float, default=6): time limit since last grade change rps (int | float, default=5): retries per second Raises: @@ -825,28 +1012,65 @@ class Instance: if self.process is None: raise ProcessDead("process was not started") - def fetch_info(): + def fetch_current_grade() -> Tuple[str, int]: try: - exit_code = self.process.wait(timeout=0) + exit_code = self.process.wait(timeout=0) # type: ignore except subprocess.TimeoutExpired: # it's fine, the process is still running pass else: raise ProcessDead(f"process exited unexpectedly, {exit_code=}") - whoami = self.call("pico.whoami") - assert isinstance(whoami, dict) - assert isinstance(whoami["raft_id"], int) - assert isinstance(whoami["instance_id"], str) - self.raft_id = whoami["raft_id"] - self.instance_id = whoami["instance_id"] - - myself = self.call("pico.instance_info", self.instance_id) + myself = self.call(".proc_instance_info") assert isinstance(myself, dict) + + assert isinstance(myself["raft_id"], int) + self.raft_id = myself["raft_id"] + + assert isinstance(myself["instance_id"], str) + self.instance_id = myself["instance_id"] + assert isinstance(myself["current_grade"], dict) - assert myself["current_grade"]["variant"] == "Online" + return ( + myself["current_grade"]["variant"], + myself["current_grade"]["incarnation"], + ) + + now = time.monotonic() + deadline = now + timeout + next_retry = now + last_grade = None + while True: + now = time.monotonic() + assert now < deadline, "timeout" + + # Throttling + if now < next_retry: + time.sleep(next_retry - now) + next_retry = time.monotonic() + 1 / rps + + try: + # Fetch grade + grade = fetch_current_grade() + if grade != last_grade: + last_grade = grade + deadline = time.monotonic() + timeout + + # Check grade + variant, incarnation = grade + assert variant == "Online" + if expected_incarnation is not None: + assert incarnation == expected_incarnation + + # Success! + break + + except ProcessDead as e: + raise e from e + except Exception as e: + if time.monotonic() > deadline: + raise e from e - Retriable(timeout, rps, fatal=ProcessDead).call(fetch_info) eprint(f"{self} is online") def raft_term(self) -> int: @@ -891,12 +1115,18 @@ class Instance: See `crate::traft::node::Node::wait_index`. """ - return self.call( - "pico.raft_wait_index", - target, - timeout, # this timeout is passed as an argument - timeout=timeout + 1, # this timeout is for network call - ) + def make_attempt(): + return self.call( + "pico.raft_wait_index", + target, + timeout, # this timeout is passed as an argument + timeout=timeout + 1, # this timeout is for network call + ) + + index = Retriable(timeout=timeout + 1, rps=10).call(make_attempt) + + assert index is not None + return index def get_vclock(self) -> int: """Get current vclock""" @@ -928,7 +1158,7 @@ class Instance: eprint(f"{self} is trying to become a leader, {attempt=}") # 1. Force the node to campaign. - self.call("pico.raft_timeout_now") + self.call(".proc_raft_promote") # 2. Wait until the miracle occurs. Retriable(timeout, rps).call(self.assert_raft_status, "Leader") @@ -936,6 +1166,31 @@ class Instance: Retriable(timeout=3, rps=1).call(make_attempt, timeout=1, rps=10) eprint(f"{self} is a leader now") + def grant_privilege( + self, user, privilege: str, object_type: str, object_name: Optional[str] = None + ): + # do it as admin because some privileges can be granted only by admin + return self.eval( + """ + box.session.su("admin") + user, privilege, object_type, object_name = ... + return pico.grant_privilege(user, privilege, object_type, object_name) + """, + [user, privilege, object_type, object_name], + ) + + def revoke_privilege( + self, user, privilege: str, object_type: str, object_name: Optional[str] = None + ): + return self.eval( + """ + box.session.su("admin") + user, privilege, object_type, object_name = ... + return pico.revoke_privilege(user, privilege, object_type, object_name) + """, + [user, privilege, object_type, object_name], + ) + CLUSTER_COLORS = ( color.cyan, @@ -960,6 +1215,7 @@ class Cluster: base_port: int max_port: int instances: list[Instance] = field(default_factory=list) + cfg_path: str | None = None def __repr__(self): return f'Cluster("{self.base_host}:{self.base_port}", n={len(self.instances)})' @@ -968,13 +1224,19 @@ class Cluster: return self.instances[item] def deploy( - self, *, instance_count: int, init_replication_factor: int = 1 + self, + *, + instance_count: int, + init_replication_factor: int | None = None, + tier: str | None = None, ) -> list[Instance]: assert not self.instances, "Already deployed" for _ in range(instance_count): self.add_instance( - wait_online=False, init_replication_factor=init_replication_factor + wait_online=False, + tier=tier, + init_replication_factor=init_replication_factor, ) for instance in self.instances: @@ -986,6 +1248,13 @@ class Cluster: eprint(f" {self} deployed ".center(80, "=")) return self.instances + def set_init_cfg(self, cfg: dict): + assert self.cfg_path is None + self.cfg_path = self.data_dir + "/tier.yaml" + with open(self.cfg_path, "w") as yaml_file: + dump = yaml.dump(cfg, default_flow_style=False) + yaml_file.write(dump) + def add_instance( self, wait_online=True, @@ -993,7 +1262,8 @@ class Cluster: instance_id: str | bool = True, replicaset_id: str | None = None, failure_domain=dict(), - init_replication_factor=1, + init_replication_factor: int | None = None, + tier: str | None = None, ) -> Instance: """Add an `Instance` into the list of instances of the cluster and wait for it to attain Online grade unless `wait_online` is `False`. @@ -1037,9 +1307,12 @@ class Cluster: host=self.base_host, port=port, peers=peers or [f"{self.base_host}:{self.base_port + 1}"], - init_replication_factor=init_replication_factor, color=CLUSTER_COLORS[len(self.instances) % len(CLUSTER_COLORS)], failure_domain=failure_domain, + init_replication_factor=init_replication_factor, + tier=tier, + init_cfg_path=self.cfg_path, + audit=True, ) self.instances.append(instance) @@ -1055,7 +1328,8 @@ class Cluster: peers=None, instance_id: str | bool = True, failure_domain=dict(), - init_replication_factor=1, + init_replication_factor: int | None = None, + tier: str = "storage", ): instance = self.add_instance( wait_online=False, @@ -1063,6 +1337,7 @@ class Cluster: instance_id=instance_id, failure_domain=failure_domain, init_replication_factor=init_replication_factor, + tier=tier, ) self.instances.remove(instance) instance.fail_to_start() @@ -1107,8 +1382,7 @@ class Cluster: """ Waits for all peers to commit an entry with index `index`. """ - import time - + assert type(index) is int deadline = time.time() + timeout for instance in self.instances: if instance.process is not None: @@ -1117,18 +1391,18 @@ class Cluster: timeout = 0 instance.raft_wait_index(index, timeout) - def create_space(self, params: dict, timeout: float = 3.0): + def create_table(self, params: dict, timeout: float = 3.0): """ Creates a space. Waits for all online peers to be aware of it. """ - index = self.instances[0].create_space(params, timeout) + index = self.instances[0].create_table(params, timeout) self.raft_wait_index(index, timeout) - def drop_space(self, space: int | str, timeout: float = 3.0): + def drop_table(self, space: int | str, timeout: float = 3.0): """ Drops the space. Waits for all online peers to be aware of it. """ - index = self.instances[0].drop_space(space, timeout) + index = self.instances[0].drop_table(space, timeout) self.raft_wait_index(index, timeout) def abort_ddl(self, timeout: float = 3.0): @@ -1140,14 +1414,19 @@ class Cluster: def cas( self, - dml_kind: Literal["insert", "replace", "delete"], - space: str, - tuple: Tuple | List, + dml_kind: Literal["insert", "replace", "delete", "update"], + table: str, + tuple: Tuple | List | None = None, + *, + key: Tuple | List | None = None, + ops: Tuple | List | None = None, index: int | None = None, term: int | None = None, ranges: List[CasRange] | None = None, # If specified send CaS through this instance instance: Instance | None = None, + user: str | None = None, + password: str | None = None, ) -> int: """ Performs a clusterwide compare and swap operation. @@ -1165,7 +1444,7 @@ class Cluster: for range in ranges: predicate_ranges.append( dict( - space=space, + table=table, key_min=range.key_min, key_max=range.key_max, ) @@ -1176,21 +1455,77 @@ class Cluster: term=term, ranges=predicate_ranges, ) - if dml_kind in ["insert", "replace", "delete"]: + if dml_kind in ["insert", "replace", "delete", "update"]: dml = dict( - space=space, + table=table, kind=dml_kind, tuple=tuple, + key=key, + ops=ops, ) else: raise Exception(f"unsupported {dml_kind=}") eprint(f"CaS:\n {predicate=}\n {dml=}") - return instance.call("pico.cas", dml, predicate) + return instance.call("pico.cas", dml, predicate, user=user, password=password) + + def masters(self) -> List[Instance]: + ret = [] + for instance in self.instances: + if not instance.eval("return box.info.ro"): + ret.append(instance) + + return ret + + def grant_box_privilege( + self, user, privilege: str, object_type: str, object_name: Optional[str] = None + ): + """ + Sometimes in our tests we go beyond picodata privilege model and need + to grant priveleges on something that is not part of the picodata access control model. + For example execute access on universe mainly needed to invoke functions. + """ + for instance in self.masters(): + instance.eval( + """ + box.session.su("admin") + user, privilege, object_type, object_name = ... + return box.schema.user.grant(user, privilege, object_type, object_name) + """, + [user, privilege, object_type, object_name], + ) + + +@dataclass +class PortalStorage: + instance: Instance + + @property + def descriptors(self): + return self.instance.call("pico.pg_portals") + + def bind(self, *params): + return self.instance.call("pico.pg_bind", *params, False) + + def close(self, descriptor: int): + return self.instance.call("pico.pg_close", descriptor) + + def describe(self, descriptor: int) -> dict: + return self.instance.call("pico.pg_describe", descriptor, False) + + def execute(self, descriptor: int) -> dict: + return self.instance.call("pico.pg_execute", descriptor, False) + + def flush(self): + for descriptor in self.descriptors["available"]: + self.close(descriptor) + + def parse(self, sql: str) -> int: + return self.instance.call("pico.pg_parse", sql, False) @pytest.fixture(scope="session") -def binary_path() -> str: +def binary_path(cargo_build: None) -> str: """Path to the picodata binary, e.g. `./target/debug/picodata`.""" path = os.getenv("PICODATA_EXECUTABLE") if path: @@ -1206,6 +1541,29 @@ def binary_path() -> str: raise Exception("can't find picodata executable!") +@pytest.fixture(scope="session") +def cargo_build(pytestconfig: pytest.Config) -> None: + """Run cargo build before tests. Skipped in CI""" + + # Start test logs with a newline. This makes them prettier with + # `pytest -s` (a shortcut for `pytest --capture=no`) + eprint("") + + if os.environ.get("CI") is not None: + eprint("Skipping cargo build") + return + + features = ["error_injection"] + if bool(pytestconfig.getoption("--with-webui")): + features.append("webui") + + # We work with picodata executabe so there is no need in building it. + + # cmd = ["cargo", "build", "--features", ",".join(features)] + # eprint(f"Running {cmd}") + # assert subprocess.call(cmd) == 0, "cargo build failed" + + @pytest.fixture(scope="session") def cluster_ids(xdist_worker_number) -> Iterator[str]: """Unique `clister_id` generator.""" @@ -1231,10 +1589,27 @@ def cluster( @pytest.fixture -def instance(cluster: Cluster) -> Generator[Instance, None, None]: +def instance(cluster: Cluster, pytestconfig) -> Generator[Instance, None, None]: """Returns a deployed instance forming a single-node cluster.""" - cluster.deploy(instance_count=1) - yield cluster[0] + instance = cluster.add_instance(wait_online=False) + + has_webui = bool(pytestconfig.getoption("--with-webui")) + if has_webui: + instance.env["PICODATA_HTTP_LISTEN"] = ( + f"{cluster.base_host}:{cluster.base_port+80}" + ) + + instance.start() + instance.wait_online() + yield instance + + +@pytest.fixture +def pg_portals(instance: Instance) -> Generator[PortalStorage, None, None]: + """Returns a PG portal storage on a single instance.""" + portals = PortalStorage(instance) + yield portals + portals.flush() def retrying(fn, timeout=3): @@ -1273,6 +1648,17 @@ def pgrep_tree(pid): return [pid] +class log_crawler: + def __init__(self, instance: Instance, search_str: str) -> None: + self.matched = False + self.search_str = search_str.encode("utf-8") + instance.on_output_line(self._cb) + + def _cb(self, line: bytes): + if self.search_str in line: + self.matched = True + + @dataclass class Postgres: instance: Instance @@ -1282,7 +1668,7 @@ class Postgres: package.cpath="{os.environ['LUA_CPATH']}" box.schema.func.create('libpgproto.server_start', {{ language = 'C' }}) - box.schema.user.grant('guest', 'execute', 'function', 'libpgproto.server_start') + box.schema.user.grant('pico_service', 'execute', 'function', 'libpgproto.server_start') """ self.instance.eval(code) return self diff --git a/pgproto/test/extended_query_test.py b/pgproto/test/extended_query_test.py index a72fb68bca..2116576471 100644 --- a/pgproto/test/extended_query_test.py +++ b/pgproto/test/extended_query_test.py @@ -2,7 +2,6 @@ import pytest import pg8000.native as pg # type: ignore import os from conftest import Postgres -from conftest import ReturnError from pg8000.exceptions import DatabaseError # type: ignore # We use psycopg for parameterized queries because pg8000 @@ -24,9 +23,8 @@ def test_extended_query(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") os.environ["PGSSLMODE"] = "disable" conn = pg.Connection(user, password=password, host=host, port=port) @@ -47,8 +45,8 @@ def test_extended_query(postgres: Postgres): ) # statement is prepared, but not executed yet - with pytest.raises(ReturnError, match="space TALL not found"): - i1.sql(""" select * from tall """) + with pytest.raises(DatabaseError, match="space TALL not found"): + conn.run(""" select * from tall """) ps.run() @@ -109,9 +107,8 @@ def test_parameterized_queries(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") conn = psycopg.connect( f"user = {user} password={password} host={host} port={port} sslmode=disable" diff --git a/pgproto/test/simple_query_test.py b/pgproto/test/simple_query_test.py index 77a7eba473..b99ba569af 100644 --- a/pgproto/test/simple_query_test.py +++ b/pgproto/test/simple_query_test.py @@ -12,9 +12,8 @@ def test_simple_query_flow_errors(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "fANPIOUWEh79p12hdunqwADI" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") with pytest.raises(pg.InterfaceError, match="Server refuses SSL"): pg.Connection(user, password=password, host=host, port=port, ssl_context=True) @@ -49,9 +48,8 @@ def test_simple_flow_session(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") os.environ["PGSSLMODE"] = "disable" conn = pg.Connection(user, password=password, host=host, port=port) @@ -107,9 +105,8 @@ def test_explain(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") os.environ["PGSSLMODE"] = "disable" conn = pg.Connection(user, password=password, host=host, port=port) @@ -133,7 +130,7 @@ def test_explain(postgres: Postgres): cur.execute("explain " + query) plan = cur.fetchall() assert 'insert "explain" on conflict: fail' in plan[0] - assert ' motion [policy: local segment([ref("COLUMN_1")])]' in plan[1] + assert ' motion [policy: segment([ref("COLUMN_1")])]' in plan[1] assert " values" in plan[2] assert " value row (data=ROW(0::unsigned))" in plan[3] assert "execution options:" in plan[4] @@ -142,7 +139,7 @@ def test_explain(postgres: Postgres): cur.execute("explain " + query) plan = cur.fetchall() assert 'insert "explain" on conflict: fail' in plan[0] - assert ' motion [policy: local segment([ref("COLUMN_1")])]' in plan[1] + assert ' motion [policy: segment([ref("COLUMN_1")])]' in plan[1] assert " values" in plan[2] assert " value row (data=ROW(0::unsigned))" in plan[3] assert "execution options:" in plan[4] @@ -176,9 +173,8 @@ def test_aggregate_error(postgres: Postgres): i1 = postgres.instance user = "admin" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.eval(f"box.schema.user.passwd('{user}', '{password}')") + password = "P@ssw0rd" + i1.sql(f"ALTER USER \"{user}\" WITH PASSWORD '{password}' USING md5") os.environ["PGSSLMODE"] = "disable" conn = pg.Connection(user, password=password, host=host, port=port) diff --git a/pgproto/test/ssl_test.py b/pgproto/test/ssl_test.py index eeb963ea9f..f29bcd1382 100644 --- a/pgproto/test/ssl_test.py +++ b/pgproto/test/ssl_test.py @@ -12,9 +12,8 @@ def test_ssl_request_handling(postgres: Postgres): i1 = postgres.instance user = "user" - password = "password" - i1.eval("box.cfg{auth_type='md5'}") - i1.call("pico.create_user", user, password, dict(timeout=3)) + password = "P@ssw0rd" + i1.sql(f"CREATE USER \"{user}\" WITH PASSWORD '{password}' USING md5") # disable: only try a non-SSL connection os.environ["PGSSLMODE"] = "disable" -- GitLab