From 1f8448e7ad9efa24c59e08ca0a575b9b04930f93 Mon Sep 17 00:00:00 2001
From: Kaitmazian Maksim <m.kaitmazian@picodata.io>
Date: Mon, 16 Dec 2024 15:05:15 +0300
Subject: [PATCH] fix: flaky test_extreme_integer_values

This test is flaky due to vhsard rebalancing, so migrating to global
tables resolves the issue, since we don't use vshard for them. However,
after this migration one of the `pytest.raises` blocks's been failed,
as turned out, because u64::max value cannot be inserted in global tables.
We decided to don't fix this problem in this commit, so this check
was removed from the test and a new issue was added.

New issue: https://git.picodata.io/core/picodata/-/issues/1216
---
 test/int/test_sql.py | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/test/int/test_sql.py b/test/int/test_sql.py
index 2af3287457..332891baf5 100644
--- a/test/int/test_sql.py
+++ b/test/int/test_sql.py
@@ -5822,6 +5822,7 @@ def test_extreme_integer_values(cluster: Cluster):
     ddl = i1.sql(
         """
         CREATE TABLE T (uid UNSIGNED PRIMARY KEY, iid INTEGER)
+        DISTRIBUTED GLOBALLY
         OPTION (TIMEOUT = 3)
     """
     )
@@ -5857,13 +5858,8 @@ def test_extreme_integer_values(cluster: Cluster):
     data = i1.sql(f"SELECT * FROM T WHERE iid = {I64_MAX} LIMIT 1")
     assert sorted(data) == [[U64_MAX, I64_MAX]]
 
-    with pytest.raises(
-        TarantoolError, match="Failed to cast 9223372036854775808 to integer"
-    ):
-        data = i1.sql(f"SELECT iid + 1 FROM T WHERE iid = {I64_MAX} LIMIT 1")
-
     with pytest.raises(TarantoolError, match="integer is overflowed"):
-        data = i1.sql(f"SELECT uid + 1 FROM T WHERE uid = {U64_MAX} LIMIT 1")
+        i1.sql(f"SELECT uid + 1 FROM T WHERE uid = {U64_MAX} LIMIT 1")
 
 
 def test_vdbe_steps_and_vtable_rows(cluster: Cluster):
-- 
GitLab