diff --git a/src/lib/json/json.c b/src/lib/json/json.c
index 7ef511d4e63bf8946932b0a99c4f9394135e5f69..1bfef172ad35e6f26a9eb1347da8ac0576131dd4 100644
--- a/src/lib/json/json.c
+++ b/src/lib/json/json.c
@@ -251,6 +251,8 @@ json_lexer_next_token(struct json_lexer *lexer, struct json_token *token)
 			return lexer->symbol_count + 1;
 		return json_parse_identifier(lexer, token);
 	default:
+		if (last_offset != 0)
+			return lexer->symbol_count;
 		json_revert_symbol(lexer, last_offset);
 		return json_parse_identifier(lexer, token);
 	}
diff --git a/test/unit/json.c b/test/unit/json.c
index 2b7236ea8472aca4459cba75450bbabae19418d6..4d3423142a7b87bdf197245bf416fdd43d9742eb 100644
--- a/test/unit/json.c
+++ b/test/unit/json.c
@@ -97,7 +97,7 @@ void
 test_errors()
 {
 	header();
-	plan(21);
+	plan(22);
 	const char *path;
 	int len;
 	struct json_lexer lexer;
@@ -146,6 +146,11 @@ test_errors()
 	json_lexer_next_token(&lexer, &token);
 	is(json_lexer_next_token(&lexer, &token), 3, "can not write <field.[index]>")
 
+	reset_to_new_path("[1]key")
+	json_lexer_next_token(&lexer, &token);
+	is(json_lexer_next_token(&lexer, &token), 4, "can not omit '.' before "\
+	   "not a first key out of []");
+
 	reset_to_new_path("f.")
 	json_lexer_next_token(&lexer, &token);
 	is(json_lexer_next_token(&lexer, &token), 3, "error in leading <.>");
@@ -563,11 +568,11 @@ test_path_multikey()
 		int rc;
 	} test_cases[] = {
 		{"", 0},
-		{"[1]Data[1]extra[1]", 18},
-		{"[*]Data[1]extra[1]", 0},
-		{"[*]Data[*]extra[1]", 0},
-		{"[1]Data[*]extra[1]", 7},
-		{"[1]Data[1]extra[*]", 15},
+		{"[1].Data[1].extra[1]", 20},
+		{"[*].Data[1].extra[1]", 0},
+		{"[*].Data[*].extra[1]", 0},
+		{"[1].Data[*].extra[1]", 8},
+		{"[1].Data[1].extra[*]", 17},
 	};
 
 	header();
diff --git a/test/unit/json.result b/test/unit/json.result
index 1c3e7f7d2492bfeb4d45ac8cbb4202f812cb7ef0..fb9d1490f538dda831108ff92a9c298dea744019 100644
--- a/test/unit/json.result
+++ b/test/unit/json.result
@@ -76,7 +76,7 @@
 ok 1 - subtests
 	*** test_basic: done ***
 	*** test_errors ***
-    1..21
+    1..22
     ok 1 - error on position 2 for <[[>
     ok 2 - error on position 2 for <[field]>
     ok 3 - error on position 1 for <'field1'.field2>
@@ -94,10 +94,11 @@ ok 1 - subtests
     ok 15 - error on position 6 for <['aaaÂÂ']>
     ok 16 - error on position 2 for <.ÂÂ>
     ok 17 - can not write <field.[index]>
-    ok 18 - error in leading <.>
-    ok 19 - space inside identifier
-    ok 20 - tab inside identifier
-    ok 21 - invalid token for index_base 1
+    ok 18 - can not omit '.' before not a first key out of []
+    ok 19 - error in leading <.>
+    ok 20 - space inside identifier
+    ok 21 - tab inside identifier
+    ok 22 - invalid token for index_base 1
 ok 2 - subtests
 	*** test_errors: done ***
 	*** test_tree ***
@@ -197,11 +198,11 @@ ok 5 - subtests
 	*** test_path_multikey ***
     1..6
     ok 1 - Test json_path_multikey_offset with : have 0 expected 0
-    ok 2 - Test json_path_multikey_offset with [1]Data[1]extra[1]: have 18 expected 18
-    ok 3 - Test json_path_multikey_offset with [*]Data[1]extra[1]: have 0 expected 0
-    ok 4 - Test json_path_multikey_offset with [*]Data[*]extra[1]: have 0 expected 0
-    ok 5 - Test json_path_multikey_offset with [1]Data[*]extra[1]: have 7 expected 7
-    ok 6 - Test json_path_multikey_offset with [1]Data[1]extra[*]: have 15 expected 15
+    ok 2 - Test json_path_multikey_offset with [1].Data[1].extra[1]: have 20 expected 20
+    ok 3 - Test json_path_multikey_offset with [*].Data[1].extra[1]: have 0 expected 0
+    ok 4 - Test json_path_multikey_offset with [*].Data[*].extra[1]: have 0 expected 0
+    ok 5 - Test json_path_multikey_offset with [1].Data[*].extra[1]: have 8 expected 8
+    ok 6 - Test json_path_multikey_offset with [1].Data[1].extra[*]: have 17 expected 17
 ok 6 - subtests
 	*** test_path_multikey: done ***
 	*** main: done ***