From 5570784cbafd8dafd7bfc09d217324f1c029d5a5 Mon Sep 17 00:00:00 2001
From: Alexandr Lyapunov <a.lyapunov@corp.mail.ru>
Date: Tue, 25 Aug 2015 17:27:05 +0300
Subject: [PATCH] minor fixes of arena and its comments

---
 src/lib/small/slab_arena.c | 16 +++++++++++-----
 src/lib/small/slab_arena.h |  3 +--
 src/lib/small/slab_cache.c |  5 ++++-
 src/memory.cc              |  2 +-
 4 files changed, 17 insertions(+), 9 deletions(-)

diff --git a/src/lib/small/slab_arena.c b/src/lib/small/slab_arena.c
index 8d108f629a..0fb4939693 100644
--- a/src/lib/small/slab_arena.c
+++ b/src/lib/small/slab_arena.c
@@ -42,12 +42,13 @@
 #define MAP_ANONYMOUS MAP_ANON
 #endif
 
-void
+static void
 munmap_checked(void *addr, size_t size)
 {
 	if (munmap(addr, size)) {
 		char buf[64];
-		intptr_t ignore_it = (intptr_t)strerror_r(errno, buf, sizeof(buf));
+		intptr_t ignore_it = (intptr_t)strerror_r(errno, buf,
+							  sizeof(buf));
 		(void)ignore_it;
 		fprintf(stderr, "Error in munmap(%p, %zu): %s\n",
 			addr, size, buf);
@@ -76,7 +77,7 @@ mmap_checked(size_t size, size_t align, int flags)
 	munmap_checked(map, size);
 
 	/*
-	 * mmap twice the requested amount to be able to align
+	 * mmap enough amount to be able to align
 	 * the mapped address.  This can lead to virtual memory
 	 * fragmentation depending on the kernels allocation
 	 * strategy.
@@ -187,8 +188,13 @@ slab_map(struct slab_arena *arena)
 	if (used <= arena->prealloc)
 		return arena->arena + used - arena->slab_size;
 
-	return mmap_checked(arena->slab_size, arena->slab_size,
-			    arena->flags);
+	ptr = mmap_checked(arena->slab_size, arena->slab_size,
+			   arena->flags);
+	if (!ptr) {
+		__sync_sub_and_fetch(&arena->used, arena->slab_size);
+		quota_release(arena->quota, arena->slab_size);
+	}
+	return ptr;
 }
 
 void
diff --git a/src/lib/small/slab_arena.h b/src/lib/small/slab_arena.h
index 308fa26b89..86c7bd107e 100644
--- a/src/lib/small/slab_arena.h
+++ b/src/lib/small/slab_arena.h
@@ -68,9 +68,8 @@ struct slab_arena {
 	 */
 	size_t prealloc;
 	/**
-	 * How much memory in the preallocated arena has
+	 * How much memory in the arena has
 	 * already been initialized for slabs.
-	 * @invariant used <= prealloc.
 	 */
 	size_t used;
 	/**
diff --git a/src/lib/small/slab_cache.c b/src/lib/small/slab_cache.c
index aad3a3412f..44a79f3358 100644
--- a/src/lib/small/slab_cache.c
+++ b/src/lib/small/slab_cache.c
@@ -109,9 +109,12 @@ slab_is_free(struct slab *slab)
 static inline void
 slab_poison(struct slab *slab)
 {
-	static const char poison_char = 'P';
+	(void)slab;
+#ifndef NDEBUG
+	const char poison_char = 'P';
 	memset((char *) slab + slab_sizeof(), poison_char,
 	       slab->size - slab_sizeof());
+#endif
 }
 
 static inline void
diff --git a/src/memory.cc b/src/memory.cc
index 81265a28c9..1c4e55093a 100644
--- a/src/memory.cc
+++ b/src/memory.cc
@@ -37,7 +37,7 @@ void
 memory_init()
 {
 	static struct quota runtime_quota;
-	static const size_t SLAB_SIZE = 4 * 1024 * 1024;
+	const size_t SLAB_SIZE = 4 * 1024 * 1024;
 	/* default quota initialization */
 	quota_init(&runtime_quota, QUOTA_MAX);
 
-- 
GitLab