blob: 25772d62950d773dc69775ee0c1e101b75f31282 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
|
--- a/include/jemalloc/internal/jemalloc_internal_inlines_c.h (working tree)
+++ b/include/jemalloc/internal/jemalloc_internal_inlines_c.h (index)
@@ -254,8 +254,14 @@ malloc_initialized(void) {
* fastpath supports ticker and profiling, both of which will also
* tail-call to the slowpath if they fire.
*/
+#if defined(JEMALLOC_ZONE)
+extern void je_assure_zone_register();
+#endif
JEMALLOC_ALWAYS_INLINE void *
imalloc_fastpath(size_t size, void *(fallback_alloc)(size_t)) {
+#if defined(JEMALLOC_ZONE)
+ je_assure_zone_register();
+#endif
LOG("core.malloc.entry", "size: %zu", size);
if (tsd_get_allocates() && unlikely(!malloc_initialized())) {
return fallback_alloc(size);
|