Browse Source

Add alloc-pool extra with auto pool sizing

pull/847/head
Sami Vaarala 9 years ago
parent
commit
8a08cadb13
  1. 35
      extras/alloc-pool/Makefile
  2. 38
      extras/alloc-pool/README.rst
  3. 389
      extras/alloc-pool/duk_alloc_pool.c
  4. 175
      extras/alloc-pool/duk_alloc_pool.h
  5. 16
      extras/alloc-pool/ptrcomp.yaml
  6. 2
      extras/alloc-pool/ptrcomp_fixup.h
  7. 114
      extras/alloc-pool/test.c

35
extras/alloc-pool/Makefile

@ -0,0 +1,35 @@
# For manual testing; say 'make' in extras/alloc-pool and run ./test.
DEFS='-DDUK_OPT_DECLARE=extern void my_fatal(const char *msg);'
DEFS+='-DDUK_OPT_FATAL_HANDLER(udata,msg)=my_fatal((msg))'
#DEFS+='-DDUK_ALLOC_POOL_DEBUG'
.PHONY: test
test:
gcc -std=c99 -Wall -Wextra -m32 -Os -otest \
-I../../src ../../src/duktape.c \
$(DEFS) \
duk_alloc_pool.c test.c \
-lm
./test 'print("foo", "bar", 1, 2, 3)'
./test 'alert("foo", "bar", 1, 2, 3)'
.PHONY: ptrcomptest
ptrcomptest:
tar -x -v -z -f ../../config/genconfig_metadata.tar.gz examples/low_memory.yaml
python ../../config/genconfig.py \
--metadata ../../config/genconfig_metadata.tar.gz \
--output ./duk_config.h \
--option-file examples/low_memory.yaml \
--option-file ptrcomp.yaml \
--fixup-file ptrcomp_fixup.h \
duk-config-header
cp ../../src/duktape.c .
cp ../../src/duktape.h .
gcc -std=c99 -Wall -Wextra -m32 -Os -optrcomptest \
-I. ./duktape.c \
$(DEFS) \
duk_alloc_pool.c test.c \
-lm
./ptrcomptest 'print("foo", "bar", 1, 2, 3)'
./ptrcomptest 'alert("foo", "bar", 1, 2, 3)'

38
extras/alloc-pool/README.rst

@ -0,0 +1,38 @@
=====================================
Pool allocator for low memory targets
=====================================
A simple pool allocator which satisfies allocations from preallocated pools
containing blocks of a certain size. The caller provides a continuous memory
region and a pool configuration when initializing the allocator.
The pool configuration specifies the block sizes used, and parameters to
control how many entries are allocated for each block size. The parameters
are specified with respect to an arbitrary floating point scaling parameter
``t`` as follows::
bytes = A*t + B
count = floor(bytes / block_size)
= floor((A*t + B) / block_size)
A: constant which indicates how quickly more bytes are assigned for this
block size as the total allocation grows
B: constant which indicates the base allocation for this block size, i.e.
the allocated needed by Duktape initialization
Pool initialization finds the largest floating point ``t`` which still fits in
the memory region provided. Any leftover bytes are sprinkled to the pools to
minimize wasted space.
A pool configuration can be written manually (by trial and error) or using
some automatic tooling such as ``pool_simulator.py``.
When using pointer compression only a single global pool is supported. This
reduces code footprint and is usually sufficient in low memory targets.
Pointer compression functions are defined as inline functions in
``duk_alloc_pool.h`` to allow the compiler to inline pointer compression when
appropriate. As a side effect ``duk_config.h`` must include
``duk_alloc_pool.h`` so that the declarations are visible when compiling
Duktape.

389
extras/alloc-pool/duk_alloc_pool.c

@ -0,0 +1,389 @@
/*
* Pool allocator for low memory targets.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <stdarg.h>
#include "duktape.h"
#include "duk_alloc_pool.h"
/* Define to enable some debug printfs. */
/* #define DUK_ALLOC_POOL_DEBUG */
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
#if 0 /* This extern declaration is provided by duktape.h, array provided by duktape.c. */
extern const void * const duk_rom_compressed_pointers[];
#endif
const void *duk_alloc_pool_romptr_low = NULL;
const void *duk_alloc_pool_romptr_high = NULL;
static void duk__alloc_pool_romptr_init(void);
#endif
#if defined(DUK_USE_HEAPPTR16)
void *duk_alloc_pool_ptrcomp_base = NULL;
#endif
#if defined(DUK_ALLOC_POOL_DEBUG)
static void duk__alloc_pool_dprintf(const char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
vfprintf(stderr, fmt, ap);
va_end(ap);
}
#endif
/*
* Pool initialization
*/
void *duk_alloc_pool_init(char *buffer,
size_t size,
const duk_pool_config *configs,
duk_pool_state *states,
int num_pools,
duk_pool_global *global) {
double t_min, t_max, t_curr, x;
int step, i, j, n;
size_t total;
char *p;
/* XXX: check that 'size' is not too large when using pointer
* compression.
*/
/* To optimize pool counts first come up with a 't' which still allows
* total pool size to fit within user provided region. After that
* sprinkle any remaining bytes to the counts. Binary search with a
* fixed step count; last round uses 't_min' as 't_curr' to ensure it
* succeeds.
*/
t_min = 0.0; /* Unless config is insane, this should always be "good". */
t_max = 1e6;
for (step = 0; ; step++) {
if (step >= 100) {
/* Force "known good", rerun config, and break out.
* Deals with rounding corner cases where t_curr is
* persistently "bad" even though t_min is a valid
* solution.
*/
t_curr = t_min;
} else {
t_curr = (t_min + t_max) / 2.0;
}
for (i = 0, total = 0; i < num_pools; i++) {
states[i].size = configs[i].size;
/* Target bytes = A*t + B ==> target count = (A*t + B) / block_size.
* Rely on A and B being small enough so that 'x' won't wrap.
*/
x = ((double) configs[i].a * t_curr + (double) configs[i].b) / (double) configs[i].size;
states[i].count = (unsigned int) x;
total += (size_t) states[i].size * (size_t) states[i].count;
if (total > size) {
goto bad;
}
}
/* t_curr is good. */
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_alloc_pool_init: step=%d, t=[%lf %lf %lf] -> total %ld/%ld (good)\n",
step, t_min, t_curr, t_max, (long) total, (long) size);
#endif
if (step >= 100) {
/* Keep state[] initialization state. The state was
* created using the highest 't_min'.
*/
break;
}
t_min = t_curr;
continue;
bad:
/* t_curr is bad. */
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_alloc_pool_init: step=%d, t=[%lf %lf %lf] -> total %ld/%ld (bad)\n",
step, t_min, t_curr, t_max, (long) total, (long) size);
#endif
if (step >= 1000) {
/* Cannot find any good solution; shouldn't happen
* unless config is bad or 'size' is so small that
* even a baseline allocation won't fit.
*/
return NULL;
}
t_max = t_curr;
/* continue */
}
/* The base configuration is now good; sprinkle any leftovers to
* pools in descending order. Note that for good t_curr, 'total'
* indicates allocated bytes so far and 'size - total' indicates
* leftovers.
*/
for (i = num_pools - 1; i >= 0; i--) {
while (size - total >= states[i].size) {
/* Ignore potential wrapping of states[i].count as the count
* is 32 bits and shouldn't wrap in practice.
*/
states[i].count++;
total += states[i].size;
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_alloc_pool_init: sprinkle %ld bytes (%ld left after)\n",
(long) states[i].size, (long) (size - total));
#endif
}
}
/* Pool counts are final. Allocate the user supplied region based
* on the final counts, initialize free lists for each block size,
* and otherwise finalize 'state' for use.
*/
p = buffer;
global->states = states;
global->num_pools = num_pools;
#if defined(DUK_USE_HEAPPTR16)
/* Register global base value for pointer compression, assumes
* a single active pool -4 allows a single subtract to be used and
* still ensures no non-NULL pointer encodes to zero.
*/
duk_alloc_pool_ptrcomp_base = (void *) (p - 4);
#endif
for (i = 0; i < num_pools; i++) {
n = states[i].count;
if (n > 0) {
states[i].first = (duk_pool_free *) p;
for (j = 0; j < n; j++) {
char *p_next = p + states[i].size;
((duk_pool_free *) p)->next =
(j == n - 1) ? (duk_pool_free *) NULL : (duk_pool_free *) p_next;
p = p_next;
}
} else {
states[i].first = (duk_pool_free *) NULL;
}
states[i].alloc_end = p; /* All members of 'state' now initialized. */
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_alloc_pool_init: block size %5ld, count %5ld, %8ld total bytes, "
"end %p\n",
(long) states[i].size, (long) states[i].count,
(long) states[i].size * (long) states[i].count,
(void *) states[i].alloc_end);
#endif
}
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
/* ROM pointer compression precomputation. Assumes a single active
* pool.
*/
duk__alloc_pool_romptr_init();
#endif
/* Use 'global' as udata. */
return (void *) global;
}
/*
* Allocation providers
*/
void *duk_alloc_pool(void *udata, duk_size_t size) {
duk_pool_global *g = (duk_pool_global *) udata;
int i, n;
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_alloc_pool: %p %ld\n", udata, (long) size);
#endif
if (size == 0) {
return NULL;
}
for (i = 0, n = g->num_pools; i < n; i++) {
duk_pool_state *st = g->states + i;
if (size <= st->size && st->first != NULL) {
duk_pool_free *res = st->first;
st->first = res->next;
return (void *) res;
}
/* Allocation doesn't fit or no free entries, try to borrow
* from the next block size. There's no support for preventing
* a borrow at present.
*/
}
return NULL;
}
void *duk_realloc_pool(void *udata, void *ptr, duk_size_t size) {
duk_pool_global *g = (duk_pool_global *) udata;
int i, j, n;
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_realloc_pool: %p %p %ld\n", udata, ptr, (long) size);
#endif
if (ptr == NULL) {
return duk_alloc_pool(udata, size);
}
if (size == 0) {
duk_free_pool(udata, ptr);
return NULL;
}
/* Non-NULL pointers are necessarily from the pool so we should
* always be able to find the allocation.
*/
for (i = 0, n = g->num_pools; i < n; i++) {
duk_pool_state *st = g->states + i;
char *new_ptr;
/* Because 'ptr' is assumed to be in the pool and pools are
* allocated in sequence, it suffices to check for end pointer
* only.
*/
if ((char *) ptr >= st->alloc_end) {
continue;
}
if (size <= st->size) {
/* Allocation still fits existing allocation. Check if
* we can shrink the allocation to a smaller block size
* (smallest possible).
*/
for (j = 0; j < i; j++) {
duk_pool_state *st2 = g->states + j;
if (size <= st2->size && st2->first != NULL) {
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_realloc_pool: shrink, block size %ld -> %ld\n",
(long) st->size, (long) st2->size);
#endif
new_ptr = (char *) st2->first;
st2->first = ((duk_pool_free *) new_ptr)->next;
memcpy((void *) new_ptr, (const void *) ptr, (size_t) size);
((duk_pool_free *) ptr)->next = st->first;
st->first = (duk_pool_free *) ptr;
return (void *) new_ptr;
}
}
/* Failed to shrink; return existing pointer. */
return ptr;
}
/* Find first free larger block. */
for (j = i + 1; j < n; j++) {
duk_pool_state *st2 = g->states + j;
if (size <= st2->size && st2->first != NULL) {
new_ptr = (char *) st2->first;
st2->first = ((duk_pool_free *) new_ptr)->next;
memcpy((void *) new_ptr, (const void *) ptr, (size_t) st->size);
((duk_pool_free *) ptr)->next = st->first;
st->first = (duk_pool_free *) ptr;
return (void *) new_ptr;
}
}
/* Failed to resize. */
return NULL;
}
/* We should never be here because 'ptr' should be a valid pool
* entry and thus always found above.
*/
return NULL;
}
void duk_free_pool(void *udata, void *ptr) {
duk_pool_global *g = (duk_pool_global *) udata;
int i, n;
#if defined(DUK_ALLOC_POOL_DEBUG)
duk__alloc_pool_dprintf("duk_free_pool: %p %p\n", udata, ptr);
#endif
if (ptr == NULL) {
return;
}
for (i = 0, n = g->num_pools; i < n; i++) {
duk_pool_state *st = g->states + i;
/* Enough to check end address only. */
if ((char *) ptr >= st->alloc_end) {
continue;
}
((duk_pool_free *) ptr)->next = st->first;
st->first = (duk_pool_free *) ptr;
return;
}
/* We should never be here because 'ptr' should be a valid pool
* entry and thus always found above.
*/
}
/*
* Pointer compression
*/
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
static void duk__alloc_pool_romptr_init(void) {
/* Scan ROM pointer range for faster detection of "is 'p' a ROM pointer"
* later on.
*/
const void * const * ptrs = (const void * const *) duk_rom_compressed_pointers;
duk_alloc_pool_romptr_low = duk_alloc_pool_romptr_high = (const void *) *ptrs;
while (*ptrs) {
if (*ptrs > duk_alloc_pool_romptr_high) {
duk_alloc_pool_romptr_high = (const void *) *ptrs;
}
if (*ptrs < duk_alloc_pool_romptr_low) {
duk_alloc_pool_romptr_low = (const void *) *ptrs;
}
ptrs++;
}
}
#endif
/* Encode/decode functions are defined in the header to allow inlining. */
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
duk_uint16_t duk_alloc_pool_enc16_rom(void *ptr) {
/* The if-condition should be the fastest possible check
* for "is 'ptr' in ROM?". If pointer is in ROM, we'd like
* to compress it quickly. Here we just scan a ~1K array
* which is very bad for performance.
*/
const void * const * ptrs = duk_rom_compressed_pointers;
while (*ptrs) {
if (*ptrs == ptr) {
return DUK_ALLOC_POOL_ROMPTR_FIRST + (duk_uint16_t) (ptrs - duk_rom_compressed_pointers);
}
ptrs++;
}
/* We should really never be here: Duktape should only be
* compressing pointers which are in the ROM compressed
* pointers list, which are known at 'make dist' time.
* We go on, causing a pointer compression error.
*/
return 0;
}
#endif

175
extras/alloc-pool/duk_alloc_pool.h

@ -0,0 +1,175 @@
#ifndef DUK_ALLOC_POOL_H_INCLUDED
#define DUK_ALLOC_POOL_H_INCLUDED
#include "duktape.h"
/* Pointer compression with ROM strings/objects:
*
* For now, use DUK_USE_ROM_OBJECTS to signal the need for compressed ROM
* pointers. DUK_USE_ROM_PTRCOMP_FIRST is provided for the ROM pointer
* compression range minimum to avoid duplication in user code.
*/
#if defined(DUK_USE_ROM_OBJECTS) && defined(DUK_USE_HEAPPTR16)
#define DUK_ALLOC_POOL_ROMPTR_COMPRESSION
#define DUK_ALLOC_POOL_ROMPTR_FIRST DUK_USE_ROM_PTRCOMP_FIRST
/* This extern declaration is provided by duktape.h, array provided by duktape.c.
* Because duk_config.h may include this file (to get the inline functions) we
* need to forward declare this also here.
*/
extern const void * const duk_rom_compressed_pointers[];
#endif
/* Pool configuration for a certain block size. */
typedef struct {
unsigned int size;
unsigned int a; /* bytes (not count) to allocate: a*t + b, t is an arbitrary scale parameter */
unsigned int b;
} duk_pool_config;
/* Freelist entry, must fit into the smallest block size. */
struct duk_pool_free;
typedef struct duk_pool_free duk_pool_free;
struct duk_pool_free {
duk_pool_free *next;
};
/* Pool state for a certain block size. */
typedef struct {
duk_pool_free *first;
char *alloc_end;
unsigned int size;
unsigned int count;
} duk_pool_state;
/* Top level state for all pools. Pointer to this struct is used as the allocator
* userdata pointer.
*/
typedef struct {
int num_pools;
duk_pool_state *states;
} duk_pool_global;
/* Initialize a pool allocator, arguments:
* - buffer and size: continuous region to use for pool
* - config: configuration for pools in ascending block size
* - state: state for pools, matches config order
* - num_pools: number of entries in 'config' and 'state'
* - global: global state structure
*
* The 'config', 'state', and 'global' pointers must be valid beyond the init
* call, as long as the pool is used.
*
* Returns a void pointer to be used as userdata for the allocator functions.
* Concretely the return value will be "(void *) global", i.e. the global
* state struct. If pool init fails, the return value will be NULL.
*/
void *duk_alloc_pool_init(char *buffer,
size_t size,
const duk_pool_config *configs,
duk_pool_state *states,
int num_pools,
duk_pool_global *global);
/* Duktape allocation providers. Typing matches Duktape requirements. */
void *duk_alloc_pool(void *udata, duk_size_t size);
void *duk_realloc_pool(void *udata, void *ptr, duk_size_t size);
void duk_free_pool(void *udata, void *ptr);
/* Duktape pointer compression global state (assumes single pool). */
#if defined(DUK_USE_ROM_OBJECTS) && defined(DUK_USE_HEAPPTR16)
extern const void *duk_alloc_pool_romptr_low;
extern const void *duk_alloc_pool_romptr_high;
duk_uint16_t duk_alloc_pool_enc16_rom(void *ptr);
#endif
#if defined(DUK_USE_HEAPPTR16)
extern void *duk_alloc_pool_ptrcomp_base;
#endif
#if 0
duk_uint16_t duk_alloc_pool_enc16(void *ptr);
void *duk_alloc_pool_dec16(duk_uint16_t val);
#endif
/* Inlined pointer compression functions. Gcc and clang -Os won't in
* practice inline these because it's more size efficient (by about
* 3kB) to use explicit calls instead. Having these defined inline
* here allows performance optimized builds to inline pointer compression
* operations.
*
* Pointer compression assumes there's a single globally registered memory
* pool which makes pointer compression more efficient. This would be easy
* to fix by adding a userdata pointer to the compression functions and
* plumbing the heap userdata from the compression/decompression macros.
*/
#if defined(DUK_USE_HEAPPTR16)
static inline duk_uint16_t duk_alloc_pool_enc16(void *ptr) {
if (ptr == NULL) {
/* With 'return 0' gcc and clang -Os generate inefficient code.
* For example, gcc -Os generates:
*
* 0804911d <duk_alloc_pool_enc16>:
* 804911d: 55 push %ebp
* 804911e: 85 c0 test %eax,%eax
* 8049120: 89 e5 mov %esp,%ebp
* 8049122: 74 0b je 804912f <duk_alloc_pool_enc16+0x12>
* 8049124: 2b 05 e4 90 07 08 sub 0x80790e4,%eax
* 804912a: c1 e8 02 shr $0x2,%eax
* 804912d: eb 02 jmp 8049131 <duk_alloc_pool_enc16+0x14>
* 804912f: 31 c0 xor %eax,%eax
* 8049131: 5d pop %ebp
* 8049132: c3 ret
*
* The NULL path checks %eax for zero; if it is zero, a zero
* is unnecessarily loaded into %eax again. The non-zero path
* has an unnecessary jump as a side effect of this.
*
* Using 'return (duk_uint16_t) (intptr_t) ptr;' generates similarly
* inefficient code; not sure how to make the result better.
*/
return 0;
}
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
if (ptr >= duk_alloc_pool_romptr_low && ptr <= duk_alloc_pool_romptr_high) {
/* This is complex enough now to need a separate function. */
return duk_alloc_pool_enc16_rom(ptr);
}
#endif
return (duk_uint16_t) (((size_t) ((char *) ptr - (char *) duk_alloc_pool_ptrcomp_base)) >> 2);
}
static inline void *duk_alloc_pool_dec16(duk_uint16_t val) {
if (val == 0) {
/* As with enc16 the gcc and clang -Os output is inefficient,
* e.g. gcc -Os:
*
* 08049133 <duk_alloc_pool_dec16>:
* 8049133: 55 push %ebp
* 8049134: 66 85 c0 test %ax,%ax
* 8049137: 89 e5 mov %esp,%ebp
* 8049139: 74 0e je 8049149 <duk_alloc_pool_dec16+0x16>
* 804913b: 8b 15 e4 90 07 08 mov 0x80790e4,%edx
* 8049141: 0f b7 c0 movzwl %ax,%eax
* 8049144: 8d 04 82 lea (%edx,%eax,4),%eax
* 8049147: eb 02 jmp 804914b <duk_alloc_pool_dec16+0x18>
* 8049149: 31 c0 xor %eax,%eax
* 804914b: 5d pop %ebp
* 804914c: c3 ret
*/
return NULL;
}
#if defined(DUK_ALLOC_POOL_ROMPTR_COMPRESSION)
if (val >= DUK_ALLOC_POOL_ROMPTR_FIRST) {
/* This is a blind lookup, could check index validity.
* Duktape should never decompress a pointer which would
* be out-of-bounds here.
*/
return (void *) (intptr_t) (duk_rom_compressed_pointers[val - DUK_ALLOC_POOL_ROMPTR_FIRST]);
}
#endif
return (void *) ((char *) duk_alloc_pool_ptrcomp_base + (((size_t) val) << 2));
}
#endif
#endif /* DUK_ALLOC_POOL_H_INCLUDED */

16
extras/alloc-pool/ptrcomp.yaml

@ -0,0 +1,16 @@
DUK_USE_REFCOUNT16: true
DUK_USE_STRHASH16: true
DUK_USE_STRLEN16: true
DUK_USE_BUFLEN16: true
DUK_USE_OBJSIZES16: true
DUK_USE_HSTRING_CLEN: false
DUK_USE_HOBJECT_HASH_PART: false
DUK_USE_HEAPPTR16: true
DUK_USE_HEAPPTR_ENC16:
verbatim: "#define DUK_USE_HEAPPTR_ENC16(ud,p) duk_alloc_pool_enc16((p))"
DUK_USE_HEAPPTR_DEC16:
verbatim: "#define DUK_USE_HEAPPTR_DEC16(ud,p) duk_alloc_pool_dec16((p))"
#DUK_USE_ROM_OBJECTS: true
#DUK_USE_ROM_STRINGS: true
#DUK_USE_ROM_GLOBAL_INHERIT: true

2
extras/alloc-pool/ptrcomp_fixup.h

@ -0,0 +1,2 @@
/* To provide declarations for inline pointer compression functions. */
#include "duk_alloc_pool.h"

114
extras/alloc-pool/test.c

@ -0,0 +1,114 @@
#include <stdio.h>
#include <stdlib.h>
#include "duktape.h"
#include "duk_alloc_pool.h"
void my_fatal(const char *msg) {
fprintf(stderr, "*** FATAL: %s\n", msg ? msg : "no message");
fflush(stderr);
abort();
}
static duk_ret_t my_print(duk_context *ctx) {
duk_push_string(ctx, " ");
duk_insert(ctx, 0);
duk_join(ctx, duk_get_top(ctx) - 1);
printf("%s\n", duk_safe_to_string(ctx, -1));
return 1;
}
static void dump_pool_state(duk_pool_global *g) {
int i;
long total_size = 0;
long total_used = 0;
for (i = 0; i < g->num_pools; i++) {
duk_pool_state *st = g->states + i;
int free, used;
duk_pool_free *f;
for (free = 0, f = st->first; f; f = f->next) {
free++;
}
used = st->count - free;
printf("Pool %2d: block size %5d, count %4d/%4d, bytes %6d/%6d\n",
i, (int) st->size, used, (int) st->count,
(int) st->size * used, (int) st->size * (int) st->count);
total_size += (long) st->size * (long) st->count;
total_used += (long) st->size * (long) used;
}
printf("=== Total: %ld/%ld, free %ld\n",
(long) total_used, (long) total_size, (long) (total_size - total_used));
}
int main(int argc, char *argv[]) {
duk_context *ctx;
int i;
int exitcode = 0;
/* NOTE! This pool configuration is NOT a good pool configuration
* for practical use (and is not intended to be one). A production
* pool configuration should be created using measurements.
*/
const duk_pool_config pool_configs[15] = {
{ 16, 20, 200 },
{ 20, 40, 100 },
{ 24, 40, 100 },
{ 32, 60, 50 },
{ 40, 60, 50 },
{ 48, 60, 50 },
{ 56, 60, 50 },
{ 64, 60, 50 },
{ 80, 60, 50 },
{ 256, 100, 10 },
{ 1024, 20, 2 },
{ 2048, 20, 2 },
{ 4096, 100, 2 },
{ 6144, 60, 2 },
{ 8192, 100, 2 },
};
duk_pool_state pool_states[15]; /* Count must match pool_configs[]. */
duk_pool_global pool_global;
char buffer[200000];
void *pool_udata;
pool_udata = duk_alloc_pool_init(buffer, sizeof(buffer), pool_configs, pool_states, sizeof(pool_configs) / sizeof(duk_pool_config), &pool_global);
if (!pool_udata) {
return 1;
}
printf("Pool after pool init:\n");
dump_pool_state(&pool_global);
ctx = duk_create_heap(duk_alloc_pool, duk_realloc_pool, duk_free_pool, pool_udata, NULL);
if (!ctx) {
return 1;
}
printf("Pool after Duktape heap creation:\n");
dump_pool_state(&pool_global);
duk_push_c_function(ctx, my_print, DUK_VARARGS);
duk_put_global_string(ctx, "print");
duk_push_c_function(ctx, my_print, DUK_VARARGS);
duk_put_global_string(ctx, "alert");
printf("top after init: %ld\n", (long) duk_get_top(ctx));
for (i = 1; i < argc; i++) {
printf("Evaling: %s\n", argv[i]);
if (duk_peval_string(ctx, argv[i]) != 0) {
exitcode = 1;
}
printf("--> %s\n", duk_safe_to_string(ctx, -1));
duk_pop(ctx);
}
printf("Pool after evaling code:\n");
dump_pool_state(&pool_global);
printf("Done\n");
duk_destroy_heap(ctx);
return exitcode;
}
Loading…
Cancel
Save