tinycc/lib/bcheck.c

1056 lines
30 KiB
C
Raw Normal View History

2002-01-04 07:12:29 +08:00
/*
* Tiny C Memory and bounds checker
*
2002-01-04 07:12:29 +08:00
* Copyright (c) 2002 Fabrice Bellard
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
2002-01-04 07:12:29 +08:00
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
2002-01-04 07:12:29 +08:00
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2002-01-04 07:12:29 +08:00
*/
2002-07-25 06:11:56 +08:00
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#if !defined(__FreeBSD__) \
&& !defined(__FreeBSD_kernel__) \
&& !defined(__DragonFly__) \
&& !defined(__OpenBSD__) \
&& !defined(__NetBSD__)
2002-07-25 06:11:56 +08:00
#include <malloc.h>
2002-12-08 22:34:30 +08:00
#endif
2012-12-10 09:51:49 +08:00
#if !defined(_WIN32)
lib/bcheck: Don't assume heap goes right after bss At startup __bound_init() wants to mark malloc zone as invalid memory, so that any access to memory on heap, not allocated through malloc be invalid. Other pages are initialized as empty regions, access to which is not treated as invalid by bounds-checking. The problem is code incorrectly assumed that heap goes right after bss, and that is not correct for two cases: 1) if we are running from `tcc -b -run`, program text data and bss will be already in malloced memory, possibly in mmaped region insead of heap, and marking memory as invalid from _end will not cover heap and probably wrongly mark correct regions. 2) if address space randomization is turned on, again heap does not start from _end, and we'll mark as invalid something else instead of malloc area. For example with the following diagnostic patch ... diff --git a/tcc.c b/tcc.c index 5dd5725..31c46e8 100644 --- a/tcc.c +++ b/tcc.c @@ -479,6 +479,8 @@ static int parse_args(TCCState *s, int argc, char **argv) return optind; } +extern int _etext, _edata, _end; + int main(int argc, char **argv) { int i; @@ -487,6 +489,18 @@ int main(int argc, char **argv) int64_t start_time = 0; const char *default_file = NULL; + void *brk; + + brk = sbrk(0); + + fprintf(stderr, "\n>>> TCC\n\n"); + fprintf(stderr, "etext:\t%10p\n", &_etext); + fprintf(stderr, "edata:\t%10p\n", &_edata); + fprintf(stderr, "end:\t%10p\n", &_end); + fprintf(stderr, "brk:\t%10p\n", brk); + fprintf(stderr, "stack:\t%10p\n", &brk); + + fprintf(stderr, "&errno: %p\n", &errno); s = tcc_new(); output_type = TCC_OUTPUT_EXE; diff --git a/tccrun.c b/tccrun.c index 531f46a..25ed30a 100644 --- a/tccrun.c +++ b/tccrun.c @@ -91,6 +91,8 @@ LIBTCCAPI int tcc_run(TCCState *s1, int argc, char **argv) int (*prog_main)(int, char **); int ret; + fprintf(stderr, "\n\ntcc_run() ...\n\n"); + if (tcc_relocate(s1, TCC_RELOCATE_AUTO) < 0) return -1; diff --git a/lib/bcheck.c b/lib/bcheck.c index ea5b233..8b26a5f 100644 --- a/lib/bcheck.c +++ b/lib/bcheck.c @@ -296,6 +326,8 @@ static void mark_invalid(unsigned long addr, unsigned long size) start = addr; end = addr + size; + fprintf(stderr, "mark_invalid %10p - %10p\n", (void *)addr, (void *)end); + t2_start = (start + BOUND_T3_SIZE - 1) >> BOUND_T3_BITS; if (end != 0) t2_end = end >> BOUND_T3_BITS; ... Look how memory is laid out for `tcc -b -run ...`: $ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \ -DONE_SOURCE ./tcc.c -B. -c x.c >>> TCC etext: 0x8065477 edata: 0x8070220 end: 0x807a95c brk: 0x807b000 stack: 0xaffff0f0 &errno: 0xa7e25688 tcc_run() ... mark_invalid 0xfff80000 - (nil) mark_invalid 0xa7c31d98 - 0xafc31d98 >>> TCC etext: 0xa7c22767 edata: 0xa7c2759c end: 0xa7c31d98 brk: 0x8211000 stack: 0xafffeff0 &errno: 0xa7e25688 Runtime error: dereferencing invalid pointer ./tccpp.c:1953: at 0xa7beebdf parse_number() (included from ./libtcc.c, ./tcc.c) ./tccpp.c:3003: by 0xa7bf0708 next() (included from ./libtcc.c, ./tcc.c) ./tccgen.c:4465: by 0xa7bfe348 block() (included from ./libtcc.c, ./tcc.c) ./tccgen.c:4440: by 0xa7bfe212 block() (included from ./libtcc.c, ./tcc.c) ./tccgen.c:5529: by 0xa7c01929 gen_function() (included from ./libtcc.c, ./tcc.c) ./tccgen.c:5767: by 0xa7c02602 decl0() (included from ./libtcc.c, ./tcc.c) The second mark_invalid goes right after in-memory-compiled program's _end, and oops, that's not where malloc zone is (starts from brk), and oops again, mark_invalid covers e.g. errno. Then compiled tcc is crasshing by bcheck on errno access: 1776 static void parse_number(const char *p) 1777 { 1778 int b, t, shift, frac_bits, s, exp_val, ch; ... 1951 *q = '\0'; 1952 t = toup(ch); 1953 errno = 0; The solution here is to use sbrk(0) as approximation for the program break start instead of &_end: - if we are a separately compiled program, __bound_init() runs early, and sbrk(0) should be equal or very near to start_brk (in case other constructors malloc something), or - if we are running from under `tcc -b -run`, sbrk(0) will return start of heap portion which is under this program control, and not mark as invalid earlier allocated memory. With this patch `tcc -b -run tcc.c ...` succeeds compiling above small-test program (diagnostic patch is still applied too): $ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \ -DONE_SOURCE ./tcc.c -B. -c x.c >>> TCC etext: 0x8065477 edata: 0x8070220 end: 0x807a95c brk: 0x807b000 stack: 0xaffff0f0 &errno: 0xa7e25688 tcc_run() ... mark_invalid 0xfff80000 - (nil) mark_invalid 0x8211000 - 0x10211000 >>> TCC etext: 0xa7c22777 edata: 0xa7c275ac end: 0xa7c31da8 brk: 0x8211000 stack: 0xafffeff0 &errno: 0xa7e25688 (completes ok) but running `tcc -b -run tcc.c -run tests/tcctest.c` sigsegv's - that's the plot for the next patch.
2012-12-09 22:48:48 +08:00
#include <unistd.h>
2012-12-10 09:51:49 +08:00
#endif
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
#define BOUND_DEBUG
2002-01-04 07:12:29 +08:00
#ifdef BOUND_DEBUG
2019-12-10 15:07:25 +08:00
#define dprintf(a...) if (print_calls) fprintf(a)
#else
#define dprintf(a...)
#endif
2019-12-10 15:07:25 +08:00
/* Check memalign */
2002-12-08 22:34:30 +08:00
#define HAVE_MEMALIGN
#if defined(__FreeBSD__) \
|| defined(__FreeBSD_kernel__) \
|| defined(__DragonFly__) \
|| defined(__OpenBSD__) \
|| defined(__NetBSD__) \
|| defined(__dietlibc__) \
|| defined(_WIN32)
2002-12-08 22:34:30 +08:00
#undef HAVE_MEMALIGN
2019-12-10 15:07:25 +08:00
#define INIT_SEM()
#define EXIT_SEM()
#define WAIT_SEM()
#define POST_SEM()
#define HAS_ENVIRON 0
#define MALLOC_REDIR (0)
#else
#include <sys/mman.h>
#include <errno.h>
#include <semaphore.h>
static sem_t bounds_sem;
#define INIT_SEM() sem_init (&bounds_sem, 0, 1)
#define EXIT_SEM() sem_destroy (&bounds_sem)
#define WAIT_SEM() while (sem_wait (&bounds_sem) < 0 && errno == EINTR);
#define POST_SEM() sem_post (&bounds_sem)
#define HAS_ENVIRON 0 /* Disabled for now */
#define __USE_GNU /* get RTLD_NEXT */
#include <dlfcn.h>
#define MALLOC_REDIR (1)
static void *(*malloc_redir) (size_t) = NULL;
static void *(*calloc_redir) (size_t, size_t) = NULL;
static void (*free_redir) (void *) = NULL;
static void *(*realloc_redir) (void *, size_t) = NULL;
static void *(*memalign_redir) (size_t, size_t) = NULL;
2002-12-08 22:34:30 +08:00
#endif
2002-01-04 07:12:29 +08:00
/* this pointer is generated when bound check is incorrect */
#define INVALID_POINTER ((void *)(-2))
2019-12-10 15:07:25 +08:00
typedef struct tree_node Tree;
struct tree_node {
Tree * left, * right;
size_t start;
size_t size;
size_t is_invalid; /* true if pointers outside region are invalid */
2019-12-10 15:07:25 +08:00
};
typedef struct alloca_list_struct {
size_t fp;
void *p;
struct alloca_list_struct *next;
} alloca_list_type;
static Tree * splay (size_t addr, Tree *t);
static Tree * splay_end (size_t addr, Tree *t);
static Tree * splay_insert(size_t addr, size_t size, Tree * t);
static Tree * splay_delete(size_t addr, Tree *t);
void splay_printtree(Tree * t, int d);
2002-01-04 07:12:29 +08:00
/* external interface */
void __bound_init(void);
#ifdef __attribute__
/* an __attribute__ macro is defined in the system headers */
#undef __attribute__
#endif
2004-11-07 23:43:33 +08:00
#define FASTCALL __attribute__((regparm(3)))
2019-12-10 15:07:25 +08:00
#if !MALLOC_REDIR
void *__bound_malloc(size_t size, const void *caller);
void *__bound_memalign(size_t size, size_t align, const void *caller);
void __bound_free(void *ptr, const void *caller);
void *__bound_realloc(void *ptr, size_t size, const void *caller);
2019-12-10 15:07:25 +08:00
void *__bound_calloc(size_t nmemb, size_t size);
#endif
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
#define FREE_REUSE_SIZE (100)
static int free_reuse_index = 0;
static void *free_reuse_list[FREE_REUSE_SIZE];
2002-11-03 08:42:33 +08:00
/* error message, just for TCC */
const char *__bound_error_msg;
2002-08-18 21:25:38 +08:00
2002-07-25 06:11:56 +08:00
/* runtime error output */
extern void rt_error(size_t pc, const char *fmt, ...);
2002-07-25 06:11:56 +08:00
2019-12-10 15:07:25 +08:00
static Tree *tree = NULL;
#define TREE_REUSE (1)
#if TREE_REUSE
static Tree *tree_free_list = NULL;
2002-01-04 07:12:29 +08:00
#endif
2019-12-10 15:07:25 +08:00
static alloca_list_type *alloca_list = NULL;
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
static int inited = 0;
static int print_calls = 0;
static int never_fatal = 0;
static int no_checking = 0;
/* enable/disable checking. This can be used for signal handlers. */
void __bound_checking (int no_check)
2002-01-04 07:12:29 +08:00
{
2019-12-10 15:07:25 +08:00
no_checking = no_check;
2002-01-04 07:12:29 +08:00
}
/* print a bound error message */
2002-11-03 08:42:33 +08:00
static void bound_error(const char *fmt, ...)
2002-01-04 07:12:29 +08:00
{
2002-11-03 08:42:33 +08:00
__bound_error_msg = fmt;
fprintf(stderr,"%s %s: %s\n", __FILE__, __FUNCTION__, fmt);
2019-12-10 15:07:25 +08:00
if (never_fatal == 0)
*(void **)0 = 0; /* force a runtime error */
2002-01-04 07:12:29 +08:00
}
static void bound_alloc_error(void)
{
2002-11-03 08:42:33 +08:00
bound_error("not enough memory for bound checking code");
2002-01-04 07:12:29 +08:00
}
2002-01-06 00:16:47 +08:00
/* return '(p + offset)' for pointer arithmetic (a pointer can reach
the end of a region in this case */
void * FASTCALL __bound_ptr_add(void *p, size_t offset)
2002-01-04 07:12:29 +08:00
{
size_t addr = (size_t)p;
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
if (no_checking) {
return p + offset;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s %s: %p 0x%x\n",
__FILE__, __FUNCTION__, p, (unsigned)offset);
WAIT_SEM ();
if (tree) {
tree = splay (addr, tree);
addr -= tree->start;
if (addr >= tree->size) {
addr = (size_t)p;
tree = splay_end (addr, tree);
addr -= tree->start;
}
if (addr <= tree->size) {
addr += offset;
if (tree->is_invalid || addr >= tree->size) {
fprintf(stderr,"%s %s: %p is outside of the region\n",
__FILE__, __FUNCTION__, p + offset);
if (never_fatal == 0) {
POST_SEM ();
return INVALID_POINTER; /* return an invalid pointer */
}
}
}
}
2019-12-10 15:07:25 +08:00
POST_SEM ();
2002-01-04 07:12:29 +08:00
return p + offset;
}
2002-01-06 00:16:47 +08:00
/* return '(p + offset)' for pointer indirection (the resulting must
be strictly inside the region */
2019-12-10 15:07:25 +08:00
#define BOUND_PTR_INDIR(dsize) \
void * FASTCALL __bound_ptr_indir ## dsize (void *p, size_t offset) \
{ \
size_t addr = (size_t)p; \
\
if (no_checking) { \
return p + offset; \
} \
dprintf(stderr, "%s %s: %p 0x%x start\n", \
__FILE__, __FUNCTION__, p, (unsigned)offset); \
WAIT_SEM (); \
if (tree) { \
tree = splay (addr, tree); \
addr -= tree->start; \
if (addr >= tree->size) { \
addr = (size_t)p; \
tree = splay_end (addr, tree); \
addr -= tree->start; \
} \
if (addr <= tree->size) { \
addr += offset + dsize; \
if (tree->is_invalid || addr > tree->size) { \
fprintf(stderr,"%s %s: %p is outside of the region\n", \
__FILE__, __FUNCTION__, p + offset); \
if (never_fatal == 0) { \
POST_SEM (); \
return INVALID_POINTER; /* return an invalid pointer */ \
} \
} \
} \
} \
POST_SEM (); \
return p + offset; \
2002-01-04 07:12:29 +08:00
}
BOUND_PTR_INDIR(1)
BOUND_PTR_INDIR(2)
BOUND_PTR_INDIR(4)
BOUND_PTR_INDIR(8)
BOUND_PTR_INDIR(12)
BOUND_PTR_INDIR(16)
#if defined(__GNUC__) && (__GNUC__ >= 6)
/*
* At least gcc 6.2 complains when __builtin_frame_address is used with
* nonzero argument.
*/
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wframe-address"
#endif
/* return the frame pointer of the caller */
2002-01-06 00:16:47 +08:00
#define GET_CALLER_FP(fp)\
{\
fp = (size_t)__builtin_frame_address(1);\
2002-01-06 00:16:47 +08:00
}
/* called when entering a function to add all the local regions */
void FASTCALL __bound_local_new(void *p1)
2002-01-06 00:16:47 +08:00
{
size_t addr, size, fp, *p = p1;
2019-12-10 15:07:25 +08:00
if (no_checking)
return;
2002-01-06 00:16:47 +08:00
GET_CALLER_FP(fp);
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s local new p1=%p fp=%p\n",
__FILE__, __FUNCTION__, p, (void *)fp);
WAIT_SEM ();
2002-01-06 00:16:47 +08:00
for(;;) {
addr = p[0];
if (addr == 0)
break;
2019-12-10 15:07:25 +08:00
if (addr == 1) {
dprintf(stderr, "%s, %s() alloca/vla used\n",
__FILE__, __FUNCTION__);
}
else {
addr += fp;
size = p[1];
dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
__FILE__, __FUNCTION__, addr, (unsigned long) size);
}
2002-01-06 00:16:47 +08:00
p += 2;
2019-12-10 15:07:25 +08:00
tree = splay_insert(addr, size, tree);
2002-01-06 00:16:47 +08:00
}
2019-12-10 15:07:25 +08:00
POST_SEM ();
2002-01-06 00:16:47 +08:00
}
/* called when leaving a function to delete all the local regions */
void FASTCALL __bound_local_delete(void *p1)
2002-01-06 00:16:47 +08:00
{
size_t addr, fp, *p = p1;
2019-12-10 15:07:25 +08:00
if (no_checking)
return;
2002-01-06 00:16:47 +08:00
GET_CALLER_FP(fp);
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s local delete p1=%p fp=%p\n",
__FILE__, __FUNCTION__, p, (void *)fp);
WAIT_SEM ();
2002-01-06 00:16:47 +08:00
for(;;) {
addr = p[0];
if (addr == 0)
break;
2019-12-10 15:07:25 +08:00
if (addr == 1) {
while (alloca_list && alloca_list->fp == fp) {
dprintf(stderr, "%s, %s() remove alloca/vla %p\n",
__FILE__, __FUNCTION__, alloca_list->p);
alloca_list_type *next = alloca_list->next;
tree = splay_delete ((size_t) alloca_list->p, tree);
#if MALLOC_REDIR
free_redir (alloca_list);
#else
free (alloca_list);
#endif
alloca_list = next;
}
}
else {
addr += fp;
dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
__FILE__, __FUNCTION__, (void *) addr, (unsigned long) p[1]);
}
2002-01-06 00:16:47 +08:00
p += 2;
2019-12-10 15:07:25 +08:00
tree = splay_delete(addr, tree);
2002-01-06 00:16:47 +08:00
}
2019-12-10 15:07:25 +08:00
POST_SEM ();
2002-01-06 00:16:47 +08:00
}
#if defined(__GNUC__) && (__GNUC__ >= 6)
#pragma GCC diagnostic pop
#endif
2002-01-04 07:12:29 +08:00
void __bound_init(void)
{
if (inited)
2019-12-10 15:07:25 +08:00
return;
inited = 1;
2019-12-10 15:07:25 +08:00
print_calls = getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL;
never_fatal = getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL;
dprintf(stderr, "%s, %s() start\n", __FILE__, __FUNCTION__);
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
INIT_SEM ();
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
{
void *addr = RTLD_NEXT;
2002-01-04 07:12:29 +08:00
2019-12-10 15:07:25 +08:00
/* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT */
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
if (malloc_redir == NULL) {
dprintf(stderr, "%s, %s() use RTLD_DEFAULT\n",
__FILE__, __FUNCTION__);
addr = RTLD_DEFAULT;
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
}
*(void **) (&calloc_redir) = dlsym (addr, "calloc");
*(void **) (&free_redir) = dlsym (addr, "free");
*(void **) (&realloc_redir) = dlsym (addr, "realloc");
*(void **) (&memalign_redir) = dlsym (addr, "memalign");
dprintf(stderr, "%s, %s() malloc_redir %p\n",
__FILE__, __FUNCTION__, malloc_redir);
dprintf(stderr, "%s, %s() free_redir %p\n",
__FILE__, __FUNCTION__, free_redir);
dprintf(stderr, "%s, %s() realloc_redir %p\n",
__FILE__, __FUNCTION__, realloc_redir);
dprintf(stderr, "%s, %s() memalign_redir %p\n",
__FILE__, __FUNCTION__, memalign_redir);
2002-01-04 07:12:29 +08:00
}
#endif
2002-07-25 06:11:56 +08:00
2019-12-10 15:07:25 +08:00
tree = NULL;
2019-12-10 15:07:25 +08:00
/* save malloc hooks and install bound check hooks */
memset (free_reuse_list, 0, sizeof (free_reuse_list));
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s() end\n\n", __FILE__, __FUNCTION__);
}
2019-12-10 15:07:25 +08:00
void __bounds_add_static_var (size_t *p)
2009-07-07 03:10:14 +08:00
{
dprintf(stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
2019-12-10 15:07:25 +08:00
/* add all static bound check values */
WAIT_SEM ();
while (p[0] != 0) {
dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
__FILE__, __FUNCTION__, (void *) p[0], (unsigned long) p[1]);
tree = splay_insert(p[0], p[1], tree);
p += 2;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
POST_SEM ();
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
void __bound_main_arg(char **p)
2002-01-04 07:12:29 +08:00
{
2019-12-10 15:07:25 +08:00
char *start = (char *) p;
WAIT_SEM ();
while (*p) {
dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
__FILE__, __FUNCTION__, *p, (unsigned long)(strlen (*p) + 1));
tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
p++;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s() argv (%p 0x%lx)\n",
__FILE__, __FUNCTION__, start, (unsigned long)((char *) p - start));
tree = splay_insert((size_t) start, (char *) p - start, tree);
#if HAS_ENVIRON
{
extern char **environ;
p = environ;
start = (char *) p;
while (*p) {
dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
__FILE__, __FUNCTION__, *p, (unsigned long)(strlen (*p) + 1));
tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
p++;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s() environ(%p 0x%lx)\n",
__FILE__, __FUNCTION__, start, (unsigned long)((char *) p - start));
tree = splay_insert((size_t) start, (char *) p - start, tree);
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
#endif
POST_SEM ();
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
void __bound_exit(void)
2002-01-04 07:12:29 +08:00
{
2019-12-10 15:07:25 +08:00
int i;
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
while (tree) {
tree = splay_delete (tree->start, tree);
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
#if TREE_REUSE
while (tree_free_list) {
Tree *next = tree_free_list->left;
#if MALLOC_REDIR
free_redir (tree_free_list);
#else
free (tree_free_list);
#endif
2019-12-10 15:07:25 +08:00
tree_free_list = next;
}
#endif
2019-12-10 15:07:25 +08:00
for (i = 0; i < FREE_REUSE_SIZE; i++) {
#if MALLOC_REDIR
free_redir (free_reuse_list[i]);
#else
free (free_reuse_list[i]);
#endif
}
EXIT_SEM ();
inited = 0;
}
2002-01-04 07:12:29 +08:00
/* XXX: we should use a malloc which ensure that it is unlikely that
two malloc'ed data have the same address if 'free' are made in
between. */
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
void *malloc(size_t size)
#else
void *__bound_malloc(size_t size, const void *caller)
2019-12-10 15:07:25 +08:00
#endif
2002-01-04 07:12:29 +08:00
{
void *ptr;
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
/* This will catch the first dlsym call from __bound_init */
if (malloc_redir == NULL) {
static int pool_index = 0;
static unsigned char pool[256];
void *retval;
retval = &pool[pool_index];
pool_index = (pool_index + size + 7) & ~8;
dprintf (stderr, "%s, %s initial (%p, 0x%x)\n",
__FILE__, __FUNCTION__, retval, (unsigned)size);
return retval;
}
#endif
2002-01-04 07:12:29 +08:00
/* we allocate one more byte to ensure the regions will be
separated by at least one byte. With the glibc malloc, it may
be in fact not necessary */
2019-12-10 15:07:25 +08:00
WAIT_SEM ();
#if MALLOC_REDIR
ptr = malloc_redir (size);
#else
ptr = malloc(size + 1);
#endif
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s (%p, 0x%x)\n",
__FILE__, __FUNCTION__, ptr, (unsigned)size);
2019-12-10 15:07:25 +08:00
if (ptr) {
tree = splay_insert ((size_t) ptr, size, tree);
}
POST_SEM ();
2002-01-04 07:12:29 +08:00
return ptr;
}
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
void *memalign(size_t size, size_t align)
#else
void *__bound_memalign(size_t size, size_t align, const void *caller)
2019-12-10 15:07:25 +08:00
#endif
{
void *ptr;
2019-12-10 15:07:25 +08:00
WAIT_SEM ();
2002-12-08 22:34:30 +08:00
#ifndef HAVE_MEMALIGN
if (align > 4) {
/* XXX: handle it ? */
ptr = NULL;
} else {
/* we suppose that malloc aligns to at least four bytes */
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
ptr = malloc_redir(size + 1);
#else
2002-12-08 22:34:30 +08:00
ptr = malloc(size + 1);
2019-12-10 15:07:25 +08:00
#endif
2002-12-08 22:34:30 +08:00
}
#else
/* we allocate one more byte to ensure the regions will be
separated by at least one byte. With the glibc malloc, it may
be in fact not necessary */
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
ptr = memalign_redir(size + 1, align);
#else
ptr = memalign(size + 1, align);
2002-12-08 22:34:30 +08:00
#endif
2019-12-10 15:07:25 +08:00
#endif
if (ptr) {
dprintf(stderr, "%s, %s (%p, 0x%x)\n",
__FILE__, __FUNCTION__, ptr, (unsigned)size);
tree = splay_insert((size_t) ptr, size, tree);
}
POST_SEM ();
return ptr;
}
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
void free(void *ptr)
#else
void __bound_free(void *ptr, const void *caller)
2019-12-10 15:07:25 +08:00
#endif
2002-01-04 07:12:29 +08:00
{
2019-12-10 15:07:25 +08:00
size_t addr = (size_t) ptr;
void *p;
if (ptr == NULL || tree == NULL)
2002-01-04 07:12:29 +08:00
return;
2019-12-10 15:07:25 +08:00
dprintf(stderr, "%s, %s (%p)\n", __FILE__, __FUNCTION__, ptr);
WAIT_SEM ();
tree = splay (addr, tree);
if (tree->start == addr) {
if (tree->is_invalid) {
bound_error("freeing invalid region");
POST_SEM ();
return;
}
tree->is_invalid = 1;
memset (ptr, 0x5a, tree->size);
p = free_reuse_list[free_reuse_index];
free_reuse_list[free_reuse_index] = ptr;
free_reuse_index = (free_reuse_index + 1) % FREE_REUSE_SIZE;
if (p) {
tree = splay_delete((size_t)p, tree);
}
ptr = p;
}
#if MALLOC_REDIR
free_redir (ptr);
#else
free(ptr);
#endif
POST_SEM ();
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
void *realloc(void *ptr, size_t size)
#else
void *__bound_realloc(void *ptr, size_t size, const void *caller)
2019-12-10 15:07:25 +08:00
#endif
2002-01-04 07:12:29 +08:00
{
void *ptr1;
2019-12-10 15:07:25 +08:00
size_t last_size;
2002-01-04 07:12:29 +08:00
if (size == 0) {
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
free(ptr);
#else
__bound_free(ptr, caller);
2019-12-10 15:07:25 +08:00
#endif
2002-01-04 07:12:29 +08:00
return NULL;
} else {
2019-12-10 15:07:25 +08:00
WAIT_SEM ();
tree = splay ((size_t) ptr, tree);
if (tree->start != (size_t) ptr) {
#if MALLOC_REDIR
ptr = realloc_redir (ptr, size);
#else
ptr = realloc (ptr, size);
#endif
if (ptr) {
tree = splay_insert ((size_t) ptr, size, tree);
}
POST_SEM ();
return ptr;
}
else {
last_size = tree->size;
POST_SEM ();
#if MALLOC_REDIR
ptr1 = malloc(size);
#else
ptr1 = __bound_malloc(size, caller);
#endif
if (ptr == NULL || ptr1 == NULL)
return ptr1;
memcpy(ptr1, ptr, last_size < size ? last_size : size);
#if MALLOC_REDIR
free(ptr);
#else
__bound_free(ptr, caller);
#endif
2002-01-04 07:12:29 +08:00
return ptr1;
2019-12-10 15:07:25 +08:00
}
2002-01-04 07:12:29 +08:00
}
}
2019-12-10 15:07:25 +08:00
#if MALLOC_REDIR
void *calloc(size_t nmemb, size_t size)
#else
void *__bound_calloc(size_t nmemb, size_t size)
2019-12-10 15:07:25 +08:00
#endif
{
void *ptr;
2019-12-10 15:07:25 +08:00
size *= nmemb;
#if MALLOC_REDIR
ptr = malloc(size);
#else
2004-10-02 22:03:39 +08:00
ptr = __bound_malloc(size, NULL);
2019-12-10 15:07:25 +08:00
#endif
if (ptr) {
memset (ptr, 0, size);
}
return ptr;
}
2019-12-10 15:07:25 +08:00
#if !defined(_WIN32)
void *__bound_mmap (void *start, size_t size, int prot,
int flags, int fd, off_t offset)
{
void *result;
dprintf(stderr, "%s, %s (%p, 0x%x)\n",
__FILE__, __FUNCTION__, start, (unsigned)size);
result = mmap (start, size, prot, flags, fd, offset);
if (result) {
WAIT_SEM ();
tree = splay_insert((size_t)result, size, tree);
POST_SEM ();
}
return result;
}
int __bound_munmap (void *start, size_t size)
{
int result;
dprintf(stderr, "%s, %s (%p, 0x%x)\n",
__FILE__, __FUNCTION__, start, (unsigned)size);
WAIT_SEM ();
tree = splay_delete ((size_t) start, tree);
POST_SEM ();
result = munmap (start, size);
return result;
}
#endif
2019-12-10 15:07:25 +08:00
/* used by alloca */
void __bound_new_region(void *p, size_t size)
2002-01-04 07:12:29 +08:00
{
2019-12-10 15:07:25 +08:00
size_t fp;
alloca_list_type *last;
alloca_list_type *cur;
dprintf(stderr, "%s, %s (%p, 0x%x)\n",
__FILE__, __FUNCTION__, p, (unsigned)size);
WAIT_SEM ();
last = NULL;
cur = alloca_list;
while (cur && cur->fp == fp) {
if (cur->p == p) {
dprintf(stderr, "%s, %s() remove alloca/vla %p\n",
__FILE__, __FUNCTION__, alloca_list->p);
if (last) {
last->next = cur->next;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
else {
alloca_list->next = cur->next;
}
tree = splay_delete((size_t)p, tree);
#if MALLOC_REDIR
free_redir (cur);
#else
free (cur);
#endif
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
last = cur;
cur = cur->next;
2002-01-04 07:12:29 +08:00
}
2019-12-10 15:07:25 +08:00
tree = splay_insert((size_t)p, size, tree);
#if MALLOC_REDIR
cur = malloc_redir (sizeof (alloca_list_type));
#else
cur = malloc (sizeof (alloca_list_type));
2002-01-04 07:12:29 +08:00
#endif
2019-12-10 15:07:25 +08:00
if (cur) {
GET_CALLER_FP (fp);
cur->fp = fp;
cur->p = p;
cur->next = alloca_list;
alloca_list = cur;
}
POST_SEM ();
}
/* some useful checked functions */
/* check that (p ... p + size - 1) lies inside 'p' region, if any */
static void __bound_check(const void *p, size_t size)
{
2019-12-10 15:07:25 +08:00
if (no_checking)
return;
if (size == 0)
return;
p = __bound_ptr_add((void *)p, size - 1);
if (p == INVALID_POINTER)
2002-11-03 08:42:33 +08:00
bound_error("invalid pointer");
}
void *__bound_memcpy(void *dst, const void *src, size_t size)
{
void* p;
__bound_check(dst, size);
__bound_check(src, size);
/* check also region overlap */
2019-12-10 15:07:25 +08:00
if (no_checking == 0 && src >= dst && src < dst + size)
2002-11-03 08:42:33 +08:00
bound_error("overlapping regions in memcpy()");
p = memcpy(dst, src, size);
return p;
}
void *__bound_memmove(void *dst, const void *src, size_t size)
{
__bound_check(dst, size);
__bound_check(src, size);
return memmove(dst, src, size);
}
void *__bound_memset(void *dst, int c, size_t size)
{
__bound_check(dst, size);
2002-01-06 01:05:30 +08:00
return memset(dst, c, size);
}
/* XXX: could be optimized */
int __bound_strlen(const char *s)
{
const char *p;
size_t len;
len = 0;
for(;;) {
p = __bound_ptr_indir1((char *)s, len);
if (p == INVALID_POINTER)
2002-11-03 08:42:33 +08:00
bound_error("bad pointer in strlen()");
if (*p == '\0')
break;
len++;
}
return len;
}
char *__bound_strcpy(char *dst, const char *src)
{
size_t len;
void *p;
len = __bound_strlen(src);
p = __bound_memcpy(dst, src, len + 1);
return p;
}
2019-12-10 15:07:25 +08:00
/*
An implementation of top-down splaying with sizes
D. Sleator <sleator@cs.cmu.edu>, January 1994.
This extends top-down-splay.c to maintain a size field in each node.
This is the number of nodes in the subtree rooted there. This makes
it possible to efficiently compute the rank of a key. (The rank is
the number of nodes to the left of the given key.) It it also
possible to quickly find the node of a given rank. Both of these
operations are illustrated in the code below. The remainder of this
introduction is taken from top-down-splay.c.
"Splay trees", or "self-adjusting search trees" are a simple and
efficient data structure for storing an ordered set. The data
structure consists of a binary tree, with no additional fields. It
allows searching, insertion, deletion, deletemin, deletemax,
splitting, joining, and many other operations, all with amortized
logarithmic performance. Since the trees adapt to the sequence of
requests, their performance on real access patterns is typically even
better. Splay trees are described in a number of texts and papers
[1,2,3,4].
The code here is adapted from simple top-down splay, at the bottom of
page 669 of [2]. It can be obtained via anonymous ftp from
spade.pc.cs.cmu.edu in directory /usr/sleator/public.
The chief modification here is that the splay operation works even if the
item being splayed is not in the tree, and even if the tree root of the
tree is NULL. So the line:
t = splay(i, t);
causes it to search for item with key i in the tree rooted at t. If it's
there, it is splayed to the root. If it isn't there, then the node put
at the root is the last one before NULL that would have been reached in a
normal binary search for i. (It's a neighbor of i in the tree.) This
allows many other operations to be easily implemented, as shown below.
[1] "Data Structures and Their Algorithms", Lewis and Denenberg,
Harper Collins, 1991, pp 243-251.
[2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
JACM Volume 32, No 3, July 1985, pp 652-686.
[3] "Data Structure and Algorithm Analysis", Mark Weiss,
Benjamin Cummins, 1992, pp 119-130.
[4] "Data Structures, Algorithms, and Performance", Derick Wood,
Addison-Wesley, 1993, pp 367-375
*/
/* Code adapted for tcc */
#define compare(start,tstart,tsize) (start < tstart ? -1 : \
start >= tstart+tsize ? 1 : 0)
/* This is the comparison. */
/* Returns <0 if i<j, =0 if i=j, and >0 if i>j */
static Tree * splay (size_t addr, Tree *t)
/* Splay using the key start (which may or may not be in the tree.) */
/* The starting root is t, and the tree used is defined by rat */
{
Tree N, *l, *r, *y;
int comp;
if (t == NULL) return t;
N.left = N.right = NULL;
l = r = &N;
for (;;) {
comp = compare(addr, t->start, t->size);
if (comp < 0) {
y = t->left;
if (y == NULL) break;
if (compare(addr, y->start, y->size) < 0) {
t->left = y->right; /* rotate right */
y->right = t;
t = y;
if (t->left == NULL) break;
}
r->left = t; /* link right */
r = t;
t = t->left;
} else if (comp > 0) {
y = t->right;
if (y == NULL) break;
if (compare(addr, y->start, y->size) > 0) {
t->right = y->left; /* rotate left */
y->left = t;
t = y;
if (t->right == NULL) break;
}
l->right = t; /* link left */
l = t;
t = t->right;
} else {
break;
}
}
l->right = t->left; /* assemble */
r->left = t->right;
t->left = N.right;
t->right = N.left;
return t;
}
#define compare_end(start,tend) (start < tend ? -1 : \
start > tend ? 1 : 0)
static Tree * splay_end (size_t addr, Tree *t)
/* Splay using the key start (which may or may not be in the tree.) */
/* The starting root is t, and the tree used is defined by rat */
{
Tree N, *l, *r, *y;
int comp;
if (t == NULL) return t;
N.left = N.right = NULL;
l = r = &N;
for (;;) {
comp = compare_end(addr, t->start + t->size);
if (comp < 0) {
y = t->left;
if (y == NULL) break;
if (compare_end(addr, y->start + y->size) < 0) {
t->left = y->right; /* rotate right */
y->right = t;
t = y;
if (t->left == NULL) break;
}
r->left = t; /* link right */
r = t;
t = t->left;
} else if (comp > 0) {
y = t->right;
if (y == NULL) break;
if (compare_end(addr, y->start + y->size) > 0) {
t->right = y->left; /* rotate left */
y->left = t;
t = y;
if (t->right == NULL) break;
}
l->right = t; /* link left */
l = t;
t = t->right;
} else {
break;
}
}
l->right = t->left; /* assemble */
r->left = t->right;
t->left = N.right;
t->right = N.left;
return t;
}
static Tree * splay_insert(size_t addr, size_t size, Tree * t)
/* Insert key start into the tree t, if it is not already there. */
/* Return a pointer to the resulting tree. */
{
Tree * new;
if (t != NULL) {
t = splay(addr,t);
if (compare(addr, t->start, t->size)==0) {
return t; /* it's already there */
}
}
#if TREE_REUSE
if (tree_free_list) {
new = tree_free_list;
tree_free_list = new->left;
}
else
#endif
{
#if MALLOC_REDIR
new = (Tree *) malloc_redir (sizeof (Tree));
#else
new = (Tree *) malloc (sizeof (Tree));
#endif
}
if (new == NULL) {bound_alloc_error();}
if (t == NULL) {
new->left = new->right = NULL;
} else if (compare(addr, t->start, t->size) < 0) {
new->left = t->left;
new->right = t;
t->left = NULL;
} else {
new->right = t->right;
new->left = t;
t->right = NULL;
}
new->start = addr;
new->size = size;
new->is_invalid = 0;
return new;
}
#define compare_destroy(start,tstart) (start < tstart ? -1 : \
start > tstart ? 1 : 0)
static Tree * splay_delete(size_t addr, Tree *t)
/* Deletes addr from the tree if it's there. */
/* Return a pointer to the resulting tree. */
{
Tree * x;
if (t==NULL) return NULL;
t = splay(addr,t);
if (compare_destroy(addr, t->start) == 0) { /* found it */
if (t->left == NULL) {
x = t->right;
} else {
x = splay(addr, t->left);
x->right = t->right;
}
#if TREE_REUSE
t->left = tree_free_list;
tree_free_list = t;
#else
#if MALLOC_REDIR
free_redir(t);
#else
free(t);
#endif
#endif
return x;
} else {
return t; /* It wasn't there */
}
}
void splay_printtree(Tree * t, int d)
{
int i;
if (t == NULL) return;
splay_printtree(t->right, d+1);
for (i=0; i<d; i++) fprintf(stderr," ");
fprintf(stderr,"%p(0x%lx:%u)\n",
(void *) t->start, (unsigned long) t->size, (unsigned)t->is_invalid);
splay_printtree(t->left, d+1);
}