2002-01-04 07:12:29 +08:00
|
|
|
/*
|
|
|
|
* Tiny C Memory and bounds checker
|
2015-07-30 04:53:57 +08:00
|
|
|
*
|
2002-01-04 07:12:29 +08:00
|
|
|
* Copyright (c) 2002 Fabrice Bellard
|
|
|
|
*
|
2007-11-15 01:34:30 +08:00
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
2002-01-04 07:12:29 +08:00
|
|
|
*
|
2007-11-15 01:34:30 +08:00
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
2002-01-04 07:12:29 +08:00
|
|
|
*
|
2007-11-15 01:34:30 +08:00
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
2002-01-04 07:12:29 +08:00
|
|
|
*/
|
2002-07-25 06:11:56 +08:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdarg.h>
|
|
|
|
#include <string.h>
|
2020-05-24 02:02:41 +08:00
|
|
|
#include <setjmp.h>
|
2017-07-24 03:24:11 +08:00
|
|
|
|
|
|
|
#if !defined(__FreeBSD__) \
|
|
|
|
&& !defined(__FreeBSD_kernel__) \
|
|
|
|
&& !defined(__DragonFly__) \
|
|
|
|
&& !defined(__OpenBSD__) \
|
2020-05-22 11:06:08 +08:00
|
|
|
&& !defined(__APPLE__) \
|
2017-07-24 03:24:11 +08:00
|
|
|
&& !defined(__NetBSD__)
|
2002-07-25 06:11:56 +08:00
|
|
|
#include <malloc.h>
|
2002-12-08 22:34:30 +08:00
|
|
|
#endif
|
2017-07-24 03:24:11 +08:00
|
|
|
|
2012-12-10 09:51:49 +08:00
|
|
|
#if !defined(_WIN32)
|
lib/bcheck: Don't assume heap goes right after bss
At startup __bound_init() wants to mark malloc zone as invalid memory,
so that any access to memory on heap, not allocated through malloc be
invalid. Other pages are initialized as empty regions, access to which
is not treated as invalid by bounds-checking.
The problem is code incorrectly assumed that heap goes right after bss,
and that is not correct for two cases:
1) if we are running from `tcc -b -run`, program text data and bss
will be already in malloced memory, possibly in mmaped region
insead of heap, and marking memory as invalid from _end
will not cover heap and probably wrongly mark correct regions.
2) if address space randomization is turned on, again heap does not
start from _end, and we'll mark as invalid something else instead
of malloc area.
For example with the following diagnostic patch ...
diff --git a/tcc.c b/tcc.c
index 5dd5725..31c46e8 100644
--- a/tcc.c
+++ b/tcc.c
@@ -479,6 +479,8 @@ static int parse_args(TCCState *s, int argc, char **argv)
return optind;
}
+extern int _etext, _edata, _end;
+
int main(int argc, char **argv)
{
int i;
@@ -487,6 +489,18 @@ int main(int argc, char **argv)
int64_t start_time = 0;
const char *default_file = NULL;
+ void *brk;
+
+ brk = sbrk(0);
+
+ fprintf(stderr, "\n>>> TCC\n\n");
+ fprintf(stderr, "etext:\t%10p\n", &_etext);
+ fprintf(stderr, "edata:\t%10p\n", &_edata);
+ fprintf(stderr, "end:\t%10p\n", &_end);
+ fprintf(stderr, "brk:\t%10p\n", brk);
+ fprintf(stderr, "stack:\t%10p\n", &brk);
+
+ fprintf(stderr, "&errno: %p\n", &errno);
s = tcc_new();
output_type = TCC_OUTPUT_EXE;
diff --git a/tccrun.c b/tccrun.c
index 531f46a..25ed30a 100644
--- a/tccrun.c
+++ b/tccrun.c
@@ -91,6 +91,8 @@ LIBTCCAPI int tcc_run(TCCState *s1, int argc, char **argv)
int (*prog_main)(int, char **);
int ret;
+ fprintf(stderr, "\n\ntcc_run() ...\n\n");
+
if (tcc_relocate(s1, TCC_RELOCATE_AUTO) < 0)
return -1;
diff --git a/lib/bcheck.c b/lib/bcheck.c
index ea5b233..8b26a5f 100644
--- a/lib/bcheck.c
+++ b/lib/bcheck.c
@@ -296,6 +326,8 @@ static void mark_invalid(unsigned long addr, unsigned long size)
start = addr;
end = addr + size;
+ fprintf(stderr, "mark_invalid %10p - %10p\n", (void *)addr, (void *)end);
+
t2_start = (start + BOUND_T3_SIZE - 1) >> BOUND_T3_BITS;
if (end != 0)
t2_end = end >> BOUND_T3_BITS;
... Look how memory is laid out for `tcc -b -run ...`:
$ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \
-DONE_SOURCE ./tcc.c -B. -c x.c
>>> TCC
etext: 0x8065477
edata: 0x8070220
end: 0x807a95c
brk: 0x807b000
stack: 0xaffff0f0
&errno: 0xa7e25688
tcc_run() ...
mark_invalid 0xfff80000 - (nil)
mark_invalid 0xa7c31d98 - 0xafc31d98
>>> TCC
etext: 0xa7c22767
edata: 0xa7c2759c
end: 0xa7c31d98
brk: 0x8211000
stack: 0xafffeff0
&errno: 0xa7e25688
Runtime error: dereferencing invalid pointer
./tccpp.c:1953: at 0xa7beebdf parse_number() (included from ./libtcc.c, ./tcc.c)
./tccpp.c:3003: by 0xa7bf0708 next() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:4465: by 0xa7bfe348 block() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:4440: by 0xa7bfe212 block() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:5529: by 0xa7c01929 gen_function() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:5767: by 0xa7c02602 decl0() (included from ./libtcc.c, ./tcc.c)
The second mark_invalid goes right after in-memory-compiled program's
_end, and oops, that's not where malloc zone is (starts from brk), and oops
again, mark_invalid covers e.g. errno. Then compiled tcc is crasshing by
bcheck on errno access:
1776 static void parse_number(const char *p)
1777 {
1778 int b, t, shift, frac_bits, s, exp_val, ch;
...
1951 *q = '\0';
1952 t = toup(ch);
1953 errno = 0;
The solution here is to use sbrk(0) as approximation for the program
break start instead of &_end:
- if we are a separately compiled program, __bound_init() runs early,
and sbrk(0) should be equal or very near to start_brk (in case other
constructors malloc something), or
- if we are running from under `tcc -b -run`, sbrk(0) will return
start of heap portion which is under this program control, and not
mark as invalid earlier allocated memory.
With this patch `tcc -b -run tcc.c ...` succeeds compiling above
small-test program (diagnostic patch is still applied too):
$ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \
-DONE_SOURCE ./tcc.c -B. -c x.c
>>> TCC
etext: 0x8065477
edata: 0x8070220
end: 0x807a95c
brk: 0x807b000
stack: 0xaffff0f0
&errno: 0xa7e25688
tcc_run() ...
mark_invalid 0xfff80000 - (nil)
mark_invalid 0x8211000 - 0x10211000
>>> TCC
etext: 0xa7c22777
edata: 0xa7c275ac
end: 0xa7c31da8
brk: 0x8211000
stack: 0xafffeff0
&errno: 0xa7e25688
(completes ok)
but running `tcc -b -run tcc.c -run tests/tcctest.c` sigsegv's - that's
the plot for the next patch.
2012-12-09 22:48:48 +08:00
|
|
|
#include <unistd.h>
|
2020-05-24 02:02:41 +08:00
|
|
|
#include <sys/syscall.h>
|
2012-12-10 09:51:49 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#define BOUND_DEBUG (1)
|
|
|
|
#define BOUND_STATISTIC (1)
|
2002-01-04 07:12:29 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_DEBUG
|
|
|
|
#define dprintf(a...) if (print_calls) fprintf(a)
|
2015-04-10 20:17:22 +08:00
|
|
|
#else
|
|
|
|
#define dprintf(a...)
|
|
|
|
#endif
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#ifdef __attribute__
|
|
|
|
/* an __attribute__ macro is defined in the system headers */
|
|
|
|
#undef __attribute__
|
|
|
|
#endif
|
|
|
|
#define FASTCALL __attribute__((regparm(3)))
|
2002-12-08 22:34:30 +08:00
|
|
|
|
2020-01-18 05:58:39 +08:00
|
|
|
#ifdef _WIN32
|
|
|
|
# define DLL_EXPORT __declspec(dllexport)
|
|
|
|
#else
|
|
|
|
# define DLL_EXPORT
|
|
|
|
#endif
|
|
|
|
|
2017-07-24 03:24:11 +08:00
|
|
|
#if defined(__FreeBSD__) \
|
|
|
|
|| defined(__FreeBSD_kernel__) \
|
|
|
|
|| defined(__DragonFly__) \
|
|
|
|
|| defined(__OpenBSD__) \
|
|
|
|
|| defined(__NetBSD__) \
|
2019-12-13 03:49:35 +08:00
|
|
|
|| defined(__dietlibc__)
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-12-02 15:05:34 +08:00
|
|
|
#include <sys/mman.h>
|
2019-12-10 15:07:25 +08:00
|
|
|
#define INIT_SEM()
|
|
|
|
#define EXIT_SEM()
|
|
|
|
#define WAIT_SEM()
|
|
|
|
#define POST_SEM()
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM()
|
2020-01-15 15:53:19 +08:00
|
|
|
#define HAVE_MEMALIGN (0)
|
|
|
|
#define MALLOC_REDIR (0)
|
|
|
|
#define HAVE_PTHREAD_CREATE (0)
|
|
|
|
#define HAVE_CTYPE (0)
|
|
|
|
#define HAVE_ERRNO (0)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define HAVE_SIGNAL (0)
|
|
|
|
#define HAVE_SIGACTION (0)
|
|
|
|
#define HAVE_FORK (0)
|
|
|
|
#define HAVE_TLS_FUNC (0)
|
|
|
|
#define HAVE_TLS_VAR (0)
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2019-12-13 03:49:35 +08:00
|
|
|
#elif defined(_WIN32)
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2019-12-13 03:49:35 +08:00
|
|
|
#include <windows.h>
|
2020-09-08 20:31:58 +08:00
|
|
|
#include <signal.h>
|
2019-12-13 03:49:35 +08:00
|
|
|
static CRITICAL_SECTION bounds_sem;
|
2020-01-15 15:53:19 +08:00
|
|
|
#define INIT_SEM() InitializeCriticalSection(&bounds_sem)
|
|
|
|
#define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
|
|
|
|
#define WAIT_SEM() EnterCriticalSection(&bounds_sem)
|
|
|
|
#define POST_SEM() LeaveCriticalSection(&bounds_sem)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM() TryEnterCriticalSection(&bounds_sem)
|
2020-01-15 15:53:19 +08:00
|
|
|
#define HAVE_MEMALIGN (0)
|
|
|
|
#define MALLOC_REDIR (0)
|
|
|
|
#define HAVE_PTHREAD_CREATE (0)
|
|
|
|
#define HAVE_CTYPE (0)
|
|
|
|
#define HAVE_ERRNO (0)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define HAVE_SIGNAL (1)
|
|
|
|
#define HAVE_SIGACTION (0)
|
|
|
|
#define HAVE_FORK (0)
|
|
|
|
#define HAVE_TLS_FUNC (1)
|
|
|
|
#define HAVE_TLS_VAR (0)
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#else
|
2020-01-15 15:53:19 +08:00
|
|
|
|
|
|
|
#define __USE_GNU /* get RTLD_NEXT */
|
2019-12-10 15:07:25 +08:00
|
|
|
#include <sys/mman.h>
|
2020-01-15 15:53:19 +08:00
|
|
|
#include <ctype.h>
|
|
|
|
#include <pthread.h>
|
|
|
|
#include <dlfcn.h>
|
2019-12-10 15:07:25 +08:00
|
|
|
#include <errno.h>
|
2020-09-08 20:31:58 +08:00
|
|
|
#include <signal.h>
|
2020-05-22 11:06:08 +08:00
|
|
|
#ifdef __APPLE__
|
|
|
|
#include <dispatch/dispatch.h>
|
|
|
|
static dispatch_semaphore_t bounds_sem;
|
|
|
|
#define INIT_SEM() bounds_sem = dispatch_semaphore_create(1)
|
|
|
|
#define EXIT_SEM() dispatch_release(*(dispatch_object_t*)&bounds_sem)
|
|
|
|
#define WAIT_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_FOREVER)
|
|
|
|
#define POST_SEM() if (use_sem) dispatch_semaphore_signal(bounds_sem)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_NOW)
|
2020-05-22 11:06:08 +08:00
|
|
|
#elif 0
|
2019-12-10 15:07:25 +08:00
|
|
|
#include <semaphore.h>
|
|
|
|
static sem_t bounds_sem;
|
2020-01-15 15:53:19 +08:00
|
|
|
#define INIT_SEM() sem_init (&bounds_sem, 0, 1)
|
|
|
|
#define EXIT_SEM() sem_destroy (&bounds_sem)
|
|
|
|
#define WAIT_SEM() if (use_sem) while (sem_wait (&bounds_sem) < 0 \
|
|
|
|
&& errno == EINTR)
|
|
|
|
#define POST_SEM() if (use_sem) sem_post (&bounds_sem)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM() if (use_sem) while (sem_trywait (&bounds_sem) < 0 \
|
|
|
|
&& errno == EINTR)
|
2020-08-13 17:19:11 +08:00
|
|
|
#elif 0
|
|
|
|
static pthread_mutex_t bounds_mtx;
|
|
|
|
#define INIT_SEM() pthread_mutex_init (&bounds_mtx, NULL)
|
|
|
|
#define EXIT_SEM() pthread_mutex_destroy (&bounds_mtx)
|
|
|
|
#define WAIT_SEM() if (use_sem) pthread_mutex_lock (&bounds_mtx)
|
|
|
|
#define POST_SEM() if (use_sem) pthread_mutex_unlock (&bounds_mtx)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM() if (use_sem) pthread_mutex_trylock (&bounds_mtx)
|
2020-01-15 15:53:19 +08:00
|
|
|
#else
|
|
|
|
static pthread_spinlock_t bounds_spin;
|
|
|
|
/* about 25% faster then semaphore. */
|
|
|
|
#define INIT_SEM() pthread_spin_init (&bounds_spin, 0)
|
|
|
|
#define EXIT_SEM() pthread_spin_destroy (&bounds_spin)
|
|
|
|
#define WAIT_SEM() if (use_sem) pthread_spin_lock (&bounds_spin)
|
|
|
|
#define POST_SEM() if (use_sem) pthread_spin_unlock (&bounds_spin)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define TRY_SEM() if (use_sem) pthread_spin_trylock (&bounds_spin)
|
2020-01-15 15:53:19 +08:00
|
|
|
#endif
|
|
|
|
#define HAVE_MEMALIGN (1)
|
|
|
|
#define MALLOC_REDIR (1)
|
|
|
|
#define HAVE_PTHREAD_CREATE (1)
|
|
|
|
#define HAVE_CTYPE (1)
|
|
|
|
#define HAVE_ERRNO (1)
|
2020-09-08 20:31:58 +08:00
|
|
|
#define HAVE_SIGNAL (1)
|
|
|
|
#define HAVE_SIGACTION (1)
|
|
|
|
#define HAVE_FORK (1)
|
|
|
|
#if !defined(__APPLE__) && defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
|
|
|
|
#define HAVE_TLS_FUNC (0)
|
|
|
|
#define HAVE_TLS_VAR (1)
|
|
|
|
#else
|
|
|
|
#define HAVE_TLS_FUNC (1)
|
|
|
|
#define HAVE_TLS_VAR (0)
|
|
|
|
#endif
|
2020-12-17 03:08:43 +08:00
|
|
|
#ifdef TCC_MUSL
|
|
|
|
# undef HAVE_CTYPE
|
|
|
|
#endif
|
2020-09-08 20:31:58 +08:00
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
#if MALLOC_REDIR
|
2020-01-15 15:53:19 +08:00
|
|
|
static void *(*malloc_redir) (size_t);
|
|
|
|
static void *(*calloc_redir) (size_t, size_t);
|
|
|
|
static void (*free_redir) (void *);
|
|
|
|
static void *(*realloc_redir) (void *, size_t);
|
2020-09-08 20:31:58 +08:00
|
|
|
static unsigned int pool_index;
|
|
|
|
static unsigned char __attribute__((aligned(16))) initial_pool[256];
|
|
|
|
#endif
|
|
|
|
#if HAVE_MEMALIGN
|
2020-01-15 15:53:19 +08:00
|
|
|
static void *(*memalign_redir) (size_t, size_t);
|
2020-09-08 20:31:58 +08:00
|
|
|
#endif
|
|
|
|
#if HAVE_PTHREAD_CREATE
|
2020-01-15 15:53:19 +08:00
|
|
|
static int (*pthread_create_redir) (pthread_t *thread,
|
|
|
|
const pthread_attr_t *attr,
|
|
|
|
void *(*start_routine)(void *), void *arg);
|
2020-09-08 20:31:58 +08:00
|
|
|
#endif
|
|
|
|
#if HAVE_SIGNAL
|
|
|
|
typedef void (*bound_sig)(int);
|
|
|
|
static bound_sig (*signal_redir) (int signum, bound_sig handler);
|
|
|
|
#endif
|
|
|
|
#if HAVE_SIGACTION
|
|
|
|
static int (*sigaction_redir) (int signum, const struct sigaction *act,
|
|
|
|
struct sigaction *oldact);
|
|
|
|
#endif
|
|
|
|
#if HAVE_FORK
|
|
|
|
static int (*fork_redir) (void);
|
2002-12-08 22:34:30 +08:00
|
|
|
#endif
|
|
|
|
|
2019-12-11 02:47:33 +08:00
|
|
|
#define TCC_TYPE_NONE (0)
|
|
|
|
#define TCC_TYPE_MALLOC (1)
|
|
|
|
#define TCC_TYPE_CALLOC (2)
|
|
|
|
#define TCC_TYPE_REALLOC (3)
|
|
|
|
#define TCC_TYPE_MEMALIGN (4)
|
2019-12-13 03:49:35 +08:00
|
|
|
#define TCC_TYPE_STRDUP (5)
|
2019-12-11 02:47:33 +08:00
|
|
|
|
2002-01-04 07:12:29 +08:00
|
|
|
/* this pointer is generated when bound check is incorrect */
|
|
|
|
#define INVALID_POINTER ((void *)(-2))
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
typedef struct tree_node Tree;
|
|
|
|
struct tree_node {
|
|
|
|
Tree * left, * right;
|
2015-03-26 12:47:45 +08:00
|
|
|
size_t start;
|
|
|
|
size_t size;
|
2020-01-15 15:53:19 +08:00
|
|
|
unsigned char type;
|
|
|
|
unsigned char is_invalid; /* true if pointers outside region are invalid */
|
2019-12-10 15:07:25 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct alloca_list_struct {
|
|
|
|
size_t fp;
|
|
|
|
void *p;
|
2020-10-01 23:09:09 +08:00
|
|
|
size_t size;
|
2019-12-10 15:07:25 +08:00
|
|
|
struct alloca_list_struct *next;
|
|
|
|
} alloca_list_type;
|
|
|
|
|
2020-05-24 02:02:41 +08:00
|
|
|
#if defined(_WIN32)
|
|
|
|
#define BOUND_TID_TYPE DWORD
|
|
|
|
#define BOUND_GET_TID GetCurrentThreadId()
|
2020-11-29 15:54:55 +08:00
|
|
|
#elif defined(__OpenBSD__)
|
2020-12-02 15:05:34 +08:00
|
|
|
#define BOUND_TID_TYPE pid_t
|
|
|
|
#define BOUND_GET_TID syscall (SYS_getthrid)
|
2020-12-11 21:26:26 +08:00
|
|
|
#elif defined(__FreeBSD__) || defined(__NetBSD__)
|
2020-12-08 13:55:50 +08:00
|
|
|
#define BOUND_TID_TYPE pid_t
|
|
|
|
#define BOUND_GET_TID 0
|
2020-06-16 13:39:48 +08:00
|
|
|
#elif defined(__i386__) || defined(__x86_64__) || defined(__arm__) || defined(__aarch64__) || defined(__riscv)
|
2020-05-24 02:02:41 +08:00
|
|
|
#define BOUND_TID_TYPE pid_t
|
|
|
|
#define BOUND_GET_TID syscall (SYS_gettid)
|
|
|
|
#else
|
|
|
|
#define BOUND_TID_TYPE int
|
|
|
|
#define BOUND_GET_TID 0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
typedef struct jmp_list_struct {
|
|
|
|
void *penv;
|
|
|
|
size_t fp;
|
|
|
|
size_t end_fp;
|
|
|
|
BOUND_TID_TYPE tid;
|
|
|
|
struct jmp_list_struct *next;
|
|
|
|
} jmp_list_type;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#define BOUND_STATISTIC_SPLAY (0)
|
2019-12-10 15:07:25 +08:00
|
|
|
static Tree * splay (size_t addr, Tree *t);
|
|
|
|
static Tree * splay_end (size_t addr, Tree *t);
|
|
|
|
static Tree * splay_insert(size_t addr, size_t size, Tree * t);
|
|
|
|
static Tree * splay_delete(size_t addr, Tree *t);
|
|
|
|
void splay_printtree(Tree * t, int d);
|
2002-01-04 07:12:29 +08:00
|
|
|
|
|
|
|
/* external interface */
|
2020-08-14 21:00:29 +08:00
|
|
|
void __bounds_checking (int no_check);
|
2020-01-15 15:53:19 +08:00
|
|
|
void __bound_never_fatal (int no_check);
|
2020-01-18 05:58:39 +08:00
|
|
|
DLL_EXPORT void * __bound_ptr_add(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir1(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir2(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir4(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir8(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir12(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir16(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void FASTCALL __bound_local_new(void *p1);
|
|
|
|
DLL_EXPORT void FASTCALL __bound_local_delete(void *p1);
|
2020-05-22 11:06:08 +08:00
|
|
|
void __bound_init(size_t *, int);
|
2020-06-22 20:55:27 +08:00
|
|
|
void __bound_main_arg(int argc, char **argv, char **envp);
|
2019-12-11 02:47:33 +08:00
|
|
|
void __bound_exit(void);
|
2020-01-15 15:53:19 +08:00
|
|
|
#if !defined(_WIN32)
|
|
|
|
void *__bound_mmap (void *start, size_t size, int prot, int flags, int fd,
|
|
|
|
off_t offset);
|
|
|
|
int __bound_munmap (void *start, size_t size);
|
2020-05-25 18:26:55 +08:00
|
|
|
DLL_EXPORT void __bound_siglongjmp(jmp_buf env, int val);
|
2015-03-29 16:28:02 +08:00
|
|
|
#endif
|
2020-01-18 05:58:39 +08:00
|
|
|
DLL_EXPORT void __bound_new_region(void *p, size_t size);
|
2020-05-24 02:02:41 +08:00
|
|
|
DLL_EXPORT void __bound_setjmp(jmp_buf env);
|
|
|
|
DLL_EXPORT void __bound_longjmp(jmp_buf env, int val);
|
2020-01-18 05:58:39 +08:00
|
|
|
DLL_EXPORT void *__bound_memcpy(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT int __bound_memcmp(const void *s1, const void *s2, size_t size);
|
|
|
|
DLL_EXPORT void *__bound_memmove(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound_memset(void *dst, int c, size_t size);
|
|
|
|
DLL_EXPORT int __bound_strlen(const char *s);
|
|
|
|
DLL_EXPORT char *__bound_strcpy(char *dst, const char *src);
|
|
|
|
DLL_EXPORT char *__bound_strncpy(char *dst, const char *src, size_t n);
|
|
|
|
DLL_EXPORT int __bound_strcmp(const char *s1, const char *s2);
|
|
|
|
DLL_EXPORT int __bound_strncmp(const char *s1, const char *s2, size_t n);
|
|
|
|
DLL_EXPORT char *__bound_strcat(char *dest, const char *src);
|
|
|
|
DLL_EXPORT char *__bound_strchr(const char *string, int ch);
|
|
|
|
DLL_EXPORT char *__bound_strdup(const char *s);
|
2002-01-05 08:41:11 +08:00
|
|
|
|
2021-01-14 02:41:04 +08:00
|
|
|
#if defined(__arm__) && defined(__ARM_EABI__)
|
2020-06-16 13:39:48 +08:00
|
|
|
DLL_EXPORT void *__bound___aeabi_memcpy(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound___aeabi_memmove(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound___aeabi_memmove4(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound___aeabi_memmove8(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound___aeabi_memset(void *dst, int c, size_t size);
|
|
|
|
DLL_EXPORT void *__aeabi_memcpy(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__aeabi_memmove(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__aeabi_memmove4(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__aeabi_memmove8(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__aeabi_memset(void *dst, int c, size_t size);
|
|
|
|
#endif
|
|
|
|
|
2020-05-24 02:02:41 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
#define BOUND_MALLOC(a) malloc_redir(a)
|
|
|
|
#define BOUND_MEMALIGN(a,b) memalign_redir(a,b)
|
|
|
|
#define BOUND_FREE(a) free_redir(a)
|
|
|
|
#define BOUND_REALLOC(a,b) realloc_redir(a,b)
|
|
|
|
#define BOUND_CALLOC(a,b) calloc_redir(a,b)
|
|
|
|
#else
|
|
|
|
#define BOUND_MALLOC(a) malloc(a)
|
|
|
|
#define BOUND_MEMALIGN(a,b) memalign(a,b)
|
|
|
|
#define BOUND_FREE(a) free(a)
|
|
|
|
#define BOUND_REALLOC(a,b) realloc(a,b)
|
|
|
|
#define BOUND_CALLOC(a,b) calloc(a,b)
|
2020-01-18 05:58:39 +08:00
|
|
|
DLL_EXPORT void *__bound_malloc(size_t size, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_memalign(size_t size, size_t align, const void *caller);
|
|
|
|
DLL_EXPORT void __bound_free(void *ptr, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_realloc(void *ptr, size_t size, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_calloc(size_t nmemb, size_t size);
|
2002-01-13 00:39:35 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#define FREE_REUSE_SIZE (100)
|
2020-01-15 15:53:19 +08:00
|
|
|
static unsigned int free_reuse_index;
|
2019-12-10 15:07:25 +08:00
|
|
|
static void *free_reuse_list[FREE_REUSE_SIZE];
|
|
|
|
|
|
|
|
static Tree *tree = NULL;
|
|
|
|
#define TREE_REUSE (1)
|
|
|
|
#if TREE_REUSE
|
2020-01-15 15:53:19 +08:00
|
|
|
static Tree *tree_free_list;
|
2002-01-04 07:12:29 +08:00
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
static alloca_list_type *alloca_list;
|
2020-05-24 02:02:41 +08:00
|
|
|
static jmp_list_type *jmp_list;
|
2020-01-15 15:53:19 +08:00
|
|
|
|
|
|
|
static unsigned char inited;
|
|
|
|
static unsigned char print_warn_ptr_add;
|
|
|
|
static unsigned char print_calls;
|
|
|
|
static unsigned char print_heap;
|
|
|
|
static unsigned char print_statistic;
|
|
|
|
static unsigned char no_strdup;
|
2020-09-14 14:24:01 +08:00
|
|
|
static unsigned char use_sem;
|
2020-06-18 13:21:48 +08:00
|
|
|
static int never_fatal;
|
2020-09-08 20:31:58 +08:00
|
|
|
#if HAVE_TLS_FUNC
|
|
|
|
#if defined(_WIN32)
|
|
|
|
static int no_checking = 0;
|
|
|
|
static DWORD no_checking_key;
|
|
|
|
#define NO_CHECKING_CHECK() if (!p) { \
|
|
|
|
p = (int *) LocalAlloc(LPTR, sizeof(int)); \
|
|
|
|
if (!p) bound_alloc_error("tls malloc"); \
|
|
|
|
*p = 0; \
|
|
|
|
TlsSetValue(no_checking_key, p); \
|
|
|
|
}
|
|
|
|
#define NO_CHECKING_GET() ({ int *p = TlsGetValue(no_checking_key); \
|
|
|
|
NO_CHECKING_CHECK(); \
|
|
|
|
*p; \
|
|
|
|
})
|
|
|
|
#define NO_CHECKING_SET(v) { int *p = TlsGetValue(no_checking_key); \
|
|
|
|
NO_CHECKING_CHECK(); \
|
|
|
|
*p = v; \
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
static int no_checking = 0;
|
|
|
|
static pthread_key_t no_checking_key;
|
|
|
|
#define NO_CHECKING_CHECK() if (!p) { \
|
|
|
|
p = (int *) BOUND_MALLOC(sizeof(int)); \
|
|
|
|
if (!p) bound_alloc_error("tls malloc"); \
|
|
|
|
*p = 0; \
|
|
|
|
pthread_setspecific(no_checking_key, p); \
|
|
|
|
}
|
|
|
|
#define NO_CHECKING_GET() ({ int *p = pthread_getspecific(no_checking_key); \
|
|
|
|
NO_CHECKING_CHECK(); \
|
|
|
|
*p; \
|
|
|
|
})
|
|
|
|
#define NO_CHECKING_SET(v) { int *p = pthread_getspecific(no_checking_key); \
|
|
|
|
NO_CHECKING_CHECK(); \
|
|
|
|
*p = v; \
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#elif HAVE_TLS_VAR
|
|
|
|
static __thread int no_checking = 0;
|
|
|
|
#define NO_CHECKING_GET() no_checking
|
|
|
|
#define NO_CHECKING_SET(v) no_checking = v
|
|
|
|
#else
|
|
|
|
static int no_checking = 0;
|
|
|
|
#define NO_CHECKING_GET() no_checking
|
|
|
|
#define NO_CHECKING_SET(v) no_checking = v
|
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
static char exec[100];
|
|
|
|
|
|
|
|
#if BOUND_STATISTIC
|
2019-12-13 17:02:20 +08:00
|
|
|
static unsigned long long bound_ptr_add_count;
|
|
|
|
static unsigned long long bound_ptr_indir1_count;
|
|
|
|
static unsigned long long bound_ptr_indir2_count;
|
|
|
|
static unsigned long long bound_ptr_indir4_count;
|
|
|
|
static unsigned long long bound_ptr_indir8_count;
|
|
|
|
static unsigned long long bound_ptr_indir12_count;
|
|
|
|
static unsigned long long bound_ptr_indir16_count;
|
|
|
|
static unsigned long long bound_local_new_count;
|
|
|
|
static unsigned long long bound_local_delete_count;
|
|
|
|
static unsigned long long bound_malloc_count;
|
|
|
|
static unsigned long long bound_calloc_count;
|
|
|
|
static unsigned long long bound_realloc_count;
|
|
|
|
static unsigned long long bound_free_count;
|
|
|
|
static unsigned long long bound_memalign_count;
|
|
|
|
static unsigned long long bound_mmap_count;
|
|
|
|
static unsigned long long bound_munmap_count;
|
|
|
|
static unsigned long long bound_alloca_count;
|
2020-05-24 02:02:41 +08:00
|
|
|
static unsigned long long bound_setjmp_count;
|
|
|
|
static unsigned long long bound_longjmp_count;
|
2019-12-13 17:02:20 +08:00
|
|
|
static unsigned long long bound_mempcy_count;
|
|
|
|
static unsigned long long bound_memcmp_count;
|
|
|
|
static unsigned long long bound_memmove_count;
|
|
|
|
static unsigned long long bound_memset_count;
|
|
|
|
static unsigned long long bound_strlen_count;
|
|
|
|
static unsigned long long bound_strcpy_count;
|
|
|
|
static unsigned long long bound_strncpy_count;
|
|
|
|
static unsigned long long bound_strcmp_count;
|
|
|
|
static unsigned long long bound_strncmp_count;
|
|
|
|
static unsigned long long bound_strcat_count;
|
|
|
|
static unsigned long long bound_strchr_count;
|
|
|
|
static unsigned long long bound_strdup_count;
|
2020-01-15 15:53:19 +08:00
|
|
|
static unsigned long long bound_not_found;
|
|
|
|
#define INCR_COUNT(x) ++x
|
2019-12-13 03:49:35 +08:00
|
|
|
#else
|
|
|
|
#define INCR_COUNT(x)
|
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_STATISTIC_SPLAY
|
|
|
|
static unsigned long long bound_splay;
|
|
|
|
static unsigned long long bound_splay_end;
|
|
|
|
static unsigned long long bound_splay_insert;
|
|
|
|
static unsigned long long bound_splay_delete;
|
|
|
|
#define INCR_COUNT_SPLAY(x) ++x
|
|
|
|
#else
|
|
|
|
#define INCR_COUNT_SPLAY(x)
|
|
|
|
#endif
|
2019-12-13 03:49:35 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
int tcc_backtrace(const char *fmt, ...);
|
|
|
|
|
|
|
|
/* print a bound error message */
|
|
|
|
#define bound_warning(...) \
|
|
|
|
tcc_backtrace("^bcheck.c^BCHECK: " __VA_ARGS__)
|
|
|
|
|
|
|
|
#define bound_error(...) \
|
|
|
|
do { \
|
|
|
|
bound_warning(__VA_ARGS__); \
|
|
|
|
if (never_fatal == 0) \
|
|
|
|
exit(255); \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
static void bound_alloc_error(const char *s)
|
|
|
|
{
|
|
|
|
fprintf(stderr,"FATAL: %s\n",s);
|
|
|
|
exit (1);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void bound_not_found_warning(const char *file, const char *function,
|
|
|
|
void *ptr)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s%s, %s(): Not found %p\n", exec, file, function, ptr);
|
|
|
|
}
|
|
|
|
|
2020-06-18 13:21:48 +08:00
|
|
|
static void fetch_and_add(int* variable, int value)
|
2020-01-15 15:53:19 +08:00
|
|
|
{
|
|
|
|
#if defined __i386__ || defined __x86_64__
|
2020-06-18 13:21:48 +08:00
|
|
|
__asm__ volatile("lock; addl %0, %1"
|
2020-01-15 15:53:19 +08:00
|
|
|
: "+r" (value), "+m" (*variable) // input+output
|
|
|
|
: // No input-only
|
|
|
|
: "memory"
|
|
|
|
);
|
2020-06-16 13:39:48 +08:00
|
|
|
#elif defined __arm__
|
2020-06-18 13:21:48 +08:00
|
|
|
extern void fetch_and_add_arm(int* variable, int value);
|
2020-06-16 13:39:48 +08:00
|
|
|
fetch_and_add_arm(variable, value);
|
|
|
|
#elif defined __aarch64__
|
2020-06-18 13:21:48 +08:00
|
|
|
extern void fetch_and_add_arm64(int* variable, int value);
|
2020-06-16 13:39:48 +08:00
|
|
|
fetch_and_add_arm64(variable, value);
|
|
|
|
#elif defined __riscv
|
2020-06-18 13:21:48 +08:00
|
|
|
extern void fetch_and_add_riscv64(int* variable, int value);
|
2020-06-16 13:39:48 +08:00
|
|
|
fetch_and_add_riscv64(variable, value);
|
2020-01-15 15:53:19 +08:00
|
|
|
#else
|
|
|
|
*variable += value;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
/* enable/disable checking. This can be used in signal handlers. */
|
2020-08-14 21:00:29 +08:00
|
|
|
void __bounds_checking (int no_check)
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2020-09-08 20:31:58 +08:00
|
|
|
#if HAVE_TLS_FUNC || HAVE_TLS_VAR
|
|
|
|
NO_CHECKING_SET(NO_CHECKING_GET() + no_check);
|
|
|
|
#else
|
2020-01-15 15:53:19 +08:00
|
|
|
fetch_and_add (&no_checking, no_check);
|
2020-09-08 20:31:58 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
/* enable/disable checking. This can be used in signal handlers. */
|
|
|
|
void __bound_never_fatal (int neverfatal)
|
|
|
|
{
|
|
|
|
fetch_and_add (&never_fatal, neverfatal);
|
|
|
|
}
|
bcheck cleanup
- revert Makefiles to state before last bcheck additions
Instead, just load bcheck.o explicitly if that is
what is wanted.
- move tcc_add_bcheck() to the <target>-link.c files and
remove revently added arguments. This function is to
support tccelf.c with linking, not for tccgen.c to
support compilation.
- remove -ba option: It said:
"-ba Enable better address checking with bounds checker"
Okay, if it is better then to have it is not an option.
- remove va_copy. It is C99 and we try to stay C89 in tinycc
when possible. For example, MS compilers do not have va_copy.
- win64: revert any 'fixes' to alloca
It was correct as it was before, except for bound_checking
where it was not implemented. This should now work too.
- remove parasitic filename:linenum features
Such feature is already present with rt_printline in
tccrun.c. If it doesn't work it can be fixed.
- revert changes to gen_bounded_ptr_add()
gen_bounded_ptr_add() was working as it should before
(mostly). For the sake of simplicity I switched it to
CDECL. Anyway, FASTCALL means SLOWCALL with tinycc.
In exchange you get one addition which is required for
bounds_cnecking function arguments. The important thing
is to check them *BEFORE* they are loaded into registers.
New function gbound_args() does that.
In any case, code instrumentation with the bounds-check
functions as such now seems to work flawlessly again,
which means when they are inserted as NOPs, any code that
tcc can compile, seems to behave just the same as without
them.
What these functions then do when fully enabled, is a
differnt story. I did not touch this.
2019-12-12 22:45:45 +08:00
|
|
|
|
2002-01-06 00:16:47 +08:00
|
|
|
/* return '(p + offset)' for pointer arithmetic (a pointer can reach
|
|
|
|
the end of a region in this case */
|
2020-01-15 15:53:19 +08:00
|
|
|
void * __bound_ptr_add(void *p, size_t offset)
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2015-03-26 12:47:45 +08:00
|
|
|
size_t addr = (size_t)p;
|
2002-01-04 07:12:29 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET())
|
2019-12-10 15:07:25 +08:00
|
|
|
return p + offset;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)offset);
|
2019-12-10 15:07:25 +08:00
|
|
|
|
|
|
|
WAIT_SEM ();
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_ptr_add_count);
|
2019-12-10 15:07:25 +08:00
|
|
|
if (tree) {
|
|
|
|
addr -= tree->start;
|
2019-12-13 17:02:20 +08:00
|
|
|
if (addr >= tree->size) {
|
|
|
|
addr = (size_t)p;
|
|
|
|
tree = splay (addr, tree);
|
|
|
|
addr -= tree->start;
|
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
if (addr >= tree->size) {
|
|
|
|
addr = (size_t)p;
|
|
|
|
tree = splay_end (addr, tree);
|
|
|
|
addr -= tree->start;
|
|
|
|
}
|
|
|
|
if (addr <= tree->size) {
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree->is_invalid || addr + offset > tree->size) {
|
|
|
|
POST_SEM ();
|
2020-01-19 18:15:12 +08:00
|
|
|
if (print_warn_ptr_add)
|
|
|
|
bound_warning("%p is outside of the region", p + offset);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (never_fatal <= 0)
|
2019-12-10 15:07:25 +08:00
|
|
|
return INVALID_POINTER; /* return an invalid pointer */
|
2020-01-15 15:53:19 +08:00
|
|
|
return p + offset;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
else if (p) { /* Allow NULL + offset. offsetoff is using it. */
|
|
|
|
INCR_COUNT(bound_not_found);
|
|
|
|
POST_SEM ();
|
|
|
|
bound_not_found_warning (__FILE__, __FUNCTION__, p);
|
|
|
|
return p + offset;
|
|
|
|
}
|
2015-03-29 16:28:02 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
POST_SEM ();
|
2002-01-04 07:12:29 +08:00
|
|
|
return p + offset;
|
|
|
|
}
|
|
|
|
|
2002-01-06 00:16:47 +08:00
|
|
|
/* return '(p + offset)' for pointer indirection (the resulting must
|
|
|
|
be strictly inside the region */
|
2019-12-13 20:45:09 +08:00
|
|
|
#define BOUND_PTR_INDIR(dsize) \
|
2020-01-15 15:53:19 +08:00
|
|
|
void * __bound_ptr_indir ## dsize (void *p, size_t offset) \
|
2019-12-13 20:45:09 +08:00
|
|
|
{ \
|
|
|
|
size_t addr = (size_t)p; \
|
|
|
|
\
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET()) \
|
2019-12-13 20:45:09 +08:00
|
|
|
return p + offset; \
|
2020-01-15 15:53:19 +08:00
|
|
|
\
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n", \
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)offset); \
|
2019-12-13 20:45:09 +08:00
|
|
|
WAIT_SEM (); \
|
|
|
|
INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
|
|
|
|
if (tree) { \
|
|
|
|
addr -= tree->start; \
|
|
|
|
if (addr >= tree->size) { \
|
|
|
|
addr = (size_t)p; \
|
|
|
|
tree = splay (addr, tree); \
|
|
|
|
addr -= tree->start; \
|
|
|
|
} \
|
|
|
|
if (addr >= tree->size) { \
|
|
|
|
addr = (size_t)p; \
|
|
|
|
tree = splay_end (addr, tree); \
|
|
|
|
addr -= tree->start; \
|
|
|
|
} \
|
|
|
|
if (addr <= tree->size) { \
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree->is_invalid || addr + offset + dsize > tree->size) { \
|
|
|
|
POST_SEM (); \
|
2020-01-18 05:58:39 +08:00
|
|
|
bound_warning("%p is outside of the region", p + offset); \
|
2020-01-15 15:53:19 +08:00
|
|
|
if (never_fatal <= 0) \
|
2019-12-13 20:45:09 +08:00
|
|
|
return INVALID_POINTER; /* return an invalid pointer */ \
|
2020-01-15 15:53:19 +08:00
|
|
|
return p + offset; \
|
2019-12-13 20:45:09 +08:00
|
|
|
} \
|
|
|
|
} \
|
2020-01-15 15:53:19 +08:00
|
|
|
else { \
|
|
|
|
INCR_COUNT(bound_not_found); \
|
|
|
|
POST_SEM (); \
|
|
|
|
bound_not_found_warning (__FILE__, __FUNCTION__, p); \
|
|
|
|
return p + offset; \
|
|
|
|
} \
|
2019-12-13 20:45:09 +08:00
|
|
|
} \
|
|
|
|
POST_SEM (); \
|
|
|
|
return p + offset; \
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
|
2009-12-20 05:22:43 +08:00
|
|
|
BOUND_PTR_INDIR(1)
|
|
|
|
BOUND_PTR_INDIR(2)
|
|
|
|
BOUND_PTR_INDIR(4)
|
|
|
|
BOUND_PTR_INDIR(8)
|
|
|
|
BOUND_PTR_INDIR(12)
|
|
|
|
BOUND_PTR_INDIR(16)
|
|
|
|
|
2016-11-30 13:18:48 +08:00
|
|
|
#if defined(__GNUC__) && (__GNUC__ >= 6)
|
|
|
|
/*
|
2017-09-25 09:03:26 +08:00
|
|
|
* At least gcc 6.2 complains when __builtin_frame_address is used with
|
2016-11-30 13:18:48 +08:00
|
|
|
* nonzero argument.
|
|
|
|
*/
|
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wframe-address"
|
|
|
|
#endif
|
|
|
|
|
2002-01-06 01:03:56 +08:00
|
|
|
/* return the frame pointer of the caller */
|
2002-01-06 00:16:47 +08:00
|
|
|
#define GET_CALLER_FP(fp)\
|
|
|
|
{\
|
2015-03-26 12:47:45 +08:00
|
|
|
fp = (size_t)__builtin_frame_address(1);\
|
2002-01-06 00:16:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* called when entering a function to add all the local regions */
|
2015-07-30 04:53:57 +08:00
|
|
|
void FASTCALL __bound_local_new(void *p1)
|
2002-01-06 00:16:47 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
size_t addr, fp, *p = p1;
|
2015-04-10 20:17:22 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET())
|
2019-12-10 15:07:25 +08:00
|
|
|
return;
|
2002-01-06 00:16:47 +08:00
|
|
|
GET_CALLER_FP(fp);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): p1=%p fp=%p\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (void *)fp);
|
2019-12-10 15:07:25 +08:00
|
|
|
WAIT_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
while ((addr = p[0])) {
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_local_new_count);
|
2020-01-18 05:58:39 +08:00
|
|
|
tree = splay_insert(addr + fp, p[1], tree);
|
2002-01-06 00:16:47 +08:00
|
|
|
p += 2;
|
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
POST_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = p1;
|
|
|
|
while ((addr = p[0])) {
|
2020-05-05 14:31:57 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) (addr + fp), (unsigned long) p[1]);
|
2020-01-15 15:53:19 +08:00
|
|
|
p += 2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2002-01-06 00:16:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* called when leaving a function to delete all the local regions */
|
2015-07-30 04:53:57 +08:00
|
|
|
void FASTCALL __bound_local_delete(void *p1)
|
2002-01-06 00:16:47 +08:00
|
|
|
{
|
2015-03-26 12:47:45 +08:00
|
|
|
size_t addr, fp, *p = p1;
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET())
|
2019-12-10 15:07:25 +08:00
|
|
|
return;
|
2002-01-06 00:16:47 +08:00
|
|
|
GET_CALLER_FP(fp);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): p1=%p fp=%p\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__, p, (void *)fp);
|
|
|
|
WAIT_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
while ((addr = p[0])) {
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_local_delete_count);
|
2020-01-18 05:58:39 +08:00
|
|
|
tree = splay_delete(addr + fp, tree);
|
|
|
|
p += 2;
|
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
if (alloca_list) {
|
2020-05-24 02:02:41 +08:00
|
|
|
alloca_list_type *last = NULL;
|
|
|
|
alloca_list_type *cur = alloca_list;
|
|
|
|
|
2020-05-25 18:26:55 +08:00
|
|
|
do {
|
2020-05-24 02:02:41 +08:00
|
|
|
if (cur->fp == fp) {
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
|
|
|
tree = splay_delete ((size_t) cur->p, tree);
|
2020-05-25 18:26:55 +08:00
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->p);
|
|
|
|
BOUND_FREE (cur);
|
2020-05-24 02:02:41 +08:00
|
|
|
cur = last ? last->next : alloca_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
} while (cur);
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
if (jmp_list) {
|
2020-05-24 02:02:41 +08:00
|
|
|
jmp_list_type *last = NULL;
|
|
|
|
jmp_list_type *cur = jmp_list;
|
|
|
|
|
2020-05-25 18:26:55 +08:00
|
|
|
do {
|
2020-05-24 02:02:41 +08:00
|
|
|
if (cur->fp == fp) {
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
jmp_list = cur->next;
|
2020-05-25 18:26:55 +08:00
|
|
|
dprintf(stderr, "%s, %s(): remove setjmp %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->penv);
|
|
|
|
BOUND_FREE (cur);
|
2020-05-24 02:02:41 +08:00
|
|
|
cur = last ? last->next : jmp_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
} while (cur);
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
2020-01-18 05:58:39 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
POST_SEM ();
|
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = p1;
|
|
|
|
while ((addr = p[0])) {
|
|
|
|
if (addr != 1) {
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) (addr + fp), (unsigned long) p[1]);
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
p+= 2;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
/* used by alloca */
|
|
|
|
void __bound_new_region(void *p, size_t size)
|
|
|
|
{
|
|
|
|
size_t fp;
|
|
|
|
alloca_list_type *last;
|
|
|
|
alloca_list_type *cur;
|
|
|
|
alloca_list_type *new;
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET())
|
2020-08-13 17:19:11 +08:00
|
|
|
return;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)size);
|
|
|
|
GET_CALLER_FP (fp);
|
2020-05-24 02:02:41 +08:00
|
|
|
new = BOUND_MALLOC (sizeof (alloca_list_type));
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_alloca_count);
|
|
|
|
last = NULL;
|
|
|
|
cur = alloca_list;
|
|
|
|
while (cur) {
|
2021-01-14 02:41:04 +08:00
|
|
|
#if defined(__i386__) || (defined(__arm__) && !defined(__ARM_EABI__))
|
2020-10-01 23:09:09 +08:00
|
|
|
int align = 4;
|
|
|
|
#elif defined(__arm__)
|
|
|
|
int align = 8;
|
|
|
|
#else
|
|
|
|
int align = 16;
|
|
|
|
#endif
|
|
|
|
void *cure = (void *)((char *)cur->p + ((cur->size + align) & -align));
|
|
|
|
void *pe = (void *)((char *)p + ((size + align) & -align));
|
|
|
|
if (cur->fp == fp && ((cur->p <= p && cure > p) ||
|
|
|
|
(p <= cur->p && pe > cur->p))) {
|
2020-01-15 15:53:19 +08:00
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
2020-10-01 23:09:09 +08:00
|
|
|
tree = splay_delete((size_t)cur->p, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
break;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-08-13 17:19:11 +08:00
|
|
|
tree = splay_insert((size_t)p, size, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (new) {
|
|
|
|
new->fp = fp;
|
|
|
|
new->p = p;
|
2020-10-01 23:09:09 +08:00
|
|
|
new->size = size;
|
2020-01-15 15:53:19 +08:00
|
|
|
new->next = alloca_list;
|
|
|
|
alloca_list = new;
|
2002-01-06 00:16:47 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
POST_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
if (cur) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->p);
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE (cur);
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
2002-01-06 00:16:47 +08:00
|
|
|
}
|
|
|
|
|
2020-05-24 02:02:41 +08:00
|
|
|
void __bound_setjmp(jmp_buf env)
|
|
|
|
{
|
|
|
|
jmp_list_type *jl;
|
|
|
|
void *e = (void *) env;
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-05-25 18:26:55 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, __FUNCTION__, e);
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_setjmp_count);
|
|
|
|
jl = jmp_list;
|
|
|
|
while (jl) {
|
|
|
|
if (jl->penv == e)
|
|
|
|
break;
|
|
|
|
jl = jl->next;
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
if (jl == NULL) {
|
|
|
|
jl = BOUND_MALLOC (sizeof (jmp_list_type));
|
|
|
|
if (jl) {
|
|
|
|
jl->penv = e;
|
|
|
|
jl->next = jmp_list;
|
|
|
|
jmp_list = jl;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (jl) {
|
|
|
|
size_t fp;
|
2020-05-24 02:02:41 +08:00
|
|
|
|
2020-05-25 18:26:55 +08:00
|
|
|
GET_CALLER_FP (fp);
|
|
|
|
jl->fp = fp;
|
|
|
|
jl->end_fp = (size_t)__builtin_frame_address(0);
|
|
|
|
jl->tid = BOUND_GET_TID;
|
|
|
|
}
|
|
|
|
POST_SEM ();
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-25 18:26:55 +08:00
|
|
|
static void __bound_long_jump(jmp_buf env, int val, int sig, const char *func)
|
2020-05-24 02:02:41 +08:00
|
|
|
{
|
|
|
|
jmp_list_type *jl;
|
2020-05-25 18:26:55 +08:00
|
|
|
void *e;
|
|
|
|
BOUND_TID_TYPE tid;
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-05-25 18:26:55 +08:00
|
|
|
e = (void *)env;
|
|
|
|
tid = BOUND_GET_TID;
|
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, func, e);
|
|
|
|
WAIT_SEM();
|
|
|
|
INCR_COUNT(bound_longjmp_count);
|
|
|
|
jl = jmp_list;
|
|
|
|
while (jl) {
|
|
|
|
if (jl->penv == e && jl->tid == tid) {
|
|
|
|
size_t start_fp = (size_t)__builtin_frame_address(0);
|
|
|
|
size_t end_fp = jl->end_fp;
|
|
|
|
jmp_list_type *cur = jmp_list;
|
|
|
|
jmp_list_type *last = NULL;
|
|
|
|
|
|
|
|
while (cur->penv != e || cur->tid != tid) {
|
|
|
|
if (cur->tid == tid) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove setjmp %p\n",
|
|
|
|
__FILE__, func, cur->penv);
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
jmp_list = cur->next;
|
|
|
|
BOUND_FREE (cur);
|
|
|
|
cur = last ? last->next : jmp_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
for (;;) {
|
|
|
|
Tree *t = tree;
|
|
|
|
alloca_list_type *last;
|
|
|
|
alloca_list_type *cur;
|
|
|
|
|
|
|
|
while (t && (t->start < start_fp || t->start > end_fp))
|
|
|
|
if (t->start < start_fp)
|
|
|
|
t = t->right;
|
|
|
|
else
|
|
|
|
t = t->left;
|
|
|
|
if (t == NULL)
|
|
|
|
break;
|
|
|
|
last = NULL;
|
|
|
|
cur = alloca_list;
|
|
|
|
while (cur) {
|
|
|
|
if ((size_t) cur->p == t->start) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, func, cur->p);
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
|
|
|
BOUND_FREE (cur);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
|
|
|
dprintf(stderr, "%s, %s(): delete %p\n",
|
|
|
|
__FILE__, func, (void *) t->start);
|
|
|
|
tree = splay_delete(t->start, tree);
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
break;
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
jl = jl->next;
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
POST_SEM();
|
2020-05-24 02:02:41 +08:00
|
|
|
}
|
2020-05-25 18:26:55 +08:00
|
|
|
#if !defined(_WIN32)
|
|
|
|
sig ? siglongjmp(env, val) :
|
|
|
|
#endif
|
2020-05-24 02:02:41 +08:00
|
|
|
longjmp (env, val);
|
|
|
|
}
|
|
|
|
|
2020-05-25 18:26:55 +08:00
|
|
|
void __bound_longjmp(jmp_buf env, int val)
|
|
|
|
{
|
|
|
|
__bound_long_jump(env,val, 0, __FUNCTION__);
|
|
|
|
}
|
|
|
|
|
|
|
|
#if !defined(_WIN32)
|
|
|
|
void __bound_siglongjmp(jmp_buf env, int val)
|
|
|
|
{
|
|
|
|
__bound_long_jump(env,val, 1, __FUNCTION__);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#if defined(__GNUC__) && (__GNUC__ >= 6)
|
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
2020-05-22 11:06:08 +08:00
|
|
|
void __bound_init(size_t *p, int mode)
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2020-05-22 11:06:08 +08:00
|
|
|
dprintf(stderr, "%s, %s(): start %s\n", __FILE__, __FUNCTION__,
|
|
|
|
mode < 0 ? "lazy" : mode == 0 ? "normal use" : "for -run");
|
2015-04-10 12:37:31 +08:00
|
|
|
|
2020-01-18 05:58:39 +08:00
|
|
|
if (inited) {
|
|
|
|
WAIT_SEM();
|
|
|
|
goto add_bounds;
|
|
|
|
}
|
2015-04-10 12:37:31 +08:00
|
|
|
inited = 1;
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
#if HAVE_TLS_FUNC
|
|
|
|
#if defined(_WIN32)
|
|
|
|
no_checking_key = TlsAlloc();
|
|
|
|
TlsSetValue(no_checking_key, &no_checking);
|
|
|
|
#else
|
|
|
|
pthread_key_create(&no_checking_key, NULL);
|
|
|
|
pthread_setspecific(no_checking_key, &no_checking);
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
NO_CHECKING_SET(1);
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
print_warn_ptr_add = getenv ("TCC_BOUNDS_WARN_POINTER_ADD") != NULL;
|
2019-12-10 15:07:25 +08:00
|
|
|
print_calls = getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL;
|
2019-12-11 02:47:33 +08:00
|
|
|
print_heap = getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL;
|
2019-12-13 03:49:35 +08:00
|
|
|
print_statistic = getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL;
|
2019-12-10 15:07:25 +08:00
|
|
|
never_fatal = getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL;
|
|
|
|
|
|
|
|
INIT_SEM ();
|
2002-01-05 08:41:11 +08:00
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
{
|
2020-05-22 11:06:08 +08:00
|
|
|
void *addr = mode > 0 ? RTLD_DEFAULT : RTLD_NEXT;
|
2002-01-04 07:12:29 +08:00
|
|
|
|
2020-05-22 11:06:08 +08:00
|
|
|
/* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT,
|
|
|
|
but using RTLD_NEXT with -run segfaults on MacOS in dyld as the
|
|
|
|
generated code segment isn't registered with dyld and hence the
|
|
|
|
caller image of dlsym isn't known to it */
|
2019-12-10 15:07:25 +08:00
|
|
|
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
|
|
|
|
if (malloc_redir == NULL) {
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): use RTLD_DEFAULT\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__);
|
|
|
|
addr = RTLD_DEFAULT;
|
|
|
|
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
|
|
|
|
}
|
|
|
|
*(void **) (&calloc_redir) = dlsym (addr, "calloc");
|
|
|
|
*(void **) (&free_redir) = dlsym (addr, "free");
|
|
|
|
*(void **) (&realloc_redir) = dlsym (addr, "realloc");
|
|
|
|
*(void **) (&memalign_redir) = dlsym (addr, "memalign");
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): malloc_redir %p\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__, malloc_redir);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): free_redir %p\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__, free_redir);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): realloc_redir %p\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__, realloc_redir);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): memalign_redir %p\n",
|
2019-12-10 15:07:25 +08:00
|
|
|
__FILE__, __FUNCTION__, memalign_redir);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (malloc_redir == NULL || free_redir == NULL)
|
|
|
|
bound_alloc_error ("Cannot redirect malloc/free");
|
|
|
|
#if HAVE_PTHREAD_CREATE
|
|
|
|
*(void **) (&pthread_create_redir) = dlsym (addr, "pthread_create");
|
|
|
|
dprintf(stderr, "%s, %s(): pthread_create_redir %p\n",
|
|
|
|
__FILE__, __FUNCTION__, pthread_create_redir);
|
2020-09-08 20:31:58 +08:00
|
|
|
if (pthread_create_redir == NULL)
|
|
|
|
bound_alloc_error ("Cannot redirect pthread_create");
|
|
|
|
#endif
|
|
|
|
#if HAVE_SIGNAL
|
|
|
|
*(void **) (&signal_redir) = dlsym (addr, "signal");
|
|
|
|
dprintf(stderr, "%s, %s(): signal_redir %p\n",
|
|
|
|
__FILE__, __FUNCTION__, signal_redir);
|
|
|
|
if (signal_redir == NULL)
|
|
|
|
bound_alloc_error ("Cannot redirect signal");
|
|
|
|
#endif
|
|
|
|
#if HAVE_SIGACTION
|
|
|
|
*(void **) (&sigaction_redir) = dlsym (addr, "sigaction");
|
|
|
|
dprintf(stderr, "%s, %s(): sigaction_redir %p\n",
|
|
|
|
__FILE__, __FUNCTION__, sigaction_redir);
|
|
|
|
if (sigaction_redir == NULL)
|
2020-09-14 14:24:01 +08:00
|
|
|
bound_alloc_error ("Cannot redirect sigaction");
|
2020-09-08 20:31:58 +08:00
|
|
|
#endif
|
|
|
|
#if HAVE_FORK
|
|
|
|
*(void **) (&fork_redir) = dlsym (addr, "fork");
|
|
|
|
dprintf(stderr, "%s, %s(): fork_redir %p\n",
|
|
|
|
__FILE__, __FUNCTION__, fork_redir);
|
|
|
|
if (fork_redir == NULL)
|
2020-09-14 14:24:01 +08:00
|
|
|
bound_alloc_error ("Cannot redirect fork");
|
2020-01-15 15:53:19 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
2002-01-05 08:41:11 +08:00
|
|
|
#endif
|
2002-07-25 06:11:56 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#ifdef __linux__
|
|
|
|
{
|
|
|
|
FILE *fp;
|
|
|
|
unsigned char found;
|
2020-06-16 13:39:48 +08:00
|
|
|
unsigned long start;
|
|
|
|
unsigned long end;
|
|
|
|
unsigned long ad =
|
|
|
|
(unsigned long) __builtin_return_address(0);
|
2020-01-15 15:53:19 +08:00
|
|
|
char line[1000];
|
|
|
|
|
|
|
|
/* Display exec name. Usefull when a lot of code is compiled with tcc */
|
|
|
|
fp = fopen ("/proc/self/comm", "r");
|
|
|
|
if (fp) {
|
|
|
|
memset (exec, 0, sizeof(exec));
|
|
|
|
fread (exec, 1, sizeof(exec) - 2, fp);
|
|
|
|
if (strchr(exec,'\n'))
|
|
|
|
*strchr(exec,'\n') = '\0';
|
|
|
|
strcat (exec, ":");
|
|
|
|
fclose (fp);
|
|
|
|
}
|
|
|
|
/* check if dlopen is used (is threre a better way?) */
|
|
|
|
found = 0;
|
|
|
|
fp = fopen ("/proc/self/maps", "r");
|
|
|
|
if (fp) {
|
|
|
|
while (fgets (line, sizeof(line), fp)) {
|
2020-06-16 13:39:48 +08:00
|
|
|
if (sscanf (line, "%lx-%lx", &start, &end) == 2 &&
|
2020-01-15 15:53:19 +08:00
|
|
|
ad >= start && ad < end) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (strstr (line,"[heap]"))
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
fclose (fp);
|
|
|
|
}
|
|
|
|
if (found == 0) {
|
|
|
|
use_sem = 1;
|
|
|
|
no_strdup = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2015-04-10 20:17:22 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
2015-04-10 20:17:22 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#if HAVE_CTYPE
|
2020-05-22 11:06:08 +08:00
|
|
|
#ifdef __APPLE__
|
2020-06-24 21:12:58 +08:00
|
|
|
tree = splay_insert((size_t) &_DefaultRuneLocale,
|
|
|
|
sizeof (_DefaultRuneLocale), tree);
|
2020-05-22 11:06:08 +08:00
|
|
|
#else
|
2020-01-15 15:53:19 +08:00
|
|
|
/* XXX: Does not work if locale is changed */
|
|
|
|
tree = splay_insert((size_t) __ctype_b_loc(),
|
|
|
|
sizeof (unsigned short *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_b_loc() - 128),
|
|
|
|
384 * sizeof (unsigned short), tree);
|
|
|
|
tree = splay_insert((size_t) __ctype_tolower_loc(),
|
|
|
|
sizeof (__int32_t *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_tolower_loc() - 128),
|
|
|
|
384 * sizeof (__int32_t), tree);
|
|
|
|
tree = splay_insert((size_t) __ctype_toupper_loc(),
|
|
|
|
sizeof (__int32_t *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_toupper_loc() - 128),
|
|
|
|
384 * sizeof (__int32_t), tree);
|
|
|
|
#endif
|
2020-05-22 11:06:08 +08:00
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
#if HAVE_ERRNO
|
|
|
|
tree = splay_insert((size_t) (&errno), sizeof (int), tree);
|
|
|
|
#endif
|
2014-03-29 14:28:02 +08:00
|
|
|
|
2020-01-18 05:58:39 +08:00
|
|
|
add_bounds:
|
|
|
|
if (!p)
|
|
|
|
goto no_bounds;
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
/* add all static bound check values */
|
|
|
|
while (p[0] != 0) {
|
|
|
|
tree = splay_insert(p[0], p[1], tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_DEBUG
|
2020-01-18 05:58:39 +08:00
|
|
|
if (print_calls) {
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): static var %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) p[0], (unsigned long) p[1]);
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-18 05:58:39 +08:00
|
|
|
p += 2;
|
|
|
|
}
|
|
|
|
no_bounds:
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-01-18 05:58:39 +08:00
|
|
|
POST_SEM ();
|
2020-09-08 20:31:58 +08:00
|
|
|
NO_CHECKING_SET(0);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): end\n\n", __FILE__, __FUNCTION__);
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
|
2020-06-22 20:55:27 +08:00
|
|
|
void
|
|
|
|
#if (defined(__GLIBC__) && (__GLIBC_MINOR__ >= 4)) || defined(_WIN32)
|
|
|
|
__attribute__((constructor))
|
|
|
|
#endif
|
|
|
|
__bound_main_arg(int argc, char **argv, char **envp)
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2020-06-22 20:55:27 +08:00
|
|
|
__bound_init (0, -1);
|
|
|
|
if (argc && argv) {
|
|
|
|
int i;
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-06-22 20:55:27 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
for (i = 0; i < argc; i++)
|
|
|
|
tree = splay_insert((size_t) argv[i], strlen (argv[i]) + 1, tree);
|
2020-08-11 14:39:12 +08:00
|
|
|
tree = splay_insert((size_t) argv, (argc + 1) * sizeof(char *), tree);
|
2020-06-22 20:55:27 +08:00
|
|
|
POST_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_DEBUG
|
2020-06-22 20:55:27 +08:00
|
|
|
if (print_calls) {
|
|
|
|
for (i = 0; i < argc; i++)
|
|
|
|
dprintf(stderr, "%s, %s(): arg %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
argv[i], (unsigned long)(strlen (argv[i]) + 1));
|
2020-09-08 20:31:58 +08:00
|
|
|
dprintf(stderr, "%s, %s(): argv %p %d\n",
|
|
|
|
__FILE__, __FUNCTION__, argv,
|
|
|
|
(int)((argc + 1) * sizeof(char *)));
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
|
|
|
#endif
|
2020-06-22 20:55:27 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-06-22 20:55:27 +08:00
|
|
|
if (envp && *envp) {
|
|
|
|
char **p = envp;
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
2019-12-10 15:07:25 +08:00
|
|
|
while (*p) {
|
|
|
|
tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
++p;
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
2020-08-11 14:39:12 +08:00
|
|
|
tree = splay_insert((size_t) envp, (++p - envp) * sizeof(char *), tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
POST_SEM ();
|
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
2020-06-22 20:55:27 +08:00
|
|
|
p = envp;
|
2020-01-15 15:53:19 +08:00
|
|
|
while (*p) {
|
2020-06-22 20:55:27 +08:00
|
|
|
dprintf(stderr, "%s, %s(): env %p 0x%lx\n",
|
2020-01-15 15:53:19 +08:00
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
*p, (unsigned long)(strlen (*p) + 1));
|
|
|
|
++p;
|
|
|
|
}
|
2020-09-08 20:31:58 +08:00
|
|
|
dprintf(stderr, "%s, %s(): environ %p %d\n",
|
|
|
|
__FILE__, __FUNCTION__, envp,
|
|
|
|
(int)((++p - envp) * sizeof(char *)));
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-11 02:47:33 +08:00
|
|
|
void __attribute__((destructor)) __bound_exit(void)
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2019-12-10 15:07:25 +08:00
|
|
|
int i;
|
2019-12-11 02:47:33 +08:00
|
|
|
static const char * const alloc_type[] = {
|
2019-12-13 03:49:35 +08:00
|
|
|
"", "malloc", "calloc", "realloc", "memalign", "strdup"
|
2019-12-11 02:47:33 +08:00
|
|
|
};
|
2016-10-02 02:47:36 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s():\n", __FILE__, __FUNCTION__);
|
2019-12-11 02:47:33 +08:00
|
|
|
|
|
|
|
if (inited) {
|
2020-12-18 22:24:32 +08:00
|
|
|
#if !defined(_WIN32) && !defined(__APPLE__) && !defined TCC_MUSL && \
|
|
|
|
!defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__NetBSD__)
|
2019-12-11 02:47:33 +08:00
|
|
|
if (print_heap) {
|
2020-01-15 15:53:19 +08:00
|
|
|
extern void __libc_freeres (void);
|
2019-12-11 02:47:33 +08:00
|
|
|
__libc_freeres ();
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
NO_CHECKING_SET(1);
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
TRY_SEM ();
|
2020-01-15 15:53:19 +08:00
|
|
|
while (alloca_list) {
|
|
|
|
alloca_list_type *next = alloca_list->next;
|
|
|
|
|
|
|
|
tree = splay_delete ((size_t) alloca_list->p, tree);
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE (alloca_list);
|
2020-01-15 15:53:19 +08:00
|
|
|
alloca_list = next;
|
|
|
|
}
|
2020-05-24 02:02:41 +08:00
|
|
|
while (jmp_list) {
|
|
|
|
jmp_list_type *next = jmp_list->next;
|
|
|
|
|
|
|
|
BOUND_FREE (jmp_list);
|
|
|
|
jmp_list = next;
|
|
|
|
}
|
2019-12-11 02:47:33 +08:00
|
|
|
for (i = 0; i < FREE_REUSE_SIZE; i++) {
|
|
|
|
if (free_reuse_list[i]) {
|
|
|
|
tree = splay_delete ((size_t) free_reuse_list[i], tree);
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE (free_reuse_list[i]);
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
while (tree) {
|
2020-01-15 15:53:19 +08:00
|
|
|
if (print_heap && tree->type != 0)
|
|
|
|
fprintf (stderr, "%s, %s(): %s found size %lu\n",
|
2019-12-11 02:47:33 +08:00
|
|
|
__FILE__, __FUNCTION__, alloc_type[tree->type],
|
|
|
|
(unsigned long) tree->size);
|
|
|
|
tree = splay_delete (tree->start, tree);
|
|
|
|
}
|
2019-12-24 03:23:18 +08:00
|
|
|
#if TREE_REUSE
|
|
|
|
while (tree_free_list) {
|
|
|
|
Tree *next = tree_free_list->left;
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE (tree_free_list);
|
2019-12-24 03:23:18 +08:00
|
|
|
tree_free_list = next;
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
POST_SEM ();
|
2019-12-11 02:47:33 +08:00
|
|
|
EXIT_SEM ();
|
2020-09-08 20:31:58 +08:00
|
|
|
#if HAVE_TLS_FUNC
|
|
|
|
#if defined(_WIN32)
|
|
|
|
TlsFree(no_checking_key);
|
|
|
|
#else
|
|
|
|
pthread_key_delete(no_checking_key);
|
|
|
|
#endif
|
|
|
|
#endif
|
2019-12-11 02:47:33 +08:00
|
|
|
inited = 0;
|
2019-12-13 03:49:35 +08:00
|
|
|
if (print_statistic) {
|
2020-01-15 15:53:19 +08:00
|
|
|
#if BOUND_STATISTIC
|
2019-12-13 03:49:35 +08:00
|
|
|
fprintf (stderr, "bound_ptr_add_count %llu\n", bound_ptr_add_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count);
|
|
|
|
fprintf (stderr, "bound_local_new_count %llu\n", bound_local_new_count);
|
|
|
|
fprintf (stderr, "bound_local_delete_count %llu\n", bound_local_delete_count);
|
|
|
|
fprintf (stderr, "bound_malloc_count %llu\n", bound_malloc_count);
|
|
|
|
fprintf (stderr, "bound_calloc_count %llu\n", bound_calloc_count);
|
|
|
|
fprintf (stderr, "bound_realloc_count %llu\n", bound_realloc_count);
|
|
|
|
fprintf (stderr, "bound_free_count %llu\n", bound_free_count);
|
|
|
|
fprintf (stderr, "bound_memalign_count %llu\n", bound_memalign_count);
|
|
|
|
fprintf (stderr, "bound_mmap_count %llu\n", bound_mmap_count);
|
|
|
|
fprintf (stderr, "bound_munmap_count %llu\n", bound_munmap_count);
|
|
|
|
fprintf (stderr, "bound_alloca_count %llu\n", bound_alloca_count);
|
2020-05-24 02:02:41 +08:00
|
|
|
fprintf (stderr, "bound_setjmp_count %llu\n", bound_setjmp_count);
|
|
|
|
fprintf (stderr, "bound_longjmp_count %llu\n", bound_longjmp_count);
|
2019-12-13 03:49:35 +08:00
|
|
|
fprintf (stderr, "bound_mempcy_count %llu\n", bound_mempcy_count);
|
|
|
|
fprintf (stderr, "bound_memcmp_count %llu\n", bound_memcmp_count);
|
|
|
|
fprintf (stderr, "bound_memmove_count %llu\n", bound_memmove_count);
|
|
|
|
fprintf (stderr, "bound_memset_count %llu\n", bound_memset_count);
|
|
|
|
fprintf (stderr, "bound_strlen_count %llu\n", bound_strlen_count);
|
|
|
|
fprintf (stderr, "bound_strcpy_count %llu\n", bound_strcpy_count);
|
|
|
|
fprintf (stderr, "bound_strncpy_count %llu\n", bound_strncpy_count);
|
|
|
|
fprintf (stderr, "bound_strcmp_count %llu\n", bound_strcmp_count);
|
|
|
|
fprintf (stderr, "bound_strncmp_count %llu\n", bound_strncmp_count);
|
|
|
|
fprintf (stderr, "bound_strcat_count %llu\n", bound_strcat_count);
|
|
|
|
fprintf (stderr, "bound_strchr_count %llu\n", bound_strchr_count);
|
|
|
|
fprintf (stderr, "bound_strdup_count %llu\n", bound_strdup_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
fprintf (stderr, "bound_not_found %llu\n", bound_not_found);
|
|
|
|
#endif
|
|
|
|
#if BOUND_STATISTIC_SPLAY
|
|
|
|
fprintf (stderr, "bound_splay %llu\n", bound_splay);
|
|
|
|
fprintf (stderr, "bound_splay_end %llu\n", bound_splay_end);
|
|
|
|
fprintf (stderr, "bound_splay_insert %llu\n", bound_splay_insert);
|
|
|
|
fprintf (stderr, "bound_splay_delete %llu\n", bound_splay_delete);
|
2019-12-13 03:49:35 +08:00
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2002-01-05 08:41:11 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#if HAVE_PTHREAD_CREATE
|
2020-09-08 20:31:58 +08:00
|
|
|
typedef struct {
|
|
|
|
void *(*start_routine) (void *);
|
|
|
|
void *arg;
|
2020-11-09 17:57:14 +08:00
|
|
|
sigset_t old_mask;
|
2020-09-08 20:31:58 +08:00
|
|
|
} bound_thread_create_type;
|
|
|
|
|
|
|
|
static void *bound_thread_create(void *bdata)
|
|
|
|
{
|
|
|
|
bound_thread_create_type *data = (bound_thread_create_type *) bdata;
|
|
|
|
void *retval;
|
2020-11-09 17:57:14 +08:00
|
|
|
#if HAVE_TLS_FUNC
|
2020-09-08 20:31:58 +08:00
|
|
|
int *p = (int *) BOUND_MALLOC(sizeof(int));
|
|
|
|
|
|
|
|
if (!p) bound_alloc_error("bound_thread_create malloc");
|
|
|
|
*p = 0;
|
|
|
|
pthread_setspecific(no_checking_key, p);
|
2020-11-09 17:57:14 +08:00
|
|
|
#endif
|
|
|
|
pthread_sigmask(SIG_SETMASK, &data->old_mask, NULL);
|
2020-09-08 20:31:58 +08:00
|
|
|
retval = data->start_routine(data->arg);
|
2020-11-09 17:57:14 +08:00
|
|
|
#if HAVE_TLS_FUNC
|
2020-09-08 20:31:58 +08:00
|
|
|
pthread_setspecific(no_checking_key, NULL);
|
|
|
|
BOUND_FREE (p);
|
2020-11-09 17:57:14 +08:00
|
|
|
#endif
|
2020-09-08 20:31:58 +08:00
|
|
|
BOUND_FREE (data);
|
|
|
|
return retval;
|
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
int pthread_create(pthread_t *thread, const pthread_attr_t *attr,
|
|
|
|
void *(*start_routine) (void *), void *arg)
|
|
|
|
{
|
2020-11-09 17:57:14 +08:00
|
|
|
int retval;
|
|
|
|
bound_thread_create_type *data;
|
|
|
|
sigset_t mask;
|
|
|
|
sigset_t old_mask;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
use_sem = 1;
|
2020-05-05 14:31:57 +08:00
|
|
|
dprintf (stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
|
2020-11-09 17:57:14 +08:00
|
|
|
sigfillset(&mask);
|
|
|
|
pthread_sigmask(SIG_SETMASK, &mask, &old_mask);
|
|
|
|
data = (bound_thread_create_type *) BOUND_MALLOC(sizeof(bound_thread_create_type));
|
|
|
|
if (!data) bound_alloc_error("bound_thread_create malloc");
|
|
|
|
data->start_routine = start_routine;
|
|
|
|
data->arg = arg;
|
|
|
|
data->old_mask = old_mask;
|
|
|
|
retval = pthread_create_redir(thread, attr, bound_thread_create, data);
|
|
|
|
pthread_sigmask(SIG_SETMASK, &old_mask, NULL);
|
|
|
|
return retval;
|
2020-09-08 20:31:58 +08:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if HAVE_SIGNAL || HAVE_SIGACTION
|
|
|
|
typedef union {
|
|
|
|
#if HAVE_SIGNAL
|
|
|
|
bound_sig signal_handler;
|
|
|
|
#endif
|
|
|
|
#if HAVE_SIGACTION
|
|
|
|
void (*sig_handler)(int);
|
|
|
|
void (*sig_sigaction)(int, siginfo_t *, void *);
|
|
|
|
#endif
|
|
|
|
} bound_sig_type;
|
|
|
|
|
|
|
|
static unsigned char bound_sig_used[NSIG];
|
|
|
|
static bound_sig_type bound_sig_data[NSIG];
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if HAVE_SIGNAL
|
|
|
|
static void signal_handler(int sig)
|
|
|
|
{
|
|
|
|
__bounds_checking(1);
|
|
|
|
bound_sig_data[sig].signal_handler(sig);
|
|
|
|
__bounds_checking(-1);
|
|
|
|
}
|
|
|
|
|
|
|
|
bound_sig signal(int signum, bound_sig handler)
|
|
|
|
{
|
|
|
|
bound_sig retval;
|
|
|
|
|
|
|
|
dprintf (stderr, "%s, %s() %d %p\n", __FILE__, __FUNCTION__,
|
|
|
|
signum, handler);
|
|
|
|
retval = signal_redir(signum, handler ? signal_handler : handler);
|
|
|
|
if (retval != SIG_ERR) {
|
|
|
|
if (bound_sig_used[signum])
|
|
|
|
retval = bound_sig_data[signum].signal_handler;
|
|
|
|
if (handler) {
|
|
|
|
bound_sig_used[signum] = 1;
|
|
|
|
bound_sig_data[signum].signal_handler = handler;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return retval;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if HAVE_SIGACTION
|
|
|
|
static void sig_handler(int sig)
|
|
|
|
{
|
|
|
|
__bounds_checking(1);
|
|
|
|
bound_sig_data[sig].sig_handler(sig);
|
|
|
|
__bounds_checking(-1);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void sig_sigaction(int sig, siginfo_t *info, void *ucontext)
|
|
|
|
{
|
|
|
|
__bounds_checking(1);
|
|
|
|
bound_sig_data[sig].sig_sigaction(sig, info, ucontext);
|
|
|
|
__bounds_checking(-1);
|
|
|
|
}
|
|
|
|
|
|
|
|
int sigaction(int signum, const struct sigaction *act, struct sigaction *oldact)
|
|
|
|
{
|
|
|
|
int retval;
|
|
|
|
struct sigaction nact, oact;
|
|
|
|
|
|
|
|
dprintf (stderr, "%s, %s() %d %p %p\n", __FILE__, __FUNCTION__,
|
|
|
|
signum, act, oldact);
|
2021-01-26 23:51:20 +08:00
|
|
|
|
|
|
|
if (sigaction_redir == NULL)
|
|
|
|
__bound_init(0,-1);
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (act) {
|
|
|
|
nact = *act;
|
|
|
|
if (nact.sa_flags & SA_SIGINFO)
|
|
|
|
nact.sa_sigaction = sig_sigaction;
|
|
|
|
else
|
|
|
|
nact.sa_handler = sig_handler;
|
|
|
|
retval = sigaction_redir(signum, &nact, &oact);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
retval = sigaction_redir(signum, act, &oact);
|
|
|
|
if (retval >= 0) {
|
|
|
|
if (bound_sig_used[signum]) {
|
|
|
|
if (oact.sa_flags & SA_SIGINFO)
|
|
|
|
oact.sa_sigaction = bound_sig_data[signum].sig_sigaction;
|
|
|
|
else
|
|
|
|
oact.sa_handler = bound_sig_data[signum].sig_handler;
|
|
|
|
}
|
|
|
|
if (oldact) {
|
|
|
|
*oldact = oact;
|
|
|
|
}
|
|
|
|
if (act) {
|
|
|
|
bound_sig_used[signum] = 1;
|
|
|
|
if (act->sa_flags & SA_SIGINFO)
|
|
|
|
bound_sig_data[signum].sig_sigaction = act->sa_sigaction;
|
|
|
|
else
|
|
|
|
bound_sig_data[signum].sig_handler = act->sa_handler;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return retval;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if HAVE_FORK
|
|
|
|
pid_t fork(void)
|
|
|
|
{
|
2020-09-14 14:24:01 +08:00
|
|
|
pid_t retval;
|
2020-09-08 20:31:58 +08:00
|
|
|
|
2020-09-14 14:24:01 +08:00
|
|
|
WAIT_SEM();
|
|
|
|
retval = (*fork_redir)();
|
2020-09-15 01:31:56 +08:00
|
|
|
if (retval == 0)
|
|
|
|
INIT_SEM();
|
|
|
|
else
|
|
|
|
POST_SEM();
|
2020-09-08 20:31:58 +08:00
|
|
|
return retval;
|
2020-01-15 15:53:19 +08:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *malloc(size_t size)
|
|
|
|
#else
|
2002-01-05 08:41:11 +08:00
|
|
|
void *__bound_malloc(size_t size, const void *caller)
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
|
|
|
void *ptr;
|
2015-07-30 04:53:57 +08:00
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
/* This will catch the first dlsym call from __bound_init */
|
|
|
|
if (malloc_redir == NULL) {
|
2020-05-22 11:06:08 +08:00
|
|
|
__bound_init (0, -1);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (malloc_redir == NULL) {
|
|
|
|
ptr = &initial_pool[pool_index];
|
|
|
|
pool_index = (pool_index + size + 15) & ~15;
|
|
|
|
if (pool_index >= sizeof (initial_pool))
|
|
|
|
bound_alloc_error ("initial memory pool too small");
|
|
|
|
dprintf (stderr, "%s, %s(): initial %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
/* we allocate one more byte to ensure the regions will be
|
|
|
|
separated by at least one byte. With the glibc malloc, it may
|
|
|
|
be in fact not necessary */
|
2020-05-24 02:02:41 +08:00
|
|
|
ptr = BOUND_MALLOC (size + 1);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
2015-07-30 04:53:57 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_malloc_count);
|
2015-03-29 16:28:02 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
if (ptr) {
|
2020-06-16 13:39:48 +08:00
|
|
|
tree = splay_insert ((size_t) ptr, size ? size : size + 1, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_MALLOC;
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
POST_SEM ();
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2002-01-04 07:12:29 +08:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *memalign(size_t size, size_t align)
|
|
|
|
#else
|
2002-01-05 08:41:11 +08:00
|
|
|
void *__bound_memalign(size_t size, size_t align, const void *caller)
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-01-05 08:41:11 +08:00
|
|
|
{
|
|
|
|
void *ptr;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
#if HAVE_MEMALIGN
|
|
|
|
/* we allocate one more byte to ensure the regions will be
|
|
|
|
separated by at least one byte. With the glibc malloc, it may
|
|
|
|
be in fact not necessary */
|
2020-05-24 02:02:41 +08:00
|
|
|
ptr = BOUND_MEMALIGN(size + 1, align);
|
2020-01-15 15:53:19 +08:00
|
|
|
#else
|
2002-12-08 22:34:30 +08:00
|
|
|
if (align > 4) {
|
|
|
|
/* XXX: handle it ? */
|
|
|
|
ptr = NULL;
|
|
|
|
} else {
|
|
|
|
/* we suppose that malloc aligns to at least four bytes */
|
2020-05-24 02:02:41 +08:00
|
|
|
ptr = BOUND_MALLOC(size + 1);
|
2002-12-08 22:34:30 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_memalign_count);
|
|
|
|
|
|
|
|
if (ptr) {
|
2020-06-16 13:39:48 +08:00
|
|
|
tree = splay_insert((size_t) ptr, size ? size : size + 1, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_MEMALIGN;
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
POST_SEM ();
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2002-01-05 08:41:11 +08:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void free(void *ptr)
|
|
|
|
#else
|
2002-01-05 08:41:11 +08:00
|
|
|
void __bound_free(void *ptr, const void *caller)
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2019-12-10 15:07:25 +08:00
|
|
|
size_t addr = (size_t) ptr;
|
|
|
|
void *p;
|
|
|
|
|
2020-08-13 17:19:11 +08:00
|
|
|
if (ptr == NULL || tree == NULL
|
2019-12-11 02:47:33 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
|| ((unsigned char *) ptr >= &initial_pool[0] &&
|
|
|
|
(unsigned char *) ptr < &initial_pool[sizeof(initial_pool)])
|
|
|
|
#endif
|
|
|
|
)
|
2002-01-04 07:12:29 +08:00
|
|
|
return;
|
2002-01-05 08:41:11 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, __FUNCTION__, ptr);
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-08-13 17:19:11 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_free_count);
|
|
|
|
tree = splay (addr, tree);
|
|
|
|
if (tree->start == addr) {
|
|
|
|
if (tree->is_invalid) {
|
|
|
|
POST_SEM ();
|
|
|
|
bound_error("freeing invalid region");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
tree->is_invalid = 1;
|
|
|
|
memset (ptr, 0x5a, tree->size);
|
|
|
|
p = free_reuse_list[free_reuse_index];
|
|
|
|
free_reuse_list[free_reuse_index] = ptr;
|
|
|
|
free_reuse_index = (free_reuse_index + 1) % FREE_REUSE_SIZE;
|
|
|
|
if (p)
|
|
|
|
tree = splay_delete((size_t)p, tree);
|
|
|
|
ptr = p;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2020-08-13 17:19:11 +08:00
|
|
|
POST_SEM ();
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE (ptr);
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *realloc(void *ptr, size_t size)
|
|
|
|
#else
|
2002-01-05 08:41:11 +08:00
|
|
|
void *__bound_realloc(void *ptr, size_t size, const void *caller)
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
void *new_ptr;
|
|
|
|
|
2002-01-04 07:12:29 +08:00
|
|
|
if (size == 0) {
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
free(ptr);
|
|
|
|
#else
|
2002-01-05 08:41:11 +08:00
|
|
|
__bound_free(ptr, caller);
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-01-04 07:12:29 +08:00
|
|
|
return NULL;
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
|
2020-06-16 13:39:48 +08:00
|
|
|
new_ptr = BOUND_REALLOC (ptr, size + 1);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, new_ptr, (unsigned long)size);
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2019-12-11 02:47:33 +08:00
|
|
|
WAIT_SEM ();
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_realloc_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
|
|
|
|
if (ptr)
|
|
|
|
tree = splay_delete ((size_t) ptr, tree);
|
|
|
|
if (new_ptr) {
|
2020-06-16 13:39:48 +08:00
|
|
|
tree = splay_insert ((size_t) new_ptr, size ? size : size + 1, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree && tree->start == (size_t) new_ptr)
|
2019-12-11 02:47:33 +08:00
|
|
|
tree->type = TCC_TYPE_REALLOC;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2019-12-11 02:47:33 +08:00
|
|
|
POST_SEM ();
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
return new_ptr;
|
2002-01-04 07:12:29 +08:00
|
|
|
}
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *calloc(size_t nmemb, size_t size)
|
|
|
|
#else
|
2002-03-04 06:45:55 +08:00
|
|
|
void *__bound_calloc(size_t nmemb, size_t size)
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
2002-03-04 06:45:55 +08:00
|
|
|
{
|
|
|
|
void *ptr;
|
2019-12-10 15:07:25 +08:00
|
|
|
|
|
|
|
size *= nmemb;
|
|
|
|
#if MALLOC_REDIR
|
2019-12-11 02:47:33 +08:00
|
|
|
/* This will catch the first dlsym call from __bound_init */
|
|
|
|
if (malloc_redir == NULL) {
|
2020-05-22 11:06:08 +08:00
|
|
|
__bound_init (0, -1);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (malloc_redir == NULL) {
|
|
|
|
ptr = &initial_pool[pool_index];
|
|
|
|
pool_index = (pool_index + size + 15) & ~15;
|
|
|
|
if (pool_index >= sizeof (initial_pool))
|
|
|
|
bound_alloc_error ("initial memory pool too small");
|
|
|
|
dprintf (stderr, "%s, %s(): initial %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
memset (ptr, 0, size);
|
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
|
|
|
#endif
|
2020-05-24 02:02:41 +08:00
|
|
|
ptr = BOUND_MALLOC(size + 1);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf (stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
if (ptr) {
|
|
|
|
memset (ptr, 0, size);
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0) {
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_calloc_count);
|
2020-06-16 13:39:48 +08:00
|
|
|
tree = splay_insert ((size_t) ptr, size ? size : size + 1, tree);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_CALLOC;
|
|
|
|
POST_SEM ();
|
2019-12-11 02:47:33 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2002-03-04 06:45:55 +08:00
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
|
|
|
|
#if !defined(_WIN32)
|
|
|
|
void *__bound_mmap (void *start, size_t size, int prot,
|
|
|
|
int flags, int fd, off_t offset)
|
|
|
|
{
|
|
|
|
void *result;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, start, (unsigned long)size);
|
2019-12-10 15:07:25 +08:00
|
|
|
result = mmap (start, size, prot, flags, fd, offset);
|
2020-09-08 20:31:58 +08:00
|
|
|
if (result && NO_CHECKING_GET() == 0) {
|
2019-12-10 15:07:25 +08:00
|
|
|
WAIT_SEM ();
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_mmap_count);
|
2019-12-10 15:07:25 +08:00
|
|
|
tree = splay_insert((size_t)result, size, tree);
|
|
|
|
POST_SEM ();
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_munmap (void *start, size_t size)
|
|
|
|
{
|
|
|
|
int result;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, start, (unsigned long)size);
|
2020-09-08 20:31:58 +08:00
|
|
|
if (start && NO_CHECKING_GET() == 0) {
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_munmap_count);
|
|
|
|
tree = splay_delete ((size_t) start, tree);
|
|
|
|
POST_SEM ();
|
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
result = munmap (start, size);
|
|
|
|
return result;
|
|
|
|
}
|
2002-03-04 06:45:55 +08:00
|
|
|
#endif
|
|
|
|
|
2002-01-06 01:03:56 +08:00
|
|
|
/* some useful checked functions */
|
|
|
|
|
|
|
|
/* check that (p ... p + size - 1) lies inside 'p' region, if any */
|
2019-12-13 20:45:09 +08:00
|
|
|
static void __bound_check(const void *p, size_t size, const char *function)
|
2002-01-06 01:03:56 +08:00
|
|
|
{
|
2020-08-13 17:19:11 +08:00
|
|
|
if (size != 0 && __bound_ptr_add((void *)p, size) == INVALID_POINTER) {
|
2020-01-18 05:58:39 +08:00
|
|
|
bound_error("invalid pointer %p, size 0x%lx in %s",
|
2020-01-15 15:53:19 +08:00
|
|
|
p, (unsigned long)size, function);
|
|
|
|
}
|
2002-01-06 01:03:56 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
static int check_overlap (const void *p1, size_t n1,
|
|
|
|
const void *p2, size_t n2,
|
|
|
|
const char *function)
|
2002-01-06 01:03:56 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
const void *p1e = (const void *) ((const char *) p1 + n1);
|
|
|
|
const void *p2e = (const void *) ((const char *) p2 + n2);
|
|
|
|
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0 && n1 != 0 && n2 !=0 &&
|
2020-01-15 15:53:19 +08:00
|
|
|
((p1 <= p2 && p1e > p2) || /* p1----p2====p1e----p2e */
|
|
|
|
(p2 <= p1 && p2e > p1))) { /* p2----p1====p2e----p1e */
|
2020-01-18 05:58:39 +08:00
|
|
|
bound_error("overlapping regions %p(0x%lx), %p(0x%lx) in %s",
|
2020-01-15 15:53:19 +08:00
|
|
|
p1, (unsigned long)n1, p2, (unsigned long)n2, function);
|
|
|
|
return never_fatal < 0;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
2015-04-10 20:17:22 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
void *__bound_memcpy(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_mempcy_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(dest, n, "memcpy dest");
|
|
|
|
__bound_check(src, n, "memcpy src");
|
|
|
|
if (check_overlap(dest, n, src, n, "memcpy"))
|
|
|
|
return dest;
|
|
|
|
return memcpy(dest, src, n);
|
2002-01-06 01:03:56 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
int __bound_memcmp(const void *s1, const void *s2, size_t n)
|
2019-12-13 03:49:35 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
int retval = 0;
|
|
|
|
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2, (unsigned long)n);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_memcmp_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
for (;;) {
|
|
|
|
if ((ssize_t) --n == -1)
|
|
|
|
break;
|
|
|
|
else if (*u1 != *u2) {
|
|
|
|
retval = *u1++ - *u2++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
++u1;
|
|
|
|
++u2;
|
|
|
|
}
|
|
|
|
__bound_check(s1, (const void *)u1 - s1, "memcmp s1");
|
|
|
|
__bound_check(s2, (const void *)u2 - s2, "memcmp s2");
|
|
|
|
return retval;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
void *__bound_memmove(void *dest, const void *src, size_t n)
|
2002-01-06 01:03:56 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_memmove_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(dest, n, "memmove dest");
|
|
|
|
__bound_check(src, n, "memmove src");
|
|
|
|
return memmove(dest, src, n);
|
2002-01-06 01:03:56 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
void *__bound_memset(void *s, int c, size_t n)
|
2002-01-06 01:03:56 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %d, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s, c, (unsigned long)n);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_memset_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(s, n, "memset");
|
|
|
|
return memset(s, c, n);
|
2002-01-06 01:03:56 +08:00
|
|
|
}
|
|
|
|
|
2021-01-14 02:41:04 +08:00
|
|
|
#if defined(__arm__) && defined(__ARM_EABI__)
|
2020-06-16 13:39:48 +08:00
|
|
|
void *__bound___aeabi_memcpy(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_mempcy_count);
|
|
|
|
__bound_check(dest, n, "memcpy dest");
|
|
|
|
__bound_check(src, n, "memcpy src");
|
|
|
|
if (check_overlap(dest, n, src, n, "memcpy"))
|
|
|
|
return dest;
|
|
|
|
return __aeabi_memcpy(dest, src, n);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *__bound___aeabi_memmove(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_memmove_count);
|
|
|
|
__bound_check(dest, n, "memmove dest");
|
|
|
|
__bound_check(src, n, "memmove src");
|
|
|
|
return __aeabi_memmove(dest, src, n);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *__bound___aeabi_memmove4(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_memmove_count);
|
|
|
|
__bound_check(dest, n, "memmove dest");
|
|
|
|
__bound_check(src, n, "memmove src");
|
|
|
|
return __aeabi_memmove4(dest, src, n);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *__bound___aeabi_memmove8(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_memmove_count);
|
|
|
|
__bound_check(dest, n, "memmove dest");
|
|
|
|
__bound_check(src, n, "memmove src");
|
|
|
|
return __aeabi_memmove8(dest, src, n);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *__bound___aeabi_memset(void *s, int c, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %d, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s, c, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_memset_count);
|
|
|
|
__bound_check(s, n, "memset");
|
|
|
|
return __aeabi_memset(s, c, n);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2002-01-27 02:53:47 +08:00
|
|
|
int __bound_strlen(const char *s)
|
|
|
|
{
|
2019-12-13 03:49:35 +08:00
|
|
|
const char *p = s;
|
2002-01-27 02:53:47 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p\n",
|
|
|
|
__FILE__, __FUNCTION__, s);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strlen_count);
|
|
|
|
while (*p++);
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(s, p - s, "strlen");
|
|
|
|
return (p - s) - 1;
|
2002-01-27 02:53:47 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
char *__bound_strcpy(char *dest, const char *src)
|
2002-01-27 02:53:47 +08:00
|
|
|
{
|
2015-04-10 20:17:22 +08:00
|
|
|
size_t len;
|
2020-01-15 15:53:19 +08:00
|
|
|
const char *p = src;
|
2015-04-10 20:17:22 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strcpy_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
while (*p++);
|
|
|
|
len = p - src;
|
|
|
|
__bound_check(dest, len, "strcpy dest");
|
|
|
|
__bound_check(src, len, "strcpy src");
|
|
|
|
if (check_overlap(dest, len, src, len, "strcpy"))
|
|
|
|
return dest;
|
|
|
|
return strcpy (dest, src);
|
2002-01-27 02:53:47 +08:00
|
|
|
}
|
2019-12-10 15:07:25 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
char *__bound_strncpy(char *dest, const char *src, size_t n)
|
2019-12-13 03:49:35 +08:00
|
|
|
{
|
2019-12-13 17:02:20 +08:00
|
|
|
size_t len = n;
|
|
|
|
const char *p = src;
|
2019-12-13 03:49:35 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_strncpy_count);
|
2019-12-13 17:02:20 +08:00
|
|
|
while (len-- && *p++);
|
|
|
|
len = p - src;
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(dest, len, "strncpy dest");
|
|
|
|
__bound_check(src, len, "strncpy src");
|
|
|
|
if (check_overlap(dest, len, src, len, "strncpy"))
|
|
|
|
return dest;
|
|
|
|
return strncpy(dest, src, n);
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_strcmp(const char *s1, const char *s2)
|
|
|
|
{
|
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strcmp_count);
|
|
|
|
while (*u1 && *u1 == *u2) {
|
2020-01-15 15:53:19 +08:00
|
|
|
++u1;
|
|
|
|
++u2;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(s1, ((const char *)u1 - s1) + 1, "strcmp s1");
|
|
|
|
__bound_check(s2, ((const char *)u2 - s2) + 1, "strcmp s2");
|
|
|
|
return *u1 - *u2;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_strncmp(const char *s1, const char *s2, size_t n)
|
|
|
|
{
|
2019-12-13 17:02:20 +08:00
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
int retval = 0;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2, (unsigned long)n);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strncmp_count);
|
2019-12-13 17:02:20 +08:00
|
|
|
do {
|
|
|
|
if ((ssize_t) --n == -1)
|
|
|
|
break;
|
|
|
|
else if (*u1 != *u2) {
|
2020-01-15 15:53:19 +08:00
|
|
|
retval = *u1++ - *u2++;
|
2019-12-13 17:02:20 +08:00
|
|
|
break;
|
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
++u2;
|
2019-12-13 17:02:20 +08:00
|
|
|
} while (*u1++);
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(s1, (const char *)u1 - s1, "strncmp s1");
|
|
|
|
__bound_check(s2, (const char *)u2 - s2, "strncmp s2");
|
2019-12-13 17:02:20 +08:00
|
|
|
return retval;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
char *__bound_strcat(char *dest, const char *src)
|
|
|
|
{
|
|
|
|
char *r = dest;
|
|
|
|
const char *s = src;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strcat_count);
|
|
|
|
while (*dest++);
|
2020-01-15 15:53:19 +08:00
|
|
|
while (*src++);
|
|
|
|
__bound_check(r, (dest - r) + (src - s) - 1, "strcat dest");
|
|
|
|
__bound_check(s, src - s, "strcat src");
|
|
|
|
if (check_overlap(r, (dest - r) + (src - s) - 1, s, src - s, "strcat"))
|
|
|
|
return dest;
|
|
|
|
return strcat(r, s);
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
char *__bound_strchr(const char *s, int c)
|
2019-12-13 03:49:35 +08:00
|
|
|
{
|
2020-01-15 15:53:19 +08:00
|
|
|
const unsigned char *str = (const unsigned char *) s;
|
|
|
|
unsigned char ch = c;
|
2019-12-13 03:49:35 +08:00
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %d\n",
|
|
|
|
__FILE__, __FUNCTION__, s, ch);
|
2019-12-13 03:49:35 +08:00
|
|
|
INCR_COUNT(bound_strchr_count);
|
2020-01-15 15:53:19 +08:00
|
|
|
while (*str) {
|
|
|
|
if (*str == ch)
|
2019-12-13 03:49:35 +08:00
|
|
|
break;
|
2020-01-15 15:53:19 +08:00
|
|
|
++str;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
2020-01-15 15:53:19 +08:00
|
|
|
__bound_check(s, ((const char *)str - s) + 1, "strchr");
|
|
|
|
return *str == ch ? (char *) str : NULL;
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
char *__bound_strdup(const char *s)
|
|
|
|
{
|
|
|
|
const char *p = s;
|
|
|
|
char *new;
|
|
|
|
|
|
|
|
INCR_COUNT(bound_strdup_count);
|
|
|
|
while (*p++);
|
2019-12-13 20:45:09 +08:00
|
|
|
__bound_check(s, p - s, "strdup");
|
2020-05-24 02:02:41 +08:00
|
|
|
new = BOUND_MALLOC ((p - s) + 1);
|
2020-01-15 15:53:19 +08:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, new, (unsigned long)(p -s));
|
2019-12-13 03:49:35 +08:00
|
|
|
if (new) {
|
2020-09-08 20:31:58 +08:00
|
|
|
if (NO_CHECKING_GET() == 0 && no_strdup == 0) {
|
2020-01-15 15:53:19 +08:00
|
|
|
WAIT_SEM ();
|
|
|
|
tree = splay_insert((size_t)new, p - s, tree);
|
|
|
|
if (tree && tree->start == (size_t) new)
|
|
|
|
tree->type = TCC_TYPE_STRDUP;
|
|
|
|
POST_SEM ();
|
2019-12-13 03:49:35 +08:00
|
|
|
}
|
|
|
|
memcpy (new, s, p - s);
|
|
|
|
}
|
|
|
|
return new;
|
|
|
|
}
|
|
|
|
|
2019-12-10 15:07:25 +08:00
|
|
|
/*
|
|
|
|
An implementation of top-down splaying with sizes
|
|
|
|
D. Sleator <sleator@cs.cmu.edu>, January 1994.
|
|
|
|
|
|
|
|
This extends top-down-splay.c to maintain a size field in each node.
|
|
|
|
This is the number of nodes in the subtree rooted there. This makes
|
|
|
|
it possible to efficiently compute the rank of a key. (The rank is
|
|
|
|
the number of nodes to the left of the given key.) It it also
|
|
|
|
possible to quickly find the node of a given rank. Both of these
|
|
|
|
operations are illustrated in the code below. The remainder of this
|
|
|
|
introduction is taken from top-down-splay.c.
|
|
|
|
|
|
|
|
"Splay trees", or "self-adjusting search trees" are a simple and
|
|
|
|
efficient data structure for storing an ordered set. The data
|
|
|
|
structure consists of a binary tree, with no additional fields. It
|
|
|
|
allows searching, insertion, deletion, deletemin, deletemax,
|
|
|
|
splitting, joining, and many other operations, all with amortized
|
|
|
|
logarithmic performance. Since the trees adapt to the sequence of
|
|
|
|
requests, their performance on real access patterns is typically even
|
|
|
|
better. Splay trees are described in a number of texts and papers
|
|
|
|
[1,2,3,4].
|
|
|
|
|
|
|
|
The code here is adapted from simple top-down splay, at the bottom of
|
|
|
|
page 669 of [2]. It can be obtained via anonymous ftp from
|
|
|
|
spade.pc.cs.cmu.edu in directory /usr/sleator/public.
|
|
|
|
|
|
|
|
The chief modification here is that the splay operation works even if the
|
|
|
|
item being splayed is not in the tree, and even if the tree root of the
|
|
|
|
tree is NULL. So the line:
|
|
|
|
|
|
|
|
t = splay(i, t);
|
|
|
|
|
|
|
|
causes it to search for item with key i in the tree rooted at t. If it's
|
|
|
|
there, it is splayed to the root. If it isn't there, then the node put
|
|
|
|
at the root is the last one before NULL that would have been reached in a
|
|
|
|
normal binary search for i. (It's a neighbor of i in the tree.) This
|
|
|
|
allows many other operations to be easily implemented, as shown below.
|
|
|
|
|
|
|
|
[1] "Data Structures and Their Algorithms", Lewis and Denenberg,
|
|
|
|
Harper Collins, 1991, pp 243-251.
|
|
|
|
[2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
|
|
|
|
JACM Volume 32, No 3, July 1985, pp 652-686.
|
|
|
|
[3] "Data Structure and Algorithm Analysis", Mark Weiss,
|
|
|
|
Benjamin Cummins, 1992, pp 119-130.
|
|
|
|
[4] "Data Structures, Algorithms, and Performance", Derick Wood,
|
|
|
|
Addison-Wesley, 1993, pp 367-375
|
|
|
|
*/
|
|
|
|
|
|
|
|
/* Code adapted for tcc */
|
|
|
|
|
|
|
|
#define compare(start,tstart,tsize) (start < tstart ? -1 : \
|
|
|
|
start >= tstart+tsize ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay (size_t addr, Tree *t)
|
|
|
|
/* Splay using the key start (which may or may not be in the tree.) */
|
2020-01-15 15:53:19 +08:00
|
|
|
/* The starting root is t, and the tree used is defined by rat */
|
2019-12-10 15:07:25 +08:00
|
|
|
{
|
|
|
|
Tree N, *l, *r, *y;
|
|
|
|
int comp;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
INCR_COUNT_SPLAY(bound_splay);
|
2019-12-10 15:07:25 +08:00
|
|
|
if (t == NULL) return t;
|
|
|
|
N.left = N.right = NULL;
|
|
|
|
l = r = &N;
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
comp = compare(addr, t->start, t->size);
|
|
|
|
if (comp < 0) {
|
|
|
|
y = t->left;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare(addr, y->start, y->size) < 0) {
|
|
|
|
t->left = y->right; /* rotate right */
|
|
|
|
y->right = t;
|
|
|
|
t = y;
|
|
|
|
if (t->left == NULL) break;
|
|
|
|
}
|
|
|
|
r->left = t; /* link right */
|
|
|
|
r = t;
|
|
|
|
t = t->left;
|
|
|
|
} else if (comp > 0) {
|
|
|
|
y = t->right;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare(addr, y->start, y->size) > 0) {
|
|
|
|
t->right = y->left; /* rotate left */
|
|
|
|
y->left = t;
|
|
|
|
t = y;
|
|
|
|
if (t->right == NULL) break;
|
|
|
|
}
|
|
|
|
l->right = t; /* link left */
|
|
|
|
l = t;
|
|
|
|
t = t->right;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
l->right = t->left; /* assemble */
|
|
|
|
r->left = t->right;
|
|
|
|
t->left = N.right;
|
|
|
|
t->right = N.left;
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define compare_end(start,tend) (start < tend ? -1 : \
|
|
|
|
start > tend ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay_end (size_t addr, Tree *t)
|
|
|
|
/* Splay using the key start (which may or may not be in the tree.) */
|
|
|
|
/* The starting root is t, and the tree used is defined by rat */
|
|
|
|
{
|
|
|
|
Tree N, *l, *r, *y;
|
|
|
|
int comp;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_end);
|
2019-12-10 15:07:25 +08:00
|
|
|
if (t == NULL) return t;
|
|
|
|
N.left = N.right = NULL;
|
|
|
|
l = r = &N;
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
comp = compare_end(addr, t->start + t->size);
|
|
|
|
if (comp < 0) {
|
|
|
|
y = t->left;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare_end(addr, y->start + y->size) < 0) {
|
|
|
|
t->left = y->right; /* rotate right */
|
|
|
|
y->right = t;
|
|
|
|
t = y;
|
|
|
|
if (t->left == NULL) break;
|
|
|
|
}
|
|
|
|
r->left = t; /* link right */
|
|
|
|
r = t;
|
|
|
|
t = t->left;
|
|
|
|
} else if (comp > 0) {
|
|
|
|
y = t->right;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare_end(addr, y->start + y->size) > 0) {
|
|
|
|
t->right = y->left; /* rotate left */
|
|
|
|
y->left = t;
|
|
|
|
t = y;
|
|
|
|
if (t->right == NULL) break;
|
|
|
|
}
|
|
|
|
l->right = t; /* link left */
|
|
|
|
l = t;
|
|
|
|
t = t->right;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
l->right = t->left; /* assemble */
|
|
|
|
r->left = t->right;
|
|
|
|
t->left = N.right;
|
|
|
|
t->right = N.left;
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static Tree * splay_insert(size_t addr, size_t size, Tree * t)
|
|
|
|
/* Insert key start into the tree t, if it is not already there. */
|
2020-01-15 15:53:19 +08:00
|
|
|
/* Return a pointer to the resulting tree. */
|
2019-12-10 15:07:25 +08:00
|
|
|
{
|
|
|
|
Tree * new;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_insert);
|
2019-12-10 15:07:25 +08:00
|
|
|
if (t != NULL) {
|
|
|
|
t = splay(addr,t);
|
|
|
|
if (compare(addr, t->start, t->size)==0) {
|
|
|
|
return t; /* it's already there */
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#if TREE_REUSE
|
|
|
|
if (tree_free_list) {
|
|
|
|
new = tree_free_list;
|
|
|
|
tree_free_list = new->left;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
#endif
|
|
|
|
{
|
2020-05-24 02:02:41 +08:00
|
|
|
new = (Tree *) BOUND_MALLOC (sizeof (Tree));
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
2019-12-13 20:45:09 +08:00
|
|
|
if (new == NULL) {
|
2020-01-15 15:53:19 +08:00
|
|
|
bound_alloc_error("not enough memory for bound checking code");
|
2019-12-13 20:45:09 +08:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
if (t == NULL) {
|
|
|
|
new->left = new->right = NULL;
|
|
|
|
} else if (compare(addr, t->start, t->size) < 0) {
|
|
|
|
new->left = t->left;
|
|
|
|
new->right = t;
|
|
|
|
t->left = NULL;
|
|
|
|
} else {
|
|
|
|
new->right = t->right;
|
|
|
|
new->left = t;
|
|
|
|
t->right = NULL;
|
|
|
|
}
|
|
|
|
new->start = addr;
|
|
|
|
new->size = size;
|
|
|
|
new->type = TCC_TYPE_NONE;
|
|
|
|
new->is_invalid = 0;
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
|
|
|
return new;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define compare_destroy(start,tstart) (start < tstart ? -1 : \
|
|
|
|
start > tstart ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay_delete(size_t addr, Tree *t)
|
|
|
|
/* Deletes addr from the tree if it's there. */
|
2020-01-15 15:53:19 +08:00
|
|
|
/* Return a pointer to the resulting tree. */
|
2019-12-10 15:07:25 +08:00
|
|
|
{
|
|
|
|
Tree * x;
|
|
|
|
|
2020-01-15 15:53:19 +08:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_delete);
|
2019-12-10 15:07:25 +08:00
|
|
|
if (t==NULL) return NULL;
|
|
|
|
t = splay(addr,t);
|
2020-01-15 15:53:19 +08:00
|
|
|
if (compare_destroy(addr, t->start) == 0) { /* found it */
|
2019-12-10 15:07:25 +08:00
|
|
|
if (t->left == NULL) {
|
|
|
|
x = t->right;
|
|
|
|
} else {
|
|
|
|
x = splay(addr, t->left);
|
|
|
|
x->right = t->right;
|
|
|
|
}
|
|
|
|
#if TREE_REUSE
|
|
|
|
t->left = tree_free_list;
|
|
|
|
tree_free_list = t;
|
|
|
|
#else
|
2020-05-24 02:02:41 +08:00
|
|
|
BOUND_FREE(t);
|
2019-12-10 15:07:25 +08:00
|
|
|
#endif
|
|
|
|
return x;
|
|
|
|
} else {
|
2020-01-15 15:53:19 +08:00
|
|
|
return t; /* It wasn't there */
|
2019-12-10 15:07:25 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void splay_printtree(Tree * t, int d)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
if (t == NULL) return;
|
|
|
|
splay_printtree(t->right, d+1);
|
|
|
|
for (i=0; i<d; i++) fprintf(stderr," ");
|
2020-01-15 15:53:19 +08:00
|
|
|
fprintf(stderr,"%p(0x%lx:%u:%u)\n",
|
|
|
|
(void *) t->start, (unsigned long) t->size,
|
|
|
|
(unsigned)t->type, (unsigned)t->is_invalid);
|
2019-12-10 15:07:25 +08:00
|
|
|
splay_printtree(t->left, d+1);
|
|
|
|
}
|