[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[Commits] r22469 - in /fsf/trunk/libc: ./ math/ ports/ ports/sysdeps/unix/sysv/linux/mips/nptl/ stdlib/ sysdeps/i386/ sysdeps/i386/fpu...
- To: commits@xxxxxxxxxx
- Subject: [Commits] r22469 - in /fsf/trunk/libc: ./ math/ ports/ ports/sysdeps/unix/sysv/linux/mips/nptl/ stdlib/ sysdeps/i386/ sysdeps/i386/fpu...
- From: eglibc@xxxxxxxxxx
- Date: Wed, 20 Feb 2013 00:02:39 -0000
Author: eglibc
Date: Wed Feb 20 00:02:37 2013
New Revision: 22469
Log:
Import glibc-mainline for 2013-02-20
Modified:
fsf/trunk/libc/ChangeLog
fsf/trunk/libc/NEWS
fsf/trunk/libc/math/atest-exp.c
fsf/trunk/libc/math/atest-exp2.c
fsf/trunk/libc/math/atest-sincos.c
fsf/trunk/libc/ports/ChangeLog.mips
fsf/trunk/libc/ports/sysdeps/unix/sysv/linux/mips/nptl/lowlevellock.h
fsf/trunk/libc/stdlib/cxa_thread_atexit_impl.c
fsf/trunk/libc/stdlib/strtod_l.c
fsf/trunk/libc/stdlib/tst-tls-atexit-lib.c
fsf/trunk/libc/stdlib/tst-tls-atexit.c
fsf/trunk/libc/sysdeps/i386/add_n.S
fsf/trunk/libc/sysdeps/i386/addmul_1.S
fsf/trunk/libc/sysdeps/i386/bp-asm.h
fsf/trunk/libc/sysdeps/i386/bsd-_setjmp.S
fsf/trunk/libc/sysdeps/i386/bsd-setjmp.S
fsf/trunk/libc/sysdeps/i386/fpu/s_frexp.S
fsf/trunk/libc/sysdeps/i386/fpu/s_frexpf.S
fsf/trunk/libc/sysdeps/i386/fpu/s_frexpl.S
fsf/trunk/libc/sysdeps/i386/fpu/s_remquo.S
fsf/trunk/libc/sysdeps/i386/fpu/s_remquof.S
fsf/trunk/libc/sysdeps/i386/fpu/s_remquol.S
fsf/trunk/libc/sysdeps/i386/i486/strcat.S
fsf/trunk/libc/sysdeps/i386/i486/strlen.S
fsf/trunk/libc/sysdeps/i386/i586/add_n.S
fsf/trunk/libc/sysdeps/i386/i586/addmul_1.S
fsf/trunk/libc/sysdeps/i386/i586/lshift.S
fsf/trunk/libc/sysdeps/i386/i586/memcpy.S
fsf/trunk/libc/sysdeps/i386/i586/memset.S
fsf/trunk/libc/sysdeps/i386/i586/mul_1.S
fsf/trunk/libc/sysdeps/i386/i586/rshift.S
fsf/trunk/libc/sysdeps/i386/i586/strchr.S
fsf/trunk/libc/sysdeps/i386/i586/strcpy.S
fsf/trunk/libc/sysdeps/i386/i586/strlen.S
fsf/trunk/libc/sysdeps/i386/i586/sub_n.S
fsf/trunk/libc/sysdeps/i386/i586/submul_1.S
fsf/trunk/libc/sysdeps/i386/i686/add_n.S
fsf/trunk/libc/sysdeps/i386/i686/memcmp.S
fsf/trunk/libc/sysdeps/i386/i686/memcpy.S
fsf/trunk/libc/sysdeps/i386/i686/memmove.S
fsf/trunk/libc/sysdeps/i386/i686/mempcpy.S
fsf/trunk/libc/sysdeps/i386/i686/memset.S
fsf/trunk/libc/sysdeps/i386/i686/strcmp.S
fsf/trunk/libc/sysdeps/i386/i686/strtok.S
fsf/trunk/libc/sysdeps/i386/lshift.S
fsf/trunk/libc/sysdeps/i386/memchr.S
fsf/trunk/libc/sysdeps/i386/memcmp.S
fsf/trunk/libc/sysdeps/i386/mul_1.S
fsf/trunk/libc/sysdeps/i386/rawmemchr.S
fsf/trunk/libc/sysdeps/i386/rshift.S
fsf/trunk/libc/sysdeps/i386/setjmp.S
fsf/trunk/libc/sysdeps/i386/stpcpy.S
fsf/trunk/libc/sysdeps/i386/stpncpy.S
fsf/trunk/libc/sysdeps/i386/strchr.S
fsf/trunk/libc/sysdeps/i386/strchrnul.S
fsf/trunk/libc/sysdeps/i386/strcspn.S
fsf/trunk/libc/sysdeps/i386/strpbrk.S
fsf/trunk/libc/sysdeps/i386/strrchr.S
fsf/trunk/libc/sysdeps/i386/strspn.S
fsf/trunk/libc/sysdeps/i386/strtok.S
fsf/trunk/libc/sysdeps/i386/sub_n.S
fsf/trunk/libc/sysdeps/i386/submul_1.S
Modified: fsf/trunk/libc/ChangeLog
==============================================================================
--- fsf/trunk/libc/ChangeLog (original)
+++ fsf/trunk/libc/ChangeLog Wed Feb 20 00:02:37 2013
@@ -1,3 +1,124 @@
+2013-02-19 Joseph Myers <joseph@xxxxxxxxxxxxxxxx>
+
+ [BZ #13550]
+ * sysdeps/i386/bp-asm.h [__BOUNDED_POINTERS__] (BOUNDS_VIOLATED):
+ Remove macro.
+ (ENTER): Remove both macro definitions.
+ (LEAVE): Likewise.
+ (CHECK_BOUNDS_LOW): Likewise.
+ (CHECK_BOUNDS_HIGH): Likewise.
+ (CHECK_BOUNDS_BOTH): Likewise.
+ (CHECK_BOUNDS_BOTH_WIDE): Likewise.
+ (RETURN_BOUNDED_POINTER): Likewise.
+ (RETURN_NULL_BOUNDED_POINTER): Likewise.
+ (PUSH_ERRNO_LOCATION_RETURN): Likewise.
+ (POP_ERRNO_LOCATION_RETURN): Likewise.
+ * sysdeps/i386/add_n.S (__mpn_add_n): Do not use removed macros.
+ (__mpn_add_n) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/addmul_1.S (__mpn_addmul_1): Do not use removed
+ macros.
+ (__mpn_addmul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/bsd-_setjmp.S (_setjmp): Do not use removed macros.
+ * sysdeps/i386/bsd-setjmp.S (setjmp): Likewise.
+ * sysdeps/i386/fpu/s_frexp.S (__frexp): Likewise.
+ * sysdeps/i386/fpu/s_frexpf.S (__frexpf): Likewise.
+ * sysdeps/i386/fpu/s_frexpl.S (__frexpl): Likewise.
+ * sysdeps/i386/fpu/s_remquo.S (__remquo): Likewise.
+ * sysdeps/i386/fpu/s_remquof.S (__remquof): Likewise.
+ * sysdeps/i386/fpu/s_remquol.S (__remquol): Likewise.
+ * sysdeps/i386/i486/strcat.S (strcat): Likewise.
+ * sysdeps/i386/i486/strlen.S (strlen): Likewise.
+ * sysdeps/i386/i586/add_n.S (__mpn_add_n): Likewise.
+ (__mpn_add_n) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/addmul_1.S (__mpn_addmul_1): Do not use
+ removed macros.
+ (__mpn_addmul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/lshift.S (__mpn_lshift): Do not use removed
+ macros.
+ (__mpn_lshift) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/memcpy.S (memcpy): Do not use removed macros.
+ * sysdeps/i386/i586/memset.S (memset): Likewise.
+ * sysdeps/i386/i586/mul_1.S (__mpn_mul_1): Likewise.
+ (__mpn_mul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/rshift.S (__mpn_rshift): Do not use removed
+ macros.
+ (__mpn_rshift) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/strchr.S (strchr): Do not use removed macros.
+ Change uses of L(2) to L(out).
+ * sysdeps/i386/i586/strcpy.S (STRCPY): Do not use removed macros.
+ * sysdeps/i386/i586/strlen.S (strlen): Likewise.
+ * sysdeps/i386/i586/sub_n.S (__mpn_sub_n): Likewise.
+ (__mpn_sub_n) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i586/submul_1.S (__mpn_submul_1): Do not use
+ removed macros.
+ (__mpn_submul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i686/add_n.S (__mpn_add_n): Do not use removed
+ macros.
+ (__mpn_add_n) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i686/memcmp.S (ENTRANCE): Do not use macro ENTER.
+ (RETURN): Do not use macro LEAVE.
+ * sysdeps/i386/i686/memcpy.S (memcpy): Do not use removed macros.
+ * sysdeps/i386/i686/memmove.S (memmove): Likewise.
+ * sysdeps/i386/i686/mempcpy.S (mempcpy): Likewise.
+ * sysdeps/i386/i686/memset.S (memset): Likewise.
+ * sysdeps/i386/i686/strcmp.S (strcmp): Likewise.
+ (strcmp) [!__BOUNDED_POINTERS__]: Make code unconditional.
+ (strcmp) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/i686/strtok.S (save_ptr) [__BOUNDED_POINTERS__]:
+ Likewise.
+ (save_ptr) [!__BOUNDED_POINTERS__]: Make code unconditional.
+ (FUNCTION): Do not use removed macros. Combine labels L(1_1),
+ L(1_2) and L(1_3) into L(1).
+ (FUNCTION) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/lshift.S (__mpn_lshift): Do not use removed macros.
+ (__mpn_lshift) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/memchr.S (__memchr): Do not use removed macros.
+ (__memchr) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/memcmp.S (memcmp): Do not use removed macros.
+ * sysdeps/i386/mul_1.S (__mpn_mul_1): Likewise.
+ (__mpn_mul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/rawmemchr.S (__rawmemchr): Do not use removed
+ macros.
+ * sysdeps/i386/rshift.S (__mpn_rshift): Likewise.
+ (__mpn_rshift) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/setjmp.S (__sigsetjmp): Do not use removed macros.
+ * sysdeps/i386/stpcpy.S (__stpcpy): Likewise.
+ * sysdeps/i386/stpncpy.S (__stpncpy): Likewise.
+ (__stpncpy) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/strchr.S (strchr): Do not use removed macros.
+ * sysdeps/i386/strchrnul.S (__strchrnul): Likewise.
+ * sysdeps/i386/strcspn.S (strcspn): Likewise.
+ * sysdeps/i386/strpbrk.S (strpbrk): Likewise.
+ * sysdeps/i386/strrchr.S (strrchr): Likewise.
+ * sysdeps/i386/strspn.S (strspn): Likewise.
+ * sysdeps/i386/strtok.S (save_ptr) [__BOUNDED_POINTERS__]: Remove
+ conditional code.
+ (save_ptr) [!__BOUNDED_POINTERS__]: Make code unconditional.
+ (FUNCTION) [!__BOUNDED_POINTERS__]: Likewise.
+ (FUNCTION) [__BOUNDED_POINTERS__]: Remove conditional code.
+ (FUNCTION): Do not use removed macros. Combine labels L(1_2) and
+ L(1_3) into L(1_1).
+ * sysdeps/i386/sub_n.S (__mpn_sub_n): Do not use removed macros.
+ (__mpn_sub_n) [__BOUNDED_POINTERS__]: Remove conditional code.
+ * sysdeps/i386/submul_1.S (__mpn_submul_1): Do not use removed
+ macros.
+ (__mpn_submul_1) [__BOUNDED_POINTERS__]: Remove conditional code.
+
+2013-02-19 Jakub Jelinek <jakub@xxxxxxxxxx>
+
+ * stdlib/strtod_l.c (__mpn_lshift_1): Rewritten as function-like
+ macro.
+
+2013-02-19 Siddhesh Poyarekar <siddhesh@xxxxxxxxxx>
+
+ * math/atest-exp.c (exp_mpn): Remove ROUND.
+ * math/atest-exp2.c (exp_mpn): Likewise.
+ * math/atest-sincos.c (sincosx_mpn): Remove ROUND and CHK.
+
+ * stdlib/cxa_thread_atexit_impl.c: Fix Copyright year.
+ * stdlib/tst-tls-atexit-lib.c: Likewise.
+ * stdlib/tst-tls-atexit.c: Likewise.
+
2013-02-18 Mike Frysinger <vapier@xxxxxxxxxx>
* stdlib/stdlib.h (aligned_alloc): Use __attribute_malloc__
Modified: fsf/trunk/libc/NEWS
==============================================================================
--- fsf/trunk/libc/NEWS (original)
+++ fsf/trunk/libc/NEWS Wed Feb 20 00:02:37 2013
@@ -12,6 +12,10 @@
11561, 13951, 14142, 14200, 14317, 14327, 14496, 14920, 14964, 14981,
14982, 14985, 14994, 14996, 15003, 15006, 15020, 15023, 15036, 15054,
15062, 15078.
+
+* Add support for calling C++11 thread_local object destructors on thread
+ and program exit. This needs compiler support for offloading C++11
+ destructor calls to glibc.
Version 2.17
Modified: fsf/trunk/libc/math/atest-exp.c
==============================================================================
--- fsf/trunk/libc/math/atest-exp.c (original)
+++ fsf/trunk/libc/math/atest-exp.c Wed Feb 20 00:02:37 2013
@@ -61,7 +61,7 @@
unsigned n;
mp1 xp;
mp2 tmp;
- mp_limb_t chk, round;
+ mp_limb_t chk;
mp1 tol;
memset (xp, 0, sizeof (mp1));
@@ -79,7 +79,7 @@
mpn_mul_n (tmp, xp, x, SZ);
assert (tmp[SZ * 2 - 1] == 0);
if (n > 0)
- round = mpn_divmod_1 (xp, tmp + FRAC / mpbpl, SZ, n);
+ mpn_divmod_1 (xp, tmp + FRAC / mpbpl, SZ, n);
chk = mpn_add_n (ex, ex, xp, SZ);
assert (chk == 0);
n++;
Modified: fsf/trunk/libc/math/atest-exp2.c
==============================================================================
--- fsf/trunk/libc/math/atest-exp2.c (original)
+++ fsf/trunk/libc/math/atest-exp2.c Wed Feb 20 00:02:37 2013
@@ -102,7 +102,7 @@
unsigned int n;
mp1 xp;
mp2 tmp;
- mp_limb_t chk, round;
+ mp_limb_t chk;
mp1 tol;
memset (xp, 0, sizeof (mp1));
@@ -120,7 +120,7 @@
mpn_mul_n (tmp, xp, x, SZ);
assert(tmp[SZ * 2 - 1] == 0);
if (n > 0)
- round = mpn_divmod_1 (xp, tmp + FRAC / mpbpl, SZ, n);
+ mpn_divmod_1 (xp, tmp + FRAC / mpbpl, SZ, n);
chk = mpn_add_n (ex, ex, xp, SZ);
assert (chk == 0);
++n;
Modified: fsf/trunk/libc/math/atest-sincos.c
==============================================================================
--- fsf/trunk/libc/math/atest-sincos.c (original)
+++ fsf/trunk/libc/math/atest-sincos.c Wed Feb 20 00:02:37 2013
@@ -64,7 +64,6 @@
int i;
mp2 s[4], c[4];
mp1 tmp, x;
- mp_limb_t chk, round;
if (ix == NULL)
{
@@ -79,34 +78,38 @@
for (i = 0; i < 1 << N; i++)
{
#define add_shift_mulh(d,x,s1,s2,sh,n) \
- /* d = (n ? -1 : 1) * (s1 + (s2>>sh)) * x / (1>>N); */ \
do { \
if (s2 != NULL) { \
if (sh > 0) { \
assert (sh < mpbpl); \
mpn_lshift (tmp, s1, SZ, sh); \
- chk = (n ? mpn_sub_n : mpn_add_n)(tmp,tmp,s2+FRAC/mpbpl,SZ); \
- } else \
- chk = (n ? mpn_sub_n : mpn_add_n)(tmp,s1,s2+FRAC/mpbpl,SZ); \
- /* assert(chk == 0); */ \
+ if (n) \
+ mpn_sub_n (tmp,tmp,s2+FRAC/mpbpl,SZ); \
+ else \
+ mpn_add_n (tmp,tmp,s2+FRAC/mpbpl,SZ); \
+ } else { \
+ if (n) \
+ mpn_sub_n (tmp,s1,s2+FRAC/mpbpl,SZ); \
+ else \
+ mpn_add_n (tmp,s1,s2+FRAC/mpbpl,SZ); \
+ } \
mpn_mul_n(d,tmp,x,SZ); \
} else \
mpn_mul_n(d,s1,x,SZ); \
- /* assert(d[SZ*2-1] == 0); */ \
assert(N+sh < mpbpl); \
if (N+sh > 0) mpn_rshift(d,d,2*SZ,N+sh); \
} while(0)
#define summ(d,ss,s,n) \
- /* d = ss +/- (s[0]+2*s[1]+2*s[2]+s[3])/6; */ \
do { \
- chk = mpn_add_n(tmp,s[1]+FRAC/mpbpl,s[2]+FRAC/mpbpl,SZ); \
+ mpn_add_n(tmp,s[1]+FRAC/mpbpl,s[2]+FRAC/mpbpl,SZ); \
mpn_lshift(tmp,tmp,SZ,1); \
- chk |= mpn_add_n(tmp,tmp,s[0]+FRAC/mpbpl,SZ); \
- chk |= mpn_add_n(tmp,tmp,s[3]+FRAC/mpbpl,SZ); \
- round = mpn_divmod_1(tmp,tmp,SZ,6); \
- /* chk |= mpn_add_1(tmp,tmp,SZ, (round > 3) ); */ \
- chk |= (n ? mpn_sub_n : mpn_add_n)(d,ss,tmp,SZ); \
- /* assert(chk == 0); */ \
+ mpn_add_n(tmp,tmp,s[0]+FRAC/mpbpl,SZ); \
+ mpn_add_n(tmp,tmp,s[3]+FRAC/mpbpl,SZ); \
+ mpn_divmod_1(tmp,tmp,SZ,6); \
+ if (n) \
+ mpn_sub_n (d,ss,tmp,SZ); \
+ else \
+ mpn_add_n (d,ss,tmp,SZ); \
} while (0)
add_shift_mulh (s[0], x, co, NULL, 0, 0); /* s0 = h * c; */
Modified: fsf/trunk/libc/ports/ChangeLog.mips
==============================================================================
--- fsf/trunk/libc/ports/ChangeLog.mips (original)
+++ fsf/trunk/libc/ports/ChangeLog.mips Wed Feb 20 00:02:37 2013
@@ -1,3 +1,13 @@
+2013-02-19 Joseph Myers <joseph@xxxxxxxxxxxxxxxx>
+
+ [BZ #14920]
+ * sysdeps/unix/sysv/linux/mips/nptl/lowlevellock.h
+ (FUTEX_WAIT_REQUEUE_PI): Define.
+ (FUTEX_CMP_REQUEUE_PI): Likewise.
+ (lll_futex_wait_requeue_pi): Likewise.
+ (lll_futex_timed_wait_requeue_pi): Likewise.
+ (lll_futex_cmp_requeue_pi): Likewise.
+
2013-02-18 Siddhesh Poyarekar <siddhesh@xxxxxxxxxx>
* sysdeps/unix/sysv/linux/mips/mips32/nptl/libc.abilist: Add
Modified: fsf/trunk/libc/ports/sysdeps/unix/sysv/linux/mips/nptl/lowlevellock.h
==============================================================================
--- fsf/trunk/libc/ports/sysdeps/unix/sysv/linux/mips/nptl/lowlevellock.h (original)
+++ fsf/trunk/libc/ports/sysdeps/unix/sysv/linux/mips/nptl/lowlevellock.h Wed Feb 20 00:02:37 2013
@@ -36,6 +36,8 @@
#define FUTEX_TRYLOCK_PI 8
#define FUTEX_WAIT_BITSET 9
#define FUTEX_WAKE_BITSET 10
+#define FUTEX_WAIT_REQUEUE_PI 11
+#define FUTEX_CMP_REQUEUE_PI 12
#define FUTEX_PRIVATE_FLAG 128
#define FUTEX_CLOCK_REALTIME 256
@@ -141,6 +143,34 @@
INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
})
+/* Priority Inheritance support. */
+#define lll_futex_wait_requeue_pi(futexp, val, mutex, private) \
+ lll_futex_timed_wait_requeue_pi (futexp, val, NULL, 0, mutex, private)
+
+#define lll_futex_timed_wait_requeue_pi(futexp, val, timespec, clockbit, \
+ mutex, private) \
+ ({ \
+ INTERNAL_SYSCALL_DECL (__err); \
+ long int __ret; \
+ int __op = FUTEX_WAIT_REQUEUE_PI | clockbit; \
+ \
+ __ret = INTERNAL_SYSCALL (futex, __err, 5, (futexp), \
+ __lll_private_flag (__op, private), \
+ (val), (timespec), mutex); \
+ INTERNAL_SYSCALL_ERROR_P (__ret, __err) ? -__ret : __ret; \
+ })
+
+#define lll_futex_cmp_requeue_pi(futexp, nr_wake, nr_move, mutex, val, priv) \
+ ({ \
+ INTERNAL_SYSCALL_DECL (__err); \
+ long int __ret; \
+ \
+ __ret = INTERNAL_SYSCALL (futex, __err, 6, (futexp), \
+ __lll_private_flag (FUTEX_CMP_REQUEUE_PI, priv),\
+ (nr_wake), (nr_move), (mutex), (val)); \
+ INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
+ })
+
static inline int __attribute__((always_inline))
__lll_trylock(int *futex)
{
Modified: fsf/trunk/libc/stdlib/cxa_thread_atexit_impl.c
==============================================================================
--- fsf/trunk/libc/stdlib/cxa_thread_atexit_impl.c (original)
+++ fsf/trunk/libc/stdlib/cxa_thread_atexit_impl.c Wed Feb 20 00:02:37 2013
@@ -1,5 +1,5 @@
/* Register destructors for C++ TLS variables declared with thread_local.
- Copyright (C) 2012 Free Software Foundation, Inc.
+ Copyright (C) 2013 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
Modified: fsf/trunk/libc/stdlib/strtod_l.c
==============================================================================
--- fsf/trunk/libc/stdlib/strtod_l.c (original)
+++ fsf/trunk/libc/stdlib/strtod_l.c Wed Feb 20 00:02:37 2013
@@ -444,28 +444,30 @@
/* Shift {PTR, SIZE} COUNT bits to the left, and fill the vacated bits
with the COUNT most significant bits of LIMB.
- Tege doesn't like this function so I have to write it here myself. :)
+ Implemented as a macro, so that __builtin_constant_p works even at -O0.
+
+ Tege doesn't like this macro so I have to write it here myself. :)
--drepper */
-static inline void
-__attribute ((always_inline))
-__mpn_lshift_1 (mp_limb_t *ptr, mp_size_t size, unsigned int count,
- mp_limb_t limb)
-{
- if (__builtin_constant_p (count) && count == BITS_PER_MP_LIMB)
- {
- /* Optimize the case of shifting by exactly a word:
- just copy words, with no actual bit-shifting. */
- mp_size_t i;
- for (i = size - 1; i > 0; --i)
- ptr[i] = ptr[i - 1];
- ptr[0] = limb;
- }
- else
- {
- (void) __mpn_lshift (ptr, ptr, size, count);
- ptr[0] |= limb >> (BITS_PER_MP_LIMB - count);
- }
-}
+#define __mpn_lshift_1(ptr, size, count, limb) \
+ do \
+ { \
+ mp_limb_t *__ptr = (ptr); \
+ if (__builtin_constant_p (count) && count == BITS_PER_MP_LIMB) \
+ { \
+ mp_size_t i; \
+ for (i = (size) - 1; i > 0; --i) \
+ __ptr[i] = __ptr[i - 1]; \
+ __ptr[0] = (limb); \
+ } \
+ else \
+ { \
+ /* We assume count > 0 && count < BITS_PER_MP_LIMB here. */ \
+ unsigned int __count = (count); \
+ (void) __mpn_lshift (__ptr, __ptr, size, __count); \
+ __ptr[0] |= (limb) >> (BITS_PER_MP_LIMB - __count); \
+ } \
+ } \
+ while (0)
#define INTERNAL(x) INTERNAL1(x)
Modified: fsf/trunk/libc/stdlib/tst-tls-atexit-lib.c
==============================================================================
--- fsf/trunk/libc/stdlib/tst-tls-atexit-lib.c (original)
+++ fsf/trunk/libc/stdlib/tst-tls-atexit-lib.c Wed Feb 20 00:02:37 2013
@@ -1,5 +1,5 @@
/* Verify that DSO is unloaded only if its TLS objects are destroyed - the DSO.
- Copyright (C) 2012 Free Software Foundation, Inc.
+ Copyright (C) 2013 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
Modified: fsf/trunk/libc/stdlib/tst-tls-atexit.c
==============================================================================
--- fsf/trunk/libc/stdlib/tst-tls-atexit.c (original)
+++ fsf/trunk/libc/stdlib/tst-tls-atexit.c Wed Feb 20 00:02:37 2013
@@ -1,5 +1,5 @@
/* Verify that DSO is unloaded only if its TLS objects are destroyed.
- Copyright (C) 2012 Free Software Foundation, Inc.
+ Copyright (C) 2013 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
Modified: fsf/trunk/libc/sysdeps/i386/add_n.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/add_n.S (original)
+++ fsf/trunk/libc/sysdeps/i386/add_n.S Wed Feb 20 00:02:37 2013
@@ -30,7 +30,6 @@
.text
ENTRY (BP_SYM (__mpn_add_n))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -43,13 +42,6 @@
cfi_rel_offset (esi, 0)
movl S2(%esp),%edx
movl SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ecx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
- shrl $2, %ecx
-#endif
movl %ecx,%eax
shrl $3,%ecx /* compute count for unrolled loop */
negl %eax
@@ -117,6 +109,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_add_n))
Modified: fsf/trunk/libc/sysdeps/i386/addmul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/addmul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/addmul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_addmul_1))
- ENTER
pushl %res_ptr
cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@
movl SIZE(%esp), %sizeP
movl S2LIMB(%esp), %s2_limb
cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
- shll $2, %sizeP /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
- shrl $2, %sizeP
-#endif
leal (%res_ptr,%sizeP,4), %res_ptr
leal (%s1_ptr,%sizeP,4), %s1_ptr
negl %sizeP
@@ -91,6 +84,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (res_ptr)
- LEAVE
ret
END (BP_SYM (__mpn_addmul_1))
Modified: fsf/trunk/libc/sysdeps/i386/bp-asm.h
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/bp-asm.h (original)
+++ fsf/trunk/libc/sysdeps/i386/bp-asm.h Wed Feb 20 00:02:37 2013
@@ -34,80 +34,11 @@
/* Although the caller pushes the hidden arg, the callee is
responsible for popping it. */
# define RET_PTR ret $RTN_SIZE
-/* Maintain frame pointer chain in leaf assembler functions for the benefit
- of debugging stack traces when bounds violations occur. */
-# define ENTER pushl %ebp; movl %esp, %ebp
-# define LEAVE movl %ebp, %esp; popl %ebp
/* Stack space overhead of procedure-call linkage: return address and
frame pointer. */
# define LINKAGE 8
/* Stack offset of return address after calling ENTER. */
# define PCOFF 4
-
-/* Int 5 is the "bound range" exception also raised by the "bound"
- instruction. */
-# define BOUNDS_VIOLATED int $5
-
-# define CHECK_BOUNDS_LOW(VAL_REG, BP_MEM) \
- cmpl 4+BP_MEM, VAL_REG; \
- jae 0f; /* continue if value >= low */ \
- BOUNDS_VIOLATED; \
- 0:
-
-# define CHECK_BOUNDS_HIGH(VAL_REG, BP_MEM, Jcc) \
- cmpl 8+BP_MEM, VAL_REG; \
- Jcc 0f; /* continue if value < high */ \
- BOUNDS_VIOLATED; \
- 0:
-
-# define CHECK_BOUNDS_BOTH(VAL_REG, BP_MEM) \
- cmpl 4+BP_MEM, VAL_REG; \
- jb 1f; /* die if value < low */ \
- cmpl 8+BP_MEM, VAL_REG; \
- jb 0f; /* continue if value < high */ \
- 1: BOUNDS_VIOLATED; \
- 0:
-
-# define CHECK_BOUNDS_BOTH_WIDE(VAL_REG, BP_MEM, LENGTH) \
- CHECK_BOUNDS_LOW(VAL_REG, BP_MEM); \
- addl LENGTH, VAL_REG; \
- cmpl 8+BP_MEM, VAL_REG; \
- jbe 0f; /* continue if value <= high */ \
- BOUNDS_VIOLATED; \
- 0: subl LENGTH, VAL_REG /* restore value */
-
-/* Take bounds from BP_MEM and affix them to the pointer
- value in %eax, stuffing all into memory at RTN(%esp).
- Use %edx as a scratch register. */
-
-# define RETURN_BOUNDED_POINTER(BP_MEM) \
- movl RTN(%esp), %edx; \
- movl %eax, 0(%edx); \
- movl 4+BP_MEM, %eax; \
- movl %eax, 4(%edx); \
- movl 8+BP_MEM, %eax; \
- movl %eax, 8(%edx)
-
-# define RETURN_NULL_BOUNDED_POINTER \
- movl RTN(%esp), %edx; \
- movl %eax, 0(%edx); \
- movl %eax, 4(%edx); \
- movl %eax, 8(%edx)
-
-/* The caller of __errno_location is responsible for allocating space
- for the three-word BP return-value and passing pushing its address
- as an implicit first argument. */
-# define PUSH_ERRNO_LOCATION_RETURN \
- subl $8, %esp; \
- subl $4, %esp; \
- pushl %esp
-
-/* __errno_location is responsible for popping the implicit first
- argument, but we must pop the space for the BP itself. We also
- dereference the return value in order to dig out the pointer value. */
-# define POP_ERRNO_LOCATION_RETURN \
- popl %eax; \
- addl $8, %esp
# else /* !__BOUNDED_POINTERS__ */
@@ -117,24 +48,10 @@
# define RTN_SIZE 0
/* Use simple return instruction for unbounded pointer values. */
# define RET_PTR ret
-/* Don't maintain frame pointer chain for leaf assembler functions. */
-# define ENTER
-# define LEAVE
/* Stack space overhead of procedure-call linkage: return address only. */
# define LINKAGE 4
/* Stack offset of return address after calling ENTER. */
# define PCOFF 0
-
-# define CHECK_BOUNDS_LOW(VAL_REG, BP_MEM)
-# define CHECK_BOUNDS_HIGH(VAL_REG, BP_MEM, Jcc)
-# define CHECK_BOUNDS_BOTH(VAL_REG, BP_MEM)
-# define CHECK_BOUNDS_BOTH_WIDE(VAL_REG, BP_MEM, LENGTH)
-# define RETURN_BOUNDED_POINTER(BP_MEM)
-
-# define RETURN_NULL_BOUNDED_POINTER
-
-# define PUSH_ERRNO_LOCATION_RETURN
-# define POP_ERRNO_LOCATION_RETURN
# endif /* !__BOUNDED_POINTERS__ */
Modified: fsf/trunk/libc/sysdeps/i386/bsd-_setjmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/bsd-_setjmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/bsd-_setjmp.S Wed Feb 20 00:02:37 2013
@@ -31,11 +31,9 @@
#define SIGMSK JMPBUF+PTR_SIZE
ENTRY (BP_SYM (_setjmp))
- ENTER
xorl %eax, %eax
movl JMPBUF(%esp), %edx
- CHECK_BOUNDS_BOTH_WIDE (%edx, JMPBUF(%esp), $(JB_SIZE+4))
/* Save registers. */
movl %ebx, (JB_BX*4)(%edx)
@@ -52,7 +50,6 @@
PTR_MANGLE (%ecx)
#endif
movl %ecx, (JB_PC*4)(%edx)
- LEAVE
movl %ebp, (JB_BP*4)(%edx) /* Save caller's frame pointer. */
movl %eax, JB_SIZE(%edx) /* No signal mask set. */
Modified: fsf/trunk/libc/sysdeps/i386/bsd-setjmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/bsd-setjmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/bsd-setjmp.S Wed Feb 20 00:02:37 2013
@@ -34,10 +34,8 @@
/* Note that we have to use a non-exported symbol in the next
jump since otherwise gas will emit it as a jump through the
PLT which is what we cannot use here. */
- ENTER
movl JMPBUF(%esp), %eax
- CHECK_BOUNDS_BOTH_WIDE (%eax, JMPBUF(%esp), $JB_SIZE)
/* Save registers. */
movl %ebx, (JB_BX*4)(%eax)
@@ -54,7 +52,6 @@
PTR_MANGLE (%ecx)
#endif
movl %ecx, (JB_PC*4)(%eax)
- LEAVE /* pop frame pointer to prepare for tail-call. */
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
/* Call __sigjmp_save. */
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_frexp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_frexp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_frexp.S Wed Feb 20 00:02:37 2013
@@ -41,7 +41,6 @@
.text
ENTRY (BP_SYM (__frexp))
- ENTER
movl VAL0(%esp), %ecx
movl VAL1(%esp), %eax
@@ -78,11 +77,9 @@
/* Store %ecx in the variable pointed to by the second argument,
get the factor from the stack and return. */
1: movl EXPP(%esp), %eax
- CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
fldl VAL0(%esp)
movl %ecx, (%eax)
- LEAVE
ret
END (BP_SYM (__frexp))
weak_alias (BP_SYM (__frexp), BP_SYM (frexp))
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_frexpf.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_frexpf.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_frexpf.S Wed Feb 20 00:02:37 2013
@@ -40,7 +40,6 @@
.text
ENTRY (BP_SYM (__frexpf))
- ENTER
movl VAL(%esp), %eax
xorl %ecx, %ecx
@@ -75,11 +74,9 @@
/* Store %ecx in the variable pointed to by the second argument,
get the factor from the stack and return. */
1: movl EXPP(%esp), %eax
- CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
flds VAL(%esp)
movl %ecx, (%eax)
- LEAVE
ret
END (BP_SYM (__frexpf))
weak_alias (BP_SYM (__frexpf), BP_SYM (frexpf))
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_frexpl.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_frexpl.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_frexpl.S Wed Feb 20 00:02:37 2013
@@ -42,7 +42,6 @@
.text
ENTRY (BP_SYM (__frexpl))
- ENTER
movl VAL0(%esp), %ecx
movl VAL2(%esp), %eax
@@ -80,11 +79,9 @@
/* Store %ecx in the variable pointed to by the second argument,
get the factor from the stack and return. */
1: movl EXPP(%esp), %eax
- CHECK_BOUNDS_BOTH_WIDE (%eax, EXPP(%esp), $4)
fldt VAL0(%esp)
movl %ecx, (%eax)
- LEAVE
ret
END (BP_SYM (__frexpl))
weak_alias (BP_SYM (__frexpl), BP_SYM (frexpl))
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_remquo.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_remquo.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_remquo.S Wed Feb 20 00:02:37 2013
@@ -15,7 +15,6 @@
.text
ENTRY (BP_SYM (__remquo))
- ENTER
fldl DVSOR(%esp)
fldl DVDND(%esp)
@@ -36,7 +35,6 @@
shrl %cl, %eax
andl $7, %eax
movl QUOP(%esp), %ecx
- CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
movl DVDND+4(%esp), %edx
xorl DVSOR+4(%esp), %edx
testl $0x80000000, %edx
@@ -44,7 +42,6 @@
negl %eax
1: movl %eax, (%ecx)
- LEAVE
ret
END (BP_SYM (__remquo))
weak_alias (BP_SYM (__remquo), BP_SYM (remquo))
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_remquof.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_remquof.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_remquof.S Wed Feb 20 00:02:37 2013
@@ -15,7 +15,6 @@
.text
ENTRY (BP_SYM (__remquof))
- ENTER
flds DVSOR(%esp)
flds DVDND(%esp)
@@ -36,7 +35,6 @@
shrl %cl, %eax
andl $7, %eax
movl QUOP(%esp), %ecx
- CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
movl DVDND(%esp), %edx
xorl DVSOR(%esp), %edx
testl $0x80000000, %edx
@@ -44,7 +42,6 @@
negl %eax
1: movl %eax, (%ecx)
- LEAVE
ret
END (BP_SYM (__remquof))
weak_alias (BP_SYM (__remquof), BP_SYM (remquof))
Modified: fsf/trunk/libc/sysdeps/i386/fpu/s_remquol.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/fpu/s_remquol.S (original)
+++ fsf/trunk/libc/sysdeps/i386/fpu/s_remquol.S Wed Feb 20 00:02:37 2013
@@ -15,7 +15,6 @@
.text
ENTRY (BP_SYM (__remquol))
- ENTER
fldt DVSOR(%esp)
fldt DVDND(%esp)
@@ -36,7 +35,6 @@
shrl %cl, %eax
andl $7, %eax
movl QUOP(%esp), %ecx
- CHECK_BOUNDS_BOTH_WIDE (%ecx, QUOP(%esp), $4)
movl DVDND+8(%esp), %edx
xorl DVSOR+8(%esp), %edx
testl $0x8000, %edx
@@ -44,7 +42,6 @@
negl %eax
1: movl %eax, (%ecx)
- LEAVE
ret
END (BP_SYM (__remquol))
weak_alias (BP_SYM (__remquol), BP_SYM (remquol))
Modified: fsf/trunk/libc/sysdeps/i386/i486/strcat.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i486/strcat.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i486/strcat.S Wed Feb 20 00:02:37 2013
@@ -31,15 +31,12 @@
.text
ENTRY (BP_SYM (strcat))
- ENTER
pushl %edi /* Save callee-safe register. */
cfi_adjust_cfa_offset (4)
movl DEST(%esp), %edx
movl SRC(%esp), %ecx
- CHECK_BOUNDS_LOW (%edx, DEST(%esp))
- CHECK_BOUNDS_LOW (%ecx, SRC(%esp))
testb $0xff, (%ecx) /* Is source string empty? */
jz L(8) /* yes => return */
@@ -262,12 +259,10 @@
L(8): /* GKM FIXME: check high bounds */
movl DEST(%esp), %eax /* start address of destination is result */
- RETURN_BOUNDED_POINTER (DEST(%esp))
popl %edi /* restore saved register */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (strcat))
libc_hidden_builtin_def (strcat)
Modified: fsf/trunk/libc/sysdeps/i386/i486/strlen.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i486/strlen.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i486/strlen.S Wed Feb 20 00:02:37 2013
@@ -28,10 +28,8 @@
.text
ENTRY (BP_SYM (strlen))
- ENTER
movl STR(%esp), %ecx
- CHECK_BOUNDS_LOW (%ecx, STR(%esp))
movl %ecx, %eax /* duplicate it */
andl $3, %ecx /* mask alignment bits */
@@ -129,10 +127,8 @@
jz L(2) /* yes => return pointer */
incl %eax /* increment pointer */
-L(2): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- subl STR(%esp), %eax /* compute difference to string start */
+L(2): subl STR(%esp), %eax /* compute difference to string start */
- LEAVE
ret
END (BP_SYM (strlen))
libc_hidden_builtin_def (strlen)
Modified: fsf/trunk/libc/sysdeps/i386/i586/add_n.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/add_n.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/add_n.S Wed Feb 20 00:02:37 2013
@@ -30,7 +30,6 @@
.text
ENTRY (BP_SYM (__mpn_add_n))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -48,13 +47,6 @@
movl S2(%esp),%ebx
cfi_rel_offset (ebx, 0)
movl SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ecx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
- shrl $2, %ecx
-#endif
movl (%ebx),%ebp
cfi_rel_offset (ebp, 4)
@@ -149,6 +141,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_add_n))
Modified: fsf/trunk/libc/sysdeps/i386/i586/addmul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/addmul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/addmul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_addmul_1))
- ENTER
pushl %res_ptr
cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@
movl SIZE(%esp), %size
movl S2LIMB(%esp), %s2_limb
cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
- shll $2, %size /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
- shrl $2, %size
-#endif
leal (%res_ptr,%size,4), %res_ptr
leal (%s1_ptr,%size,4), %s1_ptr
negl %size
@@ -98,7 +91,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (res_ptr)
- LEAVE
ret
#undef size
END (BP_SYM (__mpn_addmul_1))
Modified: fsf/trunk/libc/sysdeps/i386/i586/lshift.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/lshift.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/lshift.S Wed Feb 20 00:02:37 2013
@@ -29,7 +29,6 @@
.text
ENTRY (BP_SYM (__mpn_lshift))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -48,12 +47,6 @@
movl SIZE(%esp),%ebx
cfi_rel_offset (ebx, 0)
movl CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ebx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
- shrl $2, %ebx
-#endif
/* We can use faster code for shift-by-1 under certain conditions. */
cmp $1,%ecx
@@ -155,7 +148,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
/* We loop from least significant end of the arrays, which is only
@@ -261,6 +253,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_lshift))
Modified: fsf/trunk/libc/sysdeps/i386/i586/memcpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/memcpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/memcpy.S Wed Feb 20 00:02:37 2013
@@ -42,7 +42,6 @@
END (__memcpy_chk)
#endif
ENTRY (BP_SYM (memcpy))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -54,8 +53,6 @@
movl SRC(%esp), %esi
cfi_rel_offset (esi, 0)
movl LEN(%esp), %ecx
- CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
movl %edi, %eax
/* We need this in any case. */
@@ -127,7 +124,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (memcpy))
#if !MEMPCPY_P
Modified: fsf/trunk/libc/sysdeps/i386/i586/memset.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/memset.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/memset.S Wed Feb 20 00:02:37 2013
@@ -45,7 +45,6 @@
END (__memset_chk)
#endif
ENTRY (BP_SYM (memset))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -53,7 +52,6 @@
movl DEST(%esp), %edi
cfi_rel_offset (edi, 0)
movl LEN(%esp), %edx
- CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %edx)
#if BZERO_P
xorl %eax, %eax /* we fill with 0 */
#else
@@ -111,13 +109,11 @@
#if !BZERO_P
/* Load result (only if used as memset). */
movl DEST(%esp), %eax /* start address of destination is result */
- RETURN_BOUNDED_POINTER (DEST(%esp))
#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
#if BZERO_P
ret
#else
Modified: fsf/trunk/libc/sysdeps/i386/i586/mul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/mul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/mul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_mul_1))
- ENTER
pushl %res_ptr
cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@
movl SIZE(%esp), %size
movl S2LIMB(%esp), %s2_limb
cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
- shll $2, %size /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
- shrl $2, %size
-#endif
leal (%res_ptr,%size,4), %res_ptr
leal (%s1_ptr,%size,4), %s1_ptr
negl %size
@@ -94,7 +87,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (res_ptr)
- LEAVE
ret
#undef size
END (BP_SYM (__mpn_mul_1))
Modified: fsf/trunk/libc/sysdeps/i386/i586/rshift.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/rshift.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/rshift.S Wed Feb 20 00:02:37 2013
@@ -29,7 +29,6 @@
.text
ENTRY (BP_SYM (__mpn_rshift))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -48,12 +47,6 @@
movl SIZE(%esp),%ebx
cfi_rel_offset (ebx, 0)
movl CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ebx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
- shrl $2, %ebx
-#endif
/* We can use faster code for shift-by-1 under certain conditions. */
cmp $1,%ecx
@@ -152,7 +145,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
/* We loop from least significant end of the arrays, which is only
@@ -261,6 +253,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_rshift))
Modified: fsf/trunk/libc/sysdeps/i386/i586/strchr.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/strchr.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/strchr.S Wed Feb 20 00:02:37 2013
@@ -43,7 +43,6 @@
.text
ENTRY (BP_SYM (strchr))
- ENTER
pushl %edi /* Save callee-safe registers. */
cfi_adjust_cfa_offset (-4)
@@ -57,7 +56,6 @@
movl STR(%esp), %eax
movl CHR(%esp), %edx
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
movl %eax, %edi /* duplicate string pointer for later */
cfi_rel_offset (edi, 12)
@@ -82,7 +80,7 @@
jp L(0) /* exactly two bits set */
xorb (%eax), %cl /* is byte the one we are looking for? */
- jz L(2) /* yes => return pointer */
+ jz L(out) /* yes => return pointer */
xorb %dl, %cl /* load single byte and test for NUL */
je L(3) /* yes => return NULL */
@@ -91,7 +89,7 @@
incl %eax
cmpb %cl, %dl /* is byte == C? */
- je L(2) /* aligned => return pointer */
+ je L(out) /* aligned => return pointer */
cmpb $0, %cl /* is byte NUL? */
je L(3) /* yes => return NULL */
@@ -104,7 +102,7 @@
L(0): movb (%eax), %cl /* load single byte */
cmpb %cl, %dl /* is byte == C? */
- je L(2) /* aligned => return pointer */
+ je L(out) /* aligned => return pointer */
cmpb $0, %cl /* is byte NUL? */
je L(3) /* yes => return NULL */
@@ -274,23 +272,21 @@
L(5): subl $4, %eax /* adjust pointer */
testb %bl, %bl /* first byte == C? */
- jz L(2) /* yes => return pointer */
+ jz L(out) /* yes => return pointer */
incl %eax /* increment pointer */
testb %bh, %bh /* second byte == C? */
- jz L(2) /* yes => return pointer */
+ jz L(out) /* yes => return pointer */
shrl $16, %ebx /* make upper bytes accessible */
incl %eax /* increment pointer */
cmp $0, %bl /* third byte == C */
- je L(2) /* yes => return pointer */
-
- incl %eax /* increment pointer */
-
-L(2): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- RETURN_BOUNDED_POINTER (STR(%esp))
+ je L(out) /* yes => return pointer */
+
+ incl %eax /* increment pointer */
+
L(out): popl %ebp /* restore saved registers */
cfi_adjust_cfa_offset (-4)
cfi_restore (ebp)
@@ -305,7 +301,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
cfi_adjust_cfa_offset (16)
@@ -318,7 +313,7 @@
L(4): subl $4, %eax /* adjust pointer */
cmpb %dl, %cl /* first byte == C? */
- je L(2) /* yes => return pointer */
+ je L(out) /* yes => return pointer */
cmpb $0, %cl /* first byte == NUL? */
je L(3) /* yes => return NULL */
@@ -326,7 +321,7 @@
incl %eax /* increment pointer */
cmpb %dl, %ch /* second byte == C? */
- je L(2) /* yes => return pointer */
+ je L(out) /* yes => return pointer */
cmpb $0, %ch /* second byte == NUL? */
je L(3) /* yes => return NULL */
@@ -335,7 +330,7 @@
incl %eax /* increment pointer */
cmpb %dl, %cl /* third byte == C? */
- je L(2) /* yes => return pointer */
+ je L(out) /* yes => return pointer */
cmpb $0, %cl /* third byte == NUL? */
je L(3) /* yes => return NULL */
@@ -344,10 +339,9 @@
/* The test four the fourth byte is necessary! */
cmpb %dl, %ch /* fourth byte == C? */
- je L(2) /* yes => return pointer */
+ je L(out) /* yes => return pointer */
L(3): xorl %eax, %eax
- RETURN_NULL_BOUNDED_POINTER
jmp L(out)
END (BP_SYM (strchr))
Modified: fsf/trunk/libc/sysdeps/i386/i586/strcpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/strcpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/strcpy.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (STRCPY))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -48,8 +47,6 @@
cfi_rel_offset (edi, 8)
movl SRC(%esp), %esi
cfi_rel_offset (esi, 4)
- CHECK_BOUNDS_LOW (%edi, DEST(%esp))
- CHECK_BOUNDS_LOW (%esi, SRC(%esp))
xorl %eax, %eax
leal -1(%esi), %ecx
@@ -158,7 +155,6 @@
#else
movl DEST(%esp), %eax
#endif
- RETURN_BOUNDED_POINTER (DEST(%esp))
popl %ebx
cfi_adjust_cfa_offset (-4)
cfi_restore (ebx)
@@ -169,7 +165,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (STRCPY))
#ifndef USE_AS_STPCPY
Modified: fsf/trunk/libc/sysdeps/i386/i586/strlen.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/strlen.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/strlen.S Wed Feb 20 00:02:37 2013
@@ -41,10 +41,8 @@
.text
ENTRY (BP_SYM (strlen))
- ENTER
movl STR(%esp), %eax
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
movl $3, %edx /* load mask (= 3) */
andl %eax, %edx /* separate last two bits of address */
@@ -178,11 +176,9 @@
incl %eax /* increment pointer */
-L(2): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- subl STR(%esp), %eax /* now compute the length as difference
+L(2): subl STR(%esp), %eax /* now compute the length as difference
between start and terminating NUL
character */
- LEAVE
ret
END (BP_SYM (strlen))
libc_hidden_builtin_def (strlen)
Modified: fsf/trunk/libc/sysdeps/i386/i586/sub_n.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/sub_n.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/sub_n.S Wed Feb 20 00:02:37 2013
@@ -30,7 +30,6 @@
.text
ENTRY (BP_SYM (__mpn_sub_n))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -48,13 +47,6 @@
movl S2(%esp),%ebx
cfi_rel_offset (ebx, 0)
movl SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ecx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
- shrl $2, %ecx
-#endif
movl (%ebx),%ebp
cfi_rel_offset (ebp, 4)
@@ -149,6 +141,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_sub_n))
Modified: fsf/trunk/libc/sysdeps/i386/i586/submul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i586/submul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i586/submul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_submul_1))
- ENTER
pushl %res_ptr
cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@
movl SIZE(%esp), %size
movl S2LIMB(%esp), %s2_limb
cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
- shll $2, %sizeP /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
- shrl $2, %sizeP
-#endif
leal (%res_ptr,%size,4), %res_ptr
leal (%s1_ptr,%size,4), %s1_ptr
negl %size
@@ -98,7 +91,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (res_ptr)
- LEAVE
ret
#undef size
END (BP_SYM (__mpn_submul_1))
Modified: fsf/trunk/libc/sysdeps/i386/i686/add_n.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/add_n.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/add_n.S Wed Feb 20 00:02:37 2013
@@ -34,7 +34,6 @@
ret
#endif
ENTRY (BP_SYM (__mpn_add_n))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -47,13 +46,6 @@
cfi_rel_offset (esi, 0)
movl S2(%esp),%edx
movl SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ecx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
- shrl $2, %ecx
-#endif
movl %ecx,%eax
shrl $3,%ecx /* compute count for unrolled loop */
negl %eax
@@ -116,6 +108,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_add_n))
Modified: fsf/trunk/libc/sysdeps/i386/i686/memcmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/memcmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/memcmp.S Wed Feb 20 00:02:37 2013
@@ -26,9 +26,9 @@
#define BLK2 BLK1+PTR_SIZE
#define LEN BLK2+PTR_SIZE
#define ENTRANCE pushl %ebx; cfi_adjust_cfa_offset (4); \
- cfi_rel_offset (ebx, 0); ENTER
+ cfi_rel_offset (ebx, 0)
#define RETURN popl %ebx; cfi_adjust_cfa_offset (-4); \
- cfi_restore (ebx); LEAVE; ret
+ cfi_restore (ebx); ret
/* Load an entry in a jump table into EBX. TABLE is a jump table
with relative offsets. INDEX is a register contains the index
Modified: fsf/trunk/libc/sysdeps/i386/i686/memcpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/memcpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/memcpy.S Wed Feb 20 00:02:37 2013
@@ -38,7 +38,6 @@
END_CHK (__memcpy_chk)
#endif
ENTRY (BP_SYM (memcpy))
- ENTER
movl %edi, %eax
movl DEST(%esp), %edi
@@ -81,9 +80,7 @@
.Lend: movl %eax, %edi
movl %edx, %esi
movl DEST(%esp), %eax
- RETURN_BOUNDED_POINTER (DEST(%esp))
- LEAVE
RET_PTR
/* When we come here the pointers do not have the same
Modified: fsf/trunk/libc/sysdeps/i386/i686/memmove.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/memmove.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/memmove.S Wed Feb 20 00:02:37 2013
@@ -47,7 +47,6 @@
#endif
ENTRY (BP_SYM (memmove))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -58,8 +57,6 @@
movl %esi, %edx
movl SRC(%esp), %esi
cfi_register (esi, edx)
- CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
movl %edi, %eax
subl %esi, %eax
@@ -79,14 +76,12 @@
cfi_restore (esi)
#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
- RETURN_BOUNDED_POINTER (DEST(%esp))
#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
cfi_adjust_cfa_offset (4)
@@ -113,7 +108,6 @@
cfi_restore (esi)
#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
- RETURN_BOUNDED_POINTER (DEST(%esp))
#endif
cld
@@ -121,7 +115,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (memmove))
#ifndef USE_AS_BCOPY
Modified: fsf/trunk/libc/sysdeps/i386/i686/mempcpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/mempcpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/mempcpy.S Wed Feb 20 00:02:37 2013
@@ -38,17 +38,14 @@
END_CHK (__mempcpy_chk)
#endif
ENTRY (BP_SYM (__mempcpy))
- ENTER
movl LEN(%esp), %ecx
movl %edi, %eax
cfi_register (edi, eax)
movl DEST(%esp), %edi
- CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
movl %esi, %edx
cfi_register (esi, edx)
movl SRC(%esp), %esi
- CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
cld
shrl $1, %ecx
jnc 1f
@@ -62,9 +59,7 @@
cfi_restore (edi)
movl %edx, %esi
cfi_restore (esi)
- RETURN_BOUNDED_POINTER (DEST(%esp))
- LEAVE
RET_PTR
END (BP_SYM (__mempcpy))
libc_hidden_def (BP_SYM (__mempcpy))
Modified: fsf/trunk/libc/sysdeps/i386/i686/memset.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/memset.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/memset.S Wed Feb 20 00:02:37 2013
@@ -46,14 +46,12 @@
END_CHK (__memset_chk)
#endif
ENTRY (BP_SYM (memset))
- ENTER
cld
pushl %edi
cfi_adjust_cfa_offset (4)
movl DEST(%esp), %edx
movl LEN(%esp), %ecx
- CHECK_BOUNDS_BOTH_WIDE (%edx, DEST(%esp), %ecx)
#if BZERO_P
xorl %eax, %eax /* fill with 0 */
#else
@@ -90,13 +88,11 @@
1:
#if !BZERO_P
movl DEST(%esp), %eax /* start address of destination is result */
- RETURN_BOUNDED_POINTER (DEST(%esp))
#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
#if BZERO_P
ret
#else
Modified: fsf/trunk/libc/sysdeps/i386/i686/strcmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/strcmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/strcmp.S Wed Feb 20 00:02:37 2013
@@ -28,12 +28,9 @@
.text
ENTRY (BP_SYM (strcmp))
- ENTER
movl STR1(%esp), %ecx
movl STR2(%esp), %edx
- CHECK_BOUNDS_LOW (%ecx, STR1(%esp))
- CHECK_BOUNDS_LOW (%edx, STR2(%esp))
L(oop): movb (%ecx), %al
cmpb (%edx), %al
@@ -46,26 +43,12 @@
xorl %eax, %eax
/* when strings are equal, pointers rest one beyond
the end of the NUL terminators. */
- CHECK_BOUNDS_HIGH (%ecx, STR1(%esp), jbe)
- CHECK_BOUNDS_HIGH (%edx, STR2(%esp), jbe)
- LEAVE
ret
-#ifndef __BOUNDED_POINTERS__
L(neq): movl $1, %eax
movl $-1, %ecx
cmovbl %ecx, %eax
-#else
-L(neq): movl $1, %eax
- ja L(chk)
- negl %eax
- /* When strings differ, pointers rest on
- the unequal characters. */
-L(chk): CHECK_BOUNDS_HIGH (%ecx, STR1(%esp), jb)
- CHECK_BOUNDS_HIGH (%edx, STR2(%esp), jb)
-#endif
- LEAVE
ret
END (BP_SYM (strcmp))
libc_hidden_builtin_def (strcmp)
Modified: fsf/trunk/libc/sysdeps/i386/i686/strtok.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/i686/strtok.S (original)
+++ fsf/trunk/libc/sysdeps/i386/i686/strtok.S Wed Feb 20 00:02:37 2013
@@ -46,11 +46,7 @@
.type save_ptr, @object
.size save_ptr, 4
save_ptr:
-# if __BOUNDED_POINTERS__
- .space 12
-# else
.space 4
-# endif
# ifdef PIC
# define SAVE_PTR save_ptr@GOTOFF(%ebx)
@@ -81,7 +77,6 @@
#endif
ENTRY (BP_SYM (FUNCTION))
- ENTER
#if !defined USE_AS_STRTOK_R && defined PIC
pushl %ebx /* Save PIC register. */
@@ -127,23 +122,7 @@
cmove %eax, %edx
testl %edx, %edx
jz L(returnNULL)
-#if __BOUNDED_POINTERS__
-# ifdef USE_AS_STRTOK_R
- movl SAVE(%esp), %ecx /* borrow %ecx for a moment */
-# endif
- je L(0)
- /* Save bounds of incoming non-NULL STR into save area. */
- movl 4+STR(%esp), %eax
- movl %eax, 4+SAVE_PTR
- movl 8+STR(%esp), %eax
- movl %eax, 8+SAVE_PTR
-L(0): CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
-# ifdef USE_AS_STRTOK_R
- xorl %ecx, %ecx /* restore %ecx to zero */
-# endif
-#endif
movl DELIM(%esp), %eax /* Get start of delimiter set. */
- CHECK_BOUNDS_LOW (%eax, DELIM(%esp))
/* For understanding the following code remember that %ecx == 0 now.
Although all the following instruction only modify %cl we always
@@ -151,17 +130,17 @@
L(2): movb (%eax), %cl /* get byte from stopset */
testb %cl, %cl /* is NUL char? */
- jz L(1_1) /* yes => start compare loop */
+ jz L(1) /* yes => start compare loop */
movb %cl, (%esp,%ecx) /* set corresponding byte in stopset table */
movb 1(%eax), %cl /* get byte from stopset */
testb $0xff, %cl /* is NUL char? */
- jz L(1_2) /* yes => start compare loop */
+ jz L(1) /* yes => start compare loop */
movb %cl, (%esp,%ecx) /* set corresponding byte in stopset table */
movb 2(%eax), %cl /* get byte from stopset */
testb $0xff, %cl /* is NUL char? */
- jz L(1_3) /* yes => start compare loop */
+ jz L(1) /* yes => start compare loop */
movb %cl, (%esp,%ecx) /* set corresponding byte in stopset table */
movb 3(%eax), %cl /* get byte from stopset */
@@ -170,16 +149,7 @@
testb $0xff, %cl /* is NUL char? */
jnz L(2) /* no => process next dword from stopset */
-#if __BOUNDED_POINTERS__
- jmp L(1_0) /* pointer is correct for bounds check */
-L(1_3): incl %eax /* adjust pointer for bounds check */
-L(1_2): incl %eax /* ditto */
-L(1_1): incl %eax /* ditto */
-L(1_0): CHECK_BOUNDS_HIGH (%eax, DELIM(%esp), jbe)
-#else
-L(1_3):; L(1_2):; L(1_1): /* fall through */
-#endif
- leal -4(%edx), %eax /* prepare loop */
+L(1): leal -4(%edx), %eax /* prepare loop */
/* We use a neat trick for the following loop. Normally we would
have to test for two termination conditions
@@ -253,8 +223,6 @@
movl SAVE(%esp), %ecx
#endif
movl %edx, SAVE_PTR
- CHECK_BOUNDS_HIGH (%edx, SAVE_PTR, jb)
- RETURN_BOUNDED_POINTER (SAVE_PTR)
L(epilogue):
/* Remove the stopset table. */
@@ -265,7 +233,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (ebx)
#endif
- LEAVE
RET_PTR
L(returnNULL):
@@ -274,7 +241,6 @@
movl SAVE(%esp), %ecx
#endif
movl %edx, SAVE_PTR
- RETURN_NULL_BOUNDED_POINTER
jmp L(epilogue)
END (BP_SYM (FUNCTION))
Modified: fsf/trunk/libc/sysdeps/i386/lshift.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/lshift.S (original)
+++ fsf/trunk/libc/sysdeps/i386/lshift.S Wed Feb 20 00:02:37 2013
@@ -29,7 +29,6 @@
.text
ENTRY (BP_SYM (__mpn_lshift))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -44,12 +43,6 @@
cfi_rel_offset (esi, 4)
movl SIZE(%esp),%edx
movl CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %edx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %edx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %edx)
- shrl $2, %edx
-#endif
subl $4,%esi /* adjust s_ptr */
movl (%esi,%edx,4),%ebx /* read most significant limb */
@@ -92,7 +85,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
cfi_restore_state
@@ -109,6 +101,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_lshift))
Modified: fsf/trunk/libc/sysdeps/i386/memchr.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/memchr.S (original)
+++ fsf/trunk/libc/sysdeps/i386/memchr.S Wed Feb 20 00:02:37 2013
@@ -39,7 +39,6 @@
.text
ENTRY (BP_SYM (__memchr))
- ENTER
/* Save callee-safe registers used in this function. */
pushl %esi
@@ -53,7 +52,6 @@
movl CHR(%esp), %edx /* c: byte we are looking for. */
movl LEN(%esp), %esi /* len: length of memory block. */
cfi_rel_offset (esi, 4)
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
/* If my must not test more than three characters test
them one by one. This is especially true for 0. */
@@ -312,23 +310,13 @@
incl %eax /* increment source pointer */
/* No further test needed we we know it is one of the four bytes. */
-L(9):
-#if __BOUNDED_POINTERS__
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- /* If RTN pointer is phony, don't copy return value into it. */
- movl RTN(%esp), %ecx
- testl %ecx, %ecx
- jz L(pop)
- RETURN_BOUNDED_POINTER (STR(%esp))
-#endif
-L(pop): popl %edi /* pop saved registers */
+L(9): popl %edi /* pop saved registers */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
popl %esi
cfi_adjust_cfa_offset (-4)
cfi_restore (esi)
- LEAVE
RET_PTR
END (BP_SYM (__memchr))
Modified: fsf/trunk/libc/sysdeps/i386/memcmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/memcmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/memcmp.S Wed Feb 20 00:02:37 2013
@@ -28,7 +28,6 @@
.text
ENTRY (BP_SYM (memcmp))
- ENTER
pushl %esi /* Save callee-safe registers. */
cfi_adjust_cfa_offset (4)
@@ -40,8 +39,6 @@
cfi_rel_offset (esi, 0)
movl BLK2(%esp), %edi
movl LEN(%esp), %ecx
- CHECK_BOUNDS_LOW (%esi, BLK1(%esp))
- CHECK_BOUNDS_LOW (%edi, BLK2(%esp))
cld /* Set direction of comparison. */
@@ -64,15 +61,12 @@
Note that the following operation does not change 0xffffffff. */
orb $1, %al /* Change 0 to 1. */
-L(1): CHECK_BOUNDS_HIGH (%esi, BLK1(%esp), jbe)
- CHECK_BOUNDS_HIGH (%edi, BLK2(%esp), jbe)
- popl %esi /* Restore registers. */
+L(1): popl %esi /* Restore registers. */
cfi_adjust_cfa_offset (-4)
cfi_restore (esi)
movl %edx, %edi
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (memcmp))
Modified: fsf/trunk/libc/sysdeps/i386/mul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/mul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/mul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_mul_1))
- ENTER
pushl %res_ptr
cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@
movl SIZE(%esp), %size
movl S2LIMB(%esp), %s2_limb
cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
- shll $2, %size /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
- shrl $2, %size
-#endif
leal (%res_ptr,%size,4), %res_ptr
leal (%s1_ptr,%size,4), %s1_ptr
negl %size
@@ -90,7 +83,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (res_ptr)
- LEAVE
ret
#undef size
END (BP_SYM (__mpn_mul_1))
Modified: fsf/trunk/libc/sysdeps/i386/rawmemchr.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/rawmemchr.S (original)
+++ fsf/trunk/libc/sysdeps/i386/rawmemchr.S Wed Feb 20 00:02:37 2013
@@ -38,7 +38,6 @@
.text
ENTRY (BP_SYM (__rawmemchr))
- ENTER
/* Save callee-safe register used in this function. */
pushl %edi
@@ -48,7 +47,6 @@
/* Load parameters into registers. */
movl STR(%esp), %eax
movl CHR(%esp), %edx
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
/* At the moment %edx contains C. What we need for the
algorithm is C in all bytes of the dword. Avoid
@@ -215,13 +213,10 @@
/* No further test needed we we know it is one of the four bytes. */
L(9):
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- RETURN_BOUNDED_POINTER (STR(%esp))
popl %edi /* pop saved register */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (__rawmemchr))
Modified: fsf/trunk/libc/sysdeps/i386/rshift.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/rshift.S (original)
+++ fsf/trunk/libc/sysdeps/i386/rshift.S Wed Feb 20 00:02:37 2013
@@ -29,7 +29,6 @@
.text
ENTRY (BP_SYM (__mpn_rshift))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -44,12 +43,6 @@
cfi_rel_offset (esi, 4)
movl SIZE(%esp),%edx
movl CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %edx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %edx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %edx)
- shrl $2, %edx
-#endif
leal -4(%edi,%edx,4),%edi
leal (%esi,%edx,4),%esi
negl %edx
@@ -94,7 +87,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
cfi_restore_state
@@ -111,6 +103,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_rshift))
Modified: fsf/trunk/libc/sysdeps/i386/setjmp.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/setjmp.S (original)
+++ fsf/trunk/libc/sysdeps/i386/setjmp.S Wed Feb 20 00:02:37 2013
@@ -28,10 +28,8 @@
#define SIGMSK JMPBUF+PTR_SIZE
ENTRY (BP_SYM (__sigsetjmp))
- ENTER
movl JMPBUF(%esp), %eax
- CHECK_BOUNDS_BOTH_WIDE (%eax, JMPBUF(%esp), $JB_SIZE)
/* Save registers. */
movl %ebx, (JB_BX*4)(%eax)
@@ -48,7 +46,6 @@
PTR_MANGLE (%ecx)
#endif
movl %ecx, (JB_PC*4)(%eax)
- LEAVE /* pop frame pointer to prepare for tail-call. */
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
#if defined NOT_IN_libc && defined IS_IN_rtld
Modified: fsf/trunk/libc/sysdeps/i386/stpcpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/stpcpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/stpcpy.S Wed Feb 20 00:02:37 2013
@@ -33,12 +33,9 @@
.text
ENTRY (BP_SYM (__stpcpy))
- ENTER
movl DEST(%esp), %eax
movl SRC(%esp), %ecx
- CHECK_BOUNDS_LOW (%eax, DEST(%esp))
- CHECK_BOUNDS_LOW (%ecx, SRC(%esp))
subl %eax, %ecx /* magic: reduce number of loop variants
to one using addressing mode */
@@ -84,10 +81,7 @@
L(4): incl %eax
L(3): incl %eax
L(2):
- CHECK_BOUNDS_HIGH (%eax, DEST(%esp), jb)
- RETURN_BOUNDED_POINTER (DEST(%esp))
- LEAVE
RET_PTR
END (BP_SYM (__stpcpy))
Modified: fsf/trunk/libc/sysdeps/i386/stpncpy.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/stpncpy.S (original)
+++ fsf/trunk/libc/sysdeps/i386/stpncpy.S Wed Feb 20 00:02:37 2013
@@ -36,7 +36,6 @@
.text
ENTRY (BP_SYM (__stpncpy))
- ENTER
pushl %esi
cfi_adjust_cfa_offset (4)
@@ -45,8 +44,6 @@
movl SRC(%esp), %esi
cfi_rel_offset (esi, 0)
movl LEN(%esp), %ecx
- CHECK_BOUNDS_LOW (%eax, DEST(%esp))
- CHECK_BOUNDS_LOW (%esi, SRC(%esp))
subl %eax, %esi /* magic: reduce number of loop variants
to one using addressing mode */
@@ -141,18 +138,10 @@
L(3): decl %ecx /* all bytes written? */
jnz L(8) /* no, then again */
-L(9):
-#if __BOUNDED_POINTERS__
- addl %eax, %esi /* undo magic: %esi now points beyond end of SRC */
- CHECK_BOUNDS_HIGH (%esi, SRC(%esp), jbe)
- CHECK_BOUNDS_HIGH (%eax, DEST(%esp), jbe)
- RETURN_BOUNDED_POINTER (DEST(%esp))
-#endif
- popl %esi /* restore saved register content */
+L(9): popl %esi /* restore saved register content */
cfi_adjust_cfa_offset (-4)
cfi_restore (esi)
- LEAVE
RET_PTR
END (BP_SYM (__stpncpy))
Modified: fsf/trunk/libc/sysdeps/i386/strchr.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strchr.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strchr.S Wed Feb 20 00:02:37 2013
@@ -31,14 +31,12 @@
.text
ENTRY (BP_SYM (strchr))
- ENTER
pushl %edi /* Save callee-safe registers used here. */
cfi_adjust_cfa_offset (4)
cfi_rel_offset (edi, 0)
movl STR(%esp), %eax
movl CHR(%esp), %edx
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
/* At the moment %edx contains C. What we need for the
algorithm is C in all bytes of the dword. Avoid
@@ -243,12 +241,10 @@
L(2): /* Return NULL. */
xorl %eax, %eax
- RETURN_NULL_BOUNDED_POINTER
popl %edi /* restore saved register content */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
cfi_adjust_cfa_offset (4)
@@ -285,13 +281,10 @@
incl %eax
L(6):
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- RETURN_BOUNDED_POINTER (STR(%esp))
popl %edi /* restore saved register content */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (strchr))
Modified: fsf/trunk/libc/sysdeps/i386/strchrnul.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strchrnul.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strchrnul.S Wed Feb 20 00:02:37 2013
@@ -32,7 +32,6 @@
.text
ENTRY (BP_SYM (__strchrnul))
- ENTER
pushl %edi /* Save callee-safe registers used here. */
cfi_adjust_cfa_offset (4)
@@ -40,7 +39,6 @@
movl STR(%esp), %eax
movl CHR(%esp), %edx
- CHECK_BOUNDS_LOW (%eax, STR(%esp))
/* At the moment %edx contains CHR. What we need for the
algorithm is CHR in all bytes of the dword. Avoid
@@ -272,13 +270,10 @@
/* It must be in the fourth byte and it cannot be NUL. */
incl %eax
-L(6): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- RETURN_BOUNDED_POINTER (STR(%esp))
- popl %edi /* restore saved register content */
+L(6): popl %edi /* restore saved register content */
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (__strchrnul))
Modified: fsf/trunk/libc/sysdeps/i386/strcspn.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strcspn.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strcspn.S Wed Feb 20 00:02:37 2013
@@ -31,11 +31,9 @@
.text
ENTRY (BP_SYM (strcspn))
- ENTER
movl STR(%esp), %edx
movl STOP(%esp), %eax
- CHECK_BOUNDS_LOW (%edx, STR(%esp))
/* First we create a table with flags for all possible characters.
For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
@@ -236,11 +234,9 @@
L(4): addl $256, %esp /* remove stopset */
cfi_adjust_cfa_offset (-256)
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
subl %edx, %eax /* we have to return the number of valid
characters, so compute distance to first
non-valid character */
- LEAVE
ret
END (BP_SYM (strcspn))
libc_hidden_builtin_def (strcspn)
Modified: fsf/trunk/libc/sysdeps/i386/strpbrk.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strpbrk.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strpbrk.S Wed Feb 20 00:02:37 2013
@@ -32,11 +32,9 @@
.text
ENTRY (BP_SYM (strpbrk))
- ENTER
movl STR(%esp), %edx
movl STOP(%esp), %eax
- CHECK_BOUNDS_LOW (%edx, STR(%esp))
/* First we create a table with flags for all possible characters.
For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
@@ -238,18 +236,10 @@
L(4): addl $256, %esp /* remove stopset */
cfi_adjust_cfa_offset (-256)
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
orb %cl, %cl /* was last character NUL? */
jnz L(7) /* no => return pointer */
xorl %eax, %eax
- RETURN_NULL_BOUNDED_POINTER
-
- LEAVE
- RET_PTR
-
-L(7): RETURN_BOUNDED_POINTER (STR(%esp))
-
- LEAVE
- RET_PTR
+
+L(7): RET_PTR
END (BP_SYM (strpbrk))
libc_hidden_builtin_def (strpbrk)
Modified: fsf/trunk/libc/sysdeps/i386/strrchr.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strrchr.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strrchr.S Wed Feb 20 00:02:37 2013
@@ -31,7 +31,6 @@
.text
ENTRY (BP_SYM (strrchr))
- ENTER
pushl %edi /* Save callee-safe registers used here. */
cfi_adjust_cfa_offset (4)
@@ -43,7 +42,6 @@
movl STR(%esp), %esi
cfi_rel_offset (esi, 0)
movl CHR(%esp), %ecx
- CHECK_BOUNDS_LOW (%esi, STR(%esp))
/* At the moment %ecx contains C. What we need for the
algorithm is C in all bytes of the dword. Avoid
@@ -324,16 +322,13 @@
jne L(2) /* no => skip */
leal 3(%esi), %eax /* store address as result */
-L(2): CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
- RETURN_BOUNDED_POINTER (STR(%esp))
- popl %esi /* restore saved register content */
+L(2): popl %esi /* restore saved register content */
cfi_adjust_cfa_offset (-4)
cfi_restore (esi)
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
RET_PTR
END (BP_SYM (strrchr))
Modified: fsf/trunk/libc/sysdeps/i386/strspn.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strspn.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strspn.S Wed Feb 20 00:02:37 2013
@@ -31,11 +31,9 @@
.text
ENTRY (BP_SYM (strspn))
- ENTER
movl STR(%esp), %edx
movl SKIP(%esp), %eax
- CHECK_BOUNDS_LOW (%edx, STR(%esp))
/* First we create a table with flags for all possible characters.
For the ASCII (7bit/8bit) or ISO-8859-X character sets which are
@@ -236,11 +234,9 @@
L(4): addl $256, %esp /* remove stopset */
cfi_adjust_cfa_offset (-256)
- CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
subl %edx, %eax /* we have to return the number of valid
characters, so compute distance to first
non-valid character */
- LEAVE
ret
END (BP_SYM (strspn))
libc_hidden_builtin_def (strspn)
Modified: fsf/trunk/libc/sysdeps/i386/strtok.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/strtok.S (original)
+++ fsf/trunk/libc/sysdeps/i386/strtok.S Wed Feb 20 00:02:37 2013
@@ -46,11 +46,7 @@
.type save_ptr, @object
.size save_ptr, 4
save_ptr:
-# if __BOUNDED_POINTERS__
- .space 12
-# else
.space 4
-# endif
# ifdef PIC
# define SAVE_PTR save_ptr@GOTOFF(%ebx)
@@ -69,11 +65,9 @@
.text
ENTRY (BP_SYM (FUNCTION))
- ENTER
movl STR(%esp), %edx
movl DELIM(%esp), %eax
- CHECK_BOUNDS_LOW (%eax, DELIM(%esp))
#if !defined USE_AS_STRTOK_R && defined PIC
pushl %ebx /* Save PIC register. */
@@ -90,22 +84,7 @@
/* If the pointer is NULL we have to use the stored value of
the last run. */
cmpl $0, %edx
-#if __BOUNDED_POINTERS__
- movl SAVE(%esp), %ecx
- je L(0)
- /* Save bounds of incoming non-NULL STR into save area. */
- movl 4+STR(%esp), %eax
- movl %eax, 4+SAVE_PTR
- movl 8+STR(%esp), %eax
- movl %eax, 8+SAVE_PTR
- CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
- jmp L(1)
-L(0): movl SAVE_PTR, %edx
- CHECK_BOUNDS_LOW (%edx, SAVE_PTR)
- jmp L(1)
-#else
jne L(1)
-#endif
#ifdef USE_AS_STRTOK_R
/* The value is stored in the third argument. */
@@ -267,12 +246,12 @@
movb 1(%eax), %cl /* get byte from stopset */
testb $0xff, %cl /* is NUL char? */
- jz L(1_2) /* yes => start compare loop */
+ jz L(1_1) /* yes => start compare loop */
movb %cl, (%esp,%ecx) /* set corresponding byte in stopset table */
movb 2(%eax), %cl /* get byte from stopset */
testb $0xff, %cl /* is NUL char? */
- jz L(1_3) /* yes => start compare loop */
+ jz L(1_1) /* yes => start compare loop */
movb %cl, (%esp,%ecx) /* set corresponding byte in stopset table */
movb 3(%eax), %cl /* get byte from stopset */
@@ -281,16 +260,7 @@
testb $0xff, %cl /* is NUL char? */
jnz L(2) /* no => process next dword from stopset */
-#if __BOUNDED_POINTERS__
- jmp L(1_0) /* pointer is correct for bounds check */
-L(1_3): incl %eax /* adjust pointer for bounds check */
-L(1_2): incl %eax /* ditto */
-L(1_1): incl %eax /* ditto */
-L(1_0): CHECK_BOUNDS_HIGH (%eax, DELIM(%esp), jbe)
-#else
-L(1_3):; L(1_2):; L(1_1): /* fall through */
-#endif
- leal -4(%edx), %eax /* prepare loop */
+L(1_1): leal -4(%edx), %eax /* prepare loop */
/* We use a neat trick for the following loop. Normally we would
have to test for two termination conditions
@@ -370,8 +340,6 @@
movl SAVE(%esp), %ecx
#endif
movl %edx, SAVE_PTR
- CHECK_BOUNDS_HIGH (%edx, SAVE_PTR, jb)
- RETURN_BOUNDED_POINTER (SAVE_PTR)
L(epilogue):
#if !defined USE_AS_STRTOK_R && defined PIC
@@ -379,7 +347,6 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (ebx)
#endif
- LEAVE
RET_PTR
L(returnNULL):
@@ -388,7 +355,6 @@
movl SAVE(%esp), %ecx
#endif
movl %edx, SAVE_PTR
- RETURN_NULL_BOUNDED_POINTER
jmp L(epilogue)
END (BP_SYM (FUNCTION))
Modified: fsf/trunk/libc/sysdeps/i386/sub_n.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/sub_n.S (original)
+++ fsf/trunk/libc/sysdeps/i386/sub_n.S Wed Feb 20 00:02:37 2013
@@ -30,7 +30,6 @@
.text
ENTRY (BP_SYM (__mpn_sub_n))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -43,13 +42,6 @@
cfi_rel_offset (esi, 0)
movl S2(%esp),%edx
movl SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
- shll $2, %ecx /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%edx, S2(%esp), %ecx)
- shrl $2, %ecx
-#endif
movl %ecx,%eax
shrl $3,%ecx /* compute count for unrolled loop */
negl %eax
@@ -117,6 +109,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_sub_n))
Modified: fsf/trunk/libc/sysdeps/i386/submul_1.S
==============================================================================
--- fsf/trunk/libc/sysdeps/i386/submul_1.S (original)
+++ fsf/trunk/libc/sysdeps/i386/submul_1.S Wed Feb 20 00:02:37 2013
@@ -35,7 +35,6 @@
.text
ENTRY (BP_SYM (__mpn_submul_1))
- ENTER
pushl %edi
cfi_adjust_cfa_offset (4)
@@ -54,12 +53,6 @@
movl S1(%esp), %s1_ptr
movl SIZE(%esp), %sizeP
movl S2LIMB(%esp), %s2_limb
-#if __BOUNDED_POINTERS__
- shll $2, %sizeP /* convert limbs to bytes */
- CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
- CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
- shrl $2, %sizeP
-#endif
leal (%res_ptr,%sizeP,4), %res_ptr
leal (%s1_ptr,%sizeP,4), %s1_ptr
negl %sizeP
@@ -91,6 +84,5 @@
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
- LEAVE
ret
END (BP_SYM (__mpn_submul_1))
_______________________________________________
Commits mailing list
Commits@xxxxxxxxxx
http://eglibc.org/cgi-bin/mailman/listinfo/commits