Port-powerpc archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
Re: Testers needed for GCC patch
On Tue, 7 Mar 2000 16:16:54 +0100
Erik Bertelsen <erik%mediator.uni-c.dk@localhost> wrote:
> Well, not too well -- lots of:
Oops, small goof. Please try this patch instead.
-- Jason R. Thorpe <thorpej%nas.nasa.gov@localhost>
Index: expmed.c
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/expmed.c,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 expmed.c
*** expmed.c 1998/08/16 17:37:13 1.1.1.2
--- expmed.c 2000/03/10 01:16:46
***************
*** 54,60 ****
static int sdiv_pow2_cheap, smod_pow2_cheap;
#ifndef SLOW_UNALIGNED_ACCESS
! #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
#endif
/* For compilers that support multiple targets with different word sizes,
--- 54,60 ----
static int sdiv_pow2_cheap, smod_pow2_cheap;
#ifndef SLOW_UNALIGNED_ACCESS
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
#endif
/* For compilers that support multiple targets with different word sizes,
***************
*** 270,276 ****
BITPOS is 0 in a REG bigger than a word. */
if (GET_MODE_SIZE (fieldmode) >= UNITS_PER_WORD
&& (GET_CODE (op0) != MEM
! || ! SLOW_UNALIGNED_ACCESS
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align % GET_MODE_SIZE (fieldmode) == 0))
&& bitpos == 0 && bitsize == GET_MODE_BITSIZE (fieldmode))
--- 270,276 ----
BITPOS is 0 in a REG bigger than a word. */
if (GET_MODE_SIZE (fieldmode) >= UNITS_PER_WORD
&& (GET_CODE (op0) != MEM
! || ! SLOW_UNALIGNED_ACCESS (fieldmode, align)
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align % GET_MODE_SIZE (fieldmode) == 0))
&& bitpos == 0 && bitsize == GET_MODE_BITSIZE (fieldmode))
***************
*** 442,448 ****
bestmode = GET_MODE (op0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS && GET_MODE_SIZE (bestmode) > align))
goto insv_loses;
/* Adjust address to point to the containing unit of that mode. */
--- 442,449 ----
bestmode = GET_MODE (op0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS (bestmode, align)
! && GET_MODE_SIZE (bestmode) > align))
goto insv_loses;
/* Adjust address to point to the containing unit of that mode. */
***************
*** 565,571 ****
int all_zero = 0;
int all_one = 0;
! if (! SLOW_UNALIGNED_ACCESS)
struct_align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
/* There is a case not handled here:
--- 566,572 ----
int all_zero = 0;
int all_one = 0;
! if (! SLOW_UNALIGNED_ACCESS (word_mode, struct_align))
struct_align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
/* There is a case not handled here:
***************
*** 947,953 ****
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
GET_MODE_BITSIZE (GET_MODE (op0))))
|| (GET_CODE (op0) == MEM
! && (! SLOW_UNALIGNED_ACCESS
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align * BITS_PER_UNIT % bitsize == 0))))
&& ((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode)
--- 948,954 ----
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
GET_MODE_BITSIZE (GET_MODE (op0))))
|| (GET_CODE (op0) == MEM
! && (! SLOW_UNALIGNED_ACCESS (mode, align)
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align * BITS_PER_UNIT % bitsize == 0))))
&& ((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode)
***************
*** 1118,1124 ****
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS && GET_MODE_SIZE (bestmode) >
align))
goto extzv_loses;
/* Compute offset as multiple of this unit,
--- 1119,1126 ----
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS (bestmode, align)
! && GET_MODE_SIZE (bestmode) > align))
goto extzv_loses;
/* Compute offset as multiple of this unit,
***************
*** 1254,1260 ****
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS && GET_MODE_SIZE (bestmode) >
align))
goto extv_loses;
/* Compute offset as multiple of this unit,
--- 1256,1263 ----
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
! || (SLOW_UNALIGNED_ACCESS (bestmode, align)
! && GET_MODE_SIZE (bestmode) > align))
goto extv_loses;
/* Compute offset as multiple of this unit,
Index: expr.c
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/expr.c,v
retrieving revision 1.5
diff -c -r1.5 expr.c
*** expr.c 1999/07/29 09:09:18 1.5
--- expr.c 2000/03/10 01:16:52
***************
*** 228,234 ****
/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
! #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
#endif
/* Register mappings for target machines without register windows. */
--- 228,234 ----
/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
#endif
/* Register mappings for target machines without register windows. */
***************
*** 1471,1477 ****
data.to_addr = copy_addr_to_reg (to_addr);
}
! if (! SLOW_UNALIGNED_ACCESS
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
--- 1471,1477 ----
data.to_addr = copy_addr_to_reg (to_addr);
}
! if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
***************
*** 1516,1522 ****
register int n_insns = 0;
int max_size = MOVE_MAX + 1;
! if (! SLOW_UNALIGNED_ACCESS
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
--- 1516,1522 ----
register int n_insns = 0;
int max_size = MOVE_MAX + 1;
! if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
***************
*** 2205,2211 ****
data.to_addr = copy_addr_to_reg (to_addr);
}
! if (! SLOW_UNALIGNED_ACCESS
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
--- 2205,2211 ----
data.to_addr = copy_addr_to_reg (to_addr);
}
! if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
***************
*** 2806,2812 ****
/* Here we avoid the case of a structure whose weak alignment
forces many pushes of a small amount of data,
and such small pushes do rounding that causes trouble. */
! && ((! SLOW_UNALIGNED_ACCESS)
|| align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
|| PUSH_ROUNDING (align) == align)
&& PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
--- 2806,2812 ----
/* Here we avoid the case of a structure whose weak alignment
forces many pushes of a small amount of data,
and such small pushes do rounding that causes trouble. */
! && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
|| align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
|| PUSH_ROUNDING (align) == align)
&& PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
***************
*** 4491,4499 ****
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
! || (SLOW_UNALIGNED_ACCESS
&& align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
! || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
--- 4491,4500 ----
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
! || (SLOW_UNALIGNED_ACCESS (mode, align)
&& align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
! || (SLOW_UNALIGNED_ACCESS (mode, align)
! && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
***************
*** 6326,6332 ****
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
! || (SLOW_UNALIGNED_ACCESS
&& ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT
(mode))
|| (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
{
--- 6327,6333 ----
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
! || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
&& ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT
(mode))
|| (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
{
Index: tm.texi
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/tm.texi,v
retrieving revision 1.1.1.3
diff -c -r1.1.1.3 tm.texi
*** tm.texi 1999/04/06 15:08:35 1.1.1.3
--- tm.texi 2000/03/10 01:17:00
***************
*** 4706,4715 ****
and likewise for @code{HImode}.
@findex SLOW_UNALIGNED_ACCESS
! @item SLOW_UNALIGNED_ACCESS
! Define this macro to be the value 1 if unaligned accesses have a cost
! many times greater than aligned accesses, for example if they are
! emulated in a trap handler.
When this macro is non-zero, the compiler will act as if
@code{STRICT_ALIGNMENT} were non-zero when generating code for block
--- 4706,4716 ----
and likewise for @code{HImode}.
@findex SLOW_UNALIGNED_ACCESS
! @item SLOW_UNALIGNED_ACCESS (@var{mode}, @var{alignment})
! Define this macro to be the value 1 if memory accesses described by the
! @var{mode} and @var{alignment} parameters have a cost many times greater
! than aligned accesses, for example if they are emulated in a trap
! handler.
When this macro is non-zero, the compiler will act as if
@code{STRICT_ALIGNMENT} were non-zero when generating code for block
***************
*** 4717,4723 ****
Therefore, do not set this macro non-zero if unaligned accesses only add a
cycle or two to the time for a memory access.
! If the value of this macro is always zero, it need not be defined.
@findex DONT_REDUCE_ADDR
@item DONT_REDUCE_ADDR
--- 4718,4726 ----
Therefore, do not set this macro non-zero if unaligned accesses only add a
cycle or two to the time for a memory access.
! If the value of this macro is always zero, it need not be defined. If
! this macro is defined, it should produce a non-zero value when
! @code{STRICT_ALIGNMENT} is non-zero.
@findex DONT_REDUCE_ADDR
@item DONT_REDUCE_ADDR
Index: config/a29k/a29k.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/a29k/a29k.h,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 a29k.h
*** a29k.h 1998/08/16 17:40:43 1.1.1.2
--- a29k.h 2000/03/10 01:17:02
***************
*** 219,225 ****
/* Set this non-zero if unaligned move instructions are extremely slow.
On the 29k, they trap. */
! #define SLOW_UNALIGNED_ACCESS 1
/* Standard register usage. */
--- 219,225 ----
/* Set this non-zero if unaligned move instructions are extremely slow.
On the 29k, they trap. */
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) 1
/* Standard register usage. */
Index: config/alpha/alpha.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/alpha/alpha.h,v
retrieving revision 1.6
diff -c -r1.6 alpha.h
*** alpha.h 1999/04/06 16:04:14 1.6
--- alpha.h 2000/03/10 01:17:05
***************
*** 510,516 ****
On the Alpha, they trap. */
! #define SLOW_UNALIGNED_ACCESS 1
/* Standard register usage. */
--- 510,516 ----
On the Alpha, they trap. */
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) 1
/* Standard register usage. */
Index: config/arm/thumb.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/arm/thumb.h,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 thumb.h
*** thumb.h 1998/08/16 17:40:56 1.1.1.2
--- thumb.h 2000/03/10 01:17:07
***************
*** 956,962 ****
#define SLOW_BYTE_ACCESS 0
! #define SLOW_UNALIGNED_ACCESS 1
#define NO_FUNCTION_CSE 1
--- 956,962 ----
#define SLOW_BYTE_ACCESS 0
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) 1
#define NO_FUNCTION_CSE 1
Index: config/gmicro/gmicro.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/gmicro/gmicro.h,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 gmicro.h
*** gmicro.h 1998/08/16 17:41:23 1.1.1.2
--- gmicro.h 2000/03/10 01:17:09
***************
*** 168,174 ****
Unaligned data is allowed on Gmicro, though the access is slow. */
#define STRICT_ALIGNMENT 1
! #define SLOW_UNALIGNED_ACCESS 1
/* Make strings word-aligned so strcpy from constants will be faster. */
#define CONSTANT_ALIGNMENT(EXP, ALIGN) \
--- 168,174 ----
Unaligned data is allowed on Gmicro, though the access is slow. */
#define STRICT_ALIGNMENT 1
! #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) 1
/* Make strings word-aligned so strcpy from constants will be faster. */
#define CONSTANT_ALIGNMENT(EXP, ALIGN) \
Index: config/i386/i386.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/i386/i386.h,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 i386.h
*** i386.h 1998/08/16 17:41:45 1.1.1.2
--- i386.h 2000/03/10 01:17:13
***************
*** 2213,2219 ****
If the value of this macro is always zero, it need not be defined. */
! /* #define SLOW_UNALIGNED_ACCESS 0 */
/* Define this macro to inhibit strength reduction of memory
addresses. (On some machines, such strength reduction seems to do
--- 2213,2219 ----
If the value of this macro is always zero, it need not be defined. */
! /* #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) 0 */
/* Define this macro to inhibit strength reduction of memory
addresses. (On some machines, such strength reduction seems to do
Index: config/rs6000/rs6000.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/rs6000/rs6000.h,v
retrieving revision 1.1.1.5
diff -c -r1.1.1.5 rs6000.h
*** rs6000.h 1999/04/06 15:12:34 1.1.1.5
--- rs6000.h 2000/03/10 01:17:17
***************
*** 637,642 ****
--- 637,650 ----
/* Non-zero if move instructions will actually fail to work
when given unaligned data. */
#define STRICT_ALIGNMENT 0
+
+ /* Define this macro to be the value 1 if unaligned accesses have a cost
+ many times greater than aligned accesses, for example if they are
+ emulated in a trap handler. */
+ #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) \
+ ((((MODE) == SFmode || (MODE) == DFmode || (MODE) == DImode) \
+ && (ALIGN) < 4) ? 1 : 0)
+
/* Standard register usage. */
Index: config/rs6000/sysv4.h
===================================================================
RCS file: /cvsroot/gnusrc/gnu/dist/gcc/config/rs6000/sysv4.h,v
retrieving revision 1.1.1.2
diff -c -r1.1.1.2 sysv4.h
*** sysv4.h 1998/08/16 17:45:22 1.1.1.2
--- sysv4.h 2000/03/10 01:17:22
***************
*** 400,405 ****
--- 400,414 ----
#undef STRICT_ALIGNMENT
#define STRICT_ALIGNMENT (TARGET_STRICT_ALIGN || TARGET_LITTLE_ENDIAN)
+ /* Define this macro to be the value 1 if unaligned accesses have a cost
+ many times greater than aligned accesses, for example if they are
+ emulated in a trap handler. */
+ #undef SLOW_UNALIGNED_ACCESS
+ #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) \
+ ((STRICT_ALIGNMENT \
+ || (((MODE) == SFmode || (MODE) == DFmode || (MODE) == DImode) \
+ && (ALIGN) < 4)) ? 1 : 0)
+
/* Alignment in bits of the stack boundary. Note, in order to allow building
one set of libraries with -mno-eabi instead of eabi libraries and non-eabi
versions, just use 64 as the stack boundary. */
Home |
Main Index |
Thread Index |
Old Index