/SPARTAN/trunk/include/byteorder.h |
---|
0,0 → 1,52 |
/* |
* Copyright (C) 2005 Jakub Jermar |
* All rights reserved. |
* |
* Redistribution and use in source and binary forms, with or without |
* modification, are permitted provided that the following conditions |
* are met: |
* |
* - Redistributions of source code must retain the above copyright |
* notice, this list of conditions and the following disclaimer. |
* - Redistributions in binary form must reproduce the above copyright |
* notice, this list of conditions and the following disclaimer in the |
* documentation and/or other materials provided with the distribution. |
* - The name of the author may not be used to endorse or promote products |
* derived from this software without specific prior written permission. |
* |
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
*/ |
#ifndef __BYTEORDER_H__ |
#define __BYTEORDER_H__ |
static inline __u64 __u64_byteorder_swap(__u64 n) |
{ |
return ((n & 0xff) << 56) | |
((n & 0xff00) << 40) | |
((n & 0xff0000) << 24) | |
((n & 0xff000000LL) << 8) | |
((n & 0xff00000000LL) >>8) | |
((n & 0xff0000000000LL) >> 24) | |
((n & 0xff000000000000LL) >> 40) | |
((n & 0xff00000000000000LL) >> 56); |
} |
static inline __u32 __u32_byteorder_swap(__u32 n) |
{ |
return ((n & 0xff) << 24) | |
((n & 0xff00) << 8) | |
((n & 0xff0000) >> 8) | |
((n & 0xff000000) >> 24); |
} |
#endif |
/SPARTAN/trunk/src/debug/symtab.c |
---|
39,10 → 39,10 |
count_t i; |
for (i=1;symbol_table[i].address_le;++i) { |
if (addr < u64_le2host(symbol_table[i].address_le)) |
if (addr < __u64_le2host(symbol_table[i].address_le)) |
break; |
} |
if (addr >= u64_le2host(symbol_table[i-1].address_le)) |
if (addr >= __u64_le2host(symbol_table[i-1].address_le)) |
return symbol_table[i-1].symbol_name; |
return NULL; |
} |
/SPARTAN/trunk/arch/ppc/include/byteorder.h |
---|
30,7 → 30,14 |
#define __ppc_BYTEORDER_H__ |
#include <arch/types.h> |
#include <byteorder.h> |
static inline __u64 __u64_le2host(__u64 n) |
{ |
return __u64_byteorder_swap(n); |
} |
/** Convert little-endian __native to host __native |
* |
* Convert little-endian __native parameter to host endianess. |
40,19 → 47,8 |
* @return Result in host endianess. |
* |
*/ |
static inline __u64 u64_le2host(__u64 n) |
static inline __native __native_le2host(__native n) |
{ |
return ((n & 0xff) << 56) | |
((n & 0xff00) << 40) | |
((n & 0xff0000) << 24) | |
((n & 0xff000000LL) << 8) | |
((n & 0xff00000000LL) >>8) | |
((n & 0xff0000000000LL) >> 24) | |
((n & 0xff000000000000LL) >> 40) | |
((n & 0xff00000000000000LL) >> 56); |
} |
static inline __native native_le2host(__native n) |
{ |
__address v; |
__asm__ volatile ("lwbrx %0, %1, %2\n" : "=r" (v) : "i" (0) , "r" (&n)); |
/SPARTAN/trunk/arch/ia64/include/byteorder.h |
---|
30,7 → 30,7 |
#define __ia64_BYTEORDER_H__ |
/* IA-64 is little-endian */ |
#define native_le2host(n) (n) |
#define u64_le2host(n) (n) |
#define __native_le2host(n) (n) |
#define __u64_le2host(n) (n) |
#endif |
/SPARTAN/trunk/arch/mips/include/byteorder.h |
---|
29,29 → 29,23 |
#ifndef __mips_BYTEORDER_H__ |
#define __mips_BYTEORDER_H__ |
#include <arch/types.h> |
#include <byteorder.h> |
#ifdef BIG_ENDIAN |
static inline __u64 u64_le2host(__u64 n) |
static inline __u64 __u64_le2host(__u64 n) |
{ |
return ((n & 0xff) << 56) | |
((n & 0xff00) << 40) | |
((n & 0xff0000) << 24) | |
((n & 0xff000000LL) << 8) | |
((n & 0xff00000000LL) >>8) | |
((n & 0xff0000000000LL) >> 24) | |
((n & 0xff000000000000LL) >> 40) | |
((n & 0xff00000000000000LL) >> 56); |
return __u64_byteorder_swap(n); |
} |
static inline __native native_le2host(__native n) |
static inline __native __native_le2host(__native n) |
{ |
return ((n & 0xff) << 24) | |
((n & 0xff00) << 8) | |
((n & 0xff0000) >> 8) | |
((n & 0xff000000) >> 24); |
return __u32_byteroder_swap(n); |
} |
#else |
# define native_le2host(n) (n) |
# define u64_le2host(n) (n) |
# define __native_le2host(n) (n) |
# define __u64_le2host(n) (n) |
#endif |
#endif |
/SPARTAN/trunk/arch/amd64/include/byteorder.h |
---|
30,7 → 30,7 |
#define __amd64_BYTEORDER_H__ |
/* AMD64 is little-endian */ |
#define native_le2host(n) (n) |
#define u64_le2host(n) (n) |
#define __native_le2host(n) (n) |
#define __u64_le2host(n) (n) |
#endif |
/SPARTAN/trunk/arch/ia32/include/byteorder.h |
---|
30,7 → 30,7 |
#define __ia32_BYTEORDER_H__ |
/* IA-32 is little-endian */ |
#define native_le2host(n) (n) |
#define u64_le2host(n) (n) |
#define __native_le2host(n) (n) |
#define __u64_le2host(n) (n) |
#endif |