ddk: tiny libc and kernel imports library

git-svn-id: svn://kolibrios.org@1408 a494cfbc-eb01-0410-851d-a64ba20cac60
This commit is contained in:
Sergey Semyonov (Serge)
2010-02-12 20:11:35 +00:00
parent 7c0a5de1e7
commit 1402c59305
115 changed files with 13459 additions and 655 deletions

View File

@@ -0,0 +1,20 @@
#ifndef _ASM_GENERIC_BITOPS_EXT2_NON_ATOMIC_H_
#define _ASM_GENERIC_BITOPS_EXT2_NON_ATOMIC_H_
#include <asm-generic/bitops/le.h>
#define ext2_set_bit(nr,addr) \
generic___test_and_set_le_bit((nr),(unsigned long *)(addr))
#define ext2_clear_bit(nr,addr) \
generic___test_and_clear_le_bit((nr),(unsigned long *)(addr))
#define ext2_test_bit(nr,addr) \
generic_test_le_bit((nr),(unsigned long *)(addr))
#define ext2_find_first_zero_bit(addr, size) \
generic_find_first_zero_le_bit((unsigned long *)(addr), (size))
#define ext2_find_next_zero_bit(addr, size, off) \
generic_find_next_zero_le_bit((unsigned long *)(addr), (size), (off))
#define ext2_find_next_bit(addr, size, off) \
generic_find_next_le_bit((unsigned long *)(addr), (size), (off))
#endif /* _ASM_GENERIC_BITOPS_EXT2_NON_ATOMIC_H_ */

View File

@@ -0,0 +1,36 @@
#ifndef _ASM_GENERIC_BITOPS_FLS64_H_
#define _ASM_GENERIC_BITOPS_FLS64_H_
#include <asm/types.h>
/**
* fls64 - find last set bit in a 64-bit word
* @x: the word to search
*
* This is defined in a similar way as the libc and compiler builtin
* ffsll, but returns the position of the most significant set bit.
*
* fls64(value) returns 0 if value is 0 or the position of the last
* set bit if value is nonzero. The last (most significant) bit is
* at position 64.
*/
#if BITS_PER_LONG == 32
static __always_inline int fls64(__u64 x)
{
__u32 h = x >> 32;
if (h)
return fls(h) + 32;
return fls(x);
}
#elif BITS_PER_LONG == 64
static __always_inline int fls64(__u64 x)
{
if (x == 0)
return 0;
return __fls(x) + 1;
}
#else
#error BITS_PER_LONG not 32 or 64
#endif
#endif /* _ASM_GENERIC_BITOPS_FLS64_H_ */

View File

@@ -0,0 +1,11 @@
#ifndef _ASM_GENERIC_BITOPS_HWEIGHT_H_
#define _ASM_GENERIC_BITOPS_HWEIGHT_H_
#include <asm/types.h>
extern unsigned int hweight32(unsigned int w);
extern unsigned int hweight16(unsigned int w);
extern unsigned int hweight8(unsigned int w);
extern unsigned long hweight64(__u64 w);
#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */

View File

@@ -0,0 +1,57 @@
#ifndef _ASM_GENERIC_BITOPS_LE_H_
#define _ASM_GENERIC_BITOPS_LE_H_
#include <asm/types.h>
#include <asm/byteorder.h>
#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
#if defined(__LITTLE_ENDIAN)
#define generic_test_le_bit(nr, addr) test_bit(nr, addr)
#define generic___set_le_bit(nr, addr) __set_bit(nr, addr)
#define generic___clear_le_bit(nr, addr) __clear_bit(nr, addr)
#define generic_test_and_set_le_bit(nr, addr) test_and_set_bit(nr, addr)
#define generic_test_and_clear_le_bit(nr, addr) test_and_clear_bit(nr, addr)
#define generic___test_and_set_le_bit(nr, addr) __test_and_set_bit(nr, addr)
#define generic___test_and_clear_le_bit(nr, addr) __test_and_clear_bit(nr, addr)
#define generic_find_next_zero_le_bit(addr, size, offset) find_next_zero_bit(addr, size, offset)
#define generic_find_next_le_bit(addr, size, offset) \
find_next_bit(addr, size, offset)
#elif defined(__BIG_ENDIAN)
#define generic_test_le_bit(nr, addr) \
test_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic___set_le_bit(nr, addr) \
__set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic___clear_le_bit(nr, addr) \
__clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic_test_and_set_le_bit(nr, addr) \
test_and_set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic_test_and_clear_le_bit(nr, addr) \
test_and_clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic___test_and_set_le_bit(nr, addr) \
__test_and_set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
#define generic___test_and_clear_le_bit(nr, addr) \
__test_and_clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
extern unsigned long generic_find_next_zero_le_bit(const unsigned long *addr,
unsigned long size, unsigned long offset);
extern unsigned long generic_find_next_le_bit(const unsigned long *addr,
unsigned long size, unsigned long offset);
#else
#error "Please fix <asm/byteorder.h>"
#endif
#define generic_find_first_zero_le_bit(addr, size) \
generic_find_next_zero_le_bit((addr), (size), 0)
#endif /* _ASM_GENERIC_BITOPS_LE_H_ */

View File

@@ -0,0 +1,15 @@
#ifndef _ASM_GENERIC_BITOPS_MINIX_H_
#define _ASM_GENERIC_BITOPS_MINIX_H_
#define minix_test_and_set_bit(nr,addr) \
__test_and_set_bit((nr),(unsigned long *)(addr))
#define minix_set_bit(nr,addr) \
__set_bit((nr),(unsigned long *)(addr))
#define minix_test_and_clear_bit(nr,addr) \
__test_and_clear_bit((nr),(unsigned long *)(addr))
#define minix_test_bit(nr,addr) \
test_bit((nr),(unsigned long *)(addr))
#define minix_find_first_zero_bit(addr,size) \
find_first_zero_bit((unsigned long *)(addr),(size))
#endif /* _ASM_GENERIC_BITOPS_MINIX_H_ */

View File

@@ -0,0 +1,31 @@
#ifndef _ASM_GENERIC_BITOPS_SCHED_H_
#define _ASM_GENERIC_BITOPS_SCHED_H_
#include <linux/compiler.h> /* unlikely() */
#include <asm/types.h>
/*
* Every architecture must define this function. It's the fastest
* way of searching a 100-bit bitmap. It's guaranteed that at least
* one of the 100 bits is cleared.
*/
static inline int sched_find_first_bit(const unsigned long *b)
{
#if BITS_PER_LONG == 64
if (b[0])
return __ffs(b[0]);
return __ffs(b[1]) + 64;
#elif BITS_PER_LONG == 32
if (b[0])
return __ffs(b[0]);
if (b[1])
return __ffs(b[1]) + 32;
if (b[2])
return __ffs(b[2]) + 64;
return __ffs(b[3]) + 96;
#else
#error BITS_PER_LONG not defined
#endif
}
#endif /* _ASM_GENERIC_BITOPS_SCHED_H_ */