Subversion Repositories HelenOS

Compare Revisions

Ignore whitespace Rev 2744 → Rev 2745

/trunk/kernel/generic/include/lib/elf.h
336,7 → 336,7
typedef struct elf64_symbol elf_symbol_t;
#endif
 
extern char *elf_error(int rc);
extern char *elf_error(unsigned int rc);
 
#endif
 
/trunk/kernel/generic/include/mm/slab.h
53,7 → 53,7
#define SLAB_INSIDE_SIZE (PAGE_SIZE >> 3)
 
/** Maximum wasted space we allow for cache */
#define SLAB_MAX_BADNESS(cache) ((PAGE_SIZE << (cache)->order) >> 2)
#define SLAB_MAX_BADNESS(cache) (((unsigned int) PAGE_SIZE << (cache)->order) >> 2)
 
/* slab_reclaim constants */
 
99,8 → 99,8
int flags;
 
/* Computed values */
uint8_t order; /**< Order of frames to be allocated */
int objects; /**< Number of objects that fit in */
uint8_t order; /**< Order of frames to be allocated */
unsigned int objects; /**< Number of objects that fit in */
 
/* Statistics */
atomic_t allocated_slabs;
/trunk/kernel/generic/include/mm/as.h
299,7 → 299,7
extern mem_backend_t elf_backend;
extern mem_backend_t phys_backend;
 
extern int elf_load(elf_header_t *header, as_t *as);
extern unsigned int elf_load(elf_header_t *header, as_t *as);
 
/* Address space area related syscalls. */
extern unative_t sys_as_area_create(uintptr_t address, size_t size, int flags);
/trunk/kernel/generic/src/synch/futex.c
324,7 → 324,7
for (cur = TASK->futexes.leaf_head.next;
cur != &TASK->futexes.leaf_head; cur = cur->next) {
btree_node_t *node;
int i;
unsigned int i;
node = list_get_instance(cur, btree_node_t, leaf_link);
for (i = 0; i < node->keys; i++) {
/trunk/kernel/generic/src/main/main.c
86,7 → 86,7
 
/** Initial user-space tasks */
init_t init = {
0
.cnt = 0
};
 
/** Boot allocations. */
/trunk/kernel/generic/src/debug/symtab.c
70,7 → 70,7
*/
static char * symtab_search_one(const char *name, int *startpos)
{
int namelen = strlen(name);
unsigned int namelen = strlen(name);
char *curname;
int i,j;
int colonoffset = -1;
/trunk/kernel/generic/src/interrupt/interrupt.c
103,6 → 103,7
/** kconsole cmd - print all exceptions */
static int exc_print_cmd(cmd_arg_t *argv)
{
#if (IVT_ITEMS > 0)
unsigned int i;
char *symbol;
 
138,6 → 139,7
}
spinlock_unlock(&exctbl_lock);
#endif
return 1;
}
/trunk/kernel/generic/src/time/clock.c
137,7 → 137,7
timeout_handler_t f;
void *arg;
count_t missed_clock_ticks = CPU->missed_clock_ticks;
int i;
unsigned int i;
 
/*
* To avoid lock ordering problems,
/trunk/kernel/generic/src/printf/printf_core.c
93,7 → 93,7
static int printf_putnchars(const char * buf, size_t count,
struct printf_spec *ps)
{
return ps->write((void *)buf, count, ps->data);
return ps->write((void *) buf, count, ps->data);
}
 
/** Print a string without adding a newline.
177,8 → 177,8
*
* @return Number of characters printed, negative value on failure.
*/
static int print_string(char *s, int width, int precision, uint64_t flags,
struct printf_spec *ps)
static int print_string(char *s, int width, unsigned int precision,
uint64_t flags, struct printf_spec *ps)
{
int counter = 0;
size_t size;
/trunk/kernel/generic/src/proc/task.c
244,7 → 244,7
{
as_t *as;
as_area_t *a;
int rc;
unsigned int rc;
thread_t *t;
task_t *task;
uspace_arg_t *kernel_uarg;
/trunk/kernel/generic/src/lib/elf.c
69,7 → 69,7
* @param as Created and properly mapped address space
* @return EE_OK on success
*/
int elf_load(elf_header_t *header, as_t * as)
unsigned int elf_load(elf_header_t *header, as_t * as)
{
int i, rc;
 
131,7 → 131,7
*
* @return NULL terminated description of error.
*/
char *elf_error(int rc)
char *elf_error(unsigned int rc)
{
ASSERT(rc < sizeof(error_codes) / sizeof(char *));
 
/trunk/kernel/generic/src/lib/memstr.c
59,7 → 59,7
*/
void *_memcpy(void * dst, const void *src, size_t cnt)
{
int i, j;
unsigned int i, j;
if (ALIGN_UP((uintptr_t) src, sizeof(unative_t)) != (uintptr_t) src ||
ALIGN_UP((uintptr_t) dst, sizeof(unative_t)) != (uintptr_t) dst) {
89,7 → 89,7
*/
void _memsetb(uintptr_t dst, size_t cnt, uint8_t x)
{
int i;
unsigned int i;
uint8_t *p = (uint8_t *) dst;
for (i = 0; i < cnt; i++)
108,7 → 108,7
*/
void _memsetw(uintptr_t dst, size_t cnt, uint16_t x)
{
int i;
unsigned int i;
uint16_t *p = (uint16_t *) dst;
for (i = 0; i < cnt; i++)
/trunk/kernel/generic/src/lib/sort.c
96,14 → 96,17
void _qsort(void * data, count_t n, size_t e_size, int (* cmp) (void * a, void * b), void *tmp, void *pivot)
{
if (n > 4) {
int i = 0, j = n - 1;
unsigned int i = 0, j = n - 1;
 
memcpy(pivot, data, e_size);
 
while (1) {
while ((cmp(data + i * e_size, pivot) < 0) && i < n) i++;
while ((cmp(data + j * e_size, pivot) >=0) && j > 0) j--;
if (i<j) {
while ((cmp(data + i * e_size, pivot) < 0) && (i < n))
i++;
while ((cmp(data + j * e_size, pivot) >= 0) && (j > 0))
j--;
if (i < j) {
memcpy(tmp, data + i * e_size, e_size);
memcpy(data + i * e_size, data + j * e_size, e_size);
memcpy(data + j * e_size, tmp, e_size);
/trunk/kernel/generic/src/lib/func.c
139,9 → 139,9
*/
int strncmp(const char *src, const char *dst, size_t len)
{
int i;
unsigned int i;
for (i = 0; *src && *dst && i < len; src++, dst++, i++) {
for (i = 0; (*src) && (*dst) && (i < len); src++, dst++, i++) {
if (*src < *dst)
return -1;
if (*src > *dst)
168,7 → 168,7
*/
void strncpy(char *dest, const char *src, size_t len)
{
int i;
unsigned int i;
for (i = 0; i < len; i++) {
if (!(dest[i] = src[i]))
return;
/trunk/kernel/generic/src/mm/slab.c
172,7 → 172,7
void *data;
slab_t *slab;
size_t fsize;
int i;
unsigned int i;
unsigned int zone = 0;
data = frame_alloc_generic(cache->order, FRAME_KA | flags, &zone);
191,8 → 191,8
}
/* Fill in slab structures */
for (i=0; i < (1 << cache->order); i++)
frame_set_parent(ADDR2PFN(KA2PA(data))+i, slab, zone);
for (i = 0; i < ((unsigned int) 1 << cache->order); i++)
frame_set_parent(ADDR2PFN(KA2PA(data)) + i, slab, zone);
 
slab->start = data;
slab->available = cache->objects;
199,7 → 199,7
slab->nextavail = 0;
slab->cache = cache;
 
for (i=0; i<cache->objects;i++)
for (i = 0; i < cache->objects; i++)
*((int *) (slab->start + i*cache->size)) = i+1;
 
atomic_inc(&cache->allocated_slabs);
371,10 → 371,10
static count_t magazine_destroy(slab_cache_t *cache,
slab_magazine_t *mag)
{
int i;
unsigned int i;
count_t frames = 0;
 
for (i=0;i < mag->busy; i++) {
for (i = 0; i < mag->busy; i++) {
frames += slab_obj_destroy(cache, mag->objs[i], NULL);
atomic_dec(&cache->cached_objs);
}
527,7 → 527,7
/* Slab cache functions */
 
/** Return number of objects that fit in certain cache size */
static int comp_objects(slab_cache_t *cache)
static unsigned int comp_objects(slab_cache_t *cache)
{
if (cache->flags & SLAB_CACHE_SLINSIDE)
return ((PAGE_SIZE << cache->order) - sizeof(slab_t)) / cache->size;
536,16 → 536,16
}
 
/** Return wasted space in slab */
static int badness(slab_cache_t *cache)
static unsigned int badness(slab_cache_t *cache)
{
int objects;
int ssize;
unsigned int objects;
unsigned int ssize;
 
objects = comp_objects(cache);
ssize = PAGE_SIZE << cache->order;
if (cache->flags & SLAB_CACHE_SLINSIDE)
ssize -= sizeof(slab_t);
return ssize - objects*cache->size;
return ssize - objects * cache->size;
}
 
/**
553,16 → 553,15
*/
static void make_magcache(slab_cache_t *cache)
{
int i;
unsigned int i;
ASSERT(_slab_initialized >= 2);
 
cache->mag_cache = malloc(sizeof(slab_mag_cache_t)*config.cpu_count,0);
for (i=0; i < config.cpu_count; i++) {
for (i = 0; i < config.cpu_count; i++) {
memsetb((uintptr_t)&cache->mag_cache[i],
sizeof(cache->mag_cache[i]), 0);
spinlock_initialize(&cache->mag_cache[i].lock,
"slab_maglock_cpu");
spinlock_initialize(&cache->mag_cache[i].lock, "slab_maglock_cpu");
}
}
 
654,7 → 653,7
*/
static count_t _slab_reclaim(slab_cache_t *cache, int flags)
{
int i;
unsigned int i;
slab_magazine_t *mag;
count_t frames = 0;
int magcount;
675,7 → 674,7
if (flags & SLAB_RECLAIM_ALL) {
/* Free cpu-bound magazines */
/* Destroy CPU magazines */
for (i=0; i<config.cpu_count; i++) {
for (i = 0; i < config.cpu_count; i++) {
spinlock_lock(&cache->mag_cache[i].lock);
 
mag = cache->mag_cache[i].current;
/trunk/kernel/generic/src/mm/tlb.c
81,7 → 81,7
void tlb_shootdown_start(tlb_invalidate_type_t type, asid_t asid,
uintptr_t page, count_t count)
{
int i;
unsigned int i;
 
CPU->tlb_active = 0;
spinlock_lock(&tlblock);
144,7 → 144,7
asid_t asid;
uintptr_t page;
count_t count;
int i;
unsigned int i;
ASSERT(CPU);
/trunk/kernel/generic/src/mm/backend_anon.c
98,7 → 98,7
ALIGN_DOWN(addr, PAGE_SIZE) - area->base, &leaf);
if (!frame) {
bool allocate = true;
int i;
unsigned int i;
/*
* Zero can be returned as a valid frame address.
193,13 → 193,13
for (cur = area->used_space.leaf_head.next;
cur != &area->used_space.leaf_head; cur = cur->next) {
btree_node_t *node;
int i;
unsigned int i;
node = list_get_instance(cur, btree_node_t, leaf_link);
for (i = 0; i < node->keys; i++) {
uintptr_t base = node->key[i];
count_t count = (count_t) node->value[i];
int j;
unsigned int j;
for (j = 0; j < count; j++) {
pte_t *pte;
/trunk/kernel/generic/src/mm/as.c
431,7 → 431,7
uintptr_t b = node->key[node->keys - 1];
count_t c =
(count_t) node->value[node->keys - 1];
int i = 0;
unsigned int i = 0;
if (overlaps(b, c * PAGE_SIZE, area->base,
pages * PAGE_SIZE)) {
561,7 → 561,7
for (cur = area->used_space.leaf_head.next;
cur != &area->used_space.leaf_head; cur = cur->next) {
btree_node_t *node;
int i;
unsigned int i;
node = list_get_instance(cur, btree_node_t, leaf_link);
for (i = 0; i < node->keys; i++) {
1097,7 → 1097,7
{
as_area_t *a;
btree_node_t *leaf, *lnode;
int i;
unsigned int i;
a = (as_area_t *) btree_search(&as->as_area_btree, va, &leaf);
if (a) {
1155,7 → 1155,7
{
as_area_t *a;
btree_node_t *leaf, *node;
int i;
unsigned int i;
/*
* We don't want any area to have conflicts with NULL page.
1264,7 → 1264,7
{
btree_node_t *leaf, *node;
count_t pages;
int i;
unsigned int i;
 
ASSERT(page == ALIGN_DOWN(page, PAGE_SIZE));
ASSERT(count);
1546,7 → 1546,7
{
btree_node_t *leaf, *node;
count_t pages;
int i;
unsigned int i;
 
ASSERT(page == ALIGN_DOWN(page, PAGE_SIZE));
ASSERT(count);
1734,7 → 1734,7
for (cur = sh_info->pagemap.leaf_head.next;
cur != &sh_info->pagemap.leaf_head; cur = cur->next) {
btree_node_t *node;
int i;
unsigned int i;
node = list_get_instance(cur, btree_node_t, leaf_link);
for (i = 0; i < node->keys; i++)
1795,7 → 1795,7
node = list_get_instance(cur, btree_node_t, leaf_link);
int i;
unsigned int i;
for (i = 0; i < node->keys; i++) {
as_area_t *area = node->value[i];
/trunk/kernel/generic/src/mm/frame.c
120,7 → 120,7
 
static inline int frame_index_valid(zone_t *zone, index_t index)
{
return (index >= 0) && (index < zone->count);
return (index < zone->count);
}
 
/** Compute pfn_t from frame_t pointer & zone pointer */
210,7 → 210,7
spinlock_lock(&zones.lock);
 
if (hint >= zones.count || hint < 0)
if (hint >= zones.count)
hint = 0;
i = hint;
719,7 → 719,7
ipl = interrupts_disable();
spinlock_lock(&zones.lock);
 
if (z1 < 0 || z1 >= zones.count || z2 < 0 || z2 >= zones.count)
if ((z1 >= zones.count) || (z2 >= zones.count))
goto errout;
/* We can join only 2 zones with none existing inbetween */
if (z2-z1 != 1)
/trunk/kernel/generic/src/mm/backend_elf.c
103,7 → 103,7
frame = (uintptr_t) btree_search(&area->sh_info->pagemap,
ALIGN_DOWN(addr, PAGE_SIZE) - area->base, &leaf);
if (!frame) {
int i;
unsigned int i;
 
/*
* Workaround for valid NULL address.
290,7 → 290,7
mutex_lock(&area->sh_info->lock);
for (cur = &node->leaf_link; cur != &area->used_space.leaf_head;
cur = cur->next) {
int i;
unsigned int i;
node = list_get_instance(cur, btree_node_t, leaf_link);
297,7 → 297,7
for (i = 0; i < node->keys; i++) {
uintptr_t base = node->key[i];
count_t count = (count_t) node->value[i];
int j;
unsigned int j;
/*
* Skip read-only areas of used space that are backed
/trunk/kernel/generic/src/ipc/sysipc.c
164,7 → 164,7
{
int phoneid;
 
if (IPC_GET_RETVAL(answer->data) == EHANGUP) {
if ((native_t) IPC_GET_RETVAL(answer->data) == EHANGUP) {
/* In case of forward, hangup the forwared phone,
* not the originator
*/
354,7 → 354,7
*/
static void process_answer(call_t *call)
{
if (IPC_GET_RETVAL(call->data) == EHANGUP &&
if (((native_t) IPC_GET_RETVAL(call->data) == EHANGUP) &&
(call->flags & IPC_CALL_FORWARDED))
IPC_SET_RETVAL(call->data, EFORWARD);
 
/trunk/kernel/generic/src/ipc/irq.c
65,7 → 65,7
*/
static void code_execute(call_t *call, irq_code_t *code)
{
int i;
unsigned int i;
unative_t dstval = 0;
if (!code)