123,8 → 123,7 |
} |
} |
|
#if defined CONFIG_TIMEOUT_AVL_TREE || \ |
defined CONFIG_TIMEOUT_EXTAVL_TREE |
#if defined CONFIG_TIMEOUT_AVL_TREE |
|
/** Clock routine |
* |
141,11 → 140,98 |
count_t missed_clock_ticks = CPU->missed_clock_ticks; |
uint64_t *i = &(CPU->timeout_active_tree.base); |
uint64_t absolute_clock_ticks = *i + missed_clock_ticks; |
#if defined CONFIG TIMEOUT_AVL_TREE |
avltree_node_t *expnode; |
|
/* |
* To avoid lock ordering problems, |
* run all expired timeouts as you visit them. |
*/ |
|
for (; *i <= absolute_clock_ticks; (*i)++) { |
/* |
* Basetime is encreased by missed clock ticks + 1 !! |
*/ |
|
clock_update_counters(); |
spinlock_lock(&CPU->timeoutlock); |
|
|
/* |
* Check whether first timeout (with the smallest key in the tree) time out. If so perform |
* callback function and try next timeout (more timeouts can have same timeout). |
*/ |
while ((expnode = avltree_find_min(&CPU->timeout_active_tree)) != NULL) { |
h = avltree_get_instance(expnode,timeout_t,node); |
spinlock_lock(&h->lock); |
if (expnode->key != *i) { |
spinlock_unlock(&h->lock); |
break; |
} |
|
/* |
* Delete minimal key from the tree and repair tree structure in |
* logarithmic time. |
*/ |
avltree_delete_min(&CPU->timeout_active_tree); |
|
f = h->handler; |
arg = h->arg; |
timeout_reinitialize(h); |
spinlock_unlock(&h->lock); |
spinlock_unlock(&CPU->timeoutlock); |
|
f(arg); |
|
spinlock_lock(&CPU->timeoutlock); |
} |
spinlock_unlock(&CPU->timeoutlock); |
} |
|
CPU->missed_clock_ticks = 0; |
|
/* |
* Do CPU usage accounting and find out whether to preempt THREAD. |
*/ |
if (THREAD) { |
uint64_t ticks; |
|
spinlock_lock(&CPU->lock); |
CPU->needs_relink += 1 + missed_clock_ticks; |
spinlock_unlock(&CPU->lock); |
|
spinlock_lock(&THREAD->lock); |
if ((ticks = THREAD->ticks)) { |
if (ticks >= 1 + missed_clock_ticks) |
THREAD->ticks -= 1 + missed_clock_ticks; |
else |
THREAD->ticks = 0; |
} |
spinlock_unlock(&THREAD->lock); |
|
if (!ticks && !PREEMPTION_DISABLED) { |
scheduler(); |
} |
} |
} |
|
#elif defined CONFIG_TIMEOUT_EXTAVL_TREE |
|
/** Clock routine |
* |
* Clock routine executed from clock interrupt handler |
* (assuming interrupts_disable()'d). Runs expired timeouts |
* and preemptive scheduling. |
* |
*/ |
void clock(void) |
{ |
timeout_t *h; |
timeout_handler_t f; |
void *arg; |
count_t missed_clock_ticks = CPU->missed_clock_ticks; |
uint64_t *i = &(CPU->timeout_active_tree.base); |
uint64_t absolute_clock_ticks = *i + missed_clock_ticks; |
extavltree_node_t *expnode; |
#endif |
|
/* |
* To avoid lock ordering problems, |
176,11 → 262,7 |
* Delete first node in the list and repair tree structure in |
* constant time. |
*/ |
#if defined CONFIG TIMEOUT_AVL_TREE |
avltree_delete_min(&CPU->timeout_active_tree); |
#elif defined CONFIG_TIMEOUT_EXTAVL_TREE |
extavltree_delete_min(&CPU->timeout_active_tree); |
#endif |
|
f = h->handler; |
arg = h->arg; |
233,7 → 315,7 |
*/ |
void clock(void) |
{ |
extavltree_node_t *expnode; |
extavlreltree_node_t *expnode; |
timeout_t *h; |
timeout_handler_t f; |
void *arg; |
253,7 → 335,7 |
* next timeout (more timeouts can have same timeout). |
*/ |
while ((expnode = CPU->timeout_active_tree.head.next) != &(CPU->timeout_active_tree.head)) { |
h = list_get_instance(l, timeout_t, link); |
h = extavlreltree_get_instance(expnode,timeout_t,node); |
spinlock_lock(&h->lock); |
if (expnode->key != 0) { |
expnode->key--; |
265,7 → 347,7 |
* Delete first node in the list and repair tree structure in |
* constant time. Be careful of expnode's key, it must be 0! |
*/ |
extavltree_delete_min(&CPU->timeout_active_tree); |
extavlreltree_delete_min(&CPU->timeout_active_tree); |
|
f = h->handler; |
arg = h->arg; |