Rev 2416 | Rev 2450 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 2416 | Rev 2421 | ||
---|---|---|---|
Line 121... | Line 121... | ||
121 | } else |
121 | } else |
122 | uptime->useconds += 1000000 / HZ; |
122 | uptime->useconds += 1000000 / HZ; |
123 | } |
123 | } |
124 | } |
124 | } |
125 | 125 | ||
126 | #if defined CONFIG_TIMEOUT_AVL_TREE || \ |
126 | #if defined CONFIG_TIMEOUT_AVL_TREE |
127 | defined CONFIG_TIMEOUT_EXTAVL_TREE |
- | |
128 | 127 | ||
129 | /** Clock routine |
128 | /** Clock routine |
130 | * |
129 | * |
131 | * Clock routine executed from clock interrupt handler |
130 | * Clock routine executed from clock interrupt handler |
132 | * (assuming interrupts_disable()'d). Runs expired timeouts |
131 | * (assuming interrupts_disable()'d). Runs expired timeouts |
Line 139... | Line 138... | ||
139 | timeout_handler_t f; |
138 | timeout_handler_t f; |
140 | void *arg; |
139 | void *arg; |
141 | count_t missed_clock_ticks = CPU->missed_clock_ticks; |
140 | count_t missed_clock_ticks = CPU->missed_clock_ticks; |
142 | uint64_t *i = &(CPU->timeout_active_tree.base); |
141 | uint64_t *i = &(CPU->timeout_active_tree.base); |
143 | uint64_t absolute_clock_ticks = *i + missed_clock_ticks; |
142 | uint64_t absolute_clock_ticks = *i + missed_clock_ticks; |
144 | #if defined CONFIG TIMEOUT_AVL_TREE |
- | |
145 | avltree_node_t *expnode; |
143 | avltree_node_t *expnode; |
- | 144 | ||
- | 145 | /* |
|
- | 146 | * To avoid lock ordering problems, |
|
- | 147 | * run all expired timeouts as you visit them. |
|
- | 148 | */ |
|
- | 149 | ||
- | 150 | for (; *i <= absolute_clock_ticks; (*i)++) { |
|
- | 151 | /* |
|
- | 152 | * Basetime is encreased by missed clock ticks + 1 !! |
|
- | 153 | */ |
|
- | 154 | ||
- | 155 | clock_update_counters(); |
|
- | 156 | spinlock_lock(&CPU->timeoutlock); |
|
- | 157 | ||
- | 158 | ||
- | 159 | /* |
|
- | 160 | * Check whether first timeout (with the smallest key in the tree) time out. If so perform |
|
- | 161 | * callback function and try next timeout (more timeouts can have same timeout). |
|
- | 162 | */ |
|
- | 163 | while ((expnode = avltree_find_min(&CPU->timeout_active_tree)) != NULL) { |
|
- | 164 | h = avltree_get_instance(expnode,timeout_t,node); |
|
- | 165 | spinlock_lock(&h->lock); |
|
- | 166 | if (expnode->key != *i) { |
|
- | 167 | spinlock_unlock(&h->lock); |
|
- | 168 | break; |
|
- | 169 | } |
|
- | 170 | ||
- | 171 | /* |
|
- | 172 | * Delete minimal key from the tree and repair tree structure in |
|
- | 173 | * logarithmic time. |
|
- | 174 | */ |
|
- | 175 | avltree_delete_min(&CPU->timeout_active_tree); |
|
- | 176 | ||
- | 177 | f = h->handler; |
|
- | 178 | arg = h->arg; |
|
- | 179 | timeout_reinitialize(h); |
|
- | 180 | spinlock_unlock(&h->lock); |
|
- | 181 | spinlock_unlock(&CPU->timeoutlock); |
|
- | 182 | ||
- | 183 | f(arg); |
|
- | 184 | ||
- | 185 | spinlock_lock(&CPU->timeoutlock); |
|
- | 186 | } |
|
- | 187 | spinlock_unlock(&CPU->timeoutlock); |
|
- | 188 | } |
|
- | 189 | ||
- | 190 | CPU->missed_clock_ticks = 0; |
|
- | 191 | ||
- | 192 | /* |
|
- | 193 | * Do CPU usage accounting and find out whether to preempt THREAD. |
|
- | 194 | */ |
|
- | 195 | if (THREAD) { |
|
- | 196 | uint64_t ticks; |
|
- | 197 | ||
- | 198 | spinlock_lock(&CPU->lock); |
|
- | 199 | CPU->needs_relink += 1 + missed_clock_ticks; |
|
- | 200 | spinlock_unlock(&CPU->lock); |
|
- | 201 | ||
- | 202 | spinlock_lock(&THREAD->lock); |
|
- | 203 | if ((ticks = THREAD->ticks)) { |
|
- | 204 | if (ticks >= 1 + missed_clock_ticks) |
|
- | 205 | THREAD->ticks -= 1 + missed_clock_ticks; |
|
- | 206 | else |
|
- | 207 | THREAD->ticks = 0; |
|
- | 208 | } |
|
- | 209 | spinlock_unlock(&THREAD->lock); |
|
- | 210 | ||
- | 211 | if (!ticks && !PREEMPTION_DISABLED) { |
|
- | 212 | scheduler(); |
|
- | 213 | } |
|
- | 214 | } |
|
- | 215 | } |
|
- | 216 | ||
146 | #elif defined CONFIG_TIMEOUT_EXTAVL_TREE |
217 | #elif defined CONFIG_TIMEOUT_EXTAVL_TREE |
- | 218 | ||
- | 219 | /** Clock routine |
|
- | 220 | * |
|
- | 221 | * Clock routine executed from clock interrupt handler |
|
- | 222 | * (assuming interrupts_disable()'d). Runs expired timeouts |
|
- | 223 | * and preemptive scheduling. |
|
- | 224 | * |
|
- | 225 | */ |
|
- | 226 | void clock(void) |
|
- | 227 | { |
|
- | 228 | timeout_t *h; |
|
- | 229 | timeout_handler_t f; |
|
- | 230 | void *arg; |
|
- | 231 | count_t missed_clock_ticks = CPU->missed_clock_ticks; |
|
- | 232 | uint64_t *i = &(CPU->timeout_active_tree.base); |
|
- | 233 | uint64_t absolute_clock_ticks = *i + missed_clock_ticks; |
|
147 | extavltree_node_t *expnode; |
234 | extavltree_node_t *expnode; |
148 | #endif |
- | |
149 | 235 | ||
150 | /* |
236 | /* |
151 | * To avoid lock ordering problems, |
237 | * To avoid lock ordering problems, |
152 | * run all expired timeouts as you visit them. |
238 | * run all expired timeouts as you visit them. |
153 | */ |
239 | */ |
Line 174... | Line 260... | ||
174 | 260 | ||
175 | /* |
261 | /* |
176 | * Delete first node in the list and repair tree structure in |
262 | * Delete first node in the list and repair tree structure in |
177 | * constant time. |
263 | * constant time. |
178 | */ |
264 | */ |
179 | #if defined CONFIG TIMEOUT_AVL_TREE |
- | |
180 | avltree_delete_min(&CPU->timeout_active_tree); |
- | |
181 | #elif defined CONFIG_TIMEOUT_EXTAVL_TREE |
- | |
182 | extavltree_delete_min(&CPU->timeout_active_tree); |
265 | extavltree_delete_min(&CPU->timeout_active_tree); |
183 | #endif |
- | |
184 | 266 | ||
185 | f = h->handler; |
267 | f = h->handler; |
186 | arg = h->arg; |
268 | arg = h->arg; |
187 | timeout_reinitialize(h); |
269 | timeout_reinitialize(h); |
188 | spinlock_unlock(&h->lock); |
270 | spinlock_unlock(&h->lock); |
Line 231... | Line 313... | ||
231 | * and preemptive scheduling. |
313 | * and preemptive scheduling. |
232 | * |
314 | * |
233 | */ |
315 | */ |
234 | void clock(void) |
316 | void clock(void) |
235 | { |
317 | { |
236 | extavltree_node_t *expnode; |
318 | extavlreltree_node_t *expnode; |
237 | timeout_t *h; |
319 | timeout_t *h; |
238 | timeout_handler_t f; |
320 | timeout_handler_t f; |
239 | void *arg; |
321 | void *arg; |
240 | count_t missed_clock_ticks = CPU->missed_clock_ticks; |
322 | count_t missed_clock_ticks = CPU->missed_clock_ticks; |
241 | int i; |
323 | int i; |
Line 251... | Line 333... | ||
251 | /* |
333 | /* |
252 | * Check whether first timeout in list time out. If so perform callback function and try |
334 | * Check whether first timeout in list time out. If so perform callback function and try |
253 | * next timeout (more timeouts can have same timeout). |
335 | * next timeout (more timeouts can have same timeout). |
254 | */ |
336 | */ |
255 | while ((expnode = CPU->timeout_active_tree.head.next) != &(CPU->timeout_active_tree.head)) { |
337 | while ((expnode = CPU->timeout_active_tree.head.next) != &(CPU->timeout_active_tree.head)) { |
256 | h = list_get_instance(l, timeout_t, link); |
338 | h = extavlreltree_get_instance(expnode,timeout_t,node); |
257 | spinlock_lock(&h->lock); |
339 | spinlock_lock(&h->lock); |
258 | if (expnode->key != 0) { |
340 | if (expnode->key != 0) { |
259 | expnode->key--; |
341 | expnode->key--; |
260 | spinlock_unlock(&h->lock); |
342 | spinlock_unlock(&h->lock); |
261 | break; |
343 | break; |
Line 263... | Line 345... | ||
263 | 345 | ||
264 | /* |
346 | /* |
265 | * Delete first node in the list and repair tree structure in |
347 | * Delete first node in the list and repair tree structure in |
266 | * constant time. Be careful of expnode's key, it must be 0! |
348 | * constant time. Be careful of expnode's key, it must be 0! |
267 | */ |
349 | */ |
268 | extavltree_delete_min(&CPU->timeout_active_tree); |
350 | extavlreltree_delete_min(&CPU->timeout_active_tree); |
269 | 351 | ||
270 | f = h->handler; |
352 | f = h->handler; |
271 | arg = h->arg; |
353 | arg = h->arg; |
272 | timeout_reinitialize(h); |
354 | timeout_reinitialize(h); |
273 | spinlock_unlock(&h->lock); |
355 | spinlock_unlock(&h->lock); |