Rev 823 | Rev 897 | Go to most recent revision | Show entire file | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
| Rev 823 | Rev 827 | ||
|---|---|---|---|
| Line 53... | Line 53... | ||
| 53 | * |
53 | * |
| 54 | * Perform actions that need to be |
54 | * Perform actions that need to be |
| 55 | * taken before the newly selected |
55 | * taken before the newly selected |
| 56 | * tread is passed control. |
56 | * tread is passed control. |
| 57 | * |
57 | * |
| - | 58 | * THREAD->lock is locked on entry |
|
| - | 59 | * |
|
| 58 | */ |
60 | */ |
| 59 | void before_thread_runs(void) |
61 | void before_thread_runs(void) |
| 60 | { |
62 | { |
| 61 | before_thread_runs_arch(); |
63 | before_thread_runs_arch(); |
| 62 | #ifdef CONFIG_FPU_LAZY |
64 | #ifdef CONFIG_FPU_LAZY |
| Line 67... | Line 69... | ||
| 67 | #else |
69 | #else |
| 68 | fpu_enable(); |
70 | fpu_enable(); |
| 69 | if (THREAD->fpu_context_exists) |
71 | if (THREAD->fpu_context_exists) |
| 70 | fpu_context_restore(&(THREAD->saved_fpu_context)); |
72 | fpu_context_restore(&(THREAD->saved_fpu_context)); |
| 71 | else { |
73 | else { |
| 72 | fpu_init(); |
74 | fpu_init(&(THREAD->saved_fpu_context)); |
| 73 | THREAD->fpu_context_exists=1; |
75 | THREAD->fpu_context_exists=1; |
| 74 | } |
76 | } |
| 75 | #endif |
77 | #endif |
| 76 | } |
78 | } |
| 77 | 79 | ||
| 78 | #ifdef CONFIG_FPU_LAZY |
80 | #ifdef CONFIG_FPU_LAZY |
| 79 | void scheduler_fpu_lazy_request(void) |
81 | void scheduler_fpu_lazy_request(void) |
| 80 | { |
82 | { |
| 81 | fpu_enable(); |
83 | fpu_enable(); |
| - | 84 | spinlock_lock(&CPU->lock); |
|
| - | 85 | ||
| - | 86 | /* Save old context */ |
|
| 82 | if (CPU->fpu_owner != NULL) { |
87 | if (CPU->fpu_owner != NULL) { |
| - | 88 | spinlock_lock(&CPU->fpu_owner->lock); |
|
| 83 | fpu_context_save(&CPU->fpu_owner->saved_fpu_context); |
89 | fpu_context_save(&CPU->fpu_owner->saved_fpu_context); |
| 84 | /* don't prevent migration */ |
90 | /* don't prevent migration */ |
| 85 | CPU->fpu_owner->fpu_context_engaged=0; |
91 | CPU->fpu_owner->fpu_context_engaged=0; |
| - | 92 | spinlock_unlock(&CPU->fpu_owner->lock); |
|
| 86 | } |
93 | } |
| - | 94 | ||
| - | 95 | spinlock_lock(&THREAD->lock); |
|
| 87 | if (THREAD->fpu_context_exists) |
96 | if (THREAD->fpu_context_exists) |
| 88 | fpu_context_restore(&THREAD->saved_fpu_context); |
97 | fpu_context_restore(&THREAD->saved_fpu_context); |
| 89 | else { |
98 | else { |
| 90 | fpu_init(); |
99 | fpu_init(&(THREAD->saved_fpu_context)); |
| 91 | THREAD->fpu_context_exists=1; |
100 | THREAD->fpu_context_exists=1; |
| 92 | } |
101 | } |
| 93 | CPU->fpu_owner=THREAD; |
102 | CPU->fpu_owner=THREAD; |
| 94 | THREAD->fpu_context_engaged = 1; |
103 | THREAD->fpu_context_engaged = 1; |
| - | 104 | ||
| - | 105 | spinlock_unlock(&THREAD->lock); |
|
| - | 106 | spinlock_unlock(&CPU->lock); |
|
| 95 | } |
107 | } |
| 96 | #endif |
108 | #endif |
| 97 | 109 | ||
| 98 | /** Initialize scheduler |
110 | /** Initialize scheduler |
| 99 | * |
111 | * |