Rev 1571 | Rev 1579 | Go to most recent revision | Only display areas with differences | Ignore whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1571 | Rev 1576 | ||
---|---|---|---|
1 | /* |
1 | /* |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
2 | * Copyright (C) 2001-2004 Jakub Jermar |
3 | * All rights reserved. |
3 | * All rights reserved. |
4 | * |
4 | * |
5 | * Redistribution and use in source and binary forms, with or without |
5 | * Redistribution and use in source and binary forms, with or without |
6 | * modification, are permitted provided that the following conditions |
6 | * modification, are permitted provided that the following conditions |
7 | * are met: |
7 | * are met: |
8 | * |
8 | * |
9 | * - Redistributions of source code must retain the above copyright |
9 | * - Redistributions of source code must retain the above copyright |
10 | * notice, this list of conditions and the following disclaimer. |
10 | * notice, this list of conditions and the following disclaimer. |
11 | * - Redistributions in binary form must reproduce the above copyright |
11 | * - Redistributions in binary form must reproduce the above copyright |
12 | * notice, this list of conditions and the following disclaimer in the |
12 | * notice, this list of conditions and the following disclaimer in the |
13 | * documentation and/or other materials provided with the distribution. |
13 | * documentation and/or other materials provided with the distribution. |
14 | * - The name of the author may not be used to endorse or promote products |
14 | * - The name of the author may not be used to endorse or promote products |
15 | * derived from this software without specific prior written permission. |
15 | * derived from this software without specific prior written permission. |
16 | * |
16 | * |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
17 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
18 | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
19 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
20 | * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
21 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
22 | * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
23 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
24 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
27 | */ |
28 | 28 | ||
29 | #ifndef __THREAD_H__ |
29 | #ifndef __THREAD_H__ |
30 | #define __THREAD_H__ |
30 | #define __THREAD_H__ |
31 | 31 | ||
32 | #include <arch/proc/thread.h> |
32 | #include <arch/proc/thread.h> |
33 | #include <synch/spinlock.h> |
33 | #include <synch/spinlock.h> |
34 | #include <arch/context.h> |
34 | #include <arch/context.h> |
35 | #include <fpu_context.h> |
35 | #include <fpu_context.h> |
36 | #include <arch/types.h> |
36 | #include <arch/types.h> |
37 | #include <typedefs.h> |
37 | #include <typedefs.h> |
38 | #include <time/timeout.h> |
38 | #include <time/timeout.h> |
39 | #include <synch/rwlock.h> |
39 | #include <synch/rwlock.h> |
- | 40 | #include <synch/synch.h> |
|
40 | #include <config.h> |
41 | #include <config.h> |
41 | #include <adt/btree.h> |
42 | #include <adt/btree.h> |
42 | #include <adt/list.h> |
43 | #include <adt/list.h> |
43 | #include <mm/slab.h> |
44 | #include <mm/slab.h> |
44 | #include <proc/uarg.h> |
45 | #include <proc/uarg.h> |
45 | 46 | ||
46 | #define THREAD_STACK_SIZE STACK_SIZE |
47 | #define THREAD_STACK_SIZE STACK_SIZE |
47 | 48 | ||
48 | enum state { |
49 | enum state { |
49 | Invalid, /**< It is an error, if thread is found in this state. */ |
50 | Invalid, /**< It is an error, if thread is found in this state. */ |
50 | Running, /**< State of a thread that is currently executing on some CPU. */ |
51 | Running, /**< State of a thread that is currently executing on some CPU. */ |
51 | Sleeping, /**< Thread in this state is waiting for an event. */ |
52 | Sleeping, /**< Thread in this state is waiting for an event. */ |
52 | Ready, /**< State of threads in a run queue. */ |
53 | Ready, /**< State of threads in a run queue. */ |
53 | Entering, /**< Threads are in this state before they are first readied. */ |
54 | Entering, /**< Threads are in this state before they are first readied. */ |
54 | Exiting, /**< After a thread calls thread_exit(), it is put into Exiting state. */ |
55 | Exiting, /**< After a thread calls thread_exit(), it is put into Exiting state. */ |
55 | Undead /**< Threads that were not detached but exited are in the Undead state. */ |
56 | Undead /**< Threads that were not detached but exited are in the Undead state. */ |
56 | }; |
57 | }; |
57 | 58 | ||
58 | extern char *thread_states[]; |
59 | extern char *thread_states[]; |
59 | 60 | ||
60 | #define X_WIRED (1<<0) |
61 | #define X_WIRED (1<<0) |
61 | #define X_STOLEN (1<<1) |
62 | #define X_STOLEN (1<<1) |
62 | 63 | ||
63 | #define THREAD_NAME_BUFLEN 20 |
64 | #define THREAD_NAME_BUFLEN 20 |
64 | 65 | ||
65 | /** Thread structure. There is one per thread. */ |
66 | /** Thread structure. There is one per thread. */ |
66 | struct thread { |
67 | struct thread { |
67 | link_t rq_link; /**< Run queue link. */ |
68 | link_t rq_link; /**< Run queue link. */ |
68 | link_t wq_link; /**< Wait queue link. */ |
69 | link_t wq_link; /**< Wait queue link. */ |
69 | link_t th_link; /**< Links to threads within containing task. */ |
70 | link_t th_link; /**< Links to threads within containing task. */ |
70 | 71 | ||
71 | /** Lock protecting thread structure. |
72 | /** Lock protecting thread structure. |
72 | * |
73 | * |
73 | * Protects the whole thread structure except list links above. |
74 | * Protects the whole thread structure except list links above. |
74 | * Must be acquired before T.lock for each T of type task_t. |
75 | * Must be acquired before T.lock for each T of type task_t. |
75 | * |
76 | * |
76 | */ |
77 | */ |
77 | SPINLOCK_DECLARE(lock); |
78 | SPINLOCK_DECLARE(lock); |
78 | 79 | ||
79 | char name[THREAD_NAME_BUFLEN]; |
80 | char name[THREAD_NAME_BUFLEN]; |
80 | 81 | ||
81 | void (* thread_code)(void *); /**< Function implementing the thread. */ |
82 | void (* thread_code)(void *); /**< Function implementing the thread. */ |
82 | void *thread_arg; /**< Argument passed to thread_code() function. */ |
83 | void *thread_arg; /**< Argument passed to thread_code() function. */ |
83 | 84 | ||
84 | /** From here, the stored context is restored when the thread is scheduled. */ |
85 | /** From here, the stored context is restored when the thread is scheduled. */ |
85 | context_t saved_context; |
86 | context_t saved_context; |
86 | /** From here, the stored timeout context is restored when sleep times out. */ |
87 | /** From here, the stored timeout context is restored when sleep times out. */ |
87 | context_t sleep_timeout_context; |
88 | context_t sleep_timeout_context; |
88 | /** From here, the stored interruption context is restored when sleep is interrupted. */ |
89 | /** From here, the stored interruption context is restored when sleep is interrupted. */ |
89 | context_t sleep_interruption_context; |
90 | context_t sleep_interruption_context; |
90 | 91 | ||
91 | bool sleep_interruptible; /**< If true, the thread can be interrupted from sleep. */ |
92 | bool sleep_interruptible; /**< If true, the thread can be interrupted from sleep. */ |
92 | waitq_t *sleep_queue; /**< Wait queue in which this thread sleeps. */ |
93 | waitq_t *sleep_queue; /**< Wait queue in which this thread sleeps. */ |
93 | timeout_t sleep_timeout; /**< Timeout used for timeoutable sleeping. */ |
94 | timeout_t sleep_timeout; /**< Timeout used for timeoutable sleeping. */ |
94 | volatile int timeout_pending; /**< Flag signalling sleep timeout in progress. */ |
95 | volatile int timeout_pending; /**< Flag signalling sleep timeout in progress. */ |
95 | 96 | ||
96 | /** True if this thread is executing copy_from_uspace(). False otherwise. */ |
97 | /** True if this thread is executing copy_from_uspace(). False otherwise. */ |
97 | bool in_copy_from_uspace; |
98 | bool in_copy_from_uspace; |
98 | /** True if this thread is executing copy_to_uspace(). False otherwise. */ |
99 | /** True if this thread is executing copy_to_uspace(). False otherwise. */ |
99 | bool in_copy_to_uspace; |
100 | bool in_copy_to_uspace; |
100 | 101 | ||
101 | bool detached; /**< If true, thread_join_timeout() cannot be used on this thread. */ |
102 | bool detached; /**< If true, thread_join_timeout() cannot be used on this thread. */ |
102 | waitq_t join_wq; /**< Waitq for thread_join_timeout(). */ |
103 | waitq_t join_wq; /**< Waitq for thread_join_timeout(). */ |
103 | 104 | ||
104 | fpu_context_t *saved_fpu_context; |
105 | fpu_context_t *saved_fpu_context; |
105 | int fpu_context_exists; |
106 | int fpu_context_exists; |
106 | 107 | ||
107 | /* |
108 | /* |
108 | * Defined only if thread doesn't run. |
109 | * Defined only if thread doesn't run. |
109 | * It means that fpu context is in CPU that last time executes this thread. |
110 | * It means that fpu context is in CPU that last time executes this thread. |
110 | * This disables migration |
111 | * This disables migration |
111 | */ |
112 | */ |
112 | int fpu_context_engaged; |
113 | int fpu_context_engaged; |
113 | 114 | ||
114 | rwlock_type_t rwlock_holder_type; |
115 | rwlock_type_t rwlock_holder_type; |
115 | 116 | ||
116 | void (* call_me)(void *); /**< Funtion to be called in scheduler before the thread is put asleep. */ |
117 | void (* call_me)(void *); /**< Funtion to be called in scheduler before the thread is put asleep. */ |
117 | void *call_me_with; /**< Argument passed to call_me(). */ |
118 | void *call_me_with; /**< Argument passed to call_me(). */ |
118 | 119 | ||
119 | state_t state; /**< Thread's state. */ |
120 | state_t state; /**< Thread's state. */ |
120 | int flags; /**< Thread's flags. */ |
121 | int flags; /**< Thread's flags. */ |
121 | 122 | ||
122 | cpu_t *cpu; /**< Thread's CPU. */ |
123 | cpu_t *cpu; /**< Thread's CPU. */ |
123 | task_t *task; /**< Containing task. */ |
124 | task_t *task; /**< Containing task. */ |
124 | 125 | ||
125 | __u64 ticks; /**< Ticks before preemption. */ |
126 | __u64 ticks; /**< Ticks before preemption. */ |
126 | 127 | ||
127 | int priority; /**< Thread's priority. Implemented as index to CPU->rq */ |
128 | int priority; /**< Thread's priority. Implemented as index to CPU->rq */ |
128 | __u32 tid; /**< Thread ID. */ |
129 | __u32 tid; /**< Thread ID. */ |
129 | 130 | ||
130 | thread_arch_t arch; /**< Architecture-specific data. */ |
131 | thread_arch_t arch; /**< Architecture-specific data. */ |
131 | 132 | ||
132 | __u8 *kstack; /**< Thread's kernel stack. */ |
133 | __u8 *kstack; /**< Thread's kernel stack. */ |
133 | }; |
134 | }; |
134 | 135 | ||
135 | /** Thread list lock. |
136 | /** Thread list lock. |
136 | * |
137 | * |
137 | * This lock protects all link_t structures chained in threads_head. |
138 | * This lock protects all link_t structures chained in threads_head. |
138 | * Must be acquired before T.lock for each T of type thread_t. |
139 | * Must be acquired before T.lock for each T of type thread_t. |
139 | * |
140 | * |
140 | */ |
141 | */ |
141 | extern spinlock_t threads_lock; |
142 | extern spinlock_t threads_lock; |
142 | 143 | ||
143 | extern btree_t threads_btree; /**< B+tree containing all threads. */ |
144 | extern btree_t threads_btree; /**< B+tree containing all threads. */ |
144 | 145 | ||
145 | extern void thread_init(void); |
146 | extern void thread_init(void); |
146 | extern thread_t *thread_create(void (* func)(void *), void *arg, task_t *task, int flags, char *name); |
147 | extern thread_t *thread_create(void (* func)(void *), void *arg, task_t *task, int flags, char *name); |
147 | extern void thread_ready(thread_t *t); |
148 | extern void thread_ready(thread_t *t); |
148 | extern void thread_exit(void); |
149 | extern void thread_exit(void); |
149 | 150 | ||
150 | #ifndef thread_create_arch |
151 | #ifndef thread_create_arch |
151 | extern void thread_create_arch(thread_t *t); |
152 | extern void thread_create_arch(thread_t *t); |
152 | #endif |
153 | #endif |
153 | 154 | ||
154 | extern void thread_sleep(__u32 sec); |
155 | extern void thread_sleep(__u32 sec); |
155 | extern void thread_usleep(__u32 usec); |
156 | extern void thread_usleep(__u32 usec); |
156 | 157 | ||
- | 158 | #define thread_join(t) thread_join_timeout((t), SYNCH_NO_TIMEOUT, SYNCH_FLAGS_NONE) |
|
157 | extern int thread_join_timeout(thread_t *t, __u32 usec, int flags); |
159 | extern int thread_join_timeout(thread_t *t, __u32 usec, int flags); |
158 | extern void thread_detach(thread_t *t); |
160 | extern void thread_detach(thread_t *t); |
159 | 161 | ||
160 | extern void thread_register_call_me(void (* call_me)(void *), void *call_me_with); |
162 | extern void thread_register_call_me(void (* call_me)(void *), void *call_me_with); |
161 | extern void thread_print_list(void); |
163 | extern void thread_print_list(void); |
162 | extern void thread_destroy(thread_t *t); |
164 | extern void thread_destroy(thread_t *t); |
163 | extern bool thread_exists(thread_t *t); |
165 | extern bool thread_exists(thread_t *t); |
164 | 166 | ||
165 | /* Fpu context slab cache */ |
167 | /* Fpu context slab cache */ |
166 | extern slab_cache_t *fpu_context_slab; |
168 | extern slab_cache_t *fpu_context_slab; |
167 | 169 | ||
168 | /** Thread syscall prototypes. */ |
170 | /** Thread syscall prototypes. */ |
169 | __native sys_thread_create(uspace_arg_t *uspace_uarg, char *uspace_name); |
171 | __native sys_thread_create(uspace_arg_t *uspace_uarg, char *uspace_name); |
170 | __native sys_thread_exit(int uspace_status); |
172 | __native sys_thread_exit(int uspace_status); |
171 | 173 | ||
172 | #endif |
174 | #endif |
173 | 175 |