19 #ifndef _COBALT_KERNEL_SCHED_RT_H 20 #define _COBALT_KERNEL_SCHED_RT_H 22 #ifndef _COBALT_KERNEL_SCHED_H 23 #error "please don't include cobalt/kernel/sched-rt.h directly" 35 #define XNSCHED_CORE_MIN_PRIO 0 36 #define XNSCHED_CORE_MAX_PRIO 259 37 #define XNSCHED_CORE_NR_PRIO \ 38 (XNSCHED_CORE_MAX_PRIO - XNSCHED_CORE_MIN_PRIO + 1) 44 #define XNSCHED_FIFO_MIN_PRIO 1 45 #define XNSCHED_FIFO_MAX_PRIO 256 47 #if XNSCHED_CORE_NR_PRIO > XNSCHED_CLASS_WEIGHT_FACTOR || \ 48 (defined(CONFIG_XENO_OPT_SCALABLE_SCHED) && \ 49 XNSCHED_CORE_NR_PRIO > XNSCHED_MLQ_LEVELS) 50 #error "XNSCHED_MLQ_LEVELS is too low" 53 extern struct xnsched_class xnsched_class_rt;
55 static inline void __xnsched_rt_requeue(
struct xnthread *thread)
57 xnsched_addq(&thread->sched->rt.runnable, thread);
60 static inline void __xnsched_rt_enqueue(
struct xnthread *thread)
62 xnsched_addq_tail(&thread->sched->rt.runnable, thread);
65 static inline void __xnsched_rt_dequeue(
struct xnthread *thread)
67 xnsched_delq(&thread->sched->rt.runnable, thread);
70 static inline void __xnsched_rt_track_weakness(
struct xnthread *thread)
82 if (IS_ENABLED(CONFIG_XENO_OPT_SCHED_WEAK) || thread->cprio)
83 xnthread_clear_state(thread,
XNWEAK);
85 xnthread_set_state(thread,
XNWEAK);
88 static inline bool __xnsched_rt_setparam(
struct xnthread *thread,
89 const union xnsched_policy_param *p)
91 bool ret = xnsched_set_effective_priority(thread, p->rt.prio);
93 if (!xnthread_test_state(thread,
XNBOOST))
94 __xnsched_rt_track_weakness(thread);
99 static inline void __xnsched_rt_getparam(
struct xnthread *thread,
100 union xnsched_policy_param *p)
102 p->rt.prio = thread->cprio;
105 static inline void __xnsched_rt_trackprio(
struct xnthread *thread,
106 const union xnsched_policy_param *p)
109 thread->cprio = p->rt.prio;
111 thread->cprio = thread->bprio;
113 __xnsched_rt_track_weakness(thread);
117 static inline void __xnsched_rt_protectprio(
struct xnthread *thread,
int prio)
125 thread->cprio = prio;
128 static inline void __xnsched_rt_forget(
struct xnthread *thread)
132 static inline int xnsched_rt_init_thread(
struct xnthread *thread)
137 #ifdef CONFIG_XENO_OPT_SCHED_CLASSES 138 struct xnthread *xnsched_rt_pick(
struct xnsched *sched);
140 static inline struct xnthread *xnsched_rt_pick(
struct xnsched *sched)
142 return xnsched_getq(&sched->
rt.runnable);
146 void xnsched_rt_tick(
struct xnsched *sched);
#define XNBOOST
PI/PP boost undergoing.
Definition: thread.h:42
#define XNWEAK
Non real-time shadow (from the WEAK class)
Definition: thread.h:49
Scheduling information structure.
Definition: sched.h:58
struct xnsched_rt rt
Definition: sched.h:72