Lines Matching refs:cp

103 static void	cpu_resched(cpu_t *cp, pri_t tpri);
412 disp_cpu_init(cpu_t *cp)
420 if (cp == cpu0_disp.disp_cpu)
425 cp->cpu_disp = dp;
426 dp->disp_cpu = cp;
429 DISP_LOCK_INIT(&cp->cpu_thread_lock);
444 disp_cpu_fini(cpu_t *cp)
448 disp_kp_free(cp->cpu_disp);
449 if (cp->cpu_disp != &cpu0_disp)
450 kmem_free(cp->cpu_disp, sizeof (disp_t));
510 cpu_t *cp = CPU;
512 volatile int *local_nrunnable = &cp->cpu_disp->disp_nrunnable;
514 if (!(cp->cpu_flags & CPU_OFFLINE)) {
515 if (CP_MAXRUNPRI(cp->cpu_part) >= 0)
518 for (ocp = cp->cpu_next_part; ocp != cp;
559 cpu_t *cp = CPU;
562 CPU_STATS_ADDQ(cp, sys, idlethread, 1);
563 set_idle_cpu(cp->cpu_id); /* arch-dependent hook */
572 cpu_t *cp = CPU;
575 unset_idle_cpu(cp->cpu_id); /* arch-dependent hook */
584 struct cpu *cp = CPU; /* pointer to this CPU */
594 if (cp->cpu_disp->disp_nrunnable == 0) {
612 while (cp->cpu_flags & CPU_QUIESCED)
615 if (cp->cpu_disp->disp_nrunnable != 0) {
619 if (cp->cpu_flags & CPU_OFFLINE)
621 if ((t = disp_getwork(cp)) == NULL) {
622 if (cp->cpu_chosen_level != -1) {
623 disp_t *dp = cp->cpu_disp;
631 kpq = &cp->cpu_part->cp_kp_queue;
633 cp->cpu_chosen_level = -1;
853 cpu_t *cp;
866 cp = CPU; /* now protected against migration */
867 ASSERT(CPU_ON_INTR(cp) == 0); /* not called with PIL > 10 */
868 CPU_STATS_ADDQ(cp, sys, pswitch, 1);
869 CPU_STATS_ADDQ(cp, sys, intrblk, 1);
885 cp = CPU;
887 ASSERT(CPU_ON_INTR(cp) == 0); /* not called with PIL > 10 */
890 cp->cpu_disp_flags &= ~CPU_DISP_DONTSTEAL;
896 pg_ev_thread_swtch(cp, now, t, next);
914 CPU_STATS_ADDQ(cp, sys, pswitch, 1);
915 cp->cpu_last_swtch = t->t_disp_time = ddi_get_lbolt();
947 pg_ev_thread_remain(cp, t);
1034 cpu_t *cp;
1042 cp = CPU;
1044 if (search_disp_queues(cp->cpu_disp, tp))
1046 } while ((cp = cp->cpu_next_onln) != CPU);
1073 cpu_t *cp = CPU;
1081 CPU_STATS_ADDQ(cp, sys, pswitch, 1);
1086 pg_ev_thread_swtch(cp, now, curthread, next);
1089 cp->cpu_disp_flags &= ~CPU_DISP_DONTSTEAL;
1092 cp->cpu_last_swtch = curthread->t_disp_time = ddi_get_lbolt();
1121 cpu_resched(cpu_t *cp, pri_t tpri)
1124 pri_t cpupri = cp->cpu_dispatch_pri;
1129 if (tpri >= upreemptpri && cp->cpu_runrun == 0) {
1130 cp->cpu_runrun = 1;
1131 aston(cp->cpu_dispthread);
1132 if (tpri < kpreemptpri && cp != CPU)
1135 if (tpri >= kpreemptpri && cp->cpu_kprunrun == 0) {
1136 cp->cpu_kprunrun = 1;
1137 if (cp != CPU)
1148 poke_cpu(cp->cpu_id);
1160 #define RUNQ_LEN(cp, pri) ((cp)->cpu_disp->disp_q[pri].dq_sruncnt)
1185 cpu_t *cp;
1212 cp = tp->t_cpu;
1229 cp = disp_lowpri_cpu(tp->t_cpu, tp->t_lpl, tpri, NULL);
1231 cp = disp_lowpri_cpu(tp->t_cpu, tp->t_lpl, tpri,
1234 cp = tp->t_cpu;
1237 if (tp->t_cpupart == cp->cpu_part) {
1243 cp = cmt_balance(tp, cp);
1248 qlen = RUNQ_LEN(cp, tpri);
1256 newcp = cp->cpu_next_part;
1257 } else if ((newcp = cp->cpu_next_lpl) == cp) {
1258 newcp = cp->cpu_next_part;
1264 cpu_t *, cp, cpu_t *, newcp);
1265 cp = newcp;
1272 cp = disp_lowpri_cpu(tp->t_cpupart->cp_cpulist,
1275 ASSERT((cp->cpu_flags & CPU_QUIESCED) == 0);
1283 cp = tp->t_weakbound_cpu ?
1304 dp = cp->cpu_disp;
1309 tpri, cp, tp);
1314 tnf_thread_queue(tp, cp, tpri);
1341 cpu_resched(cp, tpri);
1346 if (self && dp->disp_max_unbound_pri == -1 && cp == CPU) {
1355 cp->cpu_disp_flags |= CPU_DISP_DONTSTEAL;
1359 (*disp_enq_thread)(cp, bound);
1375 cpu_t *cp;
1399 cp = tp->t_cpu;
1405 cp = tp->t_cpu;
1406 if (tp->t_cpupart == cp->cpu_part) {
1418 if ((!LGRP_CONTAINS_CPU(tp->t_lpl->lpl_lgrp, cp)) ||
1419 (cp == cpu_inmotion)) {
1420 cp = disp_lowpri_cpu(tp->t_cpu, tp->t_lpl, tpri,
1421 (tp == curthread) ? cp : NULL);
1422 } else if ((tpri < cp->cpu_disp->disp_maxrunpri) &&
1424 cp = disp_lowpri_cpu(tp->t_cpu, tp->t_lpl, tpri,
1431 cp = disp_lowpri_cpu(tp->t_cpupart->cp_cpulist,
1434 ASSERT((cp->cpu_flags & CPU_QUIESCED) == 0);
1442 cp = tp->t_weakbound_cpu ?
1464 dp = cp->cpu_disp;
1473 tnf_thread_queue(tp, cp, tpri);
1500 cpu_resched(cp, tpri);
1506 cp == CPU) {
1515 cp->cpu_disp_flags |= CPU_DISP_DONTSTEAL;
1519 (*disp_enq_thread)(cp, bound);
1530 cpu_t *cp;
1578 cp = tp->t_cpu;
1579 if (tp->t_cpupart != cp->cpu_part) {
1581 cp = tp->t_cpupart->cp_cpulist;
1583 cp = disp_lowpri_cpu(cp, tp->t_lpl, tp->t_pri, NULL);
1584 disp_lock_enter_high(&cp->cpu_disp->disp_lock);
1585 ASSERT((cp->cpu_flags & CPU_QUIESCED) == 0);
1590 tnf_thread_queue(tp, cp, tpri);
1593 if (cp->cpu_chosen_level < tpri)
1594 cp->cpu_chosen_level = tpri;
1595 cpu_resched(cp, tpri);
1596 disp_lock_exit_high(&cp->cpu_disp->disp_lock);
1597 (*disp_enq_thread)(cp, 0);
1897 disp_getwork(cpu_t *cp)
1914 kpq = &cp->cpu_part->cp_kp_queue;
1936 lpl = lpl_leaf = cp->cpu_lpl;
1947 if (lpl_leaf != cp->cpu_lpl)
1950 ocp = cp->cpu_next_lpl;
1972 if (cp->cpu_disp->disp_nrunnable != 0) {
2059 if (tcp && cp->cpu_disp->disp_nrunnable == 0) {
2188 cpu_t *cp, *tcp;
2198 cp = CPU;
2267 * migraiton between cp and tcp would incur no performance
2270 if (pg_cmt_can_migrate(cp, tcp))
2296 cpu_t *, tcp, cpu_t *, cp, hrtime_t, rqtime);
2361 tp->t_disp_queue = cp->cpu_disp;
2363 cp->cpu_dispthread = tp; /* protected by spl only */
2364 cp->cpu_dispatch_pri = pri;
2385 disp_lock_enter_high(&cp->cpu_disp->disp_lock);
2386 disp_lock_exit_high(&cp->cpu_disp->disp_lock);
2390 DTRACE_PROBE3(steal, kthread_t *, tp, cpu_t *, tcp, cpu_t *, cp);
2392 thread_onproc(tp, cp); /* set t_state to TS_ONPROC */
2410 disp_bound_common(cpu_t *cp, int threadlistsafe, int flag)
2435 if (tp == cp->cpu_idle_thread)
2442 if (tp == cp->cpu_pause_thread)
2446 (tp->t_bound_cpu == cp ||
2447 tp->t_bind_cpu == cp->cpu_id ||
2448 tp->t_weakbound_cpu == cp)) {
2454 (tp->t_cpupart == cp->cpu_part)) {
2471 disp_bound_threads(cpu_t *cp, int threadlistsafe)
2473 return (disp_bound_common(cp, threadlistsafe, BOUND_CPU));
2481 disp_bound_anythreads(cpu_t *cp, int threadlistsafe)
2483 return (disp_bound_common(cp, threadlistsafe, BOUND_CPU | BOUND_INTR));
2493 disp_bound_partition(cpu_t *cp, int threadlistsafe)
2495 return (disp_bound_common(cp, threadlistsafe, BOUND_PARTITION));
2503 disp_cpu_inactive(cpu_t *cp)
2506 disp_t *dp = cp->cpu_disp;
2537 * cp has already been removed from the list of active cpus
2539 * tp ending up back on cp.
2547 ASSERT(tp->t_cpu != cp);
2589 cpu_t *cp, *cpstart;
2637 cp = cpstart = hint;
2639 cp = cpstart = lpl_leaf->lpl_cpus;
2642 if (cp == curcpu)
2644 else if (cp == cpu_inmotion)
2647 cpupri = cp->cpu_dispatch_pri;
2648 if (cp->cpu_disp->disp_maxrunpri > cpupri)
2649 cpupri = cp->cpu_disp->disp_maxrunpri;
2650 if (cp->cpu_chosen_level > cpupri)
2651 cpupri = cp->cpu_chosen_level;
2654 ASSERT((cp->cpu_flags &
2656 return (cp);
2658 bestcpu = cp;
2661 } while ((cp = cp->cpu_next_lpl) != cpstart);