Lines Matching refs:sop

650 	struct sembuf *sop;  in perform_atomic_semop_slow()  local
659 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop_slow()
660 int idx = array_index_nospec(sop->sem_num, sma->sem_nsems); in perform_atomic_semop_slow()
662 sem_op = sop->sem_op; in perform_atomic_semop_slow()
674 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop_slow()
675 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop_slow()
679 un->semadj[sop->sem_num] = undo; in perform_atomic_semop_slow()
685 sop--; in perform_atomic_semop_slow()
687 while (sop >= sops) { in perform_atomic_semop_slow()
688 ipc_update_pid(&sma->sems[sop->sem_num].sempid, pid); in perform_atomic_semop_slow()
689 sop--; in perform_atomic_semop_slow()
699 q->blocking = sop; in perform_atomic_semop_slow()
701 if (sop->sem_flg & IPC_NOWAIT) in perform_atomic_semop_slow()
707 sop--; in perform_atomic_semop_slow()
708 while (sop >= sops) { in perform_atomic_semop_slow()
709 sem_op = sop->sem_op; in perform_atomic_semop_slow()
710 sma->sems[sop->sem_num].semval -= sem_op; in perform_atomic_semop_slow()
711 if (sop->sem_flg & SEM_UNDO) in perform_atomic_semop_slow()
712 un->semadj[sop->sem_num] += sem_op; in perform_atomic_semop_slow()
713 sop--; in perform_atomic_semop_slow()
722 struct sembuf *sop; in perform_atomic_semop() local
740 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop()
741 int idx = array_index_nospec(sop->sem_num, sma->sem_nsems); in perform_atomic_semop()
744 sem_op = sop->sem_op; in perform_atomic_semop()
757 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop()
758 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop()
766 for (sop = sops; sop < sops + nsops; sop++) { in perform_atomic_semop()
767 curr = &sma->sems[sop->sem_num]; in perform_atomic_semop()
768 sem_op = sop->sem_op; in perform_atomic_semop()
770 if (sop->sem_flg & SEM_UNDO) { in perform_atomic_semop()
771 int undo = un->semadj[sop->sem_num] - sem_op; in perform_atomic_semop()
773 un->semadj[sop->sem_num] = undo; in perform_atomic_semop()
782 q->blocking = sop; in perform_atomic_semop()
783 return sop->sem_flg & IPC_NOWAIT ? -EAGAIN : 1; in perform_atomic_semop()
1075 struct sembuf *sop = q->blocking; in check_qop() local
1088 if (sop->sem_num != semnum) in check_qop()
1091 if (count_zero && sop->sem_op == 0) in check_qop()
1093 if (!count_zero && sop->sem_op < 0) in check_qop()
1989 struct sembuf *sop; in __do_semtimedop() local
2013 for (sop = sops; sop < sops + nsops; sop++) { in __do_semtimedop()
2014 unsigned long mask = 1ULL << ((sop->sem_num) % BITS_PER_LONG); in __do_semtimedop()
2016 if (sop->sem_num >= max) in __do_semtimedop()
2017 max = sop->sem_num; in __do_semtimedop()
2018 if (sop->sem_flg & SEM_UNDO) in __do_semtimedop()
2029 if (sop->sem_op != 0) { in __do_semtimedop()