Lines Matching refs:op

439 	struct optimized_kprobe *op;  in free_aggr_kprobe()  local
441 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
442 arch_remove_optimized_kprobe(op); in free_aggr_kprobe()
444 kfree(op); in free_aggr_kprobe()
450 struct optimized_kprobe *op; in kprobe_optready() local
453 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
454 return arch_prepared_optinsn(&op->optinsn); in kprobe_optready()
463 struct optimized_kprobe *op; in kprobe_disarmed() local
469 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
471 return kprobe_disabled(p) && list_empty(&op->list); in kprobe_disarmed()
477 struct optimized_kprobe *op; in kprobe_queued() local
480 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
481 if (!list_empty(&op->list)) in kprobe_queued()
495 struct optimized_kprobe *op; in get_optimized_kprobe() local
502 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
503 if (arch_within_optimized_kprobe(op, addr)) in get_optimized_kprobe()
552 struct optimized_kprobe *op, *tmp; in do_unoptimize_kprobes() local
562 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_unoptimize_kprobes()
564 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in do_unoptimize_kprobes()
566 if (kprobe_disabled(&op->kp) && !kprobe_gone(&op->kp)) in do_unoptimize_kprobes()
567 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
568 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
574 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
576 list_del_init(&op->list); in do_unoptimize_kprobes()
583 struct optimized_kprobe *op, *tmp; in do_free_cleaned_kprobes() local
585 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_free_cleaned_kprobes()
586 list_del_init(&op->list); in do_free_cleaned_kprobes()
587 if (WARN_ON_ONCE(!kprobe_unused(&op->kp))) { in do_free_cleaned_kprobes()
594 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
663 bool optprobe_queued_unopt(struct optimized_kprobe *op) in optprobe_queued_unopt() argument
668 if (op == _op) in optprobe_queued_unopt()
678 struct optimized_kprobe *op; in optimize_kprobe() local
689 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
692 if (arch_check_optimized_kprobe(op) < 0) in optimize_kprobe()
696 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) { in optimize_kprobe()
697 if (optprobe_queued_unopt(op)) { in optimize_kprobe()
699 list_del_init(&op->list); in optimize_kprobe()
703 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
709 if (WARN_ON_ONCE(!list_empty(&op->list))) in optimize_kprobe()
712 list_add(&op->list, &optimizing_list); in optimize_kprobe()
717 static void force_unoptimize_kprobe(struct optimized_kprobe *op) in force_unoptimize_kprobe() argument
720 arch_unoptimize_kprobe(op); in force_unoptimize_kprobe()
721 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in force_unoptimize_kprobe()
727 struct optimized_kprobe *op; in unoptimize_kprobe() local
732 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
736 if (!list_empty(&op->list)) { in unoptimize_kprobe()
737 if (optprobe_queued_unopt(op)) { in unoptimize_kprobe()
744 force_unoptimize_kprobe(op); in unoptimize_kprobe()
745 list_move(&op->list, &freeing_list); in unoptimize_kprobe()
749 list_del_init(&op->list); in unoptimize_kprobe()
750 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
758 force_unoptimize_kprobe(op); in unoptimize_kprobe()
760 list_add(&op->list, &unoptimizing_list); in unoptimize_kprobe()
768 struct optimized_kprobe *op; in reuse_unused_kprobe() local
774 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
775 WARN_ON_ONCE(list_empty(&op->list)); in reuse_unused_kprobe()
789 struct optimized_kprobe *op; in kill_optimized_kprobe() local
791 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
792 if (!list_empty(&op->list)) in kill_optimized_kprobe()
794 list_del_init(&op->list); in kill_optimized_kprobe()
795 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
803 if (optprobe_queued_unopt(op)) in kill_optimized_kprobe()
804 list_move(&op->list, &freeing_list); in kill_optimized_kprobe()
808 arch_remove_optimized_kprobe(op); in kill_optimized_kprobe()
812 void __prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p) in __prepare_optimized_kprobe() argument
815 arch_prepare_optimized_kprobe(op, p); in __prepare_optimized_kprobe()
821 struct optimized_kprobe *op; in prepare_optimized_kprobe() local
823 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
824 __prepare_optimized_kprobe(op, p); in prepare_optimized_kprobe()
830 struct optimized_kprobe *op; in alloc_aggr_kprobe() local
832 op = kzalloc(sizeof(struct optimized_kprobe), GFP_KERNEL); in alloc_aggr_kprobe()
833 if (!op) in alloc_aggr_kprobe()
836 INIT_LIST_HEAD(&op->list); in alloc_aggr_kprobe()
837 op->kp.addr = p->addr; in alloc_aggr_kprobe()
838 __prepare_optimized_kprobe(op, p); in alloc_aggr_kprobe()
840 return &op->kp; in alloc_aggr_kprobe()
852 struct optimized_kprobe *op; in try_to_optimize_kprobe() local
867 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
868 if (!arch_prepared_optinsn(&op->optinsn)) { in try_to_optimize_kprobe()
870 arch_remove_optimized_kprobe(op); in try_to_optimize_kprobe()
871 kfree(op); in try_to_optimize_kprobe()