Lines Matching refs:intr

207 static inline struct dpu_hw_intr_entry *dpu_core_irq_get_entry(struct dpu_hw_intr *intr,  in dpu_core_irq_get_entry()  argument
210 return &intr->irq_tbl[irq_idx]; in dpu_core_irq_get_entry()
241 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq() local
249 if (!intr) in dpu_core_irq()
252 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq()
254 if (!test_bit(reg_idx, &intr->irq_mask)) in dpu_core_irq()
258 irq_status = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].status_off); in dpu_core_irq()
261 enable_mask = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].en_off); in dpu_core_irq()
265 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq()
294 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq()
299 static int dpu_hw_intr_enable_irq_locked(struct dpu_hw_intr *intr, int irq_idx) in dpu_hw_intr_enable_irq_locked() argument
306 if (!intr) in dpu_hw_intr_enable_irq_locked()
320 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_enable_irq_locked()
323 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_enable_irq_locked()
329 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_enable_irq_locked()
337 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_enable_irq_locked()
339 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_enable_irq_locked()
344 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_enable_irq_locked()
354 static int dpu_hw_intr_disable_irq_locked(struct dpu_hw_intr *intr, int irq_idx) in dpu_hw_intr_disable_irq_locked() argument
361 if (!intr) in dpu_hw_intr_disable_irq_locked()
375 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_disable_irq_locked()
378 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_disable_irq_locked()
380 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_disable_irq_locked()
388 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_disable_irq_locked()
390 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_disable_irq_locked()
395 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_disable_irq_locked()
407 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_clear_irqs() local
410 if (!intr) in dpu_clear_irqs()
414 if (test_bit(i, &intr->irq_mask)) in dpu_clear_irqs()
415 DPU_REG_WRITE(&intr->hw, in dpu_clear_irqs()
416 intr->intr_set[i].clr_off, 0xffffffff); in dpu_clear_irqs()
425 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_disable_all_irqs() local
428 if (!intr) in dpu_disable_all_irqs()
432 if (test_bit(i, &intr->irq_mask)) in dpu_disable_all_irqs()
433 DPU_REG_WRITE(&intr->hw, in dpu_disable_all_irqs()
434 intr->intr_set[i].en_off, 0x00000000); in dpu_disable_all_irqs()
443 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq_read() local
448 if (!intr) in dpu_core_irq_read()
456 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
459 intr_status = DPU_REG_READ(&intr->hw, in dpu_core_irq_read()
460 intr->intr_set[reg_idx].status_off) & in dpu_core_irq_read()
463 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq_read()
469 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
477 struct dpu_hw_intr *intr; in dpu_hw_intr_init() local
483 intr = kzalloc(sizeof(*intr), GFP_KERNEL); in dpu_hw_intr_init()
484 if (!intr) in dpu_hw_intr_init()
488 intr->intr_set = dpu_intr_set_7xxx; in dpu_hw_intr_init()
490 intr->intr_set = dpu_intr_set_legacy; in dpu_hw_intr_init()
492 intr->hw.blk_addr = addr + m->mdp[0].base; in dpu_hw_intr_init()
494 intr->irq_mask = BIT(MDP_SSPP_TOP0_INTR) | in dpu_hw_intr_init()
503 intr->irq_mask |= BIT(MDP_INTFn_INTR(intf->id)); in dpu_hw_intr_init()
506 intr->irq_mask |= BIT(DPU_IRQ_REG(intf->intr_tear_rd_ptr)); in dpu_hw_intr_init()
509 spin_lock_init(&intr->irq_lock); in dpu_hw_intr_init()
511 return intr; in dpu_hw_intr_init()
514 void dpu_hw_intr_destroy(struct dpu_hw_intr *intr) in dpu_hw_intr_destroy() argument
516 kfree(intr); in dpu_hw_intr_destroy()