Lines Matching refs:ir

147 static inline u32 mtk_chkdata_reg(struct mtk_ir *ir, u32 i)  in mtk_chkdata_reg()  argument
149 return ir->data->regs[MTK_CHKDATA_REG] + 4 * i; in mtk_chkdata_reg()
152 static inline u32 mtk_chk_period(struct mtk_ir *ir) in mtk_chk_period() argument
160 val = DIV_ROUND_CLOSEST(clk_get_rate(ir->bus), in mtk_chk_period()
161 USEC_PER_SEC * ir->data->div / MTK_IR_SAMPLE); in mtk_chk_period()
163 dev_dbg(ir->dev, "@pwm clk = \t%lu\n", in mtk_chk_period()
164 clk_get_rate(ir->bus) / ir->data->div); in mtk_chk_period()
165 dev_dbg(ir->dev, "@chkperiod = %08x\n", val); in mtk_chk_period()
170 static void mtk_w32_mask(struct mtk_ir *ir, u32 val, u32 mask, unsigned int reg) in mtk_w32_mask() argument
174 tmp = __raw_readl(ir->base + reg); in mtk_w32_mask()
176 __raw_writel(tmp, ir->base + reg); in mtk_w32_mask()
179 static void mtk_w32(struct mtk_ir *ir, u32 val, unsigned int reg) in mtk_w32() argument
181 __raw_writel(val, ir->base + reg); in mtk_w32()
184 static u32 mtk_r32(struct mtk_ir *ir, unsigned int reg) in mtk_r32() argument
186 return __raw_readl(ir->base + reg); in mtk_r32()
189 static inline void mtk_irq_disable(struct mtk_ir *ir, u32 mask) in mtk_irq_disable() argument
193 val = mtk_r32(ir, ir->data->regs[MTK_IRINT_EN_REG]); in mtk_irq_disable()
194 mtk_w32(ir, val & ~mask, ir->data->regs[MTK_IRINT_EN_REG]); in mtk_irq_disable()
197 static inline void mtk_irq_enable(struct mtk_ir *ir, u32 mask) in mtk_irq_enable() argument
201 val = mtk_r32(ir, ir->data->regs[MTK_IRINT_EN_REG]); in mtk_irq_enable()
202 mtk_w32(ir, val | mask, ir->data->regs[MTK_IRINT_EN_REG]); in mtk_irq_enable()
208 struct mtk_ir *ir = dev_id; in mtk_ir_irq() local
228 val = mtk_r32(ir, mtk_chkdata_reg(ir, i)); in mtk_ir_irq()
229 dev_dbg(ir->dev, "@reg%d=0x%08x\n", i, val); in mtk_ir_irq()
236 ir_raw_event_store_with_filter(ir->rc, &rawir); in mtk_ir_irq()
252 ir_raw_event_store_with_filter(ir->rc, &rawir); in mtk_ir_irq()
255 ir_raw_event_handle(ir->rc); in mtk_ir_irq()
261 mtk_w32_mask(ir, 0x1, MTK_IRCLR, ir->data->regs[MTK_IRCLR_REG]); in mtk_ir_irq()
264 mtk_w32_mask(ir, 0x1, MTK_IRINT_CLR, in mtk_ir_irq()
265 ir->data->regs[MTK_IRINT_CLR_REG]); in mtk_ir_irq()
297 struct mtk_ir *ir; in mtk_ir_probe() local
302 ir = devm_kzalloc(dev, sizeof(struct mtk_ir), GFP_KERNEL); in mtk_ir_probe()
303 if (!ir) in mtk_ir_probe()
306 ir->dev = dev; in mtk_ir_probe()
307 ir->data = of_device_get_match_data(dev); in mtk_ir_probe()
309 ir->clk = devm_clk_get(dev, "clk"); in mtk_ir_probe()
310 if (IS_ERR(ir->clk)) { in mtk_ir_probe()
312 return PTR_ERR(ir->clk); in mtk_ir_probe()
315 ir->bus = devm_clk_get(dev, "bus"); in mtk_ir_probe()
316 if (IS_ERR(ir->bus)) { in mtk_ir_probe()
321 ir->bus = ir->clk; in mtk_ir_probe()
324 ir->base = devm_platform_ioremap_resource(pdev, 0); in mtk_ir_probe()
325 if (IS_ERR(ir->base)) in mtk_ir_probe()
326 return PTR_ERR(ir->base); in mtk_ir_probe()
328 ir->rc = devm_rc_allocate_device(dev, RC_DRIVER_IR_RAW); in mtk_ir_probe()
329 if (!ir->rc) { in mtk_ir_probe()
334 ir->rc->priv = ir; in mtk_ir_probe()
335 ir->rc->device_name = MTK_IR_DEV; in mtk_ir_probe()
336 ir->rc->input_phys = MTK_IR_DEV "/input0"; in mtk_ir_probe()
337 ir->rc->input_id.bustype = BUS_HOST; in mtk_ir_probe()
338 ir->rc->input_id.vendor = 0x0001; in mtk_ir_probe()
339 ir->rc->input_id.product = 0x0001; in mtk_ir_probe()
340 ir->rc->input_id.version = 0x0001; in mtk_ir_probe()
342 ir->rc->map_name = map_name ?: RC_MAP_EMPTY; in mtk_ir_probe()
343 ir->rc->dev.parent = dev; in mtk_ir_probe()
344 ir->rc->driver_name = MTK_IR_DEV; in mtk_ir_probe()
345 ir->rc->allowed_protocols = RC_PROTO_BIT_ALL_IR_DECODER; in mtk_ir_probe()
346 ir->rc->rx_resolution = MTK_IR_SAMPLE; in mtk_ir_probe()
347 ir->rc->timeout = MTK_MAX_SAMPLES * (MTK_IR_SAMPLE + 1); in mtk_ir_probe()
349 ret = devm_rc_register_device(dev, ir->rc); in mtk_ir_probe()
355 platform_set_drvdata(pdev, ir); in mtk_ir_probe()
357 ir->irq = platform_get_irq(pdev, 0); in mtk_ir_probe()
358 if (ir->irq < 0) in mtk_ir_probe()
361 if (clk_prepare_enable(ir->clk)) { in mtk_ir_probe()
366 if (clk_prepare_enable(ir->bus)) { in mtk_ir_probe()
376 mtk_irq_disable(ir, MTK_IRINT_EN); in mtk_ir_probe()
378 ret = devm_request_irq(dev, ir->irq, mtk_ir_irq, 0, MTK_IR_DEV, ir); in mtk_ir_probe()
387 val = (mtk_chk_period(ir) << ir->data->fields[MTK_CHK_PERIOD].offset) & in mtk_ir_probe()
388 ir->data->fields[MTK_CHK_PERIOD].mask; in mtk_ir_probe()
389 mtk_w32_mask(ir, val, ir->data->fields[MTK_CHK_PERIOD].mask, in mtk_ir_probe()
390 ir->data->fields[MTK_CHK_PERIOD].reg); in mtk_ir_probe()
396 val = (ir->data->hw_period << ir->data->fields[MTK_HW_PERIOD].offset) & in mtk_ir_probe()
397 ir->data->fields[MTK_HW_PERIOD].mask; in mtk_ir_probe()
398 mtk_w32_mask(ir, val, ir->data->fields[MTK_HW_PERIOD].mask, in mtk_ir_probe()
399 ir->data->fields[MTK_HW_PERIOD].reg); in mtk_ir_probe()
402 mtk_w32_mask(ir, MTK_DG_CNT(1), MTK_DG_CNT_MASK, MTK_IRTHD); in mtk_ir_probe()
405 val = mtk_r32(ir, MTK_CONFIG_HIGH_REG) & ~MTK_OK_COUNT_MASK; in mtk_ir_probe()
406 val |= MTK_OK_COUNT(ir->data->ok_count) | MTK_PWM_EN | MTK_IR_EN; in mtk_ir_probe()
407 mtk_w32(ir, val, MTK_CONFIG_HIGH_REG); in mtk_ir_probe()
409 mtk_irq_enable(ir, MTK_IRINT_EN); in mtk_ir_probe()
417 clk_disable_unprepare(ir->bus); in mtk_ir_probe()
419 clk_disable_unprepare(ir->clk); in mtk_ir_probe()
426 struct mtk_ir *ir = platform_get_drvdata(pdev); in mtk_ir_remove() local
433 mtk_irq_disable(ir, MTK_IRINT_EN); in mtk_ir_remove()
434 synchronize_irq(ir->irq); in mtk_ir_remove()
436 clk_disable_unprepare(ir->bus); in mtk_ir_remove()
437 clk_disable_unprepare(ir->clk); in mtk_ir_remove()