Lines Matching refs:device

35 	return nvkm_rd32(gr->engine.subdev.device, 0x1540);  in nv50_gr_units()
46 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in nv50_gr_object_bind()
73 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv50_gr_chan_bind()
77 nv50_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv50_gr_chan_bind()
243 struct nvkm_device *device = subdev->device; in nv50_gr_prop_trap() local
244 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap()
245 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap()
246 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap()
247 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap()
248 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap()
249 u32 e20 = nvkm_rd32(device, ustatus_addr + 0x18); in nv50_gr_prop_trap()
250 u32 e24 = nvkm_rd32(device, ustatus_addr + 0x1c); in nv50_gr_prop_trap()
285 struct nvkm_device *device = subdev->device; in nv50_gr_mp_trap() local
286 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_mp_trap()
294 if (device->chipset < 0xa0) in nv50_gr_mp_trap()
298 mp10 = nvkm_rd32(device, addr + 0x10); in nv50_gr_mp_trap()
299 status = nvkm_rd32(device, addr + 0x14); in nv50_gr_mp_trap()
303 nvkm_rd32(device, addr + 0x20); in nv50_gr_mp_trap()
304 pc = nvkm_rd32(device, addr + 0x24); in nv50_gr_mp_trap()
305 oplow = nvkm_rd32(device, addr + 0x70); in nv50_gr_mp_trap()
306 ophigh = nvkm_rd32(device, addr + 0x74); in nv50_gr_mp_trap()
315 nvkm_wr32(device, addr + 0x10, mp10); in nv50_gr_mp_trap()
316 nvkm_wr32(device, addr + 0x14, 0); in nv50_gr_mp_trap()
329 struct nvkm_device *device = subdev->device; in nv50_gr_tp_trap() local
330 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_tp_trap()
338 if (device->chipset < 0xa0) in nv50_gr_tp_trap()
342 ustatus = nvkm_rd32(device, ustatus_addr) & 0x7fffffff; in nv50_gr_tp_trap()
352 nvkm_rd32(device, r)); in nv50_gr_tp_trap()
387 nvkm_wr32(device, ustatus_addr, 0xc0000000); in nv50_gr_tp_trap()
399 struct nvkm_device *device = subdev->device; in nv50_gr_trap_handler() local
400 u32 status = nvkm_rd32(device, 0x400108); in nv50_gr_trap_handler()
413 ustatus = nvkm_rd32(device, 0x400804) & 0x7fffffff; in nv50_gr_trap_handler()
418 nvkm_wr32(device, 0x400500, 0x00000000); in nv50_gr_trap_handler()
422 u32 addr = nvkm_rd32(device, 0x400808); in nv50_gr_trap_handler()
425 u32 datal = nvkm_rd32(device, 0x40080c); in nv50_gr_trap_handler()
426 u32 datah = nvkm_rd32(device, 0x400810); in nv50_gr_trap_handler()
427 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_trap_handler()
428 u32 r848 = nvkm_rd32(device, 0x400848); in nv50_gr_trap_handler()
443 nvkm_wr32(device, 0x400808, 0); in nv50_gr_trap_handler()
444 nvkm_wr32(device, 0x4008e8, nvkm_rd32(device, 0x4008e8) & 3); in nv50_gr_trap_handler()
445 nvkm_wr32(device, 0x400848, 0); in nv50_gr_trap_handler()
450 u32 addr = nvkm_rd32(device, 0x40084c); in nv50_gr_trap_handler()
453 u32 data = nvkm_rd32(device, 0x40085c); in nv50_gr_trap_handler()
454 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_trap_handler()
468 nvkm_wr32(device, 0x40084c, 0); in nv50_gr_trap_handler()
477 nvkm_wr32(device, 0x400804, 0xc0000000); in nv50_gr_trap_handler()
478 nvkm_wr32(device, 0x400108, 0x001); in nv50_gr_trap_handler()
486 u32 ustatus = nvkm_rd32(device, 0x406800) & 0x7fffffff; in nv50_gr_trap_handler()
493 nvkm_rd32(device, 0x406804), in nv50_gr_trap_handler()
494 nvkm_rd32(device, 0x406808), in nv50_gr_trap_handler()
495 nvkm_rd32(device, 0x40680c), in nv50_gr_trap_handler()
496 nvkm_rd32(device, 0x406810)); in nv50_gr_trap_handler()
500 nvkm_wr32(device, 0x400040, 2); in nv50_gr_trap_handler()
501 nvkm_wr32(device, 0x400040, 0); in nv50_gr_trap_handler()
502 nvkm_wr32(device, 0x406800, 0xc0000000); in nv50_gr_trap_handler()
503 nvkm_wr32(device, 0x400108, 0x002); in nv50_gr_trap_handler()
509 u32 ustatus = nvkm_rd32(device, 0x400c04) & 0x7fffffff; in nv50_gr_trap_handler()
516 nvkm_rd32(device, 0x400c00), in nv50_gr_trap_handler()
517 nvkm_rd32(device, 0x400c08), in nv50_gr_trap_handler()
518 nvkm_rd32(device, 0x400c0c), in nv50_gr_trap_handler()
519 nvkm_rd32(device, 0x400c10)); in nv50_gr_trap_handler()
522 nvkm_wr32(device, 0x400c04, 0xc0000000); in nv50_gr_trap_handler()
523 nvkm_wr32(device, 0x400108, 0x004); in nv50_gr_trap_handler()
529 ustatus = nvkm_rd32(device, 0x401800) & 0x7fffffff; in nv50_gr_trap_handler()
536 nvkm_rd32(device, 0x401804), in nv50_gr_trap_handler()
537 nvkm_rd32(device, 0x401808), in nv50_gr_trap_handler()
538 nvkm_rd32(device, 0x40180c), in nv50_gr_trap_handler()
539 nvkm_rd32(device, 0x401810)); in nv50_gr_trap_handler()
543 nvkm_wr32(device, 0x400040, 0x80); in nv50_gr_trap_handler()
544 nvkm_wr32(device, 0x400040, 0); in nv50_gr_trap_handler()
545 nvkm_wr32(device, 0x401800, 0xc0000000); in nv50_gr_trap_handler()
546 nvkm_wr32(device, 0x400108, 0x008); in nv50_gr_trap_handler()
552 ustatus = nvkm_rd32(device, 0x405018) & 0x7fffffff; in nv50_gr_trap_handler()
560 nvkm_rd32(device, 0x405000), in nv50_gr_trap_handler()
561 nvkm_rd32(device, 0x405004), in nv50_gr_trap_handler()
562 nvkm_rd32(device, 0x405008), in nv50_gr_trap_handler()
563 nvkm_rd32(device, 0x40500c), in nv50_gr_trap_handler()
564 nvkm_rd32(device, 0x405010), in nv50_gr_trap_handler()
565 nvkm_rd32(device, 0x405014), in nv50_gr_trap_handler()
566 nvkm_rd32(device, 0x40501c)); in nv50_gr_trap_handler()
569 nvkm_wr32(device, 0x405018, 0xc0000000); in nv50_gr_trap_handler()
570 nvkm_wr32(device, 0x400108, 0x010); in nv50_gr_trap_handler()
578 ustatus = nvkm_rd32(device, 0x402000) & 0x7fffffff; in nv50_gr_trap_handler()
581 nvkm_wr32(device, 0x402000, 0xc0000000); in nv50_gr_trap_handler()
589 nvkm_wr32(device, 0x400108, 0x040); in nv50_gr_trap_handler()
597 nvkm_wr32(device, 0x400108, 0x080); in nv50_gr_trap_handler()
606 nvkm_wr32(device, 0x400108, 0x100); in nv50_gr_trap_handler()
613 nvkm_wr32(device, 0x400108, status); in nv50_gr_trap_handler()
624 struct nvkm_device *device = subdev->device; in nv50_gr_intr() local
626 u32 stat = nvkm_rd32(device, 0x400100); in nv50_gr_intr()
627 u32 inst = nvkm_rd32(device, 0x40032c) & 0x0fffffff; in nv50_gr_intr()
628 u32 addr = nvkm_rd32(device, 0x400704); in nv50_gr_intr()
631 u32 data = nvkm_rd32(device, 0x400708); in nv50_gr_intr()
632 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_intr()
647 u32 ecode = nvkm_rd32(device, 0x400110); in nv50_gr_intr()
660 nvkm_wr32(device, 0x400100, stat); in nv50_gr_intr()
661 nvkm_wr32(device, 0x400500, 0x00010001); in nv50_gr_intr()
672 if (nvkm_rd32(device, 0x400824) & (1 << 31)) in nv50_gr_intr()
673 nvkm_wr32(device, 0x400824, nvkm_rd32(device, 0x400824) & ~(1 << 31)); in nv50_gr_intr()
682 struct nvkm_device *device = gr->base.engine.subdev.device; in nv50_gr_init() local
686 nvkm_wr32(device, 0x40008c, 0x00000004); in nv50_gr_init()
689 nvkm_wr32(device, 0x400804, 0xc0000000); in nv50_gr_init()
690 nvkm_wr32(device, 0x406800, 0xc0000000); in nv50_gr_init()
691 nvkm_wr32(device, 0x400c04, 0xc0000000); in nv50_gr_init()
692 nvkm_wr32(device, 0x401800, 0xc0000000); in nv50_gr_init()
693 nvkm_wr32(device, 0x405018, 0xc0000000); in nv50_gr_init()
694 nvkm_wr32(device, 0x402000, 0xc0000000); in nv50_gr_init()
696 units = nvkm_rd32(device, 0x001540); in nv50_gr_init()
701 if (device->chipset < 0xa0) { in nv50_gr_init()
702 nvkm_wr32(device, 0x408900 + (i << 12), 0xc0000000); in nv50_gr_init()
703 nvkm_wr32(device, 0x408e08 + (i << 12), 0xc0000000); in nv50_gr_init()
704 nvkm_wr32(device, 0x408314 + (i << 12), 0xc0000000); in nv50_gr_init()
706 nvkm_wr32(device, 0x408600 + (i << 11), 0xc0000000); in nv50_gr_init()
707 nvkm_wr32(device, 0x408708 + (i << 11), 0xc0000000); in nv50_gr_init()
708 nvkm_wr32(device, 0x40831c + (i << 11), 0xc0000000); in nv50_gr_init()
712 nvkm_wr32(device, 0x400108, 0xffffffff); in nv50_gr_init()
713 nvkm_wr32(device, 0x400138, 0xffffffff); in nv50_gr_init()
714 nvkm_wr32(device, 0x400100, 0xffffffff); in nv50_gr_init()
715 nvkm_wr32(device, 0x40013c, 0xffffffff); in nv50_gr_init()
716 nvkm_wr32(device, 0x400500, 0x00010001); in nv50_gr_init()
719 ret = nv50_grctx_init(device, &gr->size); in nv50_gr_init()
723 nvkm_wr32(device, 0x400824, 0x00000000); in nv50_gr_init()
724 nvkm_wr32(device, 0x400828, 0x00000000); in nv50_gr_init()
725 nvkm_wr32(device, 0x40082c, 0x00000000); in nv50_gr_init()
726 nvkm_wr32(device, 0x400830, 0x00000000); in nv50_gr_init()
727 nvkm_wr32(device, 0x40032c, 0x00000000); in nv50_gr_init()
728 nvkm_wr32(device, 0x400330, 0x00000000); in nv50_gr_init()
731 switch (device->chipset & 0xf0) { in nv50_gr_init()
735 nvkm_wr32(device, 0x402ca8, 0x00000800); in nv50_gr_init()
739 if (device->chipset == 0xa0 || in nv50_gr_init()
740 device->chipset == 0xaa || in nv50_gr_init()
741 device->chipset == 0xac) { in nv50_gr_init()
742 nvkm_wr32(device, 0x402ca8, 0x00000802); in nv50_gr_init()
744 nvkm_wr32(device, 0x402cc0, 0x00000000); in nv50_gr_init()
745 nvkm_wr32(device, 0x402ca8, 0x00000002); in nv50_gr_init()
753 nvkm_wr32(device, 0x402c20 + (i * 0x10), 0x00000000); in nv50_gr_init()
754 nvkm_wr32(device, 0x402c24 + (i * 0x10), 0x00000000); in nv50_gr_init()
755 nvkm_wr32(device, 0x402c28 + (i * 0x10), 0x00000000); in nv50_gr_init()
756 nvkm_wr32(device, 0x402c2c + (i * 0x10), 0x00000000); in nv50_gr_init()
763 nv50_gr_new_(const struct nvkm_gr_func *func, struct nvkm_device *device, in nv50_gr_new_() argument
773 return nvkm_gr_ctor(func, device, type, inst, true, &gr->base); in nv50_gr_new_()
793 nv50_gr_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_gr **pgr) in nv50_gr_new() argument
795 return nv50_gr_new_(&nv50_gr, device, type, inst, pgr); in nv50_gr_new()