Lines Matching refs:device

445 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value)  in nv04_gr_set_ctx1()  argument
447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; in nv04_gr_set_ctx1()
450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1()
453 nvkm_wr32(device, 0x700000 + inst, tmp); in nv04_gr_set_ctx1()
455 nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp); in nv04_gr_set_ctx1()
456 nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp); in nv04_gr_set_ctx1()
460 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument
465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val()
469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val()
472 nvkm_wr32(device, 0x70000c + inst, tmp); in nv04_gr_set_ctx_val()
504 nv04_gr_set_ctx1(device, inst, 0x01000000, valid << 24); in nv04_gr_set_ctx_val()
508 nv04_gr_mthd_set_operation(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_set_operation() argument
510 u8 class = nvkm_rd32(device, 0x700000) & 0x000000ff; in nv04_gr_mthd_set_operation()
516 nv04_gr_set_ctx1(device, inst, 0x00038000, data << 15); in nv04_gr_mthd_set_operation()
518 nv04_gr_set_ctx_val(device, inst, 0, 0); in nv04_gr_mthd_set_operation()
523 nv04_gr_mthd_surf3d_clip_h(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_surf3d_clip_h() argument
535 nvkm_wr32(device, 0x40053c, min); in nv04_gr_mthd_surf3d_clip_h()
536 nvkm_wr32(device, 0x400544, max); in nv04_gr_mthd_surf3d_clip_h()
541 nv04_gr_mthd_surf3d_clip_v(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_surf3d_clip_v() argument
553 nvkm_wr32(device, 0x400540, min); in nv04_gr_mthd_surf3d_clip_v()
554 nvkm_wr32(device, 0x400548, max); in nv04_gr_mthd_surf3d_clip_v()
559 nv04_gr_mthd_bind_class(struct nvkm_device *device, u32 inst) in nv04_gr_mthd_bind_class() argument
561 return nvkm_rd32(device, 0x700000 + (inst << 4)); in nv04_gr_mthd_bind_class()
565 nv04_gr_mthd_bind_surf2d(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf2d() argument
567 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf2d()
569 nv04_gr_set_ctx1(device, inst, 0x00004000, 0); in nv04_gr_mthd_bind_surf2d()
570 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0); in nv04_gr_mthd_bind_surf2d()
573 nv04_gr_set_ctx1(device, inst, 0x00004000, 0); in nv04_gr_mthd_bind_surf2d()
574 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000); in nv04_gr_mthd_bind_surf2d()
581 nv04_gr_mthd_bind_surf2d_swzsurf(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf2d_swzsurf() argument
583 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf2d_swzsurf()
585 nv04_gr_set_ctx1(device, inst, 0x00004000, 0); in nv04_gr_mthd_bind_surf2d_swzsurf()
586 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0); in nv04_gr_mthd_bind_surf2d_swzsurf()
589 nv04_gr_set_ctx1(device, inst, 0x00004000, 0); in nv04_gr_mthd_bind_surf2d_swzsurf()
590 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000); in nv04_gr_mthd_bind_surf2d_swzsurf()
593 nv04_gr_set_ctx1(device, inst, 0x00004000, 0x00004000); in nv04_gr_mthd_bind_surf2d_swzsurf()
594 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000); in nv04_gr_mthd_bind_surf2d_swzsurf()
601 nv01_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data) in nv01_gr_mthd_bind_patt() argument
603 switch (nv04_gr_mthd_bind_class(device, data)) { in nv01_gr_mthd_bind_patt()
605 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0); in nv01_gr_mthd_bind_patt()
608 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000); in nv01_gr_mthd_bind_patt()
615 nv04_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_patt() argument
617 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_patt()
619 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0); in nv04_gr_mthd_bind_patt()
622 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000); in nv04_gr_mthd_bind_patt()
629 nv04_gr_mthd_bind_rop(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_rop() argument
631 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_rop()
633 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0); in nv04_gr_mthd_bind_rop()
636 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0x10000000); in nv04_gr_mthd_bind_rop()
643 nv04_gr_mthd_bind_beta1(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_beta1() argument
645 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_beta1()
647 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0); in nv04_gr_mthd_bind_beta1()
650 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0x20000000); in nv04_gr_mthd_bind_beta1()
657 nv04_gr_mthd_bind_beta4(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_beta4() argument
659 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_beta4()
661 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0); in nv04_gr_mthd_bind_beta4()
664 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0x40000000); in nv04_gr_mthd_bind_beta4()
671 nv04_gr_mthd_bind_surf_dst(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf_dst() argument
673 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf_dst()
675 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0); in nv04_gr_mthd_bind_surf_dst()
678 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000); in nv04_gr_mthd_bind_surf_dst()
685 nv04_gr_mthd_bind_surf_src(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf_src() argument
687 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf_src()
689 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0); in nv04_gr_mthd_bind_surf_src()
692 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000); in nv04_gr_mthd_bind_surf_src()
699 nv04_gr_mthd_bind_surf_color(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf_color() argument
701 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf_color()
703 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0); in nv04_gr_mthd_bind_surf_color()
706 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000); in nv04_gr_mthd_bind_surf_color()
713 nv04_gr_mthd_bind_surf_zeta(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_bind_surf_zeta() argument
715 switch (nv04_gr_mthd_bind_class(device, data)) { in nv04_gr_mthd_bind_surf_zeta()
717 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0); in nv04_gr_mthd_bind_surf_zeta()
720 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000); in nv04_gr_mthd_bind_surf_zeta()
727 nv01_gr_mthd_bind_clip(struct nvkm_device *device, u32 inst, u32 data) in nv01_gr_mthd_bind_clip() argument
729 switch (nv04_gr_mthd_bind_class(device, data)) { in nv01_gr_mthd_bind_clip()
731 nv04_gr_set_ctx1(device, inst, 0x2000, 0); in nv01_gr_mthd_bind_clip()
734 nv04_gr_set_ctx1(device, inst, 0x2000, 0x2000); in nv01_gr_mthd_bind_clip()
741 nv01_gr_mthd_bind_chroma(struct nvkm_device *device, u32 inst, u32 data) in nv01_gr_mthd_bind_chroma() argument
743 switch (nv04_gr_mthd_bind_class(device, data)) { in nv01_gr_mthd_bind_chroma()
745 nv04_gr_set_ctx1(device, inst, 0x1000, 0); in nv01_gr_mthd_bind_chroma()
751 nv04_gr_set_ctx1(device, inst, 0x1000, 0x1000); in nv01_gr_mthd_bind_chroma()
758 nv03_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv03_gr_mthd_gdi() argument
770 return func(device, inst, data); in nv03_gr_mthd_gdi()
774 nv04_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_gdi() argument
787 return func(device, inst, data); in nv04_gr_mthd_gdi()
791 nv01_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv01_gr_mthd_blit() argument
806 return func(device, inst, data); in nv01_gr_mthd_blit()
810 nv04_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_blit() argument
825 return func(device, inst, data); in nv04_gr_mthd_blit()
829 nv04_gr_mthd_iifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_iifc() argument
844 return func(device, inst, data); in nv04_gr_mthd_iifc()
848 nv01_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv01_gr_mthd_ifc() argument
862 return func(device, inst, data); in nv01_gr_mthd_ifc()
866 nv04_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_ifc() argument
881 return func(device, inst, data); in nv04_gr_mthd_ifc()
885 nv03_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv03_gr_mthd_sifc() argument
898 return func(device, inst, data); in nv03_gr_mthd_sifc()
902 nv04_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_sifc() argument
916 return func(device, inst, data); in nv04_gr_mthd_sifc()
920 nv03_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv03_gr_mthd_sifm() argument
932 return func(device, inst, data); in nv03_gr_mthd_sifm()
936 nv04_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_sifm() argument
949 return func(device, inst, data); in nv04_gr_mthd_sifm()
953 nv04_gr_mthd_surf3d(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_surf3d() argument
962 return func(device, inst, data); in nv04_gr_mthd_surf3d()
966 nv03_gr_mthd_ttri(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv03_gr_mthd_ttri() argument
976 return func(device, inst, data); in nv03_gr_mthd_ttri()
980 nv01_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv01_gr_mthd_prim() argument
993 return func(device, inst, data); in nv01_gr_mthd_prim()
997 nv04_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd_prim() argument
1011 return func(device, inst, data); in nv04_gr_mthd_prim()
1015 nv04_gr_mthd(struct nvkm_device *device, u32 inst, u32 mthd, u32 data) in nv04_gr_mthd() argument
1018 switch (nvkm_rd32(device, 0x700000 + inst) & 0x000000ff) { in nv04_gr_mthd()
1039 return func(device, inst, mthd, data); in nv04_gr_mthd()
1046 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, align, in nv04_gr_object_bind()
1074 struct nvkm_device *device = gr->base.engine.subdev.device; in nv04_gr_channel() local
1076 if (nvkm_rd32(device, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) { in nv04_gr_channel()
1077 int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24; in nv04_gr_channel()
1087 struct nvkm_device *device = chan->gr->base.engine.subdev.device; in nv04_gr_load_context() local
1091 nvkm_wr32(device, nv04_gr_ctx_regs[i], chan->nv04[i]); in nv04_gr_load_context()
1093 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10010100); in nv04_gr_load_context()
1094 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24); in nv04_gr_load_context()
1095 nvkm_mask(device, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000); in nv04_gr_load_context()
1102 struct nvkm_device *device = chan->gr->base.engine.subdev.device; in nv04_gr_unload_context() local
1106 chan->nv04[i] = nvkm_rd32(device, nv04_gr_ctx_regs[i]); in nv04_gr_unload_context()
1108 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10000000); in nv04_gr_unload_context()
1109 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000); in nv04_gr_unload_context()
1116 struct nvkm_device *device = gr->base.engine.subdev.device; in nv04_gr_context_switch() local
1129 chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f; in nv04_gr_context_switch()
1165 struct nvkm_device *device = gr->base.engine.subdev.device; in nv04_gr_chan_fini() local
1169 nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000); in nv04_gr_chan_fini()
1172 nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001); in nv04_gr_chan_fini()
1214 struct nvkm_device *device = subdev->device; in nv04_gr_idle() local
1217 if (device->card_type == NV_40) in nv04_gr_idle()
1220 if (nvkm_msec(device, 2000, in nv04_gr_idle()
1221 if (!(nvkm_rd32(device, NV04_PGRAPH_STATUS) & mask)) in nv04_gr_idle()
1225 nvkm_rd32(device, NV04_PGRAPH_STATUS)); in nv04_gr_idle()
1276 struct nvkm_device *device = subdev->device; in nv04_gr_intr() local
1277 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv04_gr_intr()
1278 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv04_gr_intr()
1279 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv04_gr_intr()
1280 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv04_gr_intr()
1284 u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA); in nv04_gr_intr()
1285 u32 class = nvkm_rd32(device, 0x400180 + subc * 4) & 0xff; in nv04_gr_intr()
1286 u32 inst = (nvkm_rd32(device, 0x40016c) & 0xffff) << 4; in nv04_gr_intr()
1297 if (!nv04_gr_mthd(device, inst, mthd, data)) in nv04_gr_intr()
1303 nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH); in nv04_gr_intr()
1309 nvkm_wr32(device, NV03_PGRAPH_INTR, stat); in nv04_gr_intr()
1310 nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001); in nv04_gr_intr()
1331 struct nvkm_device *device = gr->base.engine.subdev.device; in nv04_gr_init() local
1334 nvkm_wr32(device, NV03_PGRAPH_INTR, 0xFFFFFFFF); in nv04_gr_init()
1335 nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); in nv04_gr_init()
1337 nvkm_wr32(device, NV04_PGRAPH_VALID1, 0); in nv04_gr_init()
1338 nvkm_wr32(device, NV04_PGRAPH_VALID2, 0); in nv04_gr_init()
1341 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x1231c000); in nv04_gr_init()
1344 nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x72111100); in nv04_gr_init()
1347 nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f071); in nv04_gr_init()
1351 nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31); in nv04_gr_init()
1354 nvkm_wr32(device, NV04_PGRAPH_STATE , 0xFFFFFFFF); in nv04_gr_init()
1355 nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL , 0x10000100); in nv04_gr_init()
1356 nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000); in nv04_gr_init()
1359 nvkm_wr32(device, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000); in nv04_gr_init()
1360 nvkm_wr32(device, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF); in nv04_gr_init()
1416 nv04_gr_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_gr **pgr) in nv04_gr_new() argument
1425 return nvkm_gr_ctor(&nv04_gr, device, type, inst, true, &gr->base); in nv04_gr_new()