#define NV03_BOOT_0 0x00100000 # define NV03_BOOT_0_RAM_AMOUNT 0x00000003 # define NV03_BOOT_0_RAM_AMOUNT_8MB 0x00000000 # define NV03_BOOT_0_RAM_AMOUNT_2MB 0x00000001 # define NV03_BOOT_0_RAM_AMOUNT_4MB 0x00000002 # define NV03_BOOT_0_RAM_AMOUNT_8MB_SDRAM 0x00000003 # define NV04_BOOT_0_RAM_AMOUNT_32MB 0x00000000 # define NV04_BOOT_0_RAM_AMOUNT_4MB 0x00000001 # define NV04_BOOT_0_RAM_AMOUNT_8MB 0x00000002 # define NV04_BOOT_0_RAM_AMOUNT_16MB 0x00000003 #define NV04_FIFO_DATA 0x0010020c # define NV10_FIFO_DATA_RAM_AMOUNT_MB_MASK 0xfff00000 # define NV10_FIFO_DATA_RAM_AMOUNT_MB_SHIFT 20 #define NV_RAMIN 0x00700000 #define NV_RAMHT_HANDLE_OFFSET 0 #define NV_RAMHT_CONTEXT_OFFSET 4 # define NV_RAMHT_CONTEXT_VALID (1<<31) # define NV_RAMHT_CONTEXT_CHANNEL_SHIFT 24 # define NV_RAMHT_CONTEXT_ENGINE_SHIFT 16 # define NV_RAMHT_CONTEXT_ENGINE_SOFTWARE 0 # define NV_RAMHT_CONTEXT_ENGINE_GRAPHICS 1 # define NV_RAMHT_CONTEXT_INSTANCE_SHIFT 0 # define NV40_RAMHT_CONTEXT_CHANNEL_SHIFT 23 # define NV40_RAMHT_CONTEXT_ENGINE_SHIFT 20 # define NV40_RAMHT_CONTEXT_INSTANCE_SHIFT 0 /* DMA object defines */ #define NV_DMA_ACCESS_RW 0 #define NV_DMA_ACCESS_RO 1 #define NV_DMA_ACCESS_WO 2 #define NV_DMA_TARGET_VIDMEM 0 #define NV_DMA_TARGET_PCI 2 #define NV_DMA_TARGET_AGP 3 /*The following is not a real value used by nvidia cards, it's changed by nouveau_object_dma_create*/ #define NV_DMA_TARGET_PCI_NONLINEAR 8 /* Some object classes we care about in the drm */ #define NV_CLASS_DMA_FROM_MEMORY 0x00000002 #define NV_CLASS_DMA_TO_MEMORY 0x00000003 #define NV_CLASS_NULL 0x00000030 #define NV_CLASS_DMA_IN_MEMORY 0x0000003D #define NV03_USER(i) (0x00800000+(i*NV03_USER_SIZE)) #define NV03_USER__SIZE 16 #define NV10_USER__SIZE 32 #define NV03_USER_SIZE 0x00010000 #define NV03_USER_DMA_PUT(i) (0x00800040+(i*NV03_USER_SIZE)) #define NV03_USER_DMA_PUT__SIZE 16 #define NV10_USER_DMA_PUT__SIZE 32 #define NV03_USER_DMA_GET(i) (0x00800044+(i*NV03_USER_SIZE)) #define NV03_USER_DMA_GET__SIZE 16 #define NV10_USER_DMA_GET__SIZE 32 #define NV03_USER_REF_CNT(i) (0x00800048+(i*NV03_USER_SIZE)) #define NV03_USER_REF_CNT__SIZE 16 #define NV10_USER_REF_CNT__SIZE 32 #define NV40_USER(i) (0x00c00000+(i*NV40_USER_SIZE)) #define NV40_USER_SIZE 0x00001000 #define NV40_USER_DMA_PUT(i) (0x00c00040+(i*NV40_USER_SIZE)) #define NV40_USER_DMA_PUT__SIZE 32 #define NV40_USER_DMA_GET(i) (0x00c00044+(i*NV40_USER_SIZE)) #define NV40_USER_DMA_GET__SIZE 32 #define NV40_USER_REF_CNT(i) (0x00c00048+(i*NV40_USER_SIZE)) #define NV40_USER_REF_CNT__SIZE 32 #define NV50_USER(i) (0x00c00000+(i*NV50_USER_SIZE)) #define NV50_USER_SIZE 0x00002000 #define NV50_USER_DMA_PUT(i) (0x00c00040+(i*NV50_USER_SIZE)) #define NV50_USER_DMA_PUT__SIZE 128 #define NV50_USER_DMA_GET(i) (0x00c00044+(i*NV50_USER_SIZE)) #define NV50_USER_DMA_GET__SIZE 128 /*XXX: I don't think this actually exists.. */ #define NV50_USER_REF_CNT(i) (0x00c00048+(i*NV50_USER_SIZE)) #define NV50_USER_REF_CNT__SIZE 128 #define NV03_FIFO_SIZE 0x8000UL #define NV03_PMC_BOOT_0 0x00000000 #define NV03_PMC_BOOT_1 0x00000004 #define NV03_PMC_INTR_0 0x00000100 # define NV_PMC_INTR_0_PFIFO_PENDING (1<< 8) # define NV_PMC_INTR_0_PGRAPH_PENDING (1<<12) # define NV_PMC_INTR_0_CRTC0_PENDING (1<<24) # define NV_PMC_INTR_0_CRTC1_PENDING (1<<25) # define NV_PMC_INTR_0_CRTCn_PENDING (3<<24) #define NV03_PMC_INTR_EN_0 0x00000140 # define NV_PMC_INTR_EN_0_MASTER_ENABLE (1<< 0) #define NV03_PMC_ENABLE 0x00000200 # define NV_PMC_ENABLE_PFIFO (1<< 8) # define NV_PMC_ENABLE_PGRAPH (1<<12) /* Disabling the below bit breaks newer (G7X only?) mobile chipsets, * the card will hang early on in the X init process. */ # define NV_PMC_ENABLE_UNK13 (1<<13) #define NV40_PMC_1700 0x00001700 #define NV40_PMC_1704 0x00001704 #define NV40_PMC_1708 0x00001708 #define NV40_PMC_170C 0x0000170C /* probably PMC ? */ #define NV50_PUNK_BAR0_PRAMIN 0x00001700 #define NV50_PUNK_BAR_CFG_BASE 0x00001704 #define NV50_PUNK_BAR_CFG_BASE_VALID (1<<30) #define NV50_PUNK_BAR1_CTXDMA 0x00001708 #define NV50_PUNK_BAR1_CTXDMA_VALID (1<<31) #define NV50_PUNK_BAR3_CTXDMA 0x0000170C #define NV50_PUNK_BAR3_CTXDMA_VALID (1<<31) #define NV50_PUNK_UNK1710 0x00001710 #define NV04_PTIMER_INTR_0 0x00009100 #define NV04_PTIMER_INTR_EN_0 0x00009140 #define NV04_PTIMER_NUMERATOR 0x00009200 #define NV04_PTIMER_DENOMINATOR 0x00009210 #define NV04_PTIMER_TIME_0 0x00009400 #define NV04_PTIMER_TIME_1 0x00009410 #define NV04_PTIMER_ALARM_0 0x00009420 #define NV04_PFB_CFG0 0x00100200 #define NV04_PFB_CFG1 0x00100204 #define NV40_PFB_020C 0x0010020C #define NV10_PFB_TILE(i) (0x00100240 + (i*16)) #define NV10_PFB_TILE__SIZE 8 #define NV10_PFB_TLIMIT(i) (0x00100244 + (i*16)) #define NV10_PFB_TSIZE(i) (0x00100248 + (i*16)) #define NV10_PFB_TSTATUS(i) (0x0010024C + (i*16)) #define NV10_PFB_CLOSE_PAGE2 0x0010033C #define NV40_PFB_TILE(i) (0x00100600 + (i*16)) #define NV40_PFB_TILE__SIZE_0 12 #define NV40_PFB_TILE__SIZE_1 15 #define NV40_PFB_TLIMIT(i) (0x00100604 + (i*16)) #define NV40_PFB_TSIZE(i) (0x00100608 + (i*16)) #define NV40_PFB_TSTATUS(i) (0x0010060C + (i*16)) #define NV04_PGRAPH_DEBUG_0 0x00400080 #define NV04_PGRAPH_DEBUG_1 0x00400084 #define NV04_PGRAPH_DEBUG_2 0x00400088 #define NV04_PGRAPH_DEBUG_3 0x0040008c #define NV10_PGRAPH_DEBUG_4 0x00400090 #define NV03_PGRAPH_INTR 0x00400100 #define NV03_PGRAPH_NSTATUS 0x00400104 # define NV04_PGRAPH_NSTATUS_STATE_IN_USE (1<<11) # define NV04_PGRAPH_NSTATUS_INVALID_STATE (1<<12) # define NV04_PGRAPH_NSTATUS_BAD_ARGUMENT (1<<13) # define NV04_PGRAPH_NSTATUS_PROTECTION_FAULT (1<<14) # define NV10_PGRAPH_NSTATUS_STATE_IN_USE (1<<23) # define NV10_PGRAPH_NSTATUS_INVALID_STATE (1<<24) # define NV10_PGRAPH_NSTATUS_BAD_ARGUMENT (1<<25) # define NV10_PGRAPH_NSTATUS_PROTECTION_FAULT (1<<26) #define NV03_PGRAPH_NSOURCE 0x00400108 # define NV03_PGRAPH_NSOURCE_NOTIFICATION (1<< 0) # define NV03_PGRAPH_NSOURCE_DATA_ERROR (1<< 1) # define NV03_PGRAPH_NSOURCE_PROTECTION_ERROR (1<< 2) # define NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION (1<< 3) # define NV03_PGRAPH_NSOURCE_LIMIT_COLOR (1<< 4) # define NV03_PGRAPH_NSOURCE_LIMIT_ZETA (1<< 5) # define NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD (1<< 6) # define NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION (1<< 7) # define NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION (1<< 8) # define NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION (1<< 9) # define NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION (1<<10) # define NV03_PGRAPH_NSOURCE_STATE_INVALID (1<<11) # define NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY (1<<12) # define NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE (1<<13) # define NV03_PGRAPH_NSOURCE_METHOD_CNT (1<<14) # define NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION (1<<15) # define NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION (1<<16) # define NV03_PGRAPH_NSOURCE_DMA_WIDTH_A (1<<17) # define NV03_PGRAPH_NSOURCE_DMA_WIDTH_B (1<<18) #define NV03_PGRAPH_INTR_EN 0x00400140 #define NV40_PGRAPH_INTR_EN 0x0040013C # define NV_PGRAPH_INTR_NOTIFY (1<< 0) # define NV_PGRAPH_INTR_MISSING_HW (1<< 4) # define NV_PGRAPH_INTR_CONTEXT_SWITCH (1<<12) # define NV_PGRAPH_INTR_BUFFER_NOTIFY (1<<16) # define NV_PGRAPH_INTR_ERROR (1<<20) #define NV10_PGRAPH_CTX_CONTROL 0x00400144 #define NV10_PGRAPH_CTX_USER 0x00400148 #define NV10_PGRAPH_CTX_SWITCH1 0x0040014C #define NV10_PGRAPH_CTX_SWITCH2 0x00400150 #define NV10_PGRAPH_CTX_SWITCH3 0x00400154 #define NV10_PGRAPH_CTX_SWITCH4 0x00400158 #define NV10_PGRAPH_CTX_SWITCH5 0x0040015C #define NV04_PGRAPH_CTX_SWITCH1 0x00400160 #define NV10_PGRAPH_CTX_CACHE1 0x00400160 #define NV04_PGRAPH_CTX_SWITCH2 0x00400164 #define NV04_PGRAPH_CTX_SWITCH3 0x00400168 #define NV04_PGRAPH_CTX_SWITCH4 0x0040016C #define NV04_PGRAPH_CTX_CONTROL 0x00400170 #define NV04_PGRAPH_CTX_USER 0x00400174 #define NV04_PGRAPH_CTX_CACHE1 0x00400180 #define NV10_PGRAPH_CTX_CACHE2 0x00400180 #define NV03_PGRAPH_CTX_CONTROL 0x00400190 #define NV03_PGRAPH_CTX_USER 0x00400194 #define NV04_PGRAPH_CTX_CACHE2 0x004001A0 #define NV10_PGRAPH_CTX_CACHE3 0x004001A0 #define NV04_PGRAPH_CTX_CACHE3 0x004001C0 #define NV10_PGRAPH_CTX_CACHE4 0x004001C0 #define NV04_PGRAPH_CTX_CACHE4 0x004001E0 #define NV10_PGRAPH_CTX_CACHE5 0x004001E0 #define NV40_PGRAPH_CTXCTL_0304 0x00400304 #define NV40_PGRAPH_CTXCTL_0304_XFER_CTX 0x00000001 #define NV40_PGRAPH_CTXCTL_UCODE_STAT 0x00400308 #define NV40_PGRAPH_CTXCTL_UCODE_STAT_IP_MASK 0xff000000 #define NV40_PGRAPH_CTXCTL_UCODE_STAT_IP_SHIFT 24 #define NV40_PGRAPH_CTXCTL_UCODE_STAT_OP_MASK 0x00ffffff #define NV40_PGRAPH_CTXCTL_0310 0x00400310 #define NV40_PGRAPH_CTXCTL_0310_XFER_SAVE 0x00000020 #define NV40_PGRAPH_CTXCTL_0310_XFER_LOAD 0x00000040 #define NV40_PGRAPH_CTXCTL_030C 0x0040030c #define NV40_PGRAPH_CTXCTL_UCODE_INDEX 0x00400324 #define NV40_PGRAPH_CTXCTL_UCODE_DATA 0x00400328 #define NV40_PGRAPH_CTXCTL_CUR 0x0040032c #define NV40_PGRAPH_CTXCTL_CUR_LOADED 0x01000000 #define NV40_PGRAPH_CTXCTL_CUR_INST_MASK 0x000FFFFF #define NV03_PGRAPH_ABS_X_RAM 0x00400400 #define NV03_PGRAPH_ABS_Y_RAM 0x00400480 #define NV03_PGRAPH_X_MISC 0x00400500 #define NV03_PGRAPH_Y_MISC 0x00400504 #define NV04_PGRAPH_VALID1 0x00400508 #define NV04_PGRAPH_SOURCE_COLOR 0x0040050C #define NV04_PGRAPH_MISC24_0 0x00400510 #define NV03_PGRAPH_XY_LOGIC_MISC0 0x00400514 #define NV03_PGRAPH_XY_LOGIC_MISC1 0x00400518 #define NV03_PGRAPH_XY_LOGIC_MISC2 0x0040051C #define NV03_PGRAPH_XY_LOGIC_MISC3 0x00400520 #define NV03_PGRAPH_CLIPX_0 0x00400524 #define NV03_PGRAPH_CLIPX_1 0x00400528 #define NV03_PGRAPH_CLIPY_0 0x0040052C #define NV03_PGRAPH_CLIPY_1 0x00400530 #define NV03_PGRAPH_ABS_ICLIP_XMAX 0x00400534 #define NV03_PGRAPH_ABS_ICLIP_YMAX 0x00400538 #define NV03_PGRAPH_ABS_UCLIP_XMIN 0x0040053C #define NV03_PGRAPH_ABS_UCLIP_YMIN 0x00400540 #define NV03_PGRAPH_ABS_UCLIP_XMAX 0x00400544 #define NV03_PGRAPH_ABS_UCLIP_YMAX 0x00400548 #define NV03_PGRAPH_ABS_UCLIPA_XMIN 0x00400560 #define NV03_PGRAPH_ABS_UCLIPA_YMIN 0x00400564 #define NV03_PGRAPH_ABS_UCLIPA_XMAX 0x00400568 #define NV03_PGRAPH_ABS_UCLIPA_YMAX 0x0040056C #define NV04_PGRAPH_MISC24_1 0x00400570 #define NV04_PGRAPH_MISC24_2 0x00400574 #define NV04_PGRAPH_VALID2 0x00400578 #define NV04_PGRAPH_PASSTHRU_0 0x0040057C #define NV04_PGRAPH_PASSTHRU_1 0x00400580 #define NV04_PGRAPH_PASSTHRU_2 0x00400584 #define NV10_PGRAPH_DIMX_TEXTURE 0x00400588 #define NV10_PGRAPH_WDIMX_TEXTURE 0x0040058C #define NV04_PGRAPH_COMBINE_0_ALPHA 0x00400590 #define NV04_PGRAPH_COMBINE_0_COLOR 0x00400594 #define NV04_PGRAPH_COMBINE_1_ALPHA 0x00400598 #define NV04_PGRAPH_COMBINE_1_COLOR 0x0040059C #define NV04_PGRAPH_FORMAT_0 0x004005A8 #define NV04_PGRAPH_FORMAT_1 0x004005AC #define NV04_PGRAPH_FILTER_0 0x004005B0 #define NV04_PGRAPH_FILTER_1 0x004005B4 #define NV03_PGRAPH_MONO_COLOR0 0x00400600 #define NV04_PGRAPH_ROP3 0x00400604 #define NV04_PGRAPH_BETA_AND 0x00400608 #define NV04_PGRAPH_BETA_PREMULT 0x0040060C #define NV04_PGRAPH_LIMIT_VIOL_PIX 0x00400610 #define NV04_PGRAPH_FORMATS 0x00400618 #define NV10_PGRAPH_DEBUG_2 0x00400620 #define NV04_PGRAPH_BOFFSET0 0x00400640 #define NV04_PGRAPH_BOFFSET1 0x00400644 #define NV04_PGRAPH_BOFFSET2 0x00400648 #define NV04_PGRAPH_BOFFSET3 0x0040064C #define NV04_PGRAPH_BOFFSET4 0x00400650 #define NV04_PGRAPH_BOFFSET5 0x00400654 #define NV04_PGRAPH_BBASE0 0x00400658 #define NV04_PGRAPH_BBASE1 0x0040065C #define NV04_PGRAPH_BBASE2 0x00400660 #define NV04_PGRAPH_BBASE3 0x00400664 #define NV04_PGRAPH_BBASE4 0x00400668 #define NV04_PGRAPH_BBASE5 0x0040066C #define NV04_PGRAPH_BPITCH0 0x00400670 #define NV04_PGRAPH_BPITCH1 0x00400674 #define NV04_PGRAPH_BPITCH2 0x00400678 #define NV04_PGRAPH_BPITCH3 0x0040067C #define NV04_PGRAPH_BPITCH4 0x00400680 #define NV04_PGRAPH_BLIMIT0 0x00400684 #define NV04_PGRAPH_BLIMIT1 0x00400688 #define NV04_PGRAPH_BLIMIT2 0x0040068C #define NV04_PGRAPH_BLIMIT3 0x00400690 #define NV04_PGRAPH_BLIMIT4 0x00400694 #define NV04_PGRAPH_BLIMIT5 0x00400698 #define NV04_PGRAPH_BSWIZZLE2 0x0040069C #define NV04_PGRAPH_BSWIZZLE5 0x004006A0 #define NV03_PGRAPH_STATUS 0x004006B0 #define NV04_PGRAPH_STATUS 0x00400700 #define NV04_PGRAPH_TRAPPED_ADDR 0x00400704 #define NV04_PGRAPH_TRAPPED_DATA 0x00400708 #define NV04_PGRAPH_SURFACE 0x0040070C #define NV10_PGRAPH_TRAPPED_DATA_HIGH 0x0040070C #define NV04_PGRAPH_STATE 0x00400710 #define NV10_PGRAPH_SURFACE 0x00400710 #define NV04_PGRAPH_NOTIFY 0x00400714 #define NV10_PGRAPH_STATE 0x00400714 #define NV10_PGRAPH_NOTIFY 0x00400718 #define NV04_PGRAPH_FIFO 0x00400720 #define NV04_PGRAPH_BPIXEL 0x00400724 #define NV10_PGRAPH_RDI_INDEX 0x00400750 #define NV04_PGRAPH_FFINTFC_ST2 0x00400754 #define NV10_PGRAPH_RDI_DATA 0x00400754 #define NV04_PGRAPH_DMA_PITCH 0x00400760 #define NV10_PGRAPH_FFINTFC_ST2 0x00400764 #define NV04_PGRAPH_DVD_COLORFMT 0x00400764 #define NV04_PGRAPH_SCALED_FORMAT 0x00400768 #define NV10_PGRAPH_DMA_PITCH 0x00400770 #define NV10_PGRAPH_DVD_COLORFMT 0x00400774 #define NV10_PGRAPH_SCALED_FORMAT 0x00400778 #define NV20_PGRAPH_CHANNEL_CTX_TABLE 0x00400780 #define NV20_PGRAPH_CHANNEL_CTX_POINTER 0x00400784 #define NV20_PGRAPH_CHANNEL_CTX_XFER 0x00400788 #define NV20_PGRAPH_CHANNEL_CTX_XFER_LOAD 0x00000001 #define NV20_PGRAPH_CHANNEL_CTX_XFER_SAVE 0x00000002 #define NV04_PGRAPH_PATT_COLOR0 0x00400800 #define NV04_PGRAPH_PATT_COLOR1 0x00400804 #define NV04_PGRAPH_PATTERN 0x00400808 #define NV04_PGRAPH_PATTERN_SHAPE 0x00400810 #define NV04_PGRAPH_CHROMA 0x00400814 #define NV04_PGRAPH_CONTROL0 0x00400818 #define NV04_PGRAPH_CONTROL1 0x0040081C #define NV04_PGRAPH_CONTROL2 0x00400820 #define NV04_PGRAPH_BLEND 0x00400824 #define NV04_PGRAPH_STORED_FMT 0x00400830 #define NV04_PGRAPH_PATT_COLORRAM 0x00400900 #define NV40_PGRAPH_TILE0(i) 0x00400900 #define NV40_PGRAPH_TLIMIT0(i) 0x00400904 #define NV40_PGRAPH_TSIZE0(i) 0x00400908 #define NV40_PGRAPH_TSTATUS0(i) 0x0040090C #define NV10_PGRAPH_TILE(i) (0x00400B00 + (i*16)) #define NV10_PGRAPH_TLIMIT(i) (0x00400B04 + (i*16)) #define NV10_PGRAPH_TSIZE(i) (0x00400B08 + (i*16)) #define NV10_PGRAPH_TSTATUS(i) (0x00400B0C + (i*16)) #define NV04_PGRAPH_U_RAM 0x00400D00 #define NV47_PGRAPH_TILE0(i) 0x00400D00 #define NV47_PGRAPH_TLIMIT0(i) 0x00400D04 #define NV47_PGRAPH_TSIZE0(i) 0x00400D08 #define NV47_PGRAPH_TSTATUS0(i) 0x00400D0C #define NV04_PGRAPH_V_RAM 0x00400D40 #define NV04_PGRAPH_W_RAM 0x00400D80 #define NV10_PGRAPH_COMBINER0_IN_ALPHA 0x00400E40 #define NV10_PGRAPH_COMBINER1_IN_ALPHA 0x00400E44 #define NV10_PGRAPH_COMBINER0_IN_RGB 0x00400E48 #define NV10_PGRAPH_COMBINER1_IN_RGB 0x00400E4C #define NV10_PGRAPH_COMBINER_COLOR0 0x00400E50 #define NV10_PGRAPH_COMBINER_COLOR1 0x00400E54 #define NV10_PGRAPH_COMBINER0_OUT_ALPHA 0x00400E58 #define NV10_PGRAPH_COMBINER1_OUT_ALPHA 0x00400E5C #define NV10_PGRAPH_COMBINER0_OUT_RGB 0x00400E60 #define NV10_PGRAPH_COMBINER1_OUT_RGB 0x00400E64 #define NV10_PGRAPH_COMBINER_FINAL0 0x00400E68 #define NV10_PGRAPH_COMBINER_FINAL1 0x00400E6C #define NV10_PGRAPH_WINDOWCLIP_HORIZONTAL 0x00400F00 #define NV10_PGRAPH_WINDOWCLIP_VERTICAL 0x00400F20 #define NV10_PGRAPH_XFMODE0 0x00400F40 #define NV10_PGRAPH_XFMODE1 0x00400F44 #define NV10_PGRAPH_GLOBALSTATE0 0x00400F48 #define NV10_PGRAPH_GLOBALSTATE1 0x00400F4C #define NV10_PGRAPH_PIPE_ADDRESS 0x00400F50 #define NV10_PGRAPH_PIPE_DATA 0x00400F54 #define NV04_PGRAPH_DMA_START_0 0x00401000 #define NV04_PGRAPH_DMA_START_1 0x00401004 #define NV04_PGRAPH_DMA_LENGTH 0x00401008 #define NV04_PGRAPH_DMA_MISC 0x0040100C #define NV04_PGRAPH_DMA_DATA_0 0x00401020 #define NV04_PGRAPH_DMA_DATA_1 0x00401024 #define NV04_PGRAPH_DMA_RM 0x00401030 #define NV04_PGRAPH_DMA_A_XLATE_INST 0x00401040 #define NV04_PGRAPH_DMA_A_CONTROL 0x00401044 #define NV04_PGRAPH_DMA_A_LIMIT 0x00401048 #define NV04_PGRAPH_DMA_A_TLB_PTE 0x0040104C #define NV04_PGRAPH_DMA_A_TLB_TAG 0x00401050 #define NV04_PGRAPH_DMA_A_ADJ_OFFSET 0x00401054 #define NV04_PGRAPH_DMA_A_OFFSET 0x00401058 #define NV04_PGRAPH_DMA_A_SIZE 0x0040105C #define NV04_PGRAPH_DMA_A_Y_SIZE 0x00401060 #define NV04_PGRAPH_DMA_B_XLATE_INST 0x00401080 #define NV04_PGRAPH_DMA_B_CONTROL 0x00401084 #define NV04_PGRAPH_DMA_B_LIMIT 0x00401088 #define NV04_PGRAPH_DMA_B_TLB_PTE 0x0040108C #define NV04_PGRAPH_DMA_B_TLB_TAG 0x00401090 #define NV04_PGRAPH_DMA_B_ADJ_OFFSET 0x00401094 #define NV04_PGRAPH_DMA_B_OFFSET 0x00401098 #define NV04_PGRAPH_DMA_B_SIZE 0x0040109C #define NV04_PGRAPH_DMA_B_Y_SIZE 0x004010A0 #define NV40_PGRAPH_TILE1(i) 0x00406900 #define NV40_PGRAPH_TLIMIT1(i) 0x00406904 #define NV40_PGRAPH_TSIZE1(i) 0x00406908 #define NV40_PGRAPH_TSTATUS1(i) 0x0040690C /* It's a guess that this works on NV03. Confirmed on NV04, though */ #define NV04_PFIFO_DELAY_0 0x00002040 #define NV04_PFIFO_DMA_TIMESLICE 0x00002044 #define NV04_PFIFO_NEXT_CHANNEL 0x00002050 #define NV03_PFIFO_INTR_0 0x00002100 #define NV03_PFIFO_INTR_EN_0 0x00002140 # define NV_PFIFO_INTR_CACHE_ERROR (1<< 0) # define NV_PFIFO_INTR_RUNOUT (1<< 4) # define NV_PFIFO_INTR_RUNOUT_OVERFLOW (1<< 8) # define NV_PFIFO_INTR_DMA_PUSHER (1<<12) # define NV_PFIFO_INTR_DMA_PT (1<<16) # define NV_PFIFO_INTR_SEMAPHORE (1<<20) # define NV_PFIFO_INTR_ACQUIRE_TIMEOUT (1<<24) #define NV03_PFIFO_RAMHT 0x00002210 #define NV03_PFIFO_RAMFC 0x00002214 #define NV03_PFIFO_RAMRO 0x00002218 #define NV40_PFIFO_RAMFC 0x00002220 #define NV03_PFIFO_CACHES 0x00002500 #define NV04_PFIFO_MODE 0x00002504 #define NV04_PFIFO_DMA 0x00002508 #define NV04_PFIFO_SIZE 0x0000250c #define NV50_PFIFO_CTX_TABLE(c) (0x2600+(c)*4) #define NV50_PFIFO_CTX_TABLE__SIZE 128 #define NV50_PFIFO_CTX_TABLE_CHANNEL_ENABLED (1<<31) #define NV50_PFIFO_CTX_TABLE_UNK30_BAD (1<<30) #define NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G80 0x0FFFFFFF #define NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G84 0x00FFFFFF #define NV03_PFIFO_CACHE0_PUSH0 0x00003000 #define NV03_PFIFO_CACHE0_PULL0 0x00003040 #define NV04_PFIFO_CACHE0_PULL0 0x00003050 #define NV04_PFIFO_CACHE0_PULL1 0x00003054 #define NV03_PFIFO_CACHE1_PUSH0 0x00003200 #define NV03_PFIFO_CACHE1_PUSH1 0x00003204 #define NV03_PFIFO_CACHE1_PUSH1_DMA (1<<8) #define NV40_PFIFO_CACHE1_PUSH1_DMA (1<<16) #define NV03_PFIFO_CACHE1_PUSH1_CHID_MASK 0x0000000f #define NV10_PFIFO_CACHE1_PUSH1_CHID_MASK 0x0000001f #define NV50_PFIFO_CACHE1_PUSH1_CHID_MASK 0x0000007f #define NV03_PFIFO_CACHE1_PUT 0x00003210 #define NV04_PFIFO_CACHE1_DMA_PUSH 0x00003220 #define NV04_PFIFO_CACHE1_DMA_FETCH 0x00003224 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_8_BYTES 0x00000000 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_16_BYTES 0x00000008 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_24_BYTES 0x00000010 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_32_BYTES 0x00000018 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_40_BYTES 0x00000020 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_48_BYTES 0x00000028 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_56_BYTES 0x00000030 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_64_BYTES 0x00000038 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_72_BYTES 0x00000040 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_80_BYTES 0x00000048 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_88_BYTES 0x00000050 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_96_BYTES 0x00000058 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_104_BYTES 0x00000060 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_112_BYTES 0x00000068 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_120_BYTES 0x00000070 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_128_BYTES 0x00000078 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_136_BYTES 0x00000080 # define NV_PFIFO_CACHE1_DMA_FETCH_TRIG_144_BYTES 0x00000088 # define NV_PFIFO * next paragraph) shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE * USE OR OTHER DEALINGS IN THE SOFTWARE. * * Authors: * Tungsten Graphics, * Erdi Chen, * Thomas Hellstrom. */ #include "drmP.h" #include "drm.h" #include "via_drm.h" #include "via_drv.h" #include "via_3d_reg.h" #define CMDBUF_ALIGNMENT_SIZE (0x100) #define CMDBUF_ALIGNMENT_MASK (0x0ff) /* defines for VIA 3D registers */ #define VIA_REG_STATUS 0x400 #define VIA_REG_TRANSET 0x43C #define VIA_REG_TRANSPACE 0x440 /* VIA_REG_STATUS(0x400): Engine Status */ #define VIA_CMD_RGTR_BUSY 0x00000080 /* Command Regulator is busy */ #define VIA_2D_ENG_BUSY 0x00000001 /* 2D Engine is busy */ #define VIA_3D_ENG_BUSY 0x00000002 /* 3D Engine is busy */ #define VIA_VR_QUEUE_BUSY 0x00020000 /* Virtual Queue is busy */ #define SetReg2DAGP(nReg, nData) { \ *((uint32_t *)(vb)) = ((nReg) >> 2) | HALCYON_HEADER1; \ *((uint32_t *)(vb) + 1) = (nData); \ vb = ((uint32_t *)vb) + 2; \ dev_priv->dma_low +=8; \ } #define via_flush_write_combine() DRM_MEMORYBARRIER() #define VIA_OUT_RING_QW(w1,w2) \ *vb++ = (w1); \ *vb++ = (w2); \ dev_priv->dma_low += 8; static void via_cmdbuf_start(drm_via_private_t * dev_priv); static void via_cmdbuf_pause(drm_via_private_t * dev_priv); static void via_cmdbuf_reset(drm_via_private_t * dev_priv); static void via_cmdbuf_rewind(drm_via_private_t * dev_priv); static int via_wait_idle(drm_via_private_t * dev_priv); static void via_pad_cache(drm_via_private_t *dev_priv, int qwords); /* * Free space in command buffer. */ static uint32_t via_cmdbuf_space(drm_via_private_t *dev_priv) { uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; uint32_t hw_addr = *(dev_priv->hw_addr_ptr) - agp_base; return ((hw_addr <= dev_priv->dma_low) ? (dev_priv->dma_high + hw_addr - dev_priv->dma_low) : (hw_addr - dev_priv->dma_low)); } /* * How much does the command regulator lag behind? */ static uint32_t via_cmdbuf_lag(drm_via_private_t *dev_priv) { uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; uint32_t hw_addr = *(dev_priv->hw_addr_ptr) - agp_base; return ((hw_addr <= dev_priv->dma_low) ? (dev_priv->dma_low - hw_addr) : (dev_priv->dma_wrap + dev_priv->dma_low - hw_addr)); } /* * Check that the given size fits in the buffer, otherwise wait. */ static inline int via_cmdbuf_wait(drm_via_private_t * dev_priv, unsigned int size) { uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; uint32_t cur_addr, hw_addr, next_addr; volatile uint32_t *hw_addr_ptr; uint32_t count; hw_addr_ptr = dev_priv->hw_addr_ptr; cur_addr = dev_priv->dma_low; next_addr = cur_addr + size + 512*1024; count = 1000000; do { hw_addr = *hw_addr_ptr - agp_base; if (count-- == 0) { DRM_ERROR ("via_cmdbuf_wait timed out hw %x cur_addr %x next_addr %x\n", hw_addr, cur_addr, next_addr); return -1; } } while ((cur_addr < hw_addr) && (next_addr >= hw_addr)); return 0; } /* * Checks whether buffer head has reach the end. Rewind the ring buffer * when necessary. * * Returns virtual pointer to ring buffer. */ static inline uint32_t *via_check_dma(drm_via_private_t * dev_priv, unsigned int size) { if ((dev_priv->dma_low + size + 4 * CMDBUF_ALIGNMENT_SIZE) > dev_priv->dma_high) { via_cmdbuf_rewind(dev_priv); } if (via_cmdbuf_wait(dev_priv, size) != 0) { return NULL; } return (uint32_t *) (dev_priv->dma_ptr + dev_priv->dma_low); } int via_dma_cleanup(drm_device_t * dev) { if (dev->dev_private) { drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private; if (dev_priv->ring.virtual_start) { via_cmdbuf_reset(dev_priv); drm_core_ioremapfree(&dev_priv->ring.map, dev); dev_priv->ring.virtual_start = NULL; } } return 0; } static int via_initialize(drm_device_t * dev, drm_via_private_t * dev_priv, drm_via_dma_init_t * init) { if (!dev_priv || !dev_priv->mmio) { DRM_ERROR("via_dma_init called before via_map_init\n"); return DRM_ERR(EFAULT); } if (dev_priv->ring.virtual_start != NULL) { DRM_ERROR("%s called again without calling cleanup\n", __FUNCTION__); return DRM_ERR(EFAULT); } if (!dev->agp || !dev->agp->base) { DRM_ERROR("%s called with no agp memory available\n", __FUNCTION__); return DRM_ERR(EFAULT); } dev_priv->ring.map.offset = dev->agp->base + init->offset; dev_priv->ring.map.size = init->size; dev_priv->ring.map.type = 0; dev_priv->ring.map.flags = 0; dev_priv->ring.map.mtrr = 0; drm_core_ioremap(&dev_priv->ring.map, dev); if (dev_priv->ring.map.handle == NULL) { via_dma_cleanup(dev); DRM_ERROR("can not ioremap virtual address for" " ring buffer\n"); return DRM_ERR(ENOMEM); } dev_priv->ring.virtual_start = dev_priv->ring.map.handle; dev_priv->dma_ptr = dev_priv->ring.virtual_start; dev_priv->dma_low = 0; dev_priv->dma_high = init->size; dev_priv->dma_wrap = init->size; dev_priv->dma_offset = init->offset; dev_priv->last_pause_ptr = NULL; dev_priv->hw_addr_ptr = (volatile uint32_t *)((char *)dev_priv->mmio->handle + init->reg_pause_addr); via_cmdbuf_start(dev_priv); return 0; } static int via_dma_init(DRM_IOCTL_ARGS) { DRM_DEVICE; drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private; drm_via_dma_init_t init; int retcode = 0; DRM_COPY_FROM_USER_IOCTL(init, (drm_via_dma_init_t __user *) data, sizeof(init)); switch (init.func) { case VIA_INIT_DMA: if (!DRM_SUSER(DRM_CURPROC)) retcode = DRM_ERR(EPERM); else retcode = via_initialize(dev, dev_priv, &init); break; case VIA_CLEANUP_DMA: if (!DRM_SUSER(DRM_CURPROC)) retcode = DRM_ERR(EPERM); else retcode = via_dma_cleanup(dev); break; case VIA_DMA_INITIALIZED: retcode = (dev_priv->ring.virtual_start != NULL) ? 0: DRM_ERR( EFAULT ); break; default: retcode = DRM_ERR(EINVAL); break; } return retcode; } static int via_dispatch_cmdbuffer(drm_device_t * dev, drm_via_cmdbuffer_t * cmd) { drm_via_private_t *dev_priv; uint32_t *vb; int ret; dev_priv = (drm_via_private_t *) dev->dev_private; if (dev_priv->ring.virtual_start == NULL) { DRM_ERROR("%s called without initializing AGP ring buffer.\n", __FUNCTION__); return DRM_ERR(EFAULT); } if (cmd->size > VIA_PCI_BUF_SIZE) { return DRM_ERR(ENOMEM); } if (DRM_COPY_FROM_USER(dev_priv->pci_buf, cmd->buf, cmd->size)) return DRM_ERR(EFAULT); /* * Running this function on AGP memory is dead slow. Therefore * we run it on a temporary cacheable system memory buffer and * copy it to AGP memory when ready. */ if ((ret = via_verify_command_stream((uint32_t *)dev_priv->pci_buf, cmd->size, dev, 1))) { return ret; } vb = via_check_dma(dev_priv, (cmd->size < 0x100) ? 0x102 : cmd->size); if (vb == NULL) { return DRM_ERR(EAGAIN); } memcpy(vb, dev_priv->pci_buf, cmd->size); dev_priv->dma_low += cmd->size; /* * Small submissions somehow stalls the CPU. (AGP cache effects?) * pad to greater size. */ if (cmd->size < 0x100) via_pad_cache(dev_priv,(0x100 - cmd->size) >> 3); via_cmdbuf_pause(dev_priv); return 0; } int via_driver_dma_quiescent(drm_device_t * dev) { drm_via_private_t *dev_priv = dev->dev_private; if (!via_wait_idle(dev_priv)) { return DRM_ERR(EBUSY); } return 0; } static int via_flush_ioctl(DRM_IOCTL_ARGS) { DRM_DEVICE; LOCK_TEST_WITH_RETURN( dev, filp ); return via_driver_dma_quiescent(dev); } static int via_cmdbuffer(DRM_IOCTL_ARGS) { DRM_DEVICE; drm_via_cmdbuffer_t cmdbuf; int ret; LOCK_TEST_WITH_RETURN( dev, filp ); DRM_COPY_FROM_USER_IOCTL(cmdbuf, (drm_via_cmdbuffer_t __user *) data, sizeof(cmdbuf)); DRM_DEBUG("via cmdbuffer, buf %p size %lu\n", cmdbuf.buf, cmdbuf.size); ret = via_dispatch_cmdbuffer(dev, &cmdbuf); if (ret) { return ret; } return 0; } static int via_dispatch_pci_cmdbuffer(drm_device_t * dev, drm_via_cmdbuffer_t * cmd) { drm_via_private_t *dev_priv = dev->dev_private; int ret; if (cmd->size > VIA_PCI_BUF_SIZE) { return DRM_ERR(ENOMEM); } if (DRM_COPY_FROM_USER(dev_priv->pci_buf, cmd->buf, cmd->size)) return DRM_ERR(EFAULT); if ((ret = via_verify_command_stream((uint32_t *)dev_priv->pci_buf, cmd->size, dev, 0))) { return ret; } ret = via_parse_command_stream(dev, (const uint32_t *)dev_priv->pci_buf, cmd->size); return ret; } static int via_pci_cmdbuffer(DRM_IOCTL_ARGS) { DRM_DEVICE; drm_via_cmdbuffer_t cmdbuf; int ret; LOCK_TEST_WITH_RETURN( dev, filp ); DRM_COPY_FROM_USER_IOCTL(cmdbuf, (drm_via_cmdbuffer_t __user *) data, sizeof(cmdbuf)); DRM_DEBUG("via_pci_cmdbuffer, buf %p size %lu\n", cmdbuf.buf, cmdbuf.size); ret = via_dispatch_pci_cmdbuffer(dev, &cmdbuf); if (ret) { return ret; } return 0; } static inline uint32_t *via_align_buffer(drm_via_private_t * dev_priv, uint32_t * vb, int qw_count) { for (; qw_count > 0; --qw_count) { VIA_OUT_RING_QW(HC_DUMMY, HC_DUMMY); } return vb; } /* * This function is used internally by ring buffer mangement code. * * Returns virtual pointer to ring buffer. */ static inline uint32_t *via_get_dma(drm_via_private_t * dev_priv) { return (uint32_t *) (dev_priv->dma_ptr + dev_priv->dma_low); } /* * Hooks a segment of data into the tail of the ring-buffer by * modifying the pause address stored in the buffer itself. If * the regulator has already paused, restart it. */ static int via_hook_segment(drm_via_private_t *dev_priv, uint32_t pause_addr_hi, uint32_t pause_addr_lo, int no_pci_fire) { int paused, count; volatile uint32_t *paused_at = dev_priv->last_pause_ptr; via_flush_write_combine(); while(! *(via_get_dma(dev_priv)-1)); *dev_priv->last_pause_ptr = pause_addr_lo; via_flush_write_combine(); /* * The below statement is inserted to really force the flush. * Not sure it is needed. */ while(! *dev_priv->last_pause_ptr); dev_priv->last_pause_ptr = via_get_dma(dev_priv) - 1; while(! *dev_priv->last_pause_ptr); paused = 0; count = 20; while (!(paused = (VIA_READ(0x41c) & 0x80000000)) && count--); if ((count <= 8) && (count >= 0)) { uint32_t rgtr, ptr; rgtr = *(dev_priv->hw_addr_ptr); ptr = ((volatile char *)dev_priv->last_pause_ptr - dev_priv->dma_ptr) + dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr + 4 - CMDBUF_ALIGNMENT_SIZE; if (rgtr <= ptr) { DRM_ERROR ("Command regulator\npaused at count %d, address %x, " "while current pause address is %x.\n" "Please mail this message to " "<unichrome-devel@lists.sourceforge.net>\n", count, rgtr, ptr); } } if (paused && !no_pci_fire) { uint32_t rgtr,ptr; uint32_t ptr_low; count = 1000000; while ((VIA_READ(VIA_REG_STATUS) & VIA_CMD_RGTR_BUSY) && count--); rgtr = *(dev_priv->hw_addr_ptr); ptr = ((volatile char *)paused_at - dev_priv->dma_ptr) + dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr + 4; ptr_low = (ptr > 3*CMDBUF_ALIGNMENT_SIZE) ? ptr - 3*CMDBUF_ALIGNMENT_SIZE : 0; if (rgtr <= ptr && rgtr >= ptr_low) { VIA_WRITE(VIA_REG_TRANSET, (HC_ParaType_PreCR << 16)); VIA_WRITE(VIA_REG_TRANSPACE, pause_addr_hi); VIA_WRITE(VIA_REG_TRANSPACE, pause_addr_lo); VIA_READ(VIA_REG_TRANSPACE); } } return paused; } static int via_wait_idle(drm_via_private_t * dev_priv) { int count = 10000000; while (count-- && (VIA_READ(VIA_REG_STATUS) & (VIA_CMD_RGTR_BUSY | VIA_2D_ENG_BUSY | VIA_3D_ENG_BUSY))) ; return count; } static uint32_t *via_align_cmd(drm_via_private_t * dev_priv, uint32_t cmd_type, uint32_t addr, uint32_t *cmd_addr_hi, uint32_t *cmd_addr_lo, int skip_wait) { uint32_t agp_base; uint32_t cmd_addr, addr_lo, addr_hi; uint32_t *vb; uint32_t qw_pad_count; if (!skip_wait) via_cmdbuf_wait(dev_priv, 2*CMDBUF_ALIGNMENT_SIZE); vb = via_get_dma(dev_priv); VIA_OUT_RING_QW( HC_HEADER2 | ((VIA_REG_TRANSET >> 2) << 12) | (VIA_REG_TRANSPACE >> 2), HC_ParaType_PreCR << 16); agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; qw_pad_count = (CMDBUF_ALIGNMENT_SIZE >> 3) - ((dev_priv->dma_low & CMDBUF_ALIGNMENT_MASK) >> 3); cmd_addr = (addr) ? addr : agp_base + dev_priv->dma_low - 8 + (qw_pad_count << 3); addr_lo = ((HC_SubA_HAGPBpL << 24) | (cmd_type & HC_HAGPBpID_MASK) | (cmd_addr & HC_HAGPBpL_MASK)); addr_hi = ((HC_SubA_HAGPBpH << 24) | (cmd_addr >> 24)); vb = via_align_buffer(dev_priv, vb, qw_pad_count - 1); VIA_OUT_RING_QW(*cmd_addr_hi = addr_hi, *cmd_addr_lo = addr_lo); return vb; } static void via_cmdbuf_start(drm_via_private_t * dev_priv) { uint32_t pause_addr_lo, pause_addr_hi; uint32_t start_addr, start_addr_lo; uint32_t end_addr, end_addr_lo; uint32_t command; uint32_t agp_base; dev_priv->dma_low = 0; agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; start_addr = agp_base; end_addr = agp_base + dev_priv->dma_high; start_addr_lo = ((HC_SubA_HAGPBstL << 24) | (start_addr & 0xFFFFFF)); end_addr_lo = ((HC_SubA_HAGPBendL << 24) | (end_addr & 0xFFFFFF)); command = ((HC_SubA_HAGPCMNT << 24) | (start_addr >> 24) | ((end_addr & 0xff000000) >> 16)); dev_priv->last_pause_ptr = via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi, & pause_addr_lo, 1) - 1; via_flush_write_combine(); while(! *dev_priv->last_pause_ptr); VIA_WRITE(VIA_REG_TRANSET, (HC_ParaType_PreCR << 16)); VIA_WRITE(VIA_REG_TRANSPACE, command); VIA_WRITE(VIA_REG_TRANSPACE, start_addr_lo); VIA_WRITE(VIA_REG_TRANSPACE, end_addr_lo); VIA_WRITE(VIA_REG_TRANSPACE, pause_addr_hi); VIA_WRITE(VIA_REG_TRANSPACE, pause_addr_lo); DRM_WRITEMEMORYBARRIER(); VIA_WRITE(VIA_REG_TRANSPACE, command | HC_HAGPCMNT_MASK); VIA_READ(VIA_REG_TRANSPACE); } static void via_pad_cache(drm_via_private_t *dev_priv, int qwords) { uint32_t *vb; via_cmdbuf_wait(dev_priv, qwords + 2); vb = via_get_dma(dev_priv); VIA_OUT_RING_QW( HC_HEADER2, HC_ParaType_NotTex << 16); via_align_buffer(dev_priv,vb,qwords); } static inline void via_dummy_bitblt(drm_via_private_t * dev_priv) { uint32_t *vb = via_get_dma(dev_priv); SetReg2DAGP(0x0C, (0 | (0 << 16))); SetReg2DAGP(0x10, 0 | (0 << 16)); SetReg2DAGP(0x0, 0x1 | 0x2000 | 0xAA000000); } static void via_cmdbuf_jump(drm_via_private_t * dev_priv) { uint32_t agp_base; uint32_t pause_addr_lo, pause_addr_hi; uint32_t jump_addr_lo, jump_addr_hi; volatile uint32_t *last_pause_ptr; uint32_t dma_low_save1, dma_low_save2; agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; via_align_cmd(dev_priv, HC_HAGPBpID_JUMP, 0, &jump_addr_hi, &jump_addr_lo, 0); dev_priv->dma_wrap = dev_priv->dma_low; /* * Wrap command buffer to the beginning. */ dev_priv->dma_low = 0; if (via_cmdbuf_wait(dev_priv, CMDBUF_ALIGNMENT_SIZE) != 0) { DRM_ERROR("via_cmdbuf_jump failed\n"); } via_dummy_bitblt(dev_priv); via_dummy_bitblt(dev_priv); last_pause_ptr = via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi, &pause_addr_lo, 0) -1; via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi, &pause_addr_lo, 0); *last_pause_ptr = pause_addr_lo; dma_low_save1 = dev_priv->dma_low; /* * Now, set a trap that will pause the regulator if it tries to rerun the old * command buffer. (Which may happen if via_hook_segment detecs a command regulator pause * and reissues the jump command over PCI, while the regulator has already taken the jump * and actually paused at the current buffer end). * There appears to be no other way to detect this condition, since the hw_addr_pointer * does not seem to get updated immediately when a jump occurs. */ last_pause_ptr = via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi, &pause_addr_lo, 0) -1; via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi, &pause_addr_lo, 0); *last_pause_ptr = pause_addr_lo; dma_low_save2 = dev_priv->dma_low; dev_priv->dma_low = dma_low_save1; via_hook_segment( dev_priv, jump_addr_hi, jump_addr_lo, 0); dev_priv->dma_low = dma_low_save2; via_hook_segment( dev_priv, pause_addr_hi, pause_addr_lo, 0); } static void via_cmdbuf_rewind(drm_via_private_t * dev_priv) { via_cmdbuf_jump(dev_priv); } static void via_cmdbuf_flush(drm_via_private_t * dev_priv, uint32_t cmd_type) { uint32_t pause_addr_lo, pause_addr_hi; via_align_cmd(dev_priv, cmd_type, 0, &pause_addr_hi, &pause_addr_lo, 0); via_hook_segment( dev_priv, pause_addr_hi, pause_addr_lo, 0); } static void via_cmdbuf_pause(drm_via_private_t * dev_priv) { via_cmdbuf_flush(dev_priv, HC_HAGPBpID_PAUSE); } static void via_cmdbuf_reset(drm_via_private_t * dev_priv) { via_cmdbuf_flush(dev_priv, HC_HAGPBpID_STOP); via_wait_idle(dev_priv); } /* * User interface to the space and lag functions. */ static int via_cmdbuf_size(DRM_IOCTL_ARGS) { DRM_DEVICE; drm_via_cmdbuf_size_t d_siz; int ret = 0; uint32_t tmp_size, count; drm_via_private_t *dev_priv; DRM_DEBUG("via cmdbuf_size\n"); LOCK_TEST_WITH_RETURN( dev, filp ); dev_priv = (drm_via_private_t *) dev->dev_private; if (dev_priv->ring.virtual_start == NULL) { DRM_ERROR("%s called without initializing AGP ring buffer.\n", __FUNCTION__); return DRM_ERR(EFAULT); } DRM_COPY_FROM_USER_IOCTL(d_siz, (drm_via_cmdbuf_size_t __user *) data, sizeof(d_siz)); count = 1000000; tmp_size = d_siz.size; switch(d_siz.func) { case VIA_CMDBUF_SPACE: while (((tmp_size = via_cmdbuf_space(dev_priv)) < d_siz.size) && count--) { if (!d_siz.wait) { break; } } if (!count) { DRM_ERROR("VIA_CMDBUF_SPACE timed out.\n"); ret = DRM_ERR(EAGAIN); } break; case VIA_CMDBUF_LAG: while (((tmp_size = via_cmdbuf_lag(dev_priv)) > d_siz.size) && count--) { if (!d_siz.wait) { break; } } if (!count) { DRM_ERROR("VIA_CMDBUF_LAG timed out.\n"); ret = DRM_ERR(EAGAIN); } break; default: ret = DRM_ERR(EFAULT); } d_siz.size = tmp_size; DRM_COPY_TO_USER_IOCTL((drm_via_cmdbuf_size_t __user *) data, d_siz, sizeof(d_siz)); return ret; } #ifndef VIA_HAVE_DMABLIT int via_dma_blit_sync( DRM_IOCTL_ARGS ) { DRM_ERROR("PCI DMA BitBlt is not implemented for this system.\n"); return DRM_ERR(EINVAL); } int via_dma_blit( DRM_IOCTL_ARGS ) { DRM_ERROR("PCI DMA BitBlt is not implemented for this system.\n"); return DRM_ERR(EINVAL); } #endif drm_ioctl_desc_t via_ioctls[] = { [DRM_IOCTL_NR(DRM_VIA_ALLOCMEM)] = {via_mem_alloc, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_FREEMEM)] = {via_mem_free, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_AGP_INIT)] = {via_agp_init, DRM_AUTH|DRM_MASTER}, [DRM_IOCTL_NR(DRM_VIA_FB_INIT)] = {via_fb_init, DRM_AUTH|DRM_MASTER}, [DRM_IOCTL_NR(DRM_VIA_MAP_INIT)] = {via_map_init, DRM_AUTH|DRM_MASTER}, [DRM_IOCTL_NR(DRM_VIA_DEC_FUTEX)] = {via_decoder_futex, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_DMA_INIT)] = {via_dma_init, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_CMDBUFFER)] = {via_cmdbuffer, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_FLUSH)] = {via_flush_ioctl, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_PCICMD)] = {via_pci_cmdbuffer, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_CMDBUF_SIZE)] = {via_cmdbuf_size, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_WAIT_IRQ)] = {via_wait_irq, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_DMA_BLIT)] = {via_dma_blit, DRM_AUTH}, [DRM_IOCTL_NR(DRM_VIA_BLIT_SYNC)] = {via_dma_blit_sync, DRM_AUTH} }; int via_max_ioctl = DRM_ARRAY_SIZE(via_ioctls);