mga_state.c 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104
  1. /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
  3. *
  4. * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  25. * OTHER DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Jeff Hartmann <jhartmann@valinux.com>
  29. * Keith Whitwell <keith@tungstengraphics.com>
  30. *
  31. * Rewritten by:
  32. * Gareth Hughes <gareth@valinux.com>
  33. */
  34. #include "drmP.h"
  35. #include "drm.h"
  36. #include "mga_drm.h"
  37. #include "mga_drv.h"
  38. /* ================================================================
  39. * DMA hardware state programming functions
  40. */
  41. static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
  42. struct drm_clip_rect *box)
  43. {
  44. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  45. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  46. unsigned int pitch = dev_priv->front_pitch;
  47. DMA_LOCALS;
  48. BEGIN_DMA(2);
  49. /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
  50. */
  51. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  52. DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
  53. MGA_LEN + MGA_EXEC, 0x80000000,
  54. MGA_DWGCTL, ctx->dwgctl,
  55. MGA_LEN + MGA_EXEC, 0x80000000);
  56. }
  57. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  58. MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  59. MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
  60. ADVANCE_DMA();
  61. }
  62. static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
  63. {
  64. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  65. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  66. DMA_LOCALS;
  67. BEGIN_DMA(3);
  68. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  69. MGA_MACCESS, ctx->maccess,
  70. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  71. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  72. MGA_FOGCOL, ctx->fogcolor,
  73. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  74. DMA_BLOCK(MGA_FCOL, ctx->fcol,
  75. MGA_DMAPAD, 0x00000000,
  76. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  77. ADVANCE_DMA();
  78. }
  79. static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
  80. {
  81. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  82. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  83. DMA_LOCALS;
  84. BEGIN_DMA(4);
  85. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  86. MGA_MACCESS, ctx->maccess,
  87. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  88. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  89. MGA_FOGCOL, ctx->fogcolor,
  90. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  91. DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
  92. MGA_TDUALSTAGE0, ctx->tdualstage0,
  93. MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
  94. DMA_BLOCK(MGA_STENCIL, ctx->stencil,
  95. MGA_STENCILCTL, ctx->stencilctl,
  96. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  97. ADVANCE_DMA();
  98. }
  99. static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
  100. {
  101. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  102. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  103. DMA_LOCALS;
  104. BEGIN_DMA(4);
  105. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
  106. MGA_TEXCTL, tex->texctl,
  107. MGA_TEXFILTER, tex->texfilter,
  108. MGA_TEXBORDERCOL, tex->texbordercol);
  109. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  110. MGA_TEXORG1, tex->texorg1,
  111. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  112. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  113. MGA_TEXWIDTH, tex->texwidth,
  114. MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
  115. DMA_BLOCK(MGA_WR34, tex->texheight,
  116. MGA_TEXTRANS, 0x0000ffff,
  117. MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
  118. ADVANCE_DMA();
  119. }
  120. static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
  121. {
  122. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  123. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  124. DMA_LOCALS;
  125. /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
  126. /* tex->texctl, tex->texctl2); */
  127. BEGIN_DMA(6);
  128. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
  129. MGA_TEXCTL, tex->texctl,
  130. MGA_TEXFILTER, tex->texfilter,
  131. MGA_TEXBORDERCOL, tex->texbordercol);
  132. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  133. MGA_TEXORG1, tex->texorg1,
  134. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  135. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  136. MGA_TEXWIDTH, tex->texwidth,
  137. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  138. DMA_BLOCK(MGA_WR57, 0x00000000,
  139. MGA_WR53, 0x00000000,
  140. MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
  141. DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
  142. MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
  143. MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
  144. MGA_DMAPAD, 0x00000000);
  145. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  146. MGA_DMAPAD, 0x00000000,
  147. MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
  148. ADVANCE_DMA();
  149. }
  150. static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
  151. {
  152. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  153. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
  154. DMA_LOCALS;
  155. /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
  156. /* tex->texctl, tex->texctl2); */
  157. BEGIN_DMA(5);
  158. DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
  159. MGA_MAP1_ENABLE |
  160. MGA_G400_TC2_MAGIC),
  161. MGA_TEXCTL, tex->texctl,
  162. MGA_TEXFILTER, tex->texfilter,
  163. MGA_TEXBORDERCOL, tex->texbordercol);
  164. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  165. MGA_TEXORG1, tex->texorg1,
  166. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  167. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  168. MGA_TEXWIDTH, tex->texwidth,
  169. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  170. DMA_BLOCK(MGA_WR57, 0x00000000,
  171. MGA_WR53, 0x00000000,
  172. MGA_WR61, 0x00000000,
  173. MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
  174. DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
  175. MGA_TEXTRANS, 0x0000ffff,
  176. MGA_TEXTRANSHIGH, 0x0000ffff,
  177. MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
  178. ADVANCE_DMA();
  179. }
  180. static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
  181. {
  182. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  183. unsigned int pipe = sarea_priv->warp_pipe;
  184. DMA_LOCALS;
  185. BEGIN_DMA(3);
  186. DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
  187. MGA_WVRTXSZ, 0x00000007,
  188. MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
  189. DMA_BLOCK(MGA_WR25, 0x00000100,
  190. MGA_WR34, 0x00000000,
  191. MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
  192. /* Padding required due to hardware bug.
  193. */
  194. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  195. MGA_DMAPAD, 0xffffffff,
  196. MGA_DMAPAD, 0xffffffff,
  197. MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
  198. MGA_WMODE_START | dev_priv->wagp_enable));
  199. ADVANCE_DMA();
  200. }
  201. static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
  202. {
  203. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  204. unsigned int pipe = sarea_priv->warp_pipe;
  205. DMA_LOCALS;
  206. /* printk("mga_g400_emit_pipe %x\n", pipe); */
  207. BEGIN_DMA(10);
  208. DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
  209. MGA_DMAPAD, 0x00000000,
  210. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  211. if (pipe & MGA_T2) {
  212. DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
  213. MGA_DMAPAD, 0x00000000,
  214. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  215. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  216. MGA_WACCEPTSEQ, 0x00000000,
  217. MGA_WACCEPTSEQ, 0x00000000,
  218. MGA_WACCEPTSEQ, 0x1e000000);
  219. } else {
  220. if (dev_priv->warp_pipe & MGA_T2) {
  221. /* Flush the WARP pipe */
  222. DMA_BLOCK(MGA_YDST, 0x00000000,
  223. MGA_FXLEFT, 0x00000000,
  224. MGA_FXRIGHT, 0x00000001,
  225. MGA_DWGCTL, MGA_DWGCTL_FLUSH);
  226. DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
  227. MGA_DWGSYNC, 0x00007000,
  228. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  229. MGA_LEN + MGA_EXEC, 0x00000000);
  230. DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
  231. MGA_G400_TC2_MAGIC),
  232. MGA_LEN + MGA_EXEC, 0x00000000,
  233. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  234. MGA_DMAPAD, 0x00000000);
  235. }
  236. DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
  237. MGA_DMAPAD, 0x00000000,
  238. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  239. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  240. MGA_WACCEPTSEQ, 0x00000000,
  241. MGA_WACCEPTSEQ, 0x00000000,
  242. MGA_WACCEPTSEQ, 0x18000000);
  243. }
  244. DMA_BLOCK(MGA_WFLAG, 0x00000000,
  245. MGA_WFLAG1, 0x00000000,
  246. MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
  247. DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
  248. MGA_WR57, 0x00000000, /* tex0 */
  249. MGA_WR53, 0x00000000, /* tex1 */
  250. MGA_WR61, 0x00000000); /* tex1 */
  251. DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
  252. MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
  253. MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
  254. MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
  255. /* Padding required due to hardware bug */
  256. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  257. MGA_DMAPAD, 0xffffffff,
  258. MGA_DMAPAD, 0xffffffff,
  259. MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
  260. MGA_WMODE_START | dev_priv->wagp_enable));
  261. ADVANCE_DMA();
  262. }
  263. static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
  264. {
  265. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  266. unsigned int dirty = sarea_priv->dirty;
  267. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  268. mga_g200_emit_pipe(dev_priv);
  269. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  270. }
  271. if (dirty & MGA_UPLOAD_CONTEXT) {
  272. mga_g200_emit_context(dev_priv);
  273. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  274. }
  275. if (dirty & MGA_UPLOAD_TEX0) {
  276. mga_g200_emit_tex0(dev_priv);
  277. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  278. }
  279. }
  280. static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
  281. {
  282. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  283. unsigned int dirty = sarea_priv->dirty;
  284. int multitex = sarea_priv->warp_pipe & MGA_T2;
  285. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  286. mga_g400_emit_pipe(dev_priv);
  287. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  288. }
  289. if (dirty & MGA_UPLOAD_CONTEXT) {
  290. mga_g400_emit_context(dev_priv);
  291. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  292. }
  293. if (dirty & MGA_UPLOAD_TEX0) {
  294. mga_g400_emit_tex0(dev_priv);
  295. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  296. }
  297. if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
  298. mga_g400_emit_tex1(dev_priv);
  299. sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
  300. }
  301. }
  302. /* ================================================================
  303. * SAREA state verification
  304. */
  305. /* Disallow all write destinations except the front and backbuffer.
  306. */
  307. static int mga_verify_context(drm_mga_private_t *dev_priv)
  308. {
  309. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  310. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  311. if (ctx->dstorg != dev_priv->front_offset &&
  312. ctx->dstorg != dev_priv->back_offset) {
  313. DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
  314. ctx->dstorg, dev_priv->front_offset,
  315. dev_priv->back_offset);
  316. ctx->dstorg = 0;
  317. return -EINVAL;
  318. }
  319. return 0;
  320. }
  321. /* Disallow texture reads from PCI space.
  322. */
  323. static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
  324. {
  325. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  326. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
  327. unsigned int org;
  328. org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
  329. if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
  330. DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
  331. tex->texorg = 0;
  332. return -EINVAL;
  333. }
  334. return 0;
  335. }
  336. static int mga_verify_state(drm_mga_private_t *dev_priv)
  337. {
  338. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  339. unsigned int dirty = sarea_priv->dirty;
  340. int ret = 0;
  341. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  342. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  343. if (dirty & MGA_UPLOAD_CONTEXT)
  344. ret |= mga_verify_context(dev_priv);
  345. if (dirty & MGA_UPLOAD_TEX0)
  346. ret |= mga_verify_tex(dev_priv, 0);
  347. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  348. if (dirty & MGA_UPLOAD_TEX1)
  349. ret |= mga_verify_tex(dev_priv, 1);
  350. if (dirty & MGA_UPLOAD_PIPE)
  351. ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
  352. } else {
  353. if (dirty & MGA_UPLOAD_PIPE)
  354. ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
  355. }
  356. return (ret == 0);
  357. }
  358. static int mga_verify_iload(drm_mga_private_t *dev_priv,
  359. unsigned int dstorg, unsigned int length)
  360. {
  361. if (dstorg < dev_priv->texture_offset ||
  362. dstorg + length > (dev_priv->texture_offset +
  363. dev_priv->texture_size)) {
  364. DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
  365. return -EINVAL;
  366. }
  367. if (length & MGA_ILOAD_MASK) {
  368. DRM_ERROR("*** bad iload length: 0x%x\n",
  369. length & MGA_ILOAD_MASK);
  370. return -EINVAL;
  371. }
  372. return 0;
  373. }
  374. static int mga_verify_blit(drm_mga_private_t *dev_priv,
  375. unsigned int srcorg, unsigned int dstorg)
  376. {
  377. if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
  378. (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
  379. DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
  380. return -EINVAL;
  381. }
  382. return 0;
  383. }
  384. /* ================================================================
  385. *
  386. */
  387. static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
  388. {
  389. drm_mga_private_t *dev_priv = dev->dev_private;
  390. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  391. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  392. struct drm_clip_rect *pbox = sarea_priv->boxes;
  393. int nbox = sarea_priv->nbox;
  394. int i;
  395. DMA_LOCALS;
  396. DRM_DEBUG("\n");
  397. BEGIN_DMA(1);
  398. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  399. MGA_DMAPAD, 0x00000000,
  400. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  401. ADVANCE_DMA();
  402. for (i = 0; i < nbox; i++) {
  403. struct drm_clip_rect *box = &pbox[i];
  404. u32 height = box->y2 - box->y1;
  405. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  406. box->x1, box->y1, box->x2, box->y2);
  407. if (clear->flags & MGA_FRONT) {
  408. BEGIN_DMA(2);
  409. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  410. MGA_PLNWT, clear->color_mask,
  411. MGA_YDSTLEN, (box->y1 << 16) | height,
  412. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  413. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  414. MGA_FCOL, clear->clear_color,
  415. MGA_DSTORG, dev_priv->front_offset,
  416. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  417. ADVANCE_DMA();
  418. }
  419. if (clear->flags & MGA_BACK) {
  420. BEGIN_DMA(2);
  421. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  422. MGA_PLNWT, clear->color_mask,
  423. MGA_YDSTLEN, (box->y1 << 16) | height,
  424. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  425. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  426. MGA_FCOL, clear->clear_color,
  427. MGA_DSTORG, dev_priv->back_offset,
  428. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  429. ADVANCE_DMA();
  430. }
  431. if (clear->flags & MGA_DEPTH) {
  432. BEGIN_DMA(2);
  433. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  434. MGA_PLNWT, clear->depth_mask,
  435. MGA_YDSTLEN, (box->y1 << 16) | height,
  436. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  437. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  438. MGA_FCOL, clear->clear_depth,
  439. MGA_DSTORG, dev_priv->depth_offset,
  440. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  441. ADVANCE_DMA();
  442. }
  443. }
  444. BEGIN_DMA(1);
  445. /* Force reset of DWGCTL */
  446. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  447. MGA_DMAPAD, 0x00000000,
  448. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  449. ADVANCE_DMA();
  450. FLUSH_DMA();
  451. }
  452. static void mga_dma_dispatch_swap(struct drm_device *dev)
  453. {
  454. drm_mga_private_t *dev_priv = dev->dev_private;
  455. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  456. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  457. struct drm_clip_rect *pbox = sarea_priv->boxes;
  458. int nbox = sarea_priv->nbox;
  459. int i;
  460. DMA_LOCALS;
  461. DRM_DEBUG("\n");
  462. sarea_priv->last_frame.head = dev_priv->prim.tail;
  463. sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
  464. BEGIN_DMA(4 + nbox);
  465. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  466. MGA_DMAPAD, 0x00000000,
  467. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  468. DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
  469. MGA_MACCESS, dev_priv->maccess,
  470. MGA_SRCORG, dev_priv->back_offset,
  471. MGA_AR5, dev_priv->front_pitch);
  472. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  473. MGA_DMAPAD, 0x00000000,
  474. MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
  475. for (i = 0; i < nbox; i++) {
  476. struct drm_clip_rect *box = &pbox[i];
  477. u32 height = box->y2 - box->y1;
  478. u32 start = box->y1 * dev_priv->front_pitch;
  479. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  480. box->x1, box->y1, box->x2, box->y2);
  481. DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
  482. MGA_AR3, start + box->x1,
  483. MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  484. MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
  485. }
  486. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  487. MGA_PLNWT, ctx->plnwt,
  488. MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
  489. ADVANCE_DMA();
  490. FLUSH_DMA();
  491. DRM_DEBUG("... done.\n");
  492. }
  493. static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  494. {
  495. drm_mga_private_t *dev_priv = dev->dev_private;
  496. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  497. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  498. u32 address = (u32) buf->bus_address;
  499. u32 length = (u32) buf->used;
  500. int i = 0;
  501. DMA_LOCALS;
  502. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  503. if (buf->used) {
  504. buf_priv->dispatched = 1;
  505. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  506. do {
  507. if (i < sarea_priv->nbox) {
  508. mga_emit_clip_rect(dev_priv,
  509. &sarea_priv->boxes[i]);
  510. }
  511. BEGIN_DMA(1);
  512. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  513. MGA_DMAPAD, 0x00000000,
  514. MGA_SECADDRESS, (address |
  515. MGA_DMA_VERTEX),
  516. MGA_SECEND, ((address + length) |
  517. dev_priv->dma_access));
  518. ADVANCE_DMA();
  519. } while (++i < sarea_priv->nbox);
  520. }
  521. if (buf_priv->discard) {
  522. AGE_BUFFER(buf_priv);
  523. buf->pending = 0;
  524. buf->used = 0;
  525. buf_priv->dispatched = 0;
  526. mga_freelist_put(dev, buf);
  527. }
  528. FLUSH_DMA();
  529. }
  530. static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
  531. unsigned int start, unsigned int end)
  532. {
  533. drm_mga_private_t *dev_priv = dev->dev_private;
  534. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  535. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  536. u32 address = (u32) buf->bus_address;
  537. int i = 0;
  538. DMA_LOCALS;
  539. DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
  540. if (start != end) {
  541. buf_priv->dispatched = 1;
  542. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  543. do {
  544. if (i < sarea_priv->nbox) {
  545. mga_emit_clip_rect(dev_priv,
  546. &sarea_priv->boxes[i]);
  547. }
  548. BEGIN_DMA(1);
  549. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  550. MGA_DMAPAD, 0x00000000,
  551. MGA_SETUPADDRESS, address + start,
  552. MGA_SETUPEND, ((address + end) |
  553. dev_priv->dma_access));
  554. ADVANCE_DMA();
  555. } while (++i < sarea_priv->nbox);
  556. }
  557. if (buf_priv->discard) {
  558. AGE_BUFFER(buf_priv);
  559. buf->pending = 0;
  560. buf->used = 0;
  561. buf_priv->dispatched = 0;
  562. mga_freelist_put(dev, buf);
  563. }
  564. FLUSH_DMA();
  565. }
  566. /* This copies a 64 byte aligned agp region to the frambuffer with a
  567. * standard blit, the ioctl needs to do checking.
  568. */
  569. static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
  570. unsigned int dstorg, unsigned int length)
  571. {
  572. drm_mga_private_t *dev_priv = dev->dev_private;
  573. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  574. drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
  575. u32 srcorg =
  576. buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
  577. u32 y2;
  578. DMA_LOCALS;
  579. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  580. y2 = length / 64;
  581. BEGIN_DMA(5);
  582. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  583. MGA_DMAPAD, 0x00000000,
  584. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  585. DMA_BLOCK(MGA_DSTORG, dstorg,
  586. MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
  587. DMA_BLOCK(MGA_PITCH, 64,
  588. MGA_PLNWT, 0xffffffff,
  589. MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
  590. DMA_BLOCK(MGA_AR0, 63,
  591. MGA_AR3, 0,
  592. MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
  593. DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
  594. MGA_SRCORG, dev_priv->front_offset,
  595. MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
  596. ADVANCE_DMA();
  597. AGE_BUFFER(buf_priv);
  598. buf->pending = 0;
  599. buf->used = 0;
  600. buf_priv->dispatched = 0;
  601. mga_freelist_put(dev, buf);
  602. FLUSH_DMA();
  603. }
  604. static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
  605. {
  606. drm_mga_private_t *dev_priv = dev->dev_private;
  607. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  608. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  609. struct drm_clip_rect *pbox = sarea_priv->boxes;
  610. int nbox = sarea_priv->nbox;
  611. u32 scandir = 0, i;
  612. DMA_LOCALS;
  613. DRM_DEBUG("\n");
  614. BEGIN_DMA(4 + nbox);
  615. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  616. MGA_DMAPAD, 0x00000000,
  617. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  618. DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
  619. MGA_PLNWT, blit->planemask,
  620. MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
  621. DMA_BLOCK(MGA_SGN, scandir,
  622. MGA_MACCESS, dev_priv->maccess,
  623. MGA_AR5, blit->ydir * blit->src_pitch,
  624. MGA_PITCH, blit->dst_pitch);
  625. for (i = 0; i < nbox; i++) {
  626. int srcx = pbox[i].x1 + blit->delta_sx;
  627. int srcy = pbox[i].y1 + blit->delta_sy;
  628. int dstx = pbox[i].x1 + blit->delta_dx;
  629. int dsty = pbox[i].y1 + blit->delta_dy;
  630. int h = pbox[i].y2 - pbox[i].y1;
  631. int w = pbox[i].x2 - pbox[i].x1 - 1;
  632. int start;
  633. if (blit->ydir == -1)
  634. srcy = blit->height - srcy - 1;
  635. start = srcy * blit->src_pitch + srcx;
  636. DMA_BLOCK(MGA_AR0, start + w,
  637. MGA_AR3, start,
  638. MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
  639. MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
  640. }
  641. /* Do something to flush AGP?
  642. */
  643. /* Force reset of DWGCTL */
  644. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  645. MGA_PLNWT, ctx->plnwt,
  646. MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
  647. ADVANCE_DMA();
  648. }
  649. /* ================================================================
  650. *
  651. */
  652. static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  653. {
  654. drm_mga_private_t *dev_priv = dev->dev_private;
  655. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  656. drm_mga_clear_t *clear = data;
  657. LOCK_TEST_WITH_RETURN(dev, file_priv);
  658. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  659. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  660. WRAP_TEST_WITH_RETURN(dev_priv);
  661. mga_dma_dispatch_clear(dev, clear);
  662. /* Make sure we restore the 3D state next time.
  663. */
  664. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  665. return 0;
  666. }
  667. static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  668. {
  669. drm_mga_private_t *dev_priv = dev->dev_private;
  670. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  671. LOCK_TEST_WITH_RETURN(dev, file_priv);
  672. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  673. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  674. WRAP_TEST_WITH_RETURN(dev_priv);
  675. mga_dma_dispatch_swap(dev);
  676. /* Make sure we restore the 3D state next time.
  677. */
  678. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  679. return 0;
  680. }
  681. static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  682. {
  683. drm_mga_private_t *dev_priv = dev->dev_private;
  684. struct drm_device_dma *dma = dev->dma;
  685. struct drm_buf *buf;
  686. drm_mga_buf_priv_t *buf_priv;
  687. drm_mga_vertex_t *vertex = data;
  688. LOCK_TEST_WITH_RETURN(dev, file_priv);
  689. if (vertex->idx < 0 || vertex->idx > dma->buf_count)
  690. return -EINVAL;
  691. buf = dma->buflist[vertex->idx];
  692. buf_priv = buf->dev_private;
  693. buf->used = vertex->used;
  694. buf_priv->discard = vertex->discard;
  695. if (!mga_verify_state(dev_priv)) {
  696. if (vertex->discard) {
  697. if (buf_priv->dispatched == 1)
  698. AGE_BUFFER(buf_priv);
  699. buf_priv->dispatched = 0;
  700. mga_freelist_put(dev, buf);
  701. }
  702. return -EINVAL;
  703. }
  704. WRAP_TEST_WITH_RETURN(dev_priv);
  705. mga_dma_dispatch_vertex(dev, buf);
  706. return 0;
  707. }
  708. static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  709. {
  710. drm_mga_private_t *dev_priv = dev->dev_private;
  711. struct drm_device_dma *dma = dev->dma;
  712. struct drm_buf *buf;
  713. drm_mga_buf_priv_t *buf_priv;
  714. drm_mga_indices_t *indices = data;
  715. LOCK_TEST_WITH_RETURN(dev, file_priv);
  716. if (indices->idx < 0 || indices->idx > dma->buf_count)
  717. return -EINVAL;
  718. buf = dma->buflist[indices->idx];
  719. buf_priv = buf->dev_private;
  720. buf_priv->discard = indices->discard;
  721. if (!mga_verify_state(dev_priv)) {
  722. if (indices->discard) {
  723. if (buf_priv->dispatched == 1)
  724. AGE_BUFFER(buf_priv);
  725. buf_priv->dispatched = 0;
  726. mga_freelist_put(dev, buf);
  727. }
  728. return -EINVAL;
  729. }
  730. WRAP_TEST_WITH_RETURN(dev_priv);
  731. mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
  732. return 0;
  733. }
  734. static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
  735. {
  736. struct drm_device_dma *dma = dev->dma;
  737. drm_mga_private_t *dev_priv = dev->dev_private;
  738. struct drm_buf *buf;
  739. drm_mga_buf_priv_t *buf_priv;
  740. drm_mga_iload_t *iload = data;
  741. DRM_DEBUG("\n");
  742. LOCK_TEST_WITH_RETURN(dev, file_priv);
  743. #if 0
  744. if (mga_do_wait_for_idle(dev_priv) < 0) {
  745. if (MGA_DMA_DEBUG)
  746. DRM_INFO("-EBUSY\n");
  747. return -EBUSY;
  748. }
  749. #endif
  750. if (iload->idx < 0 || iload->idx > dma->buf_count)
  751. return -EINVAL;
  752. buf = dma->buflist[iload->idx];
  753. buf_priv = buf->dev_private;
  754. if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
  755. mga_freelist_put(dev, buf);
  756. return -EINVAL;
  757. }
  758. WRAP_TEST_WITH_RETURN(dev_priv);
  759. mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
  760. /* Make sure we restore the 3D state next time.
  761. */
  762. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  763. return 0;
  764. }
  765. static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  766. {
  767. drm_mga_private_t *dev_priv = dev->dev_private;
  768. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  769. drm_mga_blit_t *blit = data;
  770. DRM_DEBUG("\n");
  771. LOCK_TEST_WITH_RETURN(dev, file_priv);
  772. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  773. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  774. if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
  775. return -EINVAL;
  776. WRAP_TEST_WITH_RETURN(dev_priv);
  777. mga_dma_dispatch_blit(dev, blit);
  778. /* Make sure we restore the 3D state next time.
  779. */
  780. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  781. return 0;
  782. }
  783. static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  784. {
  785. drm_mga_private_t *dev_priv = dev->dev_private;
  786. drm_mga_getparam_t *param = data;
  787. int value;
  788. if (!dev_priv) {
  789. DRM_ERROR("called with no initialization\n");
  790. return -EINVAL;
  791. }
  792. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  793. switch (param->param) {
  794. case MGA_PARAM_IRQ_NR:
  795. value = drm_dev_to_irq(dev);
  796. break;
  797. case MGA_PARAM_CARD_TYPE:
  798. value = dev_priv->chipset;
  799. break;
  800. default:
  801. return -EINVAL;
  802. }
  803. if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
  804. DRM_ERROR("copy_to_user\n");
  805. return -EFAULT;
  806. }
  807. return 0;
  808. }
  809. static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
  810. {
  811. drm_mga_private_t *dev_priv = dev->dev_private;
  812. u32 *fence = data;
  813. DMA_LOCALS;
  814. if (!dev_priv) {
  815. DRM_ERROR("called with no initialization\n");
  816. return -EINVAL;
  817. }
  818. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  819. /* I would normal do this assignment in the declaration of fence,
  820. * but dev_priv may be NULL.
  821. */
  822. *fence = dev_priv->next_fence_to_post;
  823. dev_priv->next_fence_to_post++;
  824. BEGIN_DMA(1);
  825. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  826. MGA_DMAPAD, 0x00000000,
  827. MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
  828. ADVANCE_DMA();
  829. return 0;
  830. }
  831. static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
  832. file_priv)
  833. {
  834. drm_mga_private_t *dev_priv = dev->dev_private;
  835. u32 *fence = data;
  836. if (!dev_priv) {
  837. DRM_ERROR("called with no initialization\n");
  838. return -EINVAL;
  839. }
  840. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  841. mga_driver_fence_wait(dev, fence);
  842. return 0;
  843. }
  844. struct drm_ioctl_desc mga_ioctls[] = {
  845. DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  846. DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
  847. DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
  848. DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
  849. DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
  850. DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
  851. DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
  852. DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
  853. DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
  854. DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
  855. DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
  856. DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
  857. DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  858. };
  859. int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);