nv10_graph.c 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195
  1. /*
  2. * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragraph) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "drmP.h"
  25. #include "drm.h"
  26. #include "nouveau_drm.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_util.h"
  29. struct nv10_graph_engine {
  30. struct nouveau_exec_engine base;
  31. };
  32. struct pipe_state {
  33. uint32_t pipe_0x0000[0x040/4];
  34. uint32_t pipe_0x0040[0x010/4];
  35. uint32_t pipe_0x0200[0x0c0/4];
  36. uint32_t pipe_0x4400[0x080/4];
  37. uint32_t pipe_0x6400[0x3b0/4];
  38. uint32_t pipe_0x6800[0x2f0/4];
  39. uint32_t pipe_0x6c00[0x030/4];
  40. uint32_t pipe_0x7000[0x130/4];
  41. uint32_t pipe_0x7400[0x0c0/4];
  42. uint32_t pipe_0x7800[0x0c0/4];
  43. };
  44. static int nv10_graph_ctx_regs[] = {
  45. NV10_PGRAPH_CTX_SWITCH(0),
  46. NV10_PGRAPH_CTX_SWITCH(1),
  47. NV10_PGRAPH_CTX_SWITCH(2),
  48. NV10_PGRAPH_CTX_SWITCH(3),
  49. NV10_PGRAPH_CTX_SWITCH(4),
  50. NV10_PGRAPH_CTX_CACHE(0, 0),
  51. NV10_PGRAPH_CTX_CACHE(0, 1),
  52. NV10_PGRAPH_CTX_CACHE(0, 2),
  53. NV10_PGRAPH_CTX_CACHE(0, 3),
  54. NV10_PGRAPH_CTX_CACHE(0, 4),
  55. NV10_PGRAPH_CTX_CACHE(1, 0),
  56. NV10_PGRAPH_CTX_CACHE(1, 1),
  57. NV10_PGRAPH_CTX_CACHE(1, 2),
  58. NV10_PGRAPH_CTX_CACHE(1, 3),
  59. NV10_PGRAPH_CTX_CACHE(1, 4),
  60. NV10_PGRAPH_CTX_CACHE(2, 0),
  61. NV10_PGRAPH_CTX_CACHE(2, 1),
  62. NV10_PGRAPH_CTX_CACHE(2, 2),
  63. NV10_PGRAPH_CTX_CACHE(2, 3),
  64. NV10_PGRAPH_CTX_CACHE(2, 4),
  65. NV10_PGRAPH_CTX_CACHE(3, 0),
  66. NV10_PGRAPH_CTX_CACHE(3, 1),
  67. NV10_PGRAPH_CTX_CACHE(3, 2),
  68. NV10_PGRAPH_CTX_CACHE(3, 3),
  69. NV10_PGRAPH_CTX_CACHE(3, 4),
  70. NV10_PGRAPH_CTX_CACHE(4, 0),
  71. NV10_PGRAPH_CTX_CACHE(4, 1),
  72. NV10_PGRAPH_CTX_CACHE(4, 2),
  73. NV10_PGRAPH_CTX_CACHE(4, 3),
  74. NV10_PGRAPH_CTX_CACHE(4, 4),
  75. NV10_PGRAPH_CTX_CACHE(5, 0),
  76. NV10_PGRAPH_CTX_CACHE(5, 1),
  77. NV10_PGRAPH_CTX_CACHE(5, 2),
  78. NV10_PGRAPH_CTX_CACHE(5, 3),
  79. NV10_PGRAPH_CTX_CACHE(5, 4),
  80. NV10_PGRAPH_CTX_CACHE(6, 0),
  81. NV10_PGRAPH_CTX_CACHE(6, 1),
  82. NV10_PGRAPH_CTX_CACHE(6, 2),
  83. NV10_PGRAPH_CTX_CACHE(6, 3),
  84. NV10_PGRAPH_CTX_CACHE(6, 4),
  85. NV10_PGRAPH_CTX_CACHE(7, 0),
  86. NV10_PGRAPH_CTX_CACHE(7, 1),
  87. NV10_PGRAPH_CTX_CACHE(7, 2),
  88. NV10_PGRAPH_CTX_CACHE(7, 3),
  89. NV10_PGRAPH_CTX_CACHE(7, 4),
  90. NV10_PGRAPH_CTX_USER,
  91. NV04_PGRAPH_DMA_START_0,
  92. NV04_PGRAPH_DMA_START_1,
  93. NV04_PGRAPH_DMA_LENGTH,
  94. NV04_PGRAPH_DMA_MISC,
  95. NV10_PGRAPH_DMA_PITCH,
  96. NV04_PGRAPH_BOFFSET0,
  97. NV04_PGRAPH_BBASE0,
  98. NV04_PGRAPH_BLIMIT0,
  99. NV04_PGRAPH_BOFFSET1,
  100. NV04_PGRAPH_BBASE1,
  101. NV04_PGRAPH_BLIMIT1,
  102. NV04_PGRAPH_BOFFSET2,
  103. NV04_PGRAPH_BBASE2,
  104. NV04_PGRAPH_BLIMIT2,
  105. NV04_PGRAPH_BOFFSET3,
  106. NV04_PGRAPH_BBASE3,
  107. NV04_PGRAPH_BLIMIT3,
  108. NV04_PGRAPH_BOFFSET4,
  109. NV04_PGRAPH_BBASE4,
  110. NV04_PGRAPH_BLIMIT4,
  111. NV04_PGRAPH_BOFFSET5,
  112. NV04_PGRAPH_BBASE5,
  113. NV04_PGRAPH_BLIMIT5,
  114. NV04_PGRAPH_BPITCH0,
  115. NV04_PGRAPH_BPITCH1,
  116. NV04_PGRAPH_BPITCH2,
  117. NV04_PGRAPH_BPITCH3,
  118. NV04_PGRAPH_BPITCH4,
  119. NV10_PGRAPH_SURFACE,
  120. NV10_PGRAPH_STATE,
  121. NV04_PGRAPH_BSWIZZLE2,
  122. NV04_PGRAPH_BSWIZZLE5,
  123. NV04_PGRAPH_BPIXEL,
  124. NV10_PGRAPH_NOTIFY,
  125. NV04_PGRAPH_PATT_COLOR0,
  126. NV04_PGRAPH_PATT_COLOR1,
  127. NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
  128. 0x00400904,
  129. 0x00400908,
  130. 0x0040090c,
  131. 0x00400910,
  132. 0x00400914,
  133. 0x00400918,
  134. 0x0040091c,
  135. 0x00400920,
  136. 0x00400924,
  137. 0x00400928,
  138. 0x0040092c,
  139. 0x00400930,
  140. 0x00400934,
  141. 0x00400938,
  142. 0x0040093c,
  143. 0x00400940,
  144. 0x00400944,
  145. 0x00400948,
  146. 0x0040094c,
  147. 0x00400950,
  148. 0x00400954,
  149. 0x00400958,
  150. 0x0040095c,
  151. 0x00400960,
  152. 0x00400964,
  153. 0x00400968,
  154. 0x0040096c,
  155. 0x00400970,
  156. 0x00400974,
  157. 0x00400978,
  158. 0x0040097c,
  159. 0x00400980,
  160. 0x00400984,
  161. 0x00400988,
  162. 0x0040098c,
  163. 0x00400990,
  164. 0x00400994,
  165. 0x00400998,
  166. 0x0040099c,
  167. 0x004009a0,
  168. 0x004009a4,
  169. 0x004009a8,
  170. 0x004009ac,
  171. 0x004009b0,
  172. 0x004009b4,
  173. 0x004009b8,
  174. 0x004009bc,
  175. 0x004009c0,
  176. 0x004009c4,
  177. 0x004009c8,
  178. 0x004009cc,
  179. 0x004009d0,
  180. 0x004009d4,
  181. 0x004009d8,
  182. 0x004009dc,
  183. 0x004009e0,
  184. 0x004009e4,
  185. 0x004009e8,
  186. 0x004009ec,
  187. 0x004009f0,
  188. 0x004009f4,
  189. 0x004009f8,
  190. 0x004009fc,
  191. NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
  192. 0x0040080c,
  193. NV04_PGRAPH_PATTERN_SHAPE,
  194. NV03_PGRAPH_MONO_COLOR0,
  195. NV04_PGRAPH_ROP3,
  196. NV04_PGRAPH_CHROMA,
  197. NV04_PGRAPH_BETA_AND,
  198. NV04_PGRAPH_BETA_PREMULT,
  199. 0x00400e70,
  200. 0x00400e74,
  201. 0x00400e78,
  202. 0x00400e7c,
  203. 0x00400e80,
  204. 0x00400e84,
  205. 0x00400e88,
  206. 0x00400e8c,
  207. 0x00400ea0,
  208. 0x00400ea4,
  209. 0x00400ea8,
  210. 0x00400e90,
  211. 0x00400e94,
  212. 0x00400e98,
  213. 0x00400e9c,
  214. NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
  215. NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
  216. 0x00400f04,
  217. 0x00400f24,
  218. 0x00400f08,
  219. 0x00400f28,
  220. 0x00400f0c,
  221. 0x00400f2c,
  222. 0x00400f10,
  223. 0x00400f30,
  224. 0x00400f14,
  225. 0x00400f34,
  226. 0x00400f18,
  227. 0x00400f38,
  228. 0x00400f1c,
  229. 0x00400f3c,
  230. NV10_PGRAPH_XFMODE0,
  231. NV10_PGRAPH_XFMODE1,
  232. NV10_PGRAPH_GLOBALSTATE0,
  233. NV10_PGRAPH_GLOBALSTATE1,
  234. NV04_PGRAPH_STORED_FMT,
  235. NV04_PGRAPH_SOURCE_COLOR,
  236. NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
  237. NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
  238. 0x00400404,
  239. 0x00400484,
  240. 0x00400408,
  241. 0x00400488,
  242. 0x0040040c,
  243. 0x0040048c,
  244. 0x00400410,
  245. 0x00400490,
  246. 0x00400414,
  247. 0x00400494,
  248. 0x00400418,
  249. 0x00400498,
  250. 0x0040041c,
  251. 0x0040049c,
  252. 0x00400420,
  253. 0x004004a0,
  254. 0x00400424,
  255. 0x004004a4,
  256. 0x00400428,
  257. 0x004004a8,
  258. 0x0040042c,
  259. 0x004004ac,
  260. 0x00400430,
  261. 0x004004b0,
  262. 0x00400434,
  263. 0x004004b4,
  264. 0x00400438,
  265. 0x004004b8,
  266. 0x0040043c,
  267. 0x004004bc,
  268. 0x00400440,
  269. 0x004004c0,
  270. 0x00400444,
  271. 0x004004c4,
  272. 0x00400448,
  273. 0x004004c8,
  274. 0x0040044c,
  275. 0x004004cc,
  276. 0x00400450,
  277. 0x004004d0,
  278. 0x00400454,
  279. 0x004004d4,
  280. 0x00400458,
  281. 0x004004d8,
  282. 0x0040045c,
  283. 0x004004dc,
  284. 0x00400460,
  285. 0x004004e0,
  286. 0x00400464,
  287. 0x004004e4,
  288. 0x00400468,
  289. 0x004004e8,
  290. 0x0040046c,
  291. 0x004004ec,
  292. 0x00400470,
  293. 0x004004f0,
  294. 0x00400474,
  295. 0x004004f4,
  296. 0x00400478,
  297. 0x004004f8,
  298. 0x0040047c,
  299. 0x004004fc,
  300. NV03_PGRAPH_ABS_UCLIP_XMIN,
  301. NV03_PGRAPH_ABS_UCLIP_XMAX,
  302. NV03_PGRAPH_ABS_UCLIP_YMIN,
  303. NV03_PGRAPH_ABS_UCLIP_YMAX,
  304. 0x00400550,
  305. 0x00400558,
  306. 0x00400554,
  307. 0x0040055c,
  308. NV03_PGRAPH_ABS_UCLIPA_XMIN,
  309. NV03_PGRAPH_ABS_UCLIPA_XMAX,
  310. NV03_PGRAPH_ABS_UCLIPA_YMIN,
  311. NV03_PGRAPH_ABS_UCLIPA_YMAX,
  312. NV03_PGRAPH_ABS_ICLIP_XMAX,
  313. NV03_PGRAPH_ABS_ICLIP_YMAX,
  314. NV03_PGRAPH_XY_LOGIC_MISC0,
  315. NV03_PGRAPH_XY_LOGIC_MISC1,
  316. NV03_PGRAPH_XY_LOGIC_MISC2,
  317. NV03_PGRAPH_XY_LOGIC_MISC3,
  318. NV03_PGRAPH_CLIPX_0,
  319. NV03_PGRAPH_CLIPX_1,
  320. NV03_PGRAPH_CLIPY_0,
  321. NV03_PGRAPH_CLIPY_1,
  322. NV10_PGRAPH_COMBINER0_IN_ALPHA,
  323. NV10_PGRAPH_COMBINER1_IN_ALPHA,
  324. NV10_PGRAPH_COMBINER0_IN_RGB,
  325. NV10_PGRAPH_COMBINER1_IN_RGB,
  326. NV10_PGRAPH_COMBINER_COLOR0,
  327. NV10_PGRAPH_COMBINER_COLOR1,
  328. NV10_PGRAPH_COMBINER0_OUT_ALPHA,
  329. NV10_PGRAPH_COMBINER1_OUT_ALPHA,
  330. NV10_PGRAPH_COMBINER0_OUT_RGB,
  331. NV10_PGRAPH_COMBINER1_OUT_RGB,
  332. NV10_PGRAPH_COMBINER_FINAL0,
  333. NV10_PGRAPH_COMBINER_FINAL1,
  334. 0x00400e00,
  335. 0x00400e04,
  336. 0x00400e08,
  337. 0x00400e0c,
  338. 0x00400e10,
  339. 0x00400e14,
  340. 0x00400e18,
  341. 0x00400e1c,
  342. 0x00400e20,
  343. 0x00400e24,
  344. 0x00400e28,
  345. 0x00400e2c,
  346. 0x00400e30,
  347. 0x00400e34,
  348. 0x00400e38,
  349. 0x00400e3c,
  350. NV04_PGRAPH_PASSTHRU_0,
  351. NV04_PGRAPH_PASSTHRU_1,
  352. NV04_PGRAPH_PASSTHRU_2,
  353. NV10_PGRAPH_DIMX_TEXTURE,
  354. NV10_PGRAPH_WDIMX_TEXTURE,
  355. NV10_PGRAPH_DVD_COLORFMT,
  356. NV10_PGRAPH_SCALED_FORMAT,
  357. NV04_PGRAPH_MISC24_0,
  358. NV04_PGRAPH_MISC24_1,
  359. NV04_PGRAPH_MISC24_2,
  360. NV03_PGRAPH_X_MISC,
  361. NV03_PGRAPH_Y_MISC,
  362. NV04_PGRAPH_VALID1,
  363. NV04_PGRAPH_VALID2,
  364. };
  365. static int nv17_graph_ctx_regs[] = {
  366. NV10_PGRAPH_DEBUG_4,
  367. 0x004006b0,
  368. 0x00400eac,
  369. 0x00400eb0,
  370. 0x00400eb4,
  371. 0x00400eb8,
  372. 0x00400ebc,
  373. 0x00400ec0,
  374. 0x00400ec4,
  375. 0x00400ec8,
  376. 0x00400ecc,
  377. 0x00400ed0,
  378. 0x00400ed4,
  379. 0x00400ed8,
  380. 0x00400edc,
  381. 0x00400ee0,
  382. 0x00400a00,
  383. 0x00400a04,
  384. };
  385. struct graph_state {
  386. int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
  387. int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
  388. struct pipe_state pipe_state;
  389. uint32_t lma_window[4];
  390. };
  391. #define PIPE_SAVE(dev, state, addr) \
  392. do { \
  393. int __i; \
  394. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  395. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  396. state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
  397. } while (0)
  398. #define PIPE_RESTORE(dev, state, addr) \
  399. do { \
  400. int __i; \
  401. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  402. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  403. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
  404. } while (0)
  405. static void nv10_graph_save_pipe(struct nouveau_channel *chan)
  406. {
  407. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  408. struct pipe_state *pipe = &pgraph_ctx->pipe_state;
  409. struct drm_device *dev = chan->dev;
  410. PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
  411. PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
  412. PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
  413. PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
  414. PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
  415. PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
  416. PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
  417. PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
  418. PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
  419. PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
  420. }
  421. static void nv10_graph_load_pipe(struct nouveau_channel *chan)
  422. {
  423. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  424. struct pipe_state *pipe = &pgraph_ctx->pipe_state;
  425. struct drm_device *dev = chan->dev;
  426. uint32_t xfmode0, xfmode1;
  427. int i;
  428. nouveau_wait_for_idle(dev);
  429. /* XXX check haiku comments */
  430. xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
  431. xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
  432. nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
  433. nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
  434. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  435. for (i = 0; i < 4; i++)
  436. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  437. for (i = 0; i < 4; i++)
  438. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  439. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  440. for (i = 0; i < 3; i++)
  441. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  442. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  443. for (i = 0; i < 3; i++)
  444. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  445. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  446. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  447. PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
  448. nouveau_wait_for_idle(dev);
  449. /* restore XFMODE */
  450. nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
  451. nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
  452. PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
  453. PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
  454. PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
  455. PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
  456. PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
  457. PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
  458. PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
  459. PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
  460. PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
  461. nouveau_wait_for_idle(dev);
  462. }
  463. static void nv10_graph_create_pipe(struct nouveau_channel *chan)
  464. {
  465. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  466. struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
  467. struct drm_device *dev = chan->dev;
  468. uint32_t *fifo_pipe_state_addr;
  469. int i;
  470. #define PIPE_INIT(addr) \
  471. do { \
  472. fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
  473. } while (0)
  474. #define PIPE_INIT_END(addr) \
  475. do { \
  476. uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
  477. ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
  478. if (fifo_pipe_state_addr != __end_addr) \
  479. NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
  480. addr, fifo_pipe_state_addr, __end_addr); \
  481. } while (0)
  482. #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
  483. PIPE_INIT(0x0200);
  484. for (i = 0; i < 48; i++)
  485. NV_WRITE_PIPE_INIT(0x00000000);
  486. PIPE_INIT_END(0x0200);
  487. PIPE_INIT(0x6400);
  488. for (i = 0; i < 211; i++)
  489. NV_WRITE_PIPE_INIT(0x00000000);
  490. NV_WRITE_PIPE_INIT(0x3f800000);
  491. NV_WRITE_PIPE_INIT(0x40000000);
  492. NV_WRITE_PIPE_INIT(0x40000000);
  493. NV_WRITE_PIPE_INIT(0x40000000);
  494. NV_WRITE_PIPE_INIT(0x40000000);
  495. NV_WRITE_PIPE_INIT(0x00000000);
  496. NV_WRITE_PIPE_INIT(0x00000000);
  497. NV_WRITE_PIPE_INIT(0x3f800000);
  498. NV_WRITE_PIPE_INIT(0x00000000);
  499. NV_WRITE_PIPE_INIT(0x3f000000);
  500. NV_WRITE_PIPE_INIT(0x3f000000);
  501. NV_WRITE_PIPE_INIT(0x00000000);
  502. NV_WRITE_PIPE_INIT(0x00000000);
  503. NV_WRITE_PIPE_INIT(0x00000000);
  504. NV_WRITE_PIPE_INIT(0x00000000);
  505. NV_WRITE_PIPE_INIT(0x3f800000);
  506. NV_WRITE_PIPE_INIT(0x00000000);
  507. NV_WRITE_PIPE_INIT(0x00000000);
  508. NV_WRITE_PIPE_INIT(0x00000000);
  509. NV_WRITE_PIPE_INIT(0x00000000);
  510. NV_WRITE_PIPE_INIT(0x00000000);
  511. NV_WRITE_PIPE_INIT(0x3f800000);
  512. NV_WRITE_PIPE_INIT(0x3f800000);
  513. NV_WRITE_PIPE_INIT(0x3f800000);
  514. NV_WRITE_PIPE_INIT(0x3f800000);
  515. PIPE_INIT_END(0x6400);
  516. PIPE_INIT(0x6800);
  517. for (i = 0; i < 162; i++)
  518. NV_WRITE_PIPE_INIT(0x00000000);
  519. NV_WRITE_PIPE_INIT(0x3f800000);
  520. for (i = 0; i < 25; i++)
  521. NV_WRITE_PIPE_INIT(0x00000000);
  522. PIPE_INIT_END(0x6800);
  523. PIPE_INIT(0x6c00);
  524. NV_WRITE_PIPE_INIT(0x00000000);
  525. NV_WRITE_PIPE_INIT(0x00000000);
  526. NV_WRITE_PIPE_INIT(0x00000000);
  527. NV_WRITE_PIPE_INIT(0x00000000);
  528. NV_WRITE_PIPE_INIT(0xbf800000);
  529. NV_WRITE_PIPE_INIT(0x00000000);
  530. NV_WRITE_PIPE_INIT(0x00000000);
  531. NV_WRITE_PIPE_INIT(0x00000000);
  532. NV_WRITE_PIPE_INIT(0x00000000);
  533. NV_WRITE_PIPE_INIT(0x00000000);
  534. NV_WRITE_PIPE_INIT(0x00000000);
  535. NV_WRITE_PIPE_INIT(0x00000000);
  536. PIPE_INIT_END(0x6c00);
  537. PIPE_INIT(0x7000);
  538. NV_WRITE_PIPE_INIT(0x00000000);
  539. NV_WRITE_PIPE_INIT(0x00000000);
  540. NV_WRITE_PIPE_INIT(0x00000000);
  541. NV_WRITE_PIPE_INIT(0x00000000);
  542. NV_WRITE_PIPE_INIT(0x00000000);
  543. NV_WRITE_PIPE_INIT(0x00000000);
  544. NV_WRITE_PIPE_INIT(0x00000000);
  545. NV_WRITE_PIPE_INIT(0x00000000);
  546. NV_WRITE_PIPE_INIT(0x00000000);
  547. NV_WRITE_PIPE_INIT(0x00000000);
  548. NV_WRITE_PIPE_INIT(0x00000000);
  549. NV_WRITE_PIPE_INIT(0x00000000);
  550. NV_WRITE_PIPE_INIT(0x7149f2ca);
  551. NV_WRITE_PIPE_INIT(0x00000000);
  552. NV_WRITE_PIPE_INIT(0x00000000);
  553. NV_WRITE_PIPE_INIT(0x00000000);
  554. NV_WRITE_PIPE_INIT(0x7149f2ca);
  555. NV_WRITE_PIPE_INIT(0x00000000);
  556. NV_WRITE_PIPE_INIT(0x00000000);
  557. NV_WRITE_PIPE_INIT(0x00000000);
  558. NV_WRITE_PIPE_INIT(0x7149f2ca);
  559. NV_WRITE_PIPE_INIT(0x00000000);
  560. NV_WRITE_PIPE_INIT(0x00000000);
  561. NV_WRITE_PIPE_INIT(0x00000000);
  562. NV_WRITE_PIPE_INIT(0x7149f2ca);
  563. NV_WRITE_PIPE_INIT(0x00000000);
  564. NV_WRITE_PIPE_INIT(0x00000000);
  565. NV_WRITE_PIPE_INIT(0x00000000);
  566. NV_WRITE_PIPE_INIT(0x7149f2ca);
  567. NV_WRITE_PIPE_INIT(0x00000000);
  568. NV_WRITE_PIPE_INIT(0x00000000);
  569. NV_WRITE_PIPE_INIT(0x00000000);
  570. NV_WRITE_PIPE_INIT(0x7149f2ca);
  571. NV_WRITE_PIPE_INIT(0x00000000);
  572. NV_WRITE_PIPE_INIT(0x00000000);
  573. NV_WRITE_PIPE_INIT(0x00000000);
  574. NV_WRITE_PIPE_INIT(0x7149f2ca);
  575. NV_WRITE_PIPE_INIT(0x00000000);
  576. NV_WRITE_PIPE_INIT(0x00000000);
  577. NV_WRITE_PIPE_INIT(0x00000000);
  578. NV_WRITE_PIPE_INIT(0x7149f2ca);
  579. for (i = 0; i < 35; i++)
  580. NV_WRITE_PIPE_INIT(0x00000000);
  581. PIPE_INIT_END(0x7000);
  582. PIPE_INIT(0x7400);
  583. for (i = 0; i < 48; i++)
  584. NV_WRITE_PIPE_INIT(0x00000000);
  585. PIPE_INIT_END(0x7400);
  586. PIPE_INIT(0x7800);
  587. for (i = 0; i < 48; i++)
  588. NV_WRITE_PIPE_INIT(0x00000000);
  589. PIPE_INIT_END(0x7800);
  590. PIPE_INIT(0x4400);
  591. for (i = 0; i < 32; i++)
  592. NV_WRITE_PIPE_INIT(0x00000000);
  593. PIPE_INIT_END(0x4400);
  594. PIPE_INIT(0x0000);
  595. for (i = 0; i < 16; i++)
  596. NV_WRITE_PIPE_INIT(0x00000000);
  597. PIPE_INIT_END(0x0000);
  598. PIPE_INIT(0x0040);
  599. for (i = 0; i < 4; i++)
  600. NV_WRITE_PIPE_INIT(0x00000000);
  601. PIPE_INIT_END(0x0040);
  602. #undef PIPE_INIT
  603. #undef PIPE_INIT_END
  604. #undef NV_WRITE_PIPE_INIT
  605. }
  606. static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
  607. {
  608. int i;
  609. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
  610. if (nv10_graph_ctx_regs[i] == reg)
  611. return i;
  612. }
  613. NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
  614. return -1;
  615. }
  616. static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
  617. {
  618. int i;
  619. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
  620. if (nv17_graph_ctx_regs[i] == reg)
  621. return i;
  622. }
  623. NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
  624. return -1;
  625. }
  626. static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
  627. uint32_t inst)
  628. {
  629. struct drm_device *dev = chan->dev;
  630. uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
  631. uint32_t ctx_user, ctx_switch[5];
  632. int i, subchan = -1;
  633. /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
  634. * that cannot be restored via MMIO. Do it through the FIFO
  635. * instead.
  636. */
  637. /* Look for a celsius object */
  638. for (i = 0; i < 8; i++) {
  639. int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
  640. if (class == 0x56 || class == 0x96 || class == 0x99) {
  641. subchan = i;
  642. break;
  643. }
  644. }
  645. if (subchan < 0 || !inst)
  646. return;
  647. /* Save the current ctx object */
  648. ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
  649. for (i = 0; i < 5; i++)
  650. ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
  651. /* Save the FIFO state */
  652. st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
  653. st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
  654. st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
  655. fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
  656. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  657. fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
  658. /* Switch to the celsius subchannel */
  659. for (i = 0; i < 5; i++)
  660. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
  661. nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
  662. nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
  663. /* Inject NV10TCL_DMA_VTXBUF */
  664. nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
  665. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
  666. 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
  667. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
  668. nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
  669. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  670. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  671. /* Restore the FIFO state */
  672. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  673. nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
  674. nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
  675. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
  676. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
  677. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
  678. /* Restore the current ctx object */
  679. for (i = 0; i < 5; i++)
  680. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
  681. nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
  682. }
  683. static int
  684. nv10_graph_load_context(struct nouveau_channel *chan)
  685. {
  686. struct drm_device *dev = chan->dev;
  687. struct drm_nouveau_private *dev_priv = dev->dev_private;
  688. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  689. uint32_t tmp;
  690. int i;
  691. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
  692. nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
  693. if (dev_priv->chipset >= 0x17) {
  694. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
  695. nv_wr32(dev, nv17_graph_ctx_regs[i],
  696. pgraph_ctx->nv17[i]);
  697. }
  698. nv10_graph_load_pipe(chan);
  699. nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
  700. & 0xffff));
  701. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
  702. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
  703. nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
  704. tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
  705. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
  706. return 0;
  707. }
  708. static int
  709. nv10_graph_unload_context(struct drm_device *dev)
  710. {
  711. struct drm_nouveau_private *dev_priv = dev->dev_private;
  712. struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
  713. struct nouveau_channel *chan;
  714. struct graph_state *ctx;
  715. uint32_t tmp;
  716. int i;
  717. chan = nv10_graph_channel(dev);
  718. if (!chan)
  719. return 0;
  720. ctx = chan->engctx[NVOBJ_ENGINE_GR];
  721. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
  722. ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
  723. if (dev_priv->chipset >= 0x17) {
  724. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
  725. ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
  726. }
  727. nv10_graph_save_pipe(chan);
  728. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
  729. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
  730. tmp |= (pfifo->channels - 1) << 24;
  731. nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
  732. return 0;
  733. }
  734. static void
  735. nv10_graph_context_switch(struct drm_device *dev)
  736. {
  737. struct drm_nouveau_private *dev_priv = dev->dev_private;
  738. struct nouveau_channel *chan = NULL;
  739. int chid;
  740. nouveau_wait_for_idle(dev);
  741. /* If previous context is valid, we need to save it */
  742. nv10_graph_unload_context(dev);
  743. /* Load context for next channel */
  744. chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
  745. chan = dev_priv->channels.ptr[chid];
  746. if (chan && chan->engctx[NVOBJ_ENGINE_GR])
  747. nv10_graph_load_context(chan);
  748. }
  749. #define NV_WRITE_CTX(reg, val) do { \
  750. int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
  751. if (offset > 0) \
  752. pgraph_ctx->nv10[offset] = val; \
  753. } while (0)
  754. #define NV17_WRITE_CTX(reg, val) do { \
  755. int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
  756. if (offset > 0) \
  757. pgraph_ctx->nv17[offset] = val; \
  758. } while (0)
  759. struct nouveau_channel *
  760. nv10_graph_channel(struct drm_device *dev)
  761. {
  762. struct drm_nouveau_private *dev_priv = dev->dev_private;
  763. int chid = dev_priv->engine.fifo.channels;
  764. if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
  765. chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
  766. if (chid >= dev_priv->engine.fifo.channels)
  767. return NULL;
  768. return dev_priv->channels.ptr[chid];
  769. }
  770. static int
  771. nv10_graph_context_new(struct nouveau_channel *chan, int engine)
  772. {
  773. struct drm_device *dev = chan->dev;
  774. struct drm_nouveau_private *dev_priv = dev->dev_private;
  775. struct graph_state *pgraph_ctx;
  776. NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
  777. pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
  778. if (pgraph_ctx == NULL)
  779. return -ENOMEM;
  780. chan->engctx[engine] = pgraph_ctx;
  781. NV_WRITE_CTX(0x00400e88, 0x08000000);
  782. NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
  783. NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
  784. NV_WRITE_CTX(0x00400e10, 0x00001000);
  785. NV_WRITE_CTX(0x00400e14, 0x00001000);
  786. NV_WRITE_CTX(0x00400e30, 0x00080008);
  787. NV_WRITE_CTX(0x00400e34, 0x00080008);
  788. if (dev_priv->chipset >= 0x17) {
  789. /* is it really needed ??? */
  790. NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
  791. nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
  792. NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
  793. NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
  794. NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
  795. NV17_WRITE_CTX(0x00400ec0, 0x00000080);
  796. NV17_WRITE_CTX(0x00400ed0, 0x00000080);
  797. }
  798. NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
  799. nv10_graph_create_pipe(chan);
  800. return 0;
  801. }
  802. static void
  803. nv10_graph_context_del(struct nouveau_channel *chan, int engine)
  804. {
  805. struct drm_device *dev = chan->dev;
  806. struct drm_nouveau_private *dev_priv = dev->dev_private;
  807. struct graph_state *pgraph_ctx = chan->engctx[engine];
  808. unsigned long flags;
  809. spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
  810. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  811. /* Unload the context if it's the currently active one */
  812. if (nv10_graph_channel(dev) == chan)
  813. nv10_graph_unload_context(dev);
  814. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  815. spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
  816. /* Free the context resources */
  817. chan->engctx[engine] = NULL;
  818. kfree(pgraph_ctx);
  819. }
  820. static void
  821. nv10_graph_set_tile_region(struct drm_device *dev, int i)
  822. {
  823. struct drm_nouveau_private *dev_priv = dev->dev_private;
  824. struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
  825. nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
  826. nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
  827. nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
  828. }
  829. static int
  830. nv10_graph_init(struct drm_device *dev, int engine)
  831. {
  832. struct drm_nouveau_private *dev_priv = dev->dev_private;
  833. u32 tmp;
  834. int i;
  835. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
  836. ~NV_PMC_ENABLE_PGRAPH);
  837. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
  838. NV_PMC_ENABLE_PGRAPH);
  839. nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
  840. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  841. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
  842. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
  843. nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
  844. /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
  845. nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
  846. nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
  847. (1<<29) |
  848. (1<<31));
  849. if (dev_priv->chipset >= 0x17) {
  850. nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
  851. nv_wr32(dev, 0x400a10, 0x3ff3fb6);
  852. nv_wr32(dev, 0x400838, 0x2f8684);
  853. nv_wr32(dev, 0x40083c, 0x115f3f);
  854. nv_wr32(dev, 0x004006b0, 0x40000020);
  855. } else
  856. nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
  857. /* Turn all the tiling regions off. */
  858. for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
  859. nv10_graph_set_tile_region(dev, i);
  860. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
  861. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
  862. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
  863. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
  864. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
  865. nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
  866. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
  867. tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
  868. nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
  869. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
  870. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
  871. return 0;
  872. }
  873. static int
  874. nv10_graph_fini(struct drm_device *dev, int engine, bool suspend)
  875. {
  876. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  877. if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
  878. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  879. return -EBUSY;
  880. }
  881. nv10_graph_unload_context(dev);
  882. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
  883. return 0;
  884. }
  885. static int
  886. nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
  887. u32 class, u32 mthd, u32 data)
  888. {
  889. struct graph_state *ctx = chan->engctx[NVOBJ_ENGINE_GR];
  890. struct drm_device *dev = chan->dev;
  891. struct pipe_state *pipe = &ctx->pipe_state;
  892. uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
  893. uint32_t xfmode0, xfmode1;
  894. int i;
  895. ctx->lma_window[(mthd - 0x1638) / 4] = data;
  896. if (mthd != 0x1644)
  897. return 0;
  898. nouveau_wait_for_idle(dev);
  899. PIPE_SAVE(dev, pipe_0x0040, 0x0040);
  900. PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
  901. PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
  902. nouveau_wait_for_idle(dev);
  903. xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
  904. xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
  905. PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
  906. PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
  907. PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
  908. PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
  909. nouveau_wait_for_idle(dev);
  910. nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
  911. nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
  912. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  913. for (i = 0; i < 4; i++)
  914. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  915. for (i = 0; i < 4; i++)
  916. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  917. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  918. for (i = 0; i < 3; i++)
  919. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  920. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  921. for (i = 0; i < 3; i++)
  922. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  923. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  924. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  925. PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
  926. nouveau_wait_for_idle(dev);
  927. PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
  928. nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
  929. nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
  930. PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
  931. PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
  932. PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
  933. PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
  934. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
  935. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  936. nouveau_wait_for_idle(dev);
  937. return 0;
  938. }
  939. static int
  940. nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
  941. u32 class, u32 mthd, u32 data)
  942. {
  943. struct drm_device *dev = chan->dev;
  944. nouveau_wait_for_idle(dev);
  945. nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
  946. nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
  947. nv_wr32(dev, 0x004006b0,
  948. nv_rd32(dev, 0x004006b0) | 0x8 << 24);
  949. return 0;
  950. }
  951. struct nouveau_bitfield nv10_graph_intr[] = {
  952. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  953. { NV_PGRAPH_INTR_ERROR, "ERROR" },
  954. {}
  955. };
  956. struct nouveau_bitfield nv10_graph_nstatus[] = {
  957. { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  958. { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  959. { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  960. { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  961. {}
  962. };
  963. static void
  964. nv10_graph_isr(struct drm_device *dev)
  965. {
  966. u32 stat;
  967. while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
  968. u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
  969. u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
  970. u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
  971. u32 chid = (addr & 0x01f00000) >> 20;
  972. u32 subc = (addr & 0x00070000) >> 16;
  973. u32 mthd = (addr & 0x00001ffc);
  974. u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
  975. u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
  976. u32 show = stat;
  977. if (stat & NV_PGRAPH_INTR_ERROR) {
  978. if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
  979. if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
  980. show &= ~NV_PGRAPH_INTR_ERROR;
  981. }
  982. }
  983. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  984. nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  985. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  986. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  987. nv10_graph_context_switch(dev);
  988. }
  989. nv_wr32(dev, NV03_PGRAPH_INTR, stat);
  990. nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
  991. if (show && nouveau_ratelimit()) {
  992. NV_INFO(dev, "PGRAPH -");
  993. nouveau_bitfield_print(nv10_graph_intr, show);
  994. printk(" nsource:");
  995. nouveau_bitfield_print(nv04_graph_nsource, nsource);
  996. printk(" nstatus:");
  997. nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
  998. printk("\n");
  999. NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
  1000. "mthd 0x%04x data 0x%08x\n",
  1001. chid, subc, class, mthd, data);
  1002. }
  1003. }
  1004. }
  1005. static void
  1006. nv10_graph_destroy(struct drm_device *dev, int engine)
  1007. {
  1008. struct nv10_graph_engine *pgraph = nv_engine(dev, engine);
  1009. nouveau_irq_unregister(dev, 12);
  1010. kfree(pgraph);
  1011. }
  1012. int
  1013. nv10_graph_create(struct drm_device *dev)
  1014. {
  1015. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1016. struct nv10_graph_engine *pgraph;
  1017. pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
  1018. if (!pgraph)
  1019. return -ENOMEM;
  1020. pgraph->base.destroy = nv10_graph_destroy;
  1021. pgraph->base.init = nv10_graph_init;
  1022. pgraph->base.fini = nv10_graph_fini;
  1023. pgraph->base.context_new = nv10_graph_context_new;
  1024. pgraph->base.context_del = nv10_graph_context_del;
  1025. pgraph->base.object_new = nv04_graph_object_new;
  1026. pgraph->base.set_tile_region = nv10_graph_set_tile_region;
  1027. NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
  1028. nouveau_irq_register(dev, 12, nv10_graph_isr);
  1029. /* nvsw */
  1030. NVOBJ_CLASS(dev, 0x506e, SW);
  1031. NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
  1032. NVOBJ_CLASS(dev, 0x0030, GR); /* null */
  1033. NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
  1034. NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
  1035. NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
  1036. NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
  1037. NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
  1038. NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
  1039. NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
  1040. NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
  1041. NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
  1042. NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
  1043. NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
  1044. NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
  1045. NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
  1046. NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
  1047. NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
  1048. NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
  1049. /* celcius */
  1050. if (dev_priv->chipset <= 0x10) {
  1051. NVOBJ_CLASS(dev, 0x0056, GR);
  1052. } else
  1053. if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
  1054. NVOBJ_CLASS(dev, 0x0096, GR);
  1055. } else {
  1056. NVOBJ_CLASS(dev, 0x0099, GR);
  1057. NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
  1058. NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
  1059. NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
  1060. NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
  1061. NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);
  1062. }
  1063. return 0;
  1064. }