nv10.c 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221
  1. /*
  2. * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragr) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "nv10.h"
  25. #include "regs.h"
  26. #include <core/client.h>
  27. #include <core/gpuobj.h>
  28. #include <engine/fifo.h>
  29. #include <engine/fifo/chan.h>
  30. #include <subdev/fb.h>
  31. struct pipe_state {
  32. u32 pipe_0x0000[0x040/4];
  33. u32 pipe_0x0040[0x010/4];
  34. u32 pipe_0x0200[0x0c0/4];
  35. u32 pipe_0x4400[0x080/4];
  36. u32 pipe_0x6400[0x3b0/4];
  37. u32 pipe_0x6800[0x2f0/4];
  38. u32 pipe_0x6c00[0x030/4];
  39. u32 pipe_0x7000[0x130/4];
  40. u32 pipe_0x7400[0x0c0/4];
  41. u32 pipe_0x7800[0x0c0/4];
  42. };
  43. static int nv10_gr_ctx_regs[] = {
  44. NV10_PGRAPH_CTX_SWITCH(0),
  45. NV10_PGRAPH_CTX_SWITCH(1),
  46. NV10_PGRAPH_CTX_SWITCH(2),
  47. NV10_PGRAPH_CTX_SWITCH(3),
  48. NV10_PGRAPH_CTX_SWITCH(4),
  49. NV10_PGRAPH_CTX_CACHE(0, 0),
  50. NV10_PGRAPH_CTX_CACHE(0, 1),
  51. NV10_PGRAPH_CTX_CACHE(0, 2),
  52. NV10_PGRAPH_CTX_CACHE(0, 3),
  53. NV10_PGRAPH_CTX_CACHE(0, 4),
  54. NV10_PGRAPH_CTX_CACHE(1, 0),
  55. NV10_PGRAPH_CTX_CACHE(1, 1),
  56. NV10_PGRAPH_CTX_CACHE(1, 2),
  57. NV10_PGRAPH_CTX_CACHE(1, 3),
  58. NV10_PGRAPH_CTX_CACHE(1, 4),
  59. NV10_PGRAPH_CTX_CACHE(2, 0),
  60. NV10_PGRAPH_CTX_CACHE(2, 1),
  61. NV10_PGRAPH_CTX_CACHE(2, 2),
  62. NV10_PGRAPH_CTX_CACHE(2, 3),
  63. NV10_PGRAPH_CTX_CACHE(2, 4),
  64. NV10_PGRAPH_CTX_CACHE(3, 0),
  65. NV10_PGRAPH_CTX_CACHE(3, 1),
  66. NV10_PGRAPH_CTX_CACHE(3, 2),
  67. NV10_PGRAPH_CTX_CACHE(3, 3),
  68. NV10_PGRAPH_CTX_CACHE(3, 4),
  69. NV10_PGRAPH_CTX_CACHE(4, 0),
  70. NV10_PGRAPH_CTX_CACHE(4, 1),
  71. NV10_PGRAPH_CTX_CACHE(4, 2),
  72. NV10_PGRAPH_CTX_CACHE(4, 3),
  73. NV10_PGRAPH_CTX_CACHE(4, 4),
  74. NV10_PGRAPH_CTX_CACHE(5, 0),
  75. NV10_PGRAPH_CTX_CACHE(5, 1),
  76. NV10_PGRAPH_CTX_CACHE(5, 2),
  77. NV10_PGRAPH_CTX_CACHE(5, 3),
  78. NV10_PGRAPH_CTX_CACHE(5, 4),
  79. NV10_PGRAPH_CTX_CACHE(6, 0),
  80. NV10_PGRAPH_CTX_CACHE(6, 1),
  81. NV10_PGRAPH_CTX_CACHE(6, 2),
  82. NV10_PGRAPH_CTX_CACHE(6, 3),
  83. NV10_PGRAPH_CTX_CACHE(6, 4),
  84. NV10_PGRAPH_CTX_CACHE(7, 0),
  85. NV10_PGRAPH_CTX_CACHE(7, 1),
  86. NV10_PGRAPH_CTX_CACHE(7, 2),
  87. NV10_PGRAPH_CTX_CACHE(7, 3),
  88. NV10_PGRAPH_CTX_CACHE(7, 4),
  89. NV10_PGRAPH_CTX_USER,
  90. NV04_PGRAPH_DMA_START_0,
  91. NV04_PGRAPH_DMA_START_1,
  92. NV04_PGRAPH_DMA_LENGTH,
  93. NV04_PGRAPH_DMA_MISC,
  94. NV10_PGRAPH_DMA_PITCH,
  95. NV04_PGRAPH_BOFFSET0,
  96. NV04_PGRAPH_BBASE0,
  97. NV04_PGRAPH_BLIMIT0,
  98. NV04_PGRAPH_BOFFSET1,
  99. NV04_PGRAPH_BBASE1,
  100. NV04_PGRAPH_BLIMIT1,
  101. NV04_PGRAPH_BOFFSET2,
  102. NV04_PGRAPH_BBASE2,
  103. NV04_PGRAPH_BLIMIT2,
  104. NV04_PGRAPH_BOFFSET3,
  105. NV04_PGRAPH_BBASE3,
  106. NV04_PGRAPH_BLIMIT3,
  107. NV04_PGRAPH_BOFFSET4,
  108. NV04_PGRAPH_BBASE4,
  109. NV04_PGRAPH_BLIMIT4,
  110. NV04_PGRAPH_BOFFSET5,
  111. NV04_PGRAPH_BBASE5,
  112. NV04_PGRAPH_BLIMIT5,
  113. NV04_PGRAPH_BPITCH0,
  114. NV04_PGRAPH_BPITCH1,
  115. NV04_PGRAPH_BPITCH2,
  116. NV04_PGRAPH_BPITCH3,
  117. NV04_PGRAPH_BPITCH4,
  118. NV10_PGRAPH_SURFACE,
  119. NV10_PGRAPH_STATE,
  120. NV04_PGRAPH_BSWIZZLE2,
  121. NV04_PGRAPH_BSWIZZLE5,
  122. NV04_PGRAPH_BPIXEL,
  123. NV10_PGRAPH_NOTIFY,
  124. NV04_PGRAPH_PATT_COLOR0,
  125. NV04_PGRAPH_PATT_COLOR1,
  126. NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
  127. 0x00400904,
  128. 0x00400908,
  129. 0x0040090c,
  130. 0x00400910,
  131. 0x00400914,
  132. 0x00400918,
  133. 0x0040091c,
  134. 0x00400920,
  135. 0x00400924,
  136. 0x00400928,
  137. 0x0040092c,
  138. 0x00400930,
  139. 0x00400934,
  140. 0x00400938,
  141. 0x0040093c,
  142. 0x00400940,
  143. 0x00400944,
  144. 0x00400948,
  145. 0x0040094c,
  146. 0x00400950,
  147. 0x00400954,
  148. 0x00400958,
  149. 0x0040095c,
  150. 0x00400960,
  151. 0x00400964,
  152. 0x00400968,
  153. 0x0040096c,
  154. 0x00400970,
  155. 0x00400974,
  156. 0x00400978,
  157. 0x0040097c,
  158. 0x00400980,
  159. 0x00400984,
  160. 0x00400988,
  161. 0x0040098c,
  162. 0x00400990,
  163. 0x00400994,
  164. 0x00400998,
  165. 0x0040099c,
  166. 0x004009a0,
  167. 0x004009a4,
  168. 0x004009a8,
  169. 0x004009ac,
  170. 0x004009b0,
  171. 0x004009b4,
  172. 0x004009b8,
  173. 0x004009bc,
  174. 0x004009c0,
  175. 0x004009c4,
  176. 0x004009c8,
  177. 0x004009cc,
  178. 0x004009d0,
  179. 0x004009d4,
  180. 0x004009d8,
  181. 0x004009dc,
  182. 0x004009e0,
  183. 0x004009e4,
  184. 0x004009e8,
  185. 0x004009ec,
  186. 0x004009f0,
  187. 0x004009f4,
  188. 0x004009f8,
  189. 0x004009fc,
  190. NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
  191. 0x0040080c,
  192. NV04_PGRAPH_PATTERN_SHAPE,
  193. NV03_PGRAPH_MONO_COLOR0,
  194. NV04_PGRAPH_ROP3,
  195. NV04_PGRAPH_CHROMA,
  196. NV04_PGRAPH_BETA_AND,
  197. NV04_PGRAPH_BETA_PREMULT,
  198. 0x00400e70,
  199. 0x00400e74,
  200. 0x00400e78,
  201. 0x00400e7c,
  202. 0x00400e80,
  203. 0x00400e84,
  204. 0x00400e88,
  205. 0x00400e8c,
  206. 0x00400ea0,
  207. 0x00400ea4,
  208. 0x00400ea8,
  209. 0x00400e90,
  210. 0x00400e94,
  211. 0x00400e98,
  212. 0x00400e9c,
  213. NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
  214. NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
  215. 0x00400f04,
  216. 0x00400f24,
  217. 0x00400f08,
  218. 0x00400f28,
  219. 0x00400f0c,
  220. 0x00400f2c,
  221. 0x00400f10,
  222. 0x00400f30,
  223. 0x00400f14,
  224. 0x00400f34,
  225. 0x00400f18,
  226. 0x00400f38,
  227. 0x00400f1c,
  228. 0x00400f3c,
  229. NV10_PGRAPH_XFMODE0,
  230. NV10_PGRAPH_XFMODE1,
  231. NV10_PGRAPH_GLOBALSTATE0,
  232. NV10_PGRAPH_GLOBALSTATE1,
  233. NV04_PGRAPH_STORED_FMT,
  234. NV04_PGRAPH_SOURCE_COLOR,
  235. NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
  236. NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
  237. 0x00400404,
  238. 0x00400484,
  239. 0x00400408,
  240. 0x00400488,
  241. 0x0040040c,
  242. 0x0040048c,
  243. 0x00400410,
  244. 0x00400490,
  245. 0x00400414,
  246. 0x00400494,
  247. 0x00400418,
  248. 0x00400498,
  249. 0x0040041c,
  250. 0x0040049c,
  251. 0x00400420,
  252. 0x004004a0,
  253. 0x00400424,
  254. 0x004004a4,
  255. 0x00400428,
  256. 0x004004a8,
  257. 0x0040042c,
  258. 0x004004ac,
  259. 0x00400430,
  260. 0x004004b0,
  261. 0x00400434,
  262. 0x004004b4,
  263. 0x00400438,
  264. 0x004004b8,
  265. 0x0040043c,
  266. 0x004004bc,
  267. 0x00400440,
  268. 0x004004c0,
  269. 0x00400444,
  270. 0x004004c4,
  271. 0x00400448,
  272. 0x004004c8,
  273. 0x0040044c,
  274. 0x004004cc,
  275. 0x00400450,
  276. 0x004004d0,
  277. 0x00400454,
  278. 0x004004d4,
  279. 0x00400458,
  280. 0x004004d8,
  281. 0x0040045c,
  282. 0x004004dc,
  283. 0x00400460,
  284. 0x004004e0,
  285. 0x00400464,
  286. 0x004004e4,
  287. 0x00400468,
  288. 0x004004e8,
  289. 0x0040046c,
  290. 0x004004ec,
  291. 0x00400470,
  292. 0x004004f0,
  293. 0x00400474,
  294. 0x004004f4,
  295. 0x00400478,
  296. 0x004004f8,
  297. 0x0040047c,
  298. 0x004004fc,
  299. NV03_PGRAPH_ABS_UCLIP_XMIN,
  300. NV03_PGRAPH_ABS_UCLIP_XMAX,
  301. NV03_PGRAPH_ABS_UCLIP_YMIN,
  302. NV03_PGRAPH_ABS_UCLIP_YMAX,
  303. 0x00400550,
  304. 0x00400558,
  305. 0x00400554,
  306. 0x0040055c,
  307. NV03_PGRAPH_ABS_UCLIPA_XMIN,
  308. NV03_PGRAPH_ABS_UCLIPA_XMAX,
  309. NV03_PGRAPH_ABS_UCLIPA_YMIN,
  310. NV03_PGRAPH_ABS_UCLIPA_YMAX,
  311. NV03_PGRAPH_ABS_ICLIP_XMAX,
  312. NV03_PGRAPH_ABS_ICLIP_YMAX,
  313. NV03_PGRAPH_XY_LOGIC_MISC0,
  314. NV03_PGRAPH_XY_LOGIC_MISC1,
  315. NV03_PGRAPH_XY_LOGIC_MISC2,
  316. NV03_PGRAPH_XY_LOGIC_MISC3,
  317. NV03_PGRAPH_CLIPX_0,
  318. NV03_PGRAPH_CLIPX_1,
  319. NV03_PGRAPH_CLIPY_0,
  320. NV03_PGRAPH_CLIPY_1,
  321. NV10_PGRAPH_COMBINER0_IN_ALPHA,
  322. NV10_PGRAPH_COMBINER1_IN_ALPHA,
  323. NV10_PGRAPH_COMBINER0_IN_RGB,
  324. NV10_PGRAPH_COMBINER1_IN_RGB,
  325. NV10_PGRAPH_COMBINER_COLOR0,
  326. NV10_PGRAPH_COMBINER_COLOR1,
  327. NV10_PGRAPH_COMBINER0_OUT_ALPHA,
  328. NV10_PGRAPH_COMBINER1_OUT_ALPHA,
  329. NV10_PGRAPH_COMBINER0_OUT_RGB,
  330. NV10_PGRAPH_COMBINER1_OUT_RGB,
  331. NV10_PGRAPH_COMBINER_FINAL0,
  332. NV10_PGRAPH_COMBINER_FINAL1,
  333. 0x00400e00,
  334. 0x00400e04,
  335. 0x00400e08,
  336. 0x00400e0c,
  337. 0x00400e10,
  338. 0x00400e14,
  339. 0x00400e18,
  340. 0x00400e1c,
  341. 0x00400e20,
  342. 0x00400e24,
  343. 0x00400e28,
  344. 0x00400e2c,
  345. 0x00400e30,
  346. 0x00400e34,
  347. 0x00400e38,
  348. 0x00400e3c,
  349. NV04_PGRAPH_PASSTHRU_0,
  350. NV04_PGRAPH_PASSTHRU_1,
  351. NV04_PGRAPH_PASSTHRU_2,
  352. NV10_PGRAPH_DIMX_TEXTURE,
  353. NV10_PGRAPH_WDIMX_TEXTURE,
  354. NV10_PGRAPH_DVD_COLORFMT,
  355. NV10_PGRAPH_SCALED_FORMAT,
  356. NV04_PGRAPH_MISC24_0,
  357. NV04_PGRAPH_MISC24_1,
  358. NV04_PGRAPH_MISC24_2,
  359. NV03_PGRAPH_X_MISC,
  360. NV03_PGRAPH_Y_MISC,
  361. NV04_PGRAPH_VALID1,
  362. NV04_PGRAPH_VALID2,
  363. };
  364. static int nv17_gr_ctx_regs[] = {
  365. NV10_PGRAPH_DEBUG_4,
  366. 0x004006b0,
  367. 0x00400eac,
  368. 0x00400eb0,
  369. 0x00400eb4,
  370. 0x00400eb8,
  371. 0x00400ebc,
  372. 0x00400ec0,
  373. 0x00400ec4,
  374. 0x00400ec8,
  375. 0x00400ecc,
  376. 0x00400ed0,
  377. 0x00400ed4,
  378. 0x00400ed8,
  379. 0x00400edc,
  380. 0x00400ee0,
  381. 0x00400a00,
  382. 0x00400a04,
  383. };
  384. #define nv10_gr(p) container_of((p), struct nv10_gr, base)
  385. struct nv10_gr {
  386. struct nvkm_gr base;
  387. struct nv10_gr_chan *chan[32];
  388. spinlock_t lock;
  389. };
  390. #define nv10_gr_chan(p) container_of((p), struct nv10_gr_chan, object)
  391. struct nv10_gr_chan {
  392. struct nvkm_object object;
  393. struct nv10_gr *gr;
  394. int chid;
  395. int nv10[ARRAY_SIZE(nv10_gr_ctx_regs)];
  396. int nv17[ARRAY_SIZE(nv17_gr_ctx_regs)];
  397. struct pipe_state pipe_state;
  398. u32 lma_window[4];
  399. };
  400. /*******************************************************************************
  401. * Graphics object classes
  402. ******************************************************************************/
  403. #define PIPE_SAVE(gr, state, addr) \
  404. do { \
  405. int __i; \
  406. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  407. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  408. state[__i] = nvkm_rd32(device, NV10_PGRAPH_PIPE_DATA); \
  409. } while (0)
  410. #define PIPE_RESTORE(gr, state, addr) \
  411. do { \
  412. int __i; \
  413. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  414. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  415. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, state[__i]); \
  416. } while (0)
  417. static void
  418. nv17_gr_mthd_lma_window(struct nv10_gr_chan *chan, u32 mthd, u32 data)
  419. {
  420. struct nvkm_device *device = chan->object.engine->subdev.device;
  421. struct nvkm_gr *gr = &chan->gr->base;
  422. struct pipe_state *pipe = &chan->pipe_state;
  423. u32 pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
  424. u32 xfmode0, xfmode1;
  425. int i;
  426. chan->lma_window[(mthd - 0x1638) / 4] = data;
  427. if (mthd != 0x1644)
  428. return;
  429. nv04_gr_idle(gr);
  430. PIPE_SAVE(device, pipe_0x0040, 0x0040);
  431. PIPE_SAVE(device, pipe->pipe_0x0200, 0x0200);
  432. PIPE_RESTORE(device, chan->lma_window, 0x6790);
  433. nv04_gr_idle(gr);
  434. xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0);
  435. xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1);
  436. PIPE_SAVE(device, pipe->pipe_0x4400, 0x4400);
  437. PIPE_SAVE(device, pipe_0x64c0, 0x64c0);
  438. PIPE_SAVE(device, pipe_0x6ab0, 0x6ab0);
  439. PIPE_SAVE(device, pipe_0x6a80, 0x6a80);
  440. nv04_gr_idle(gr);
  441. nvkm_wr32(device, NV10_PGRAPH_XFMODE0, 0x10000000);
  442. nvkm_wr32(device, NV10_PGRAPH_XFMODE1, 0x00000000);
  443. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  444. for (i = 0; i < 4; i++)
  445. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  446. for (i = 0; i < 4; i++)
  447. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  448. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  449. for (i = 0; i < 3; i++)
  450. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  451. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  452. for (i = 0; i < 3; i++)
  453. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  454. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  455. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  456. PIPE_RESTORE(device, pipe->pipe_0x0200, 0x0200);
  457. nv04_gr_idle(gr);
  458. PIPE_RESTORE(device, pipe_0x0040, 0x0040);
  459. nvkm_wr32(device, NV10_PGRAPH_XFMODE0, xfmode0);
  460. nvkm_wr32(device, NV10_PGRAPH_XFMODE1, xfmode1);
  461. PIPE_RESTORE(device, pipe_0x64c0, 0x64c0);
  462. PIPE_RESTORE(device, pipe_0x6ab0, 0x6ab0);
  463. PIPE_RESTORE(device, pipe_0x6a80, 0x6a80);
  464. PIPE_RESTORE(device, pipe->pipe_0x4400, 0x4400);
  465. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
  466. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  467. nv04_gr_idle(gr);
  468. }
  469. static void
  470. nv17_gr_mthd_lma_enable(struct nv10_gr_chan *chan, u32 mthd, u32 data)
  471. {
  472. struct nvkm_device *device = chan->object.engine->subdev.device;
  473. struct nvkm_gr *gr = &chan->gr->base;
  474. nv04_gr_idle(gr);
  475. nvkm_mask(device, NV10_PGRAPH_DEBUG_4, 0x00000100, 0x00000100);
  476. nvkm_mask(device, 0x4006b0, 0x08000000, 0x08000000);
  477. }
  478. static bool
  479. nv17_gr_mthd_celcius(struct nv10_gr_chan *chan, u32 mthd, u32 data)
  480. {
  481. void (*func)(struct nv10_gr_chan *, u32, u32);
  482. switch (mthd) {
  483. case 0x1638 ... 0x1644:
  484. func = nv17_gr_mthd_lma_window; break;
  485. case 0x1658: func = nv17_gr_mthd_lma_enable; break;
  486. default:
  487. return false;
  488. }
  489. func(chan, mthd, data);
  490. return true;
  491. }
  492. static bool
  493. nv10_gr_mthd(struct nv10_gr_chan *chan, u8 class, u32 mthd, u32 data)
  494. {
  495. bool (*func)(struct nv10_gr_chan *, u32, u32);
  496. switch (class) {
  497. case 0x99: func = nv17_gr_mthd_celcius; break;
  498. default:
  499. return false;
  500. }
  501. return func(chan, mthd, data);
  502. }
  503. /*******************************************************************************
  504. * PGRAPH context
  505. ******************************************************************************/
  506. static struct nv10_gr_chan *
  507. nv10_gr_channel(struct nv10_gr *gr)
  508. {
  509. struct nvkm_device *device = gr->base.engine.subdev.device;
  510. struct nv10_gr_chan *chan = NULL;
  511. if (nvkm_rd32(device, 0x400144) & 0x00010000) {
  512. int chid = nvkm_rd32(device, 0x400148) >> 24;
  513. if (chid < ARRAY_SIZE(gr->chan))
  514. chan = gr->chan[chid];
  515. }
  516. return chan;
  517. }
  518. static void
  519. nv10_gr_save_pipe(struct nv10_gr_chan *chan)
  520. {
  521. struct nv10_gr *gr = chan->gr;
  522. struct pipe_state *pipe = &chan->pipe_state;
  523. struct nvkm_device *device = gr->base.engine.subdev.device;
  524. PIPE_SAVE(gr, pipe->pipe_0x4400, 0x4400);
  525. PIPE_SAVE(gr, pipe->pipe_0x0200, 0x0200);
  526. PIPE_SAVE(gr, pipe->pipe_0x6400, 0x6400);
  527. PIPE_SAVE(gr, pipe->pipe_0x6800, 0x6800);
  528. PIPE_SAVE(gr, pipe->pipe_0x6c00, 0x6c00);
  529. PIPE_SAVE(gr, pipe->pipe_0x7000, 0x7000);
  530. PIPE_SAVE(gr, pipe->pipe_0x7400, 0x7400);
  531. PIPE_SAVE(gr, pipe->pipe_0x7800, 0x7800);
  532. PIPE_SAVE(gr, pipe->pipe_0x0040, 0x0040);
  533. PIPE_SAVE(gr, pipe->pipe_0x0000, 0x0000);
  534. }
  535. static void
  536. nv10_gr_load_pipe(struct nv10_gr_chan *chan)
  537. {
  538. struct nv10_gr *gr = chan->gr;
  539. struct pipe_state *pipe = &chan->pipe_state;
  540. struct nvkm_device *device = gr->base.engine.subdev.device;
  541. u32 xfmode0, xfmode1;
  542. int i;
  543. nv04_gr_idle(&gr->base);
  544. /* XXX check haiku comments */
  545. xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0);
  546. xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1);
  547. nvkm_wr32(device, NV10_PGRAPH_XFMODE0, 0x10000000);
  548. nvkm_wr32(device, NV10_PGRAPH_XFMODE1, 0x00000000);
  549. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  550. for (i = 0; i < 4; i++)
  551. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  552. for (i = 0; i < 4; i++)
  553. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  554. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  555. for (i = 0; i < 3; i++)
  556. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  557. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  558. for (i = 0; i < 3; i++)
  559. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  560. nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  561. nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  562. PIPE_RESTORE(gr, pipe->pipe_0x0200, 0x0200);
  563. nv04_gr_idle(&gr->base);
  564. /* restore XFMODE */
  565. nvkm_wr32(device, NV10_PGRAPH_XFMODE0, xfmode0);
  566. nvkm_wr32(device, NV10_PGRAPH_XFMODE1, xfmode1);
  567. PIPE_RESTORE(gr, pipe->pipe_0x6400, 0x6400);
  568. PIPE_RESTORE(gr, pipe->pipe_0x6800, 0x6800);
  569. PIPE_RESTORE(gr, pipe->pipe_0x6c00, 0x6c00);
  570. PIPE_RESTORE(gr, pipe->pipe_0x7000, 0x7000);
  571. PIPE_RESTORE(gr, pipe->pipe_0x7400, 0x7400);
  572. PIPE_RESTORE(gr, pipe->pipe_0x7800, 0x7800);
  573. PIPE_RESTORE(gr, pipe->pipe_0x4400, 0x4400);
  574. PIPE_RESTORE(gr, pipe->pipe_0x0000, 0x0000);
  575. PIPE_RESTORE(gr, pipe->pipe_0x0040, 0x0040);
  576. nv04_gr_idle(&gr->base);
  577. }
  578. static void
  579. nv10_gr_create_pipe(struct nv10_gr_chan *chan)
  580. {
  581. struct nv10_gr *gr = chan->gr;
  582. struct nvkm_subdev *subdev = &gr->base.engine.subdev;
  583. struct pipe_state *pipe_state = &chan->pipe_state;
  584. u32 *pipe_state_addr;
  585. int i;
  586. #define PIPE_INIT(addr) \
  587. do { \
  588. pipe_state_addr = pipe_state->pipe_##addr; \
  589. } while (0)
  590. #define PIPE_INIT_END(addr) \
  591. do { \
  592. u32 *__end_addr = pipe_state->pipe_##addr + \
  593. ARRAY_SIZE(pipe_state->pipe_##addr); \
  594. if (pipe_state_addr != __end_addr) \
  595. nvkm_error(subdev, "incomplete pipe init for 0x%x : %p/%p\n", \
  596. addr, pipe_state_addr, __end_addr); \
  597. } while (0)
  598. #define NV_WRITE_PIPE_INIT(value) *(pipe_state_addr++) = value
  599. PIPE_INIT(0x0200);
  600. for (i = 0; i < 48; i++)
  601. NV_WRITE_PIPE_INIT(0x00000000);
  602. PIPE_INIT_END(0x0200);
  603. PIPE_INIT(0x6400);
  604. for (i = 0; i < 211; i++)
  605. NV_WRITE_PIPE_INIT(0x00000000);
  606. NV_WRITE_PIPE_INIT(0x3f800000);
  607. NV_WRITE_PIPE_INIT(0x40000000);
  608. NV_WRITE_PIPE_INIT(0x40000000);
  609. NV_WRITE_PIPE_INIT(0x40000000);
  610. NV_WRITE_PIPE_INIT(0x40000000);
  611. NV_WRITE_PIPE_INIT(0x00000000);
  612. NV_WRITE_PIPE_INIT(0x00000000);
  613. NV_WRITE_PIPE_INIT(0x3f800000);
  614. NV_WRITE_PIPE_INIT(0x00000000);
  615. NV_WRITE_PIPE_INIT(0x3f000000);
  616. NV_WRITE_PIPE_INIT(0x3f000000);
  617. NV_WRITE_PIPE_INIT(0x00000000);
  618. NV_WRITE_PIPE_INIT(0x00000000);
  619. NV_WRITE_PIPE_INIT(0x00000000);
  620. NV_WRITE_PIPE_INIT(0x00000000);
  621. NV_WRITE_PIPE_INIT(0x3f800000);
  622. NV_WRITE_PIPE_INIT(0x00000000);
  623. NV_WRITE_PIPE_INIT(0x00000000);
  624. NV_WRITE_PIPE_INIT(0x00000000);
  625. NV_WRITE_PIPE_INIT(0x00000000);
  626. NV_WRITE_PIPE_INIT(0x00000000);
  627. NV_WRITE_PIPE_INIT(0x3f800000);
  628. NV_WRITE_PIPE_INIT(0x3f800000);
  629. NV_WRITE_PIPE_INIT(0x3f800000);
  630. NV_WRITE_PIPE_INIT(0x3f800000);
  631. PIPE_INIT_END(0x6400);
  632. PIPE_INIT(0x6800);
  633. for (i = 0; i < 162; i++)
  634. NV_WRITE_PIPE_INIT(0x00000000);
  635. NV_WRITE_PIPE_INIT(0x3f800000);
  636. for (i = 0; i < 25; i++)
  637. NV_WRITE_PIPE_INIT(0x00000000);
  638. PIPE_INIT_END(0x6800);
  639. PIPE_INIT(0x6c00);
  640. NV_WRITE_PIPE_INIT(0x00000000);
  641. NV_WRITE_PIPE_INIT(0x00000000);
  642. NV_WRITE_PIPE_INIT(0x00000000);
  643. NV_WRITE_PIPE_INIT(0x00000000);
  644. NV_WRITE_PIPE_INIT(0xbf800000);
  645. NV_WRITE_PIPE_INIT(0x00000000);
  646. NV_WRITE_PIPE_INIT(0x00000000);
  647. NV_WRITE_PIPE_INIT(0x00000000);
  648. NV_WRITE_PIPE_INIT(0x00000000);
  649. NV_WRITE_PIPE_INIT(0x00000000);
  650. NV_WRITE_PIPE_INIT(0x00000000);
  651. NV_WRITE_PIPE_INIT(0x00000000);
  652. PIPE_INIT_END(0x6c00);
  653. PIPE_INIT(0x7000);
  654. NV_WRITE_PIPE_INIT(0x00000000);
  655. NV_WRITE_PIPE_INIT(0x00000000);
  656. NV_WRITE_PIPE_INIT(0x00000000);
  657. NV_WRITE_PIPE_INIT(0x00000000);
  658. NV_WRITE_PIPE_INIT(0x00000000);
  659. NV_WRITE_PIPE_INIT(0x00000000);
  660. NV_WRITE_PIPE_INIT(0x00000000);
  661. NV_WRITE_PIPE_INIT(0x00000000);
  662. NV_WRITE_PIPE_INIT(0x00000000);
  663. NV_WRITE_PIPE_INIT(0x00000000);
  664. NV_WRITE_PIPE_INIT(0x00000000);
  665. NV_WRITE_PIPE_INIT(0x00000000);
  666. NV_WRITE_PIPE_INIT(0x7149f2ca);
  667. NV_WRITE_PIPE_INIT(0x00000000);
  668. NV_WRITE_PIPE_INIT(0x00000000);
  669. NV_WRITE_PIPE_INIT(0x00000000);
  670. NV_WRITE_PIPE_INIT(0x7149f2ca);
  671. NV_WRITE_PIPE_INIT(0x00000000);
  672. NV_WRITE_PIPE_INIT(0x00000000);
  673. NV_WRITE_PIPE_INIT(0x00000000);
  674. NV_WRITE_PIPE_INIT(0x7149f2ca);
  675. NV_WRITE_PIPE_INIT(0x00000000);
  676. NV_WRITE_PIPE_INIT(0x00000000);
  677. NV_WRITE_PIPE_INIT(0x00000000);
  678. NV_WRITE_PIPE_INIT(0x7149f2ca);
  679. NV_WRITE_PIPE_INIT(0x00000000);
  680. NV_WRITE_PIPE_INIT(0x00000000);
  681. NV_WRITE_PIPE_INIT(0x00000000);
  682. NV_WRITE_PIPE_INIT(0x7149f2ca);
  683. NV_WRITE_PIPE_INIT(0x00000000);
  684. NV_WRITE_PIPE_INIT(0x00000000);
  685. NV_WRITE_PIPE_INIT(0x00000000);
  686. NV_WRITE_PIPE_INIT(0x7149f2ca);
  687. NV_WRITE_PIPE_INIT(0x00000000);
  688. NV_WRITE_PIPE_INIT(0x00000000);
  689. NV_WRITE_PIPE_INIT(0x00000000);
  690. NV_WRITE_PIPE_INIT(0x7149f2ca);
  691. NV_WRITE_PIPE_INIT(0x00000000);
  692. NV_WRITE_PIPE_INIT(0x00000000);
  693. NV_WRITE_PIPE_INIT(0x00000000);
  694. NV_WRITE_PIPE_INIT(0x7149f2ca);
  695. for (i = 0; i < 35; i++)
  696. NV_WRITE_PIPE_INIT(0x00000000);
  697. PIPE_INIT_END(0x7000);
  698. PIPE_INIT(0x7400);
  699. for (i = 0; i < 48; i++)
  700. NV_WRITE_PIPE_INIT(0x00000000);
  701. PIPE_INIT_END(0x7400);
  702. PIPE_INIT(0x7800);
  703. for (i = 0; i < 48; i++)
  704. NV_WRITE_PIPE_INIT(0x00000000);
  705. PIPE_INIT_END(0x7800);
  706. PIPE_INIT(0x4400);
  707. for (i = 0; i < 32; i++)
  708. NV_WRITE_PIPE_INIT(0x00000000);
  709. PIPE_INIT_END(0x4400);
  710. PIPE_INIT(0x0000);
  711. for (i = 0; i < 16; i++)
  712. NV_WRITE_PIPE_INIT(0x00000000);
  713. PIPE_INIT_END(0x0000);
  714. PIPE_INIT(0x0040);
  715. for (i = 0; i < 4; i++)
  716. NV_WRITE_PIPE_INIT(0x00000000);
  717. PIPE_INIT_END(0x0040);
  718. #undef PIPE_INIT
  719. #undef PIPE_INIT_END
  720. #undef NV_WRITE_PIPE_INIT
  721. }
  722. static int
  723. nv10_gr_ctx_regs_find_offset(struct nv10_gr *gr, int reg)
  724. {
  725. struct nvkm_subdev *subdev = &gr->base.engine.subdev;
  726. int i;
  727. for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++) {
  728. if (nv10_gr_ctx_regs[i] == reg)
  729. return i;
  730. }
  731. nvkm_error(subdev, "unknown offset nv10_ctx_regs %d\n", reg);
  732. return -1;
  733. }
  734. static int
  735. nv17_gr_ctx_regs_find_offset(struct nv10_gr *gr, int reg)
  736. {
  737. struct nvkm_subdev *subdev = &gr->base.engine.subdev;
  738. int i;
  739. for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++) {
  740. if (nv17_gr_ctx_regs[i] == reg)
  741. return i;
  742. }
  743. nvkm_error(subdev, "unknown offset nv17_ctx_regs %d\n", reg);
  744. return -1;
  745. }
  746. static void
  747. nv10_gr_load_dma_vtxbuf(struct nv10_gr_chan *chan, int chid, u32 inst)
  748. {
  749. struct nv10_gr *gr = chan->gr;
  750. struct nvkm_device *device = gr->base.engine.subdev.device;
  751. u32 st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
  752. u32 ctx_user, ctx_switch[5];
  753. int i, subchan = -1;
  754. /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
  755. * that cannot be restored via MMIO. Do it through the FIFO
  756. * instead.
  757. */
  758. /* Look for a celsius object */
  759. for (i = 0; i < 8; i++) {
  760. int class = nvkm_rd32(device, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
  761. if (class == 0x56 || class == 0x96 || class == 0x99) {
  762. subchan = i;
  763. break;
  764. }
  765. }
  766. if (subchan < 0 || !inst)
  767. return;
  768. /* Save the current ctx object */
  769. ctx_user = nvkm_rd32(device, NV10_PGRAPH_CTX_USER);
  770. for (i = 0; i < 5; i++)
  771. ctx_switch[i] = nvkm_rd32(device, NV10_PGRAPH_CTX_SWITCH(i));
  772. /* Save the FIFO state */
  773. st2 = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2);
  774. st2_dl = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2_DL);
  775. st2_dh = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2_DH);
  776. fifo_ptr = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR);
  777. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  778. fifo[i] = nvkm_rd32(device, 0x4007a0 + 4 * i);
  779. /* Switch to the celsius subchannel */
  780. for (i = 0; i < 5; i++)
  781. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(i),
  782. nvkm_rd32(device, NV10_PGRAPH_CTX_CACHE(subchan, i)));
  783. nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
  784. /* Inject NV10TCL_DMA_VTXBUF */
  785. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
  786. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2,
  787. 0x2c000000 | chid << 20 | subchan << 16 | 0x18c);
  788. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
  789. nvkm_mask(device, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
  790. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  791. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  792. /* Restore the FIFO state */
  793. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  794. nvkm_wr32(device, 0x4007a0 + 4 * i, fifo[i]);
  795. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
  796. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2, st2);
  797. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
  798. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
  799. /* Restore the current ctx object */
  800. for (i = 0; i < 5; i++)
  801. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
  802. nvkm_wr32(device, NV10_PGRAPH_CTX_USER, ctx_user);
  803. }
  804. static int
  805. nv10_gr_load_context(struct nv10_gr_chan *chan, int chid)
  806. {
  807. struct nv10_gr *gr = chan->gr;
  808. struct nvkm_device *device = gr->base.engine.subdev.device;
  809. u32 inst;
  810. int i;
  811. for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++)
  812. nvkm_wr32(device, nv10_gr_ctx_regs[i], chan->nv10[i]);
  813. if (device->card_type >= NV_11 && device->chipset >= 0x17) {
  814. for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++)
  815. nvkm_wr32(device, nv17_gr_ctx_regs[i], chan->nv17[i]);
  816. }
  817. nv10_gr_load_pipe(chan);
  818. inst = nvkm_rd32(device, NV10_PGRAPH_GLOBALSTATE1) & 0xffff;
  819. nv10_gr_load_dma_vtxbuf(chan, chid, inst);
  820. nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
  821. nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, chid << 24);
  822. nvkm_mask(device, NV10_PGRAPH_FFINTFC_ST2, 0x30000000, 0x00000000);
  823. return 0;
  824. }
  825. static int
  826. nv10_gr_unload_context(struct nv10_gr_chan *chan)
  827. {
  828. struct nv10_gr *gr = chan->gr;
  829. struct nvkm_device *device = gr->base.engine.subdev.device;
  830. int i;
  831. for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++)
  832. chan->nv10[i] = nvkm_rd32(device, nv10_gr_ctx_regs[i]);
  833. if (device->card_type >= NV_11 && device->chipset >= 0x17) {
  834. for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++)
  835. chan->nv17[i] = nvkm_rd32(device, nv17_gr_ctx_regs[i]);
  836. }
  837. nv10_gr_save_pipe(chan);
  838. nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
  839. nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, 0x1f000000);
  840. return 0;
  841. }
  842. static void
  843. nv10_gr_context_switch(struct nv10_gr *gr)
  844. {
  845. struct nvkm_device *device = gr->base.engine.subdev.device;
  846. struct nv10_gr_chan *prev = NULL;
  847. struct nv10_gr_chan *next = NULL;
  848. int chid;
  849. nv04_gr_idle(&gr->base);
  850. /* If previous context is valid, we need to save it */
  851. prev = nv10_gr_channel(gr);
  852. if (prev)
  853. nv10_gr_unload_context(prev);
  854. /* load context for next channel */
  855. chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
  856. next = gr->chan[chid];
  857. if (next)
  858. nv10_gr_load_context(next, chid);
  859. }
  860. static int
  861. nv10_gr_chan_fini(struct nvkm_object *object, bool suspend)
  862. {
  863. struct nv10_gr_chan *chan = nv10_gr_chan(object);
  864. struct nv10_gr *gr = chan->gr;
  865. struct nvkm_device *device = gr->base.engine.subdev.device;
  866. unsigned long flags;
  867. spin_lock_irqsave(&gr->lock, flags);
  868. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  869. if (nv10_gr_channel(gr) == chan)
  870. nv10_gr_unload_context(chan);
  871. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  872. spin_unlock_irqrestore(&gr->lock, flags);
  873. return 0;
  874. }
  875. static void *
  876. nv10_gr_chan_dtor(struct nvkm_object *object)
  877. {
  878. struct nv10_gr_chan *chan = nv10_gr_chan(object);
  879. struct nv10_gr *gr = chan->gr;
  880. unsigned long flags;
  881. spin_lock_irqsave(&gr->lock, flags);
  882. gr->chan[chan->chid] = NULL;
  883. spin_unlock_irqrestore(&gr->lock, flags);
  884. return chan;
  885. }
  886. static const struct nvkm_object_func
  887. nv10_gr_chan = {
  888. .dtor = nv10_gr_chan_dtor,
  889. .fini = nv10_gr_chan_fini,
  890. };
  891. #define NV_WRITE_CTX(reg, val) do { \
  892. int offset = nv10_gr_ctx_regs_find_offset(gr, reg); \
  893. if (offset > 0) \
  894. chan->nv10[offset] = val; \
  895. } while (0)
  896. #define NV17_WRITE_CTX(reg, val) do { \
  897. int offset = nv17_gr_ctx_regs_find_offset(gr, reg); \
  898. if (offset > 0) \
  899. chan->nv17[offset] = val; \
  900. } while (0)
  901. int
  902. nv10_gr_chan_new(struct nvkm_gr *base, struct nvkm_fifo_chan *fifoch,
  903. const struct nvkm_oclass *oclass, struct nvkm_object **pobject)
  904. {
  905. struct nv10_gr *gr = nv10_gr(base);
  906. struct nv10_gr_chan *chan;
  907. struct nvkm_device *device = gr->base.engine.subdev.device;
  908. unsigned long flags;
  909. if (!(chan = kzalloc(sizeof(*chan), GFP_KERNEL)))
  910. return -ENOMEM;
  911. nvkm_object_ctor(&nv10_gr_chan, oclass, &chan->object);
  912. chan->gr = gr;
  913. chan->chid = fifoch->chid;
  914. *pobject = &chan->object;
  915. NV_WRITE_CTX(0x00400e88, 0x08000000);
  916. NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
  917. NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
  918. NV_WRITE_CTX(0x00400e10, 0x00001000);
  919. NV_WRITE_CTX(0x00400e14, 0x00001000);
  920. NV_WRITE_CTX(0x00400e30, 0x00080008);
  921. NV_WRITE_CTX(0x00400e34, 0x00080008);
  922. if (device->card_type >= NV_11 && device->chipset >= 0x17) {
  923. /* is it really needed ??? */
  924. NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
  925. nvkm_rd32(device, NV10_PGRAPH_DEBUG_4));
  926. NV17_WRITE_CTX(0x004006b0, nvkm_rd32(device, 0x004006b0));
  927. NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
  928. NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
  929. NV17_WRITE_CTX(0x00400ec0, 0x00000080);
  930. NV17_WRITE_CTX(0x00400ed0, 0x00000080);
  931. }
  932. NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->chid << 24);
  933. nv10_gr_create_pipe(chan);
  934. spin_lock_irqsave(&gr->lock, flags);
  935. gr->chan[chan->chid] = chan;
  936. spin_unlock_irqrestore(&gr->lock, flags);
  937. return 0;
  938. }
  939. /*******************************************************************************
  940. * PGRAPH engine/subdev functions
  941. ******************************************************************************/
  942. void
  943. nv10_gr_tile(struct nvkm_gr *base, int i, struct nvkm_fb_tile *tile)
  944. {
  945. struct nv10_gr *gr = nv10_gr(base);
  946. struct nvkm_device *device = gr->base.engine.subdev.device;
  947. struct nvkm_fifo *fifo = device->fifo;
  948. unsigned long flags;
  949. nvkm_fifo_pause(fifo, &flags);
  950. nv04_gr_idle(&gr->base);
  951. nvkm_wr32(device, NV10_PGRAPH_TLIMIT(i), tile->limit);
  952. nvkm_wr32(device, NV10_PGRAPH_TSIZE(i), tile->pitch);
  953. nvkm_wr32(device, NV10_PGRAPH_TILE(i), tile->addr);
  954. nvkm_fifo_start(fifo, &flags);
  955. }
  956. const struct nvkm_bitfield nv10_gr_intr_name[] = {
  957. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  958. { NV_PGRAPH_INTR_ERROR, "ERROR" },
  959. {}
  960. };
  961. const struct nvkm_bitfield nv10_gr_nstatus[] = {
  962. { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  963. { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  964. { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  965. { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  966. {}
  967. };
  968. void
  969. nv10_gr_intr(struct nvkm_gr *base)
  970. {
  971. struct nv10_gr *gr = nv10_gr(base);
  972. struct nvkm_subdev *subdev = &gr->base.engine.subdev;
  973. struct nvkm_device *device = subdev->device;
  974. u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR);
  975. u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE);
  976. u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS);
  977. u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR);
  978. u32 chid = (addr & 0x01f00000) >> 20;
  979. u32 subc = (addr & 0x00070000) >> 16;
  980. u32 mthd = (addr & 0x00001ffc);
  981. u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA);
  982. u32 class = nvkm_rd32(device, 0x400160 + subc * 4) & 0xfff;
  983. u32 show = stat;
  984. char msg[128], src[128], sta[128];
  985. struct nv10_gr_chan *chan;
  986. unsigned long flags;
  987. spin_lock_irqsave(&gr->lock, flags);
  988. chan = gr->chan[chid];
  989. if (stat & NV_PGRAPH_INTR_ERROR) {
  990. if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
  991. if (!nv10_gr_mthd(chan, class, mthd, data))
  992. show &= ~NV_PGRAPH_INTR_ERROR;
  993. }
  994. }
  995. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  996. nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  997. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  998. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  999. nv10_gr_context_switch(gr);
  1000. }
  1001. nvkm_wr32(device, NV03_PGRAPH_INTR, stat);
  1002. nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001);
  1003. if (show) {
  1004. nvkm_snprintbf(msg, sizeof(msg), nv10_gr_intr_name, show);
  1005. nvkm_snprintbf(src, sizeof(src), nv04_gr_nsource, nsource);
  1006. nvkm_snprintbf(sta, sizeof(sta), nv10_gr_nstatus, nstatus);
  1007. nvkm_error(subdev, "intr %08x [%s] nsource %08x [%s] "
  1008. "nstatus %08x [%s] ch %d [%s] subc %d "
  1009. "class %04x mthd %04x data %08x\n",
  1010. show, msg, nsource, src, nstatus, sta, chid,
  1011. chan ? chan->object.client->name : "unknown",
  1012. subc, class, mthd, data);
  1013. }
  1014. spin_unlock_irqrestore(&gr->lock, flags);
  1015. }
  1016. int
  1017. nv10_gr_init(struct nvkm_gr *base)
  1018. {
  1019. struct nv10_gr *gr = nv10_gr(base);
  1020. struct nvkm_device *device = gr->base.engine.subdev.device;
  1021. nvkm_wr32(device, NV03_PGRAPH_INTR , 0xFFFFFFFF);
  1022. nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  1023. nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
  1024. nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x00000000);
  1025. nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x00118700);
  1026. /* nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
  1027. nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
  1028. nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0x55DE0830 | (1 << 29) | (1 << 31));
  1029. if (device->card_type >= NV_11 && device->chipset >= 0x17) {
  1030. nvkm_wr32(device, NV10_PGRAPH_DEBUG_4, 0x1f000000);
  1031. nvkm_wr32(device, 0x400a10, 0x03ff3fb6);
  1032. nvkm_wr32(device, 0x400838, 0x002f8684);
  1033. nvkm_wr32(device, 0x40083c, 0x00115f3f);
  1034. nvkm_wr32(device, 0x4006b0, 0x40000020);
  1035. } else {
  1036. nvkm_wr32(device, NV10_PGRAPH_DEBUG_4, 0x00000000);
  1037. }
  1038. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
  1039. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
  1040. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
  1041. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
  1042. nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
  1043. nvkm_wr32(device, NV10_PGRAPH_STATE, 0xFFFFFFFF);
  1044. nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, 0x1f000000);
  1045. nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
  1046. nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
  1047. return 0;
  1048. }
  1049. int
  1050. nv10_gr_new_(const struct nvkm_gr_func *func, struct nvkm_device *device,
  1051. int index, struct nvkm_gr **pgr)
  1052. {
  1053. struct nv10_gr *gr;
  1054. if (!(gr = kzalloc(sizeof(*gr), GFP_KERNEL)))
  1055. return -ENOMEM;
  1056. spin_lock_init(&gr->lock);
  1057. *pgr = &gr->base;
  1058. return nvkm_gr_ctor(func, device, index, true, &gr->base);
  1059. }
  1060. static const struct nvkm_gr_func
  1061. nv10_gr = {
  1062. .init = nv10_gr_init,
  1063. .intr = nv10_gr_intr,
  1064. .tile = nv10_gr_tile,
  1065. .chan_new = nv10_gr_chan_new,
  1066. .sclass = {
  1067. { -1, -1, 0x0012, &nv04_gr_object }, /* beta1 */
  1068. { -1, -1, 0x0019, &nv04_gr_object }, /* clip */
  1069. { -1, -1, 0x0030, &nv04_gr_object }, /* null */
  1070. { -1, -1, 0x0039, &nv04_gr_object }, /* m2mf */
  1071. { -1, -1, 0x0043, &nv04_gr_object }, /* rop */
  1072. { -1, -1, 0x0044, &nv04_gr_object }, /* pattern */
  1073. { -1, -1, 0x004a, &nv04_gr_object }, /* gdi */
  1074. { -1, -1, 0x0052, &nv04_gr_object }, /* swzsurf */
  1075. { -1, -1, 0x005f, &nv04_gr_object }, /* blit */
  1076. { -1, -1, 0x0062, &nv04_gr_object }, /* surf2d */
  1077. { -1, -1, 0x0072, &nv04_gr_object }, /* beta4 */
  1078. { -1, -1, 0x0089, &nv04_gr_object }, /* sifm */
  1079. { -1, -1, 0x008a, &nv04_gr_object }, /* ifc */
  1080. { -1, -1, 0x009f, &nv04_gr_object }, /* blit */
  1081. { -1, -1, 0x0093, &nv04_gr_object }, /* surf3d */
  1082. { -1, -1, 0x0094, &nv04_gr_object }, /* ttri */
  1083. { -1, -1, 0x0095, &nv04_gr_object }, /* mtri */
  1084. { -1, -1, 0x0056, &nv04_gr_object }, /* celcius */
  1085. {}
  1086. }
  1087. };
  1088. int
  1089. nv10_gr_new(struct nvkm_device *device, int index, struct nvkm_gr **pgr)
  1090. {
  1091. return nv10_gr_new_(&nv10_gr, device, index, pgr);
  1092. }