nv04.c 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427
  1. /*
  2. * Copyright 2007 Stephane Marchesin
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragr) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "priv.h"
  25. #include "regs.h"
  26. #include <core/client.h>
  27. #include <core/gpuobj.h>
  28. #include <engine/fifo.h>
  29. #include <engine/fifo/chan.h>
  30. #include <subdev/instmem.h>
  31. #include <subdev/timer.h>
  32. static u32
  33. nv04_gr_ctx_regs[] = {
  34. 0x0040053c,
  35. 0x00400544,
  36. 0x00400540,
  37. 0x00400548,
  38. NV04_PGRAPH_CTX_SWITCH1,
  39. NV04_PGRAPH_CTX_SWITCH2,
  40. NV04_PGRAPH_CTX_SWITCH3,
  41. NV04_PGRAPH_CTX_SWITCH4,
  42. NV04_PGRAPH_CTX_CACHE1,
  43. NV04_PGRAPH_CTX_CACHE2,
  44. NV04_PGRAPH_CTX_CACHE3,
  45. NV04_PGRAPH_CTX_CACHE4,
  46. 0x00400184,
  47. 0x004001a4,
  48. 0x004001c4,
  49. 0x004001e4,
  50. 0x00400188,
  51. 0x004001a8,
  52. 0x004001c8,
  53. 0x004001e8,
  54. 0x0040018c,
  55. 0x004001ac,
  56. 0x004001cc,
  57. 0x004001ec,
  58. 0x00400190,
  59. 0x004001b0,
  60. 0x004001d0,
  61. 0x004001f0,
  62. 0x00400194,
  63. 0x004001b4,
  64. 0x004001d4,
  65. 0x004001f4,
  66. 0x00400198,
  67. 0x004001b8,
  68. 0x004001d8,
  69. 0x004001f8,
  70. 0x0040019c,
  71. 0x004001bc,
  72. 0x004001dc,
  73. 0x004001fc,
  74. 0x00400174,
  75. NV04_PGRAPH_DMA_START_0,
  76. NV04_PGRAPH_DMA_START_1,
  77. NV04_PGRAPH_DMA_LENGTH,
  78. NV04_PGRAPH_DMA_MISC,
  79. NV04_PGRAPH_DMA_PITCH,
  80. NV04_PGRAPH_BOFFSET0,
  81. NV04_PGRAPH_BBASE0,
  82. NV04_PGRAPH_BLIMIT0,
  83. NV04_PGRAPH_BOFFSET1,
  84. NV04_PGRAPH_BBASE1,
  85. NV04_PGRAPH_BLIMIT1,
  86. NV04_PGRAPH_BOFFSET2,
  87. NV04_PGRAPH_BBASE2,
  88. NV04_PGRAPH_BLIMIT2,
  89. NV04_PGRAPH_BOFFSET3,
  90. NV04_PGRAPH_BBASE3,
  91. NV04_PGRAPH_BLIMIT3,
  92. NV04_PGRAPH_BOFFSET4,
  93. NV04_PGRAPH_BBASE4,
  94. NV04_PGRAPH_BLIMIT4,
  95. NV04_PGRAPH_BOFFSET5,
  96. NV04_PGRAPH_BBASE5,
  97. NV04_PGRAPH_BLIMIT5,
  98. NV04_PGRAPH_BPITCH0,
  99. NV04_PGRAPH_BPITCH1,
  100. NV04_PGRAPH_BPITCH2,
  101. NV04_PGRAPH_BPITCH3,
  102. NV04_PGRAPH_BPITCH4,
  103. NV04_PGRAPH_SURFACE,
  104. NV04_PGRAPH_STATE,
  105. NV04_PGRAPH_BSWIZZLE2,
  106. NV04_PGRAPH_BSWIZZLE5,
  107. NV04_PGRAPH_BPIXEL,
  108. NV04_PGRAPH_NOTIFY,
  109. NV04_PGRAPH_PATT_COLOR0,
  110. NV04_PGRAPH_PATT_COLOR1,
  111. NV04_PGRAPH_PATT_COLORRAM+0x00,
  112. NV04_PGRAPH_PATT_COLORRAM+0x04,
  113. NV04_PGRAPH_PATT_COLORRAM+0x08,
  114. NV04_PGRAPH_PATT_COLORRAM+0x0c,
  115. NV04_PGRAPH_PATT_COLORRAM+0x10,
  116. NV04_PGRAPH_PATT_COLORRAM+0x14,
  117. NV04_PGRAPH_PATT_COLORRAM+0x18,
  118. NV04_PGRAPH_PATT_COLORRAM+0x1c,
  119. NV04_PGRAPH_PATT_COLORRAM+0x20,
  120. NV04_PGRAPH_PATT_COLORRAM+0x24,
  121. NV04_PGRAPH_PATT_COLORRAM+0x28,
  122. NV04_PGRAPH_PATT_COLORRAM+0x2c,
  123. NV04_PGRAPH_PATT_COLORRAM+0x30,
  124. NV04_PGRAPH_PATT_COLORRAM+0x34,
  125. NV04_PGRAPH_PATT_COLORRAM+0x38,
  126. NV04_PGRAPH_PATT_COLORRAM+0x3c,
  127. NV04_PGRAPH_PATT_COLORRAM+0x40,
  128. NV04_PGRAPH_PATT_COLORRAM+0x44,
  129. NV04_PGRAPH_PATT_COLORRAM+0x48,
  130. NV04_PGRAPH_PATT_COLORRAM+0x4c,
  131. NV04_PGRAPH_PATT_COLORRAM+0x50,
  132. NV04_PGRAPH_PATT_COLORRAM+0x54,
  133. NV04_PGRAPH_PATT_COLORRAM+0x58,
  134. NV04_PGRAPH_PATT_COLORRAM+0x5c,
  135. NV04_PGRAPH_PATT_COLORRAM+0x60,
  136. NV04_PGRAPH_PATT_COLORRAM+0x64,
  137. NV04_PGRAPH_PATT_COLORRAM+0x68,
  138. NV04_PGRAPH_PATT_COLORRAM+0x6c,
  139. NV04_PGRAPH_PATT_COLORRAM+0x70,
  140. NV04_PGRAPH_PATT_COLORRAM+0x74,
  141. NV04_PGRAPH_PATT_COLORRAM+0x78,
  142. NV04_PGRAPH_PATT_COLORRAM+0x7c,
  143. NV04_PGRAPH_PATT_COLORRAM+0x80,
  144. NV04_PGRAPH_PATT_COLORRAM+0x84,
  145. NV04_PGRAPH_PATT_COLORRAM+0x88,
  146. NV04_PGRAPH_PATT_COLORRAM+0x8c,
  147. NV04_PGRAPH_PATT_COLORRAM+0x90,
  148. NV04_PGRAPH_PATT_COLORRAM+0x94,
  149. NV04_PGRAPH_PATT_COLORRAM+0x98,
  150. NV04_PGRAPH_PATT_COLORRAM+0x9c,
  151. NV04_PGRAPH_PATT_COLORRAM+0xa0,
  152. NV04_PGRAPH_PATT_COLORRAM+0xa4,
  153. NV04_PGRAPH_PATT_COLORRAM+0xa8,
  154. NV04_PGRAPH_PATT_COLORRAM+0xac,
  155. NV04_PGRAPH_PATT_COLORRAM+0xb0,
  156. NV04_PGRAPH_PATT_COLORRAM+0xb4,
  157. NV04_PGRAPH_PATT_COLORRAM+0xb8,
  158. NV04_PGRAPH_PATT_COLORRAM+0xbc,
  159. NV04_PGRAPH_PATT_COLORRAM+0xc0,
  160. NV04_PGRAPH_PATT_COLORRAM+0xc4,
  161. NV04_PGRAPH_PATT_COLORRAM+0xc8,
  162. NV04_PGRAPH_PATT_COLORRAM+0xcc,
  163. NV04_PGRAPH_PATT_COLORRAM+0xd0,
  164. NV04_PGRAPH_PATT_COLORRAM+0xd4,
  165. NV04_PGRAPH_PATT_COLORRAM+0xd8,
  166. NV04_PGRAPH_PATT_COLORRAM+0xdc,
  167. NV04_PGRAPH_PATT_COLORRAM+0xe0,
  168. NV04_PGRAPH_PATT_COLORRAM+0xe4,
  169. NV04_PGRAPH_PATT_COLORRAM+0xe8,
  170. NV04_PGRAPH_PATT_COLORRAM+0xec,
  171. NV04_PGRAPH_PATT_COLORRAM+0xf0,
  172. NV04_PGRAPH_PATT_COLORRAM+0xf4,
  173. NV04_PGRAPH_PATT_COLORRAM+0xf8,
  174. NV04_PGRAPH_PATT_COLORRAM+0xfc,
  175. NV04_PGRAPH_PATTERN,
  176. 0x0040080c,
  177. NV04_PGRAPH_PATTERN_SHAPE,
  178. 0x00400600,
  179. NV04_PGRAPH_ROP3,
  180. NV04_PGRAPH_CHROMA,
  181. NV04_PGRAPH_BETA_AND,
  182. NV04_PGRAPH_BETA_PREMULT,
  183. NV04_PGRAPH_CONTROL0,
  184. NV04_PGRAPH_CONTROL1,
  185. NV04_PGRAPH_CONTROL2,
  186. NV04_PGRAPH_BLEND,
  187. NV04_PGRAPH_STORED_FMT,
  188. NV04_PGRAPH_SOURCE_COLOR,
  189. 0x00400560,
  190. 0x00400568,
  191. 0x00400564,
  192. 0x0040056c,
  193. 0x00400400,
  194. 0x00400480,
  195. 0x00400404,
  196. 0x00400484,
  197. 0x00400408,
  198. 0x00400488,
  199. 0x0040040c,
  200. 0x0040048c,
  201. 0x00400410,
  202. 0x00400490,
  203. 0x00400414,
  204. 0x00400494,
  205. 0x00400418,
  206. 0x00400498,
  207. 0x0040041c,
  208. 0x0040049c,
  209. 0x00400420,
  210. 0x004004a0,
  211. 0x00400424,
  212. 0x004004a4,
  213. 0x00400428,
  214. 0x004004a8,
  215. 0x0040042c,
  216. 0x004004ac,
  217. 0x00400430,
  218. 0x004004b0,
  219. 0x00400434,
  220. 0x004004b4,
  221. 0x00400438,
  222. 0x004004b8,
  223. 0x0040043c,
  224. 0x004004bc,
  225. 0x00400440,
  226. 0x004004c0,
  227. 0x00400444,
  228. 0x004004c4,
  229. 0x00400448,
  230. 0x004004c8,
  231. 0x0040044c,
  232. 0x004004cc,
  233. 0x00400450,
  234. 0x004004d0,
  235. 0x00400454,
  236. 0x004004d4,
  237. 0x00400458,
  238. 0x004004d8,
  239. 0x0040045c,
  240. 0x004004dc,
  241. 0x00400460,
  242. 0x004004e0,
  243. 0x00400464,
  244. 0x004004e4,
  245. 0x00400468,
  246. 0x004004e8,
  247. 0x0040046c,
  248. 0x004004ec,
  249. 0x00400470,
  250. 0x004004f0,
  251. 0x00400474,
  252. 0x004004f4,
  253. 0x00400478,
  254. 0x004004f8,
  255. 0x0040047c,
  256. 0x004004fc,
  257. 0x00400534,
  258. 0x00400538,
  259. 0x00400514,
  260. 0x00400518,
  261. 0x0040051c,
  262. 0x00400520,
  263. 0x00400524,
  264. 0x00400528,
  265. 0x0040052c,
  266. 0x00400530,
  267. 0x00400d00,
  268. 0x00400d40,
  269. 0x00400d80,
  270. 0x00400d04,
  271. 0x00400d44,
  272. 0x00400d84,
  273. 0x00400d08,
  274. 0x00400d48,
  275. 0x00400d88,
  276. 0x00400d0c,
  277. 0x00400d4c,
  278. 0x00400d8c,
  279. 0x00400d10,
  280. 0x00400d50,
  281. 0x00400d90,
  282. 0x00400d14,
  283. 0x00400d54,
  284. 0x00400d94,
  285. 0x00400d18,
  286. 0x00400d58,
  287. 0x00400d98,
  288. 0x00400d1c,
  289. 0x00400d5c,
  290. 0x00400d9c,
  291. 0x00400d20,
  292. 0x00400d60,
  293. 0x00400da0,
  294. 0x00400d24,
  295. 0x00400d64,
  296. 0x00400da4,
  297. 0x00400d28,
  298. 0x00400d68,
  299. 0x00400da8,
  300. 0x00400d2c,
  301. 0x00400d6c,
  302. 0x00400dac,
  303. 0x00400d30,
  304. 0x00400d70,
  305. 0x00400db0,
  306. 0x00400d34,
  307. 0x00400d74,
  308. 0x00400db4,
  309. 0x00400d38,
  310. 0x00400d78,
  311. 0x00400db8,
  312. 0x00400d3c,
  313. 0x00400d7c,
  314. 0x00400dbc,
  315. 0x00400590,
  316. 0x00400594,
  317. 0x00400598,
  318. 0x0040059c,
  319. 0x004005a8,
  320. 0x004005ac,
  321. 0x004005b0,
  322. 0x004005b4,
  323. 0x004005c0,
  324. 0x004005c4,
  325. 0x004005c8,
  326. 0x004005cc,
  327. 0x004005d0,
  328. 0x004005d4,
  329. 0x004005d8,
  330. 0x004005dc,
  331. 0x004005e0,
  332. NV04_PGRAPH_PASSTHRU_0,
  333. NV04_PGRAPH_PASSTHRU_1,
  334. NV04_PGRAPH_PASSTHRU_2,
  335. NV04_PGRAPH_DVD_COLORFMT,
  336. NV04_PGRAPH_SCALED_FORMAT,
  337. NV04_PGRAPH_MISC24_0,
  338. NV04_PGRAPH_MISC24_1,
  339. NV04_PGRAPH_MISC24_2,
  340. 0x00400500,
  341. 0x00400504,
  342. NV04_PGRAPH_VALID1,
  343. NV04_PGRAPH_VALID2,
  344. NV04_PGRAPH_DEBUG_3
  345. };
  346. #define nv04_gr(p) container_of((p), struct nv04_gr, base)
  347. struct nv04_gr {
  348. struct nvkm_gr base;
  349. struct nv04_gr_chan *chan[16];
  350. spinlock_t lock;
  351. };
  352. #define nv04_gr_chan(p) container_of((p), struct nv04_gr_chan, object)
  353. struct nv04_gr_chan {
  354. struct nvkm_object object;
  355. struct nv04_gr *gr;
  356. int chid;
  357. u32 nv04[ARRAY_SIZE(nv04_gr_ctx_regs)];
  358. };
  359. /*******************************************************************************
  360. * Graphics object classes
  361. ******************************************************************************/
  362. /*
  363. * Software methods, why they are needed, and how they all work:
  364. *
  365. * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
  366. * 2d engine settings are kept inside the grobjs themselves. The grobjs are
  367. * 3 words long on both. grobj format on NV04 is:
  368. *
  369. * word 0:
  370. * - bits 0-7: class
  371. * - bit 12: color key active
  372. * - bit 13: clip rect active
  373. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  374. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  375. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  376. * NV03_CONTEXT_SURFACE_DST].
  377. * - bits 15-17: 2d operation [aka patch config]
  378. * - bit 24: patch valid [enables rendering using this object]
  379. * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
  380. * word 1:
  381. * - bits 0-1: mono format
  382. * - bits 8-13: color format
  383. * - bits 16-31: DMA_NOTIFY instance
  384. * word 2:
  385. * - bits 0-15: DMA_A instance
  386. * - bits 16-31: DMA_B instance
  387. *
  388. * On NV05 it's:
  389. *
  390. * word 0:
  391. * - bits 0-7: class
  392. * - bit 12: color key active
  393. * - bit 13: clip rect active
  394. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  395. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  396. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  397. * NV03_CONTEXT_SURFACE_DST].
  398. * - bits 15-17: 2d operation [aka patch config]
  399. * - bits 20-22: dither mode
  400. * - bit 24: patch valid [enables rendering using this object]
  401. * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
  402. * - bit 26: surface_src/surface_zeta valid
  403. * - bit 27: pattern valid
  404. * - bit 28: rop valid
  405. * - bit 29: beta1 valid
  406. * - bit 30: beta4 valid
  407. * word 1:
  408. * - bits 0-1: mono format
  409. * - bits 8-13: color format
  410. * - bits 16-31: DMA_NOTIFY instance
  411. * word 2:
  412. * - bits 0-15: DMA_A instance
  413. * - bits 16-31: DMA_B instance
  414. *
  415. * NV05 will set/unset the relevant valid bits when you poke the relevant
  416. * object-binding methods with object of the proper type, or with the NULL
  417. * type. It'll only allow rendering using the grobj if all needed objects
  418. * are bound. The needed set of objects depends on selected operation: for
  419. * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
  420. *
  421. * NV04 doesn't have these methods implemented at all, and doesn't have the
  422. * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
  423. * is set. So we have to emulate them in software, internally keeping the
  424. * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
  425. * but the last word isn't actually used for anything, we abuse it for this
  426. * purpose.
  427. *
  428. * Actually, NV05 can optionally check bit 24 too, but we disable this since
  429. * there's no use for it.
  430. *
  431. * For unknown reasons, NV04 implements surf3d binding in hardware as an
  432. * exception. Also for unknown reasons, NV04 doesn't implement the clipping
  433. * methods on the surf3d object, so we have to emulate them too.
  434. */
  435. static void
  436. nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
  437. {
  438. int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
  439. u32 tmp;
  440. tmp = nvkm_rd32(device, 0x700000 + inst);
  441. tmp &= ~mask;
  442. tmp |= value;
  443. nvkm_wr32(device, 0x700000 + inst, tmp);
  444. nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp);
  445. nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp);
  446. }
  447. static void
  448. nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
  449. {
  450. int class, op, valid = 1;
  451. u32 tmp, ctx1;
  452. ctx1 = nvkm_rd32(device, 0x700000 + inst);
  453. class = ctx1 & 0xff;
  454. op = (ctx1 >> 15) & 7;
  455. tmp = nvkm_rd32(device, 0x70000c + inst);
  456. tmp &= ~mask;
  457. tmp |= value;
  458. nvkm_wr32(device, 0x70000c + inst, tmp);
  459. /* check for valid surf2d/surf_dst/surf_color */
  460. if (!(tmp & 0x02000000))
  461. valid = 0;
  462. /* check for valid surf_src/surf_zeta */
  463. if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
  464. valid = 0;
  465. switch (op) {
  466. /* SRCCOPY_AND, SRCCOPY: no extra objects required */
  467. case 0:
  468. case 3:
  469. break;
  470. /* ROP_AND: requires pattern and rop */
  471. case 1:
  472. if (!(tmp & 0x18000000))
  473. valid = 0;
  474. break;
  475. /* BLEND_AND: requires beta1 */
  476. case 2:
  477. if (!(tmp & 0x20000000))
  478. valid = 0;
  479. break;
  480. /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
  481. case 4:
  482. case 5:
  483. if (!(tmp & 0x40000000))
  484. valid = 0;
  485. break;
  486. }
  487. nv04_gr_set_ctx1(device, inst, 0x01000000, valid << 24);
  488. }
  489. static bool
  490. nv04_gr_mthd_set_operation(struct nvkm_device *device, u32 inst, u32 data)
  491. {
  492. u8 class = nvkm_rd32(device, 0x700000) & 0x000000ff;
  493. if (data > 5)
  494. return false;
  495. /* Old versions of the objects only accept first three operations. */
  496. if (data > 2 && class < 0x40)
  497. return false;
  498. nv04_gr_set_ctx1(device, inst, 0x00038000, data << 15);
  499. /* changing operation changes set of objects needed for validation */
  500. nv04_gr_set_ctx_val(device, inst, 0, 0);
  501. return true;
  502. }
  503. static bool
  504. nv04_gr_mthd_surf3d_clip_h(struct nvkm_device *device, u32 inst, u32 data)
  505. {
  506. u32 min = data & 0xffff, max;
  507. u32 w = data >> 16;
  508. if (min & 0x8000)
  509. /* too large */
  510. return false;
  511. if (w & 0x8000)
  512. /* yes, it accepts negative for some reason. */
  513. w |= 0xffff0000;
  514. max = min + w;
  515. max &= 0x3ffff;
  516. nvkm_wr32(device, 0x40053c, min);
  517. nvkm_wr32(device, 0x400544, max);
  518. return true;
  519. }
  520. static bool
  521. nv04_gr_mthd_surf3d_clip_v(struct nvkm_device *device, u32 inst, u32 data)
  522. {
  523. u32 min = data & 0xffff, max;
  524. u32 w = data >> 16;
  525. if (min & 0x8000)
  526. /* too large */
  527. return false;
  528. if (w & 0x8000)
  529. /* yes, it accepts negative for some reason. */
  530. w |= 0xffff0000;
  531. max = min + w;
  532. max &= 0x3ffff;
  533. nvkm_wr32(device, 0x400540, min);
  534. nvkm_wr32(device, 0x400548, max);
  535. return true;
  536. }
  537. static u8
  538. nv04_gr_mthd_bind_class(struct nvkm_device *device, u32 inst)
  539. {
  540. return nvkm_rd32(device, 0x700000 + (inst << 4));
  541. }
  542. static bool
  543. nv04_gr_mthd_bind_surf2d(struct nvkm_device *device, u32 inst, u32 data)
  544. {
  545. switch (nv04_gr_mthd_bind_class(device, data)) {
  546. case 0x30:
  547. nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
  548. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
  549. return true;
  550. case 0x42:
  551. nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
  552. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
  553. return true;
  554. }
  555. return false;
  556. }
  557. static bool
  558. nv04_gr_mthd_bind_surf2d_swzsurf(struct nvkm_device *device, u32 inst, u32 data)
  559. {
  560. switch (nv04_gr_mthd_bind_class(device, data)) {
  561. case 0x30:
  562. nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
  563. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
  564. return true;
  565. case 0x42:
  566. nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
  567. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
  568. return true;
  569. case 0x52:
  570. nv04_gr_set_ctx1(device, inst, 0x00004000, 0x00004000);
  571. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
  572. return true;
  573. }
  574. return false;
  575. }
  576. static bool
  577. nv01_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
  578. {
  579. switch (nv04_gr_mthd_bind_class(device, data)) {
  580. case 0x30:
  581. nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
  582. return true;
  583. case 0x18:
  584. nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
  585. return true;
  586. }
  587. return false;
  588. }
  589. static bool
  590. nv04_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
  591. {
  592. switch (nv04_gr_mthd_bind_class(device, data)) {
  593. case 0x30:
  594. nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
  595. return true;
  596. case 0x44:
  597. nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
  598. return true;
  599. }
  600. return false;
  601. }
  602. static bool
  603. nv04_gr_mthd_bind_rop(struct nvkm_device *device, u32 inst, u32 data)
  604. {
  605. switch (nv04_gr_mthd_bind_class(device, data)) {
  606. case 0x30:
  607. nv04_gr_set_ctx_val(device, inst, 0x10000000, 0);
  608. return true;
  609. case 0x43:
  610. nv04_gr_set_ctx_val(device, inst, 0x10000000, 0x10000000);
  611. return true;
  612. }
  613. return false;
  614. }
  615. static bool
  616. nv04_gr_mthd_bind_beta1(struct nvkm_device *device, u32 inst, u32 data)
  617. {
  618. switch (nv04_gr_mthd_bind_class(device, data)) {
  619. case 0x30:
  620. nv04_gr_set_ctx_val(device, inst, 0x20000000, 0);
  621. return true;
  622. case 0x12:
  623. nv04_gr_set_ctx_val(device, inst, 0x20000000, 0x20000000);
  624. return true;
  625. }
  626. return false;
  627. }
  628. static bool
  629. nv04_gr_mthd_bind_beta4(struct nvkm_device *device, u32 inst, u32 data)
  630. {
  631. switch (nv04_gr_mthd_bind_class(device, data)) {
  632. case 0x30:
  633. nv04_gr_set_ctx_val(device, inst, 0x40000000, 0);
  634. return true;
  635. case 0x72:
  636. nv04_gr_set_ctx_val(device, inst, 0x40000000, 0x40000000);
  637. return true;
  638. }
  639. return false;
  640. }
  641. static bool
  642. nv04_gr_mthd_bind_surf_dst(struct nvkm_device *device, u32 inst, u32 data)
  643. {
  644. switch (nv04_gr_mthd_bind_class(device, data)) {
  645. case 0x30:
  646. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
  647. return true;
  648. case 0x58:
  649. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
  650. return true;
  651. }
  652. return false;
  653. }
  654. static bool
  655. nv04_gr_mthd_bind_surf_src(struct nvkm_device *device, u32 inst, u32 data)
  656. {
  657. switch (nv04_gr_mthd_bind_class(device, data)) {
  658. case 0x30:
  659. nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
  660. return true;
  661. case 0x59:
  662. nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
  663. return true;
  664. }
  665. return false;
  666. }
  667. static bool
  668. nv04_gr_mthd_bind_surf_color(struct nvkm_device *device, u32 inst, u32 data)
  669. {
  670. switch (nv04_gr_mthd_bind_class(device, data)) {
  671. case 0x30:
  672. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
  673. return true;
  674. case 0x5a:
  675. nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
  676. return true;
  677. }
  678. return false;
  679. }
  680. static bool
  681. nv04_gr_mthd_bind_surf_zeta(struct nvkm_device *device, u32 inst, u32 data)
  682. {
  683. switch (nv04_gr_mthd_bind_class(device, data)) {
  684. case 0x30:
  685. nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
  686. return true;
  687. case 0x5b:
  688. nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
  689. return true;
  690. }
  691. return false;
  692. }
  693. static bool
  694. nv01_gr_mthd_bind_clip(struct nvkm_device *device, u32 inst, u32 data)
  695. {
  696. switch (nv04_gr_mthd_bind_class(device, data)) {
  697. case 0x30:
  698. nv04_gr_set_ctx1(device, inst, 0x2000, 0);
  699. return true;
  700. case 0x19:
  701. nv04_gr_set_ctx1(device, inst, 0x2000, 0x2000);
  702. return true;
  703. }
  704. return false;
  705. }
  706. static bool
  707. nv01_gr_mthd_bind_chroma(struct nvkm_device *device, u32 inst, u32 data)
  708. {
  709. switch (nv04_gr_mthd_bind_class(device, data)) {
  710. case 0x30:
  711. nv04_gr_set_ctx1(device, inst, 0x1000, 0);
  712. return true;
  713. /* Yes, for some reason even the old versions of objects
  714. * accept 0x57 and not 0x17. Consistency be damned.
  715. */
  716. case 0x57:
  717. nv04_gr_set_ctx1(device, inst, 0x1000, 0x1000);
  718. return true;
  719. }
  720. return false;
  721. }
  722. static bool
  723. nv03_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  724. {
  725. bool (*func)(struct nvkm_device *, u32, u32);
  726. switch (mthd) {
  727. case 0x0184: func = nv01_gr_mthd_bind_patt; break;
  728. case 0x0188: func = nv04_gr_mthd_bind_rop; break;
  729. case 0x018c: func = nv04_gr_mthd_bind_beta1; break;
  730. case 0x0190: func = nv04_gr_mthd_bind_surf_dst; break;
  731. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  732. default:
  733. return false;
  734. }
  735. return func(device, inst, data);
  736. }
  737. static bool
  738. nv04_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  739. {
  740. bool (*func)(struct nvkm_device *, u32, u32);
  741. switch (mthd) {
  742. case 0x0188: func = nv04_gr_mthd_bind_patt; break;
  743. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  744. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  745. case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
  746. case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
  747. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  748. default:
  749. return false;
  750. }
  751. return func(device, inst, data);
  752. }
  753. static bool
  754. nv01_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  755. {
  756. bool (*func)(struct nvkm_device *, u32, u32);
  757. switch (mthd) {
  758. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  759. case 0x0188: func = nv01_gr_mthd_bind_clip; break;
  760. case 0x018c: func = nv01_gr_mthd_bind_patt; break;
  761. case 0x0190: func = nv04_gr_mthd_bind_rop; break;
  762. case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
  763. case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
  764. case 0x019c: func = nv04_gr_mthd_bind_surf_src; break;
  765. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  766. default:
  767. return false;
  768. }
  769. return func(device, inst, data);
  770. }
  771. static bool
  772. nv04_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  773. {
  774. bool (*func)(struct nvkm_device *, u32, u32);
  775. switch (mthd) {
  776. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  777. case 0x0188: func = nv01_gr_mthd_bind_clip; break;
  778. case 0x018c: func = nv04_gr_mthd_bind_patt; break;
  779. case 0x0190: func = nv04_gr_mthd_bind_rop; break;
  780. case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
  781. case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
  782. case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
  783. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  784. default:
  785. return false;
  786. }
  787. return func(device, inst, data);
  788. }
  789. static bool
  790. nv04_gr_mthd_iifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  791. {
  792. bool (*func)(struct nvkm_device *, u32, u32);
  793. switch (mthd) {
  794. case 0x0188: func = nv01_gr_mthd_bind_chroma; break;
  795. case 0x018c: func = nv01_gr_mthd_bind_clip; break;
  796. case 0x0190: func = nv04_gr_mthd_bind_patt; break;
  797. case 0x0194: func = nv04_gr_mthd_bind_rop; break;
  798. case 0x0198: func = nv04_gr_mthd_bind_beta1; break;
  799. case 0x019c: func = nv04_gr_mthd_bind_beta4; break;
  800. case 0x01a0: func = nv04_gr_mthd_bind_surf2d_swzsurf; break;
  801. case 0x03e4: func = nv04_gr_mthd_set_operation; break;
  802. default:
  803. return false;
  804. }
  805. return func(device, inst, data);
  806. }
  807. static bool
  808. nv01_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  809. {
  810. bool (*func)(struct nvkm_device *, u32, u32);
  811. switch (mthd) {
  812. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  813. case 0x0188: func = nv01_gr_mthd_bind_clip; break;
  814. case 0x018c: func = nv01_gr_mthd_bind_patt; break;
  815. case 0x0190: func = nv04_gr_mthd_bind_rop; break;
  816. case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
  817. case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
  818. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  819. default:
  820. return false;
  821. }
  822. return func(device, inst, data);
  823. }
  824. static bool
  825. nv04_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  826. {
  827. bool (*func)(struct nvkm_device *, u32, u32);
  828. switch (mthd) {
  829. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  830. case 0x0188: func = nv01_gr_mthd_bind_clip; break;
  831. case 0x018c: func = nv04_gr_mthd_bind_patt; break;
  832. case 0x0190: func = nv04_gr_mthd_bind_rop; break;
  833. case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
  834. case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
  835. case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
  836. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  837. default:
  838. return false;
  839. }
  840. return func(device, inst, data);
  841. }
  842. static bool
  843. nv03_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  844. {
  845. bool (*func)(struct nvkm_device *, u32, u32);
  846. switch (mthd) {
  847. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  848. case 0x0188: func = nv01_gr_mthd_bind_patt; break;
  849. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  850. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  851. case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
  852. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  853. default:
  854. return false;
  855. }
  856. return func(device, inst, data);
  857. }
  858. static bool
  859. nv04_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  860. {
  861. bool (*func)(struct nvkm_device *, u32, u32);
  862. switch (mthd) {
  863. case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
  864. case 0x0188: func = nv04_gr_mthd_bind_patt; break;
  865. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  866. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  867. case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
  868. case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
  869. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  870. default:
  871. return false;
  872. }
  873. return func(device, inst, data);
  874. }
  875. static bool
  876. nv03_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  877. {
  878. bool (*func)(struct nvkm_device *, u32, u32);
  879. switch (mthd) {
  880. case 0x0188: func = nv01_gr_mthd_bind_patt; break;
  881. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  882. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  883. case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
  884. case 0x0304: func = nv04_gr_mthd_set_operation; break;
  885. default:
  886. return false;
  887. }
  888. return func(device, inst, data);
  889. }
  890. static bool
  891. nv04_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  892. {
  893. bool (*func)(struct nvkm_device *, u32, u32);
  894. switch (mthd) {
  895. case 0x0188: func = nv04_gr_mthd_bind_patt; break;
  896. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  897. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  898. case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
  899. case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
  900. case 0x0304: func = nv04_gr_mthd_set_operation; break;
  901. default:
  902. return false;
  903. }
  904. return func(device, inst, data);
  905. }
  906. static bool
  907. nv04_gr_mthd_surf3d(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  908. {
  909. bool (*func)(struct nvkm_device *, u32, u32);
  910. switch (mthd) {
  911. case 0x02f8: func = nv04_gr_mthd_surf3d_clip_h; break;
  912. case 0x02fc: func = nv04_gr_mthd_surf3d_clip_v; break;
  913. default:
  914. return false;
  915. }
  916. return func(device, inst, data);
  917. }
  918. static bool
  919. nv03_gr_mthd_ttri(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  920. {
  921. bool (*func)(struct nvkm_device *, u32, u32);
  922. switch (mthd) {
  923. case 0x0188: func = nv01_gr_mthd_bind_clip; break;
  924. case 0x018c: func = nv04_gr_mthd_bind_surf_color; break;
  925. case 0x0190: func = nv04_gr_mthd_bind_surf_zeta; break;
  926. default:
  927. return false;
  928. }
  929. return func(device, inst, data);
  930. }
  931. static bool
  932. nv01_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  933. {
  934. bool (*func)(struct nvkm_device *, u32, u32);
  935. switch (mthd) {
  936. case 0x0184: func = nv01_gr_mthd_bind_clip; break;
  937. case 0x0188: func = nv01_gr_mthd_bind_patt; break;
  938. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  939. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  940. case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
  941. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  942. default:
  943. return false;
  944. }
  945. return func(device, inst, data);
  946. }
  947. static bool
  948. nv04_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  949. {
  950. bool (*func)(struct nvkm_device *, u32, u32);
  951. switch (mthd) {
  952. case 0x0184: func = nv01_gr_mthd_bind_clip; break;
  953. case 0x0188: func = nv04_gr_mthd_bind_patt; break;
  954. case 0x018c: func = nv04_gr_mthd_bind_rop; break;
  955. case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
  956. case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
  957. case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
  958. case 0x02fc: func = nv04_gr_mthd_set_operation; break;
  959. default:
  960. return false;
  961. }
  962. return func(device, inst, data);
  963. }
  964. static bool
  965. nv04_gr_mthd(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
  966. {
  967. bool (*func)(struct nvkm_device *, u32, u32, u32);
  968. switch (nvkm_rd32(device, 0x700000 + inst) & 0x000000ff) {
  969. case 0x1c ... 0x1e:
  970. func = nv01_gr_mthd_prim; break;
  971. case 0x1f: func = nv01_gr_mthd_blit; break;
  972. case 0x21: func = nv01_gr_mthd_ifc; break;
  973. case 0x36: func = nv03_gr_mthd_sifc; break;
  974. case 0x37: func = nv03_gr_mthd_sifm; break;
  975. case 0x48: func = nv03_gr_mthd_ttri; break;
  976. case 0x4a: func = nv04_gr_mthd_gdi; break;
  977. case 0x4b: func = nv03_gr_mthd_gdi; break;
  978. case 0x53: func = nv04_gr_mthd_surf3d; break;
  979. case 0x5c ... 0x5e:
  980. func = nv04_gr_mthd_prim; break;
  981. case 0x5f: func = nv04_gr_mthd_blit; break;
  982. case 0x60: func = nv04_gr_mthd_iifc; break;
  983. case 0x61: func = nv04_gr_mthd_ifc; break;
  984. case 0x76: func = nv04_gr_mthd_sifc; break;
  985. case 0x77: func = nv04_gr_mthd_sifm; break;
  986. default:
  987. return false;
  988. }
  989. return func(device, inst, mthd, data);
  990. }
  991. static int
  992. nv04_gr_object_bind(struct nvkm_object *object, struct nvkm_gpuobj *parent,
  993. int align, struct nvkm_gpuobj **pgpuobj)
  994. {
  995. int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, align,
  996. false, parent, pgpuobj);
  997. if (ret == 0) {
  998. nvkm_kmap(*pgpuobj);
  999. nvkm_wo32(*pgpuobj, 0x00, object->oclass);
  1000. #ifdef __BIG_ENDIAN
  1001. nvkm_mo32(*pgpuobj, 0x00, 0x00080000, 0x00080000);
  1002. #endif
  1003. nvkm_wo32(*pgpuobj, 0x04, 0x00000000);
  1004. nvkm_wo32(*pgpuobj, 0x08, 0x00000000);
  1005. nvkm_wo32(*pgpuobj, 0x0c, 0x00000000);
  1006. nvkm_done(*pgpuobj);
  1007. }
  1008. return ret;
  1009. }
  1010. const struct nvkm_object_func
  1011. nv04_gr_object = {
  1012. .bind = nv04_gr_object_bind,
  1013. };
  1014. /*******************************************************************************
  1015. * PGRAPH context
  1016. ******************************************************************************/
  1017. static struct nv04_gr_chan *
  1018. nv04_gr_channel(struct nv04_gr *gr)
  1019. {
  1020. struct nvkm_device *device = gr->base.engine.subdev.device;
  1021. struct nv04_gr_chan *chan = NULL;
  1022. if (nvkm_rd32(device, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
  1023. int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24;
  1024. if (chid < ARRAY_SIZE(gr->chan))
  1025. chan = gr->chan[chid];
  1026. }
  1027. return chan;
  1028. }
  1029. static int
  1030. nv04_gr_load_context(struct nv04_gr_chan *chan, int chid)
  1031. {
  1032. struct nvkm_device *device = chan->gr->base.engine.subdev.device;
  1033. int i;
  1034. for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
  1035. nvkm_wr32(device, nv04_gr_ctx_regs[i], chan->nv04[i]);
  1036. nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
  1037. nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
  1038. nvkm_mask(device, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
  1039. return 0;
  1040. }
  1041. static int
  1042. nv04_gr_unload_context(struct nv04_gr_chan *chan)
  1043. {
  1044. struct nvkm_device *device = chan->gr->base.engine.subdev.device;
  1045. int i;
  1046. for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
  1047. chan->nv04[i] = nvkm_rd32(device, nv04_gr_ctx_regs[i]);
  1048. nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
  1049. nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
  1050. return 0;
  1051. }
  1052. static void
  1053. nv04_gr_context_switch(struct nv04_gr *gr)
  1054. {
  1055. struct nvkm_device *device = gr->base.engine.subdev.device;
  1056. struct nv04_gr_chan *prev = NULL;
  1057. struct nv04_gr_chan *next = NULL;
  1058. int chid;
  1059. nv04_gr_idle(&gr->base);
  1060. /* If previous context is valid, we need to save it */
  1061. prev = nv04_gr_channel(gr);
  1062. if (prev)
  1063. nv04_gr_unload_context(prev);
  1064. /* load context for next channel */
  1065. chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
  1066. next = gr->chan[chid];
  1067. if (next)
  1068. nv04_gr_load_context(next, chid);
  1069. }
  1070. static u32 *ctx_reg(struct nv04_gr_chan *chan, u32 reg)
  1071. {
  1072. int i;
  1073. for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++) {
  1074. if (nv04_gr_ctx_regs[i] == reg)
  1075. return &chan->nv04[i];
  1076. }
  1077. return NULL;
  1078. }
  1079. static void *
  1080. nv04_gr_chan_dtor(struct nvkm_object *object)
  1081. {
  1082. struct nv04_gr_chan *chan = nv04_gr_chan(object);
  1083. struct nv04_gr *gr = chan->gr;
  1084. unsigned long flags;
  1085. spin_lock_irqsave(&gr->lock, flags);
  1086. gr->chan[chan->chid] = NULL;
  1087. spin_unlock_irqrestore(&gr->lock, flags);
  1088. return chan;
  1089. }
  1090. static int
  1091. nv04_gr_chan_fini(struct nvkm_object *object, bool suspend)
  1092. {
  1093. struct nv04_gr_chan *chan = nv04_gr_chan(object);
  1094. struct nv04_gr *gr = chan->gr;
  1095. struct nvkm_device *device = gr->base.engine.subdev.device;
  1096. unsigned long flags;
  1097. spin_lock_irqsave(&gr->lock, flags);
  1098. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  1099. if (nv04_gr_channel(gr) == chan)
  1100. nv04_gr_unload_context(chan);
  1101. nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  1102. spin_unlock_irqrestore(&gr->lock, flags);
  1103. return 0;
  1104. }
  1105. static const struct nvkm_object_func
  1106. nv04_gr_chan = {
  1107. .dtor = nv04_gr_chan_dtor,
  1108. .fini = nv04_gr_chan_fini,
  1109. };
  1110. static int
  1111. nv04_gr_chan_new(struct nvkm_gr *base, struct nvkm_fifo_chan *fifoch,
  1112. const struct nvkm_oclass *oclass, struct nvkm_object **pobject)
  1113. {
  1114. struct nv04_gr *gr = nv04_gr(base);
  1115. struct nv04_gr_chan *chan;
  1116. unsigned long flags;
  1117. if (!(chan = kzalloc(sizeof(*chan), GFP_KERNEL)))
  1118. return -ENOMEM;
  1119. nvkm_object_ctor(&nv04_gr_chan, oclass, &chan->object);
  1120. chan->gr = gr;
  1121. chan->chid = fifoch->chid;
  1122. *pobject = &chan->object;
  1123. *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
  1124. spin_lock_irqsave(&gr->lock, flags);
  1125. gr->chan[chan->chid] = chan;
  1126. spin_unlock_irqrestore(&gr->lock, flags);
  1127. return 0;
  1128. }
  1129. /*******************************************************************************
  1130. * PGRAPH engine/subdev functions
  1131. ******************************************************************************/
  1132. bool
  1133. nv04_gr_idle(struct nvkm_gr *gr)
  1134. {
  1135. struct nvkm_subdev *subdev = &gr->engine.subdev;
  1136. struct nvkm_device *device = subdev->device;
  1137. u32 mask = 0xffffffff;
  1138. if (device->card_type == NV_40)
  1139. mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
  1140. if (nvkm_msec(device, 2000,
  1141. if (!(nvkm_rd32(device, NV04_PGRAPH_STATUS) & mask))
  1142. break;
  1143. ) < 0) {
  1144. nvkm_error(subdev, "idle timed out with status %08x\n",
  1145. nvkm_rd32(device, NV04_PGRAPH_STATUS));
  1146. return false;
  1147. }
  1148. return true;
  1149. }
  1150. static const struct nvkm_bitfield
  1151. nv04_gr_intr_name[] = {
  1152. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  1153. {}
  1154. };
  1155. static const struct nvkm_bitfield
  1156. nv04_gr_nstatus[] = {
  1157. { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  1158. { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  1159. { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  1160. { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  1161. {}
  1162. };
  1163. const struct nvkm_bitfield
  1164. nv04_gr_nsource[] = {
  1165. { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
  1166. { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
  1167. { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
  1168. { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
  1169. { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
  1170. { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
  1171. { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
  1172. { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
  1173. { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
  1174. { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
  1175. { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
  1176. { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
  1177. { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
  1178. { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
  1179. { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
  1180. { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
  1181. { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
  1182. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
  1183. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
  1184. {}
  1185. };
  1186. static void
  1187. nv04_gr_intr(struct nvkm_gr *base)
  1188. {
  1189. struct nv04_gr *gr = nv04_gr(base);
  1190. struct nvkm_subdev *subdev = &gr->base.engine.subdev;
  1191. struct nvkm_device *device = subdev->device;
  1192. u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR);
  1193. u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE);
  1194. u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS);
  1195. u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR);
  1196. u32 chid = (addr & 0x0f000000) >> 24;
  1197. u32 subc = (addr & 0x0000e000) >> 13;
  1198. u32 mthd = (addr & 0x00001ffc);
  1199. u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA);
  1200. u32 class = nvkm_rd32(device, 0x400180 + subc * 4) & 0xff;
  1201. u32 inst = (nvkm_rd32(device, 0x40016c) & 0xffff) << 4;
  1202. u32 show = stat;
  1203. char msg[128], src[128], sta[128];
  1204. struct nv04_gr_chan *chan;
  1205. unsigned long flags;
  1206. spin_lock_irqsave(&gr->lock, flags);
  1207. chan = gr->chan[chid];
  1208. if (stat & NV_PGRAPH_INTR_NOTIFY) {
  1209. if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
  1210. if (!nv04_gr_mthd(device, inst, mthd, data))
  1211. show &= ~NV_PGRAPH_INTR_NOTIFY;
  1212. }
  1213. }
  1214. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  1215. nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  1216. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  1217. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  1218. nv04_gr_context_switch(gr);
  1219. }
  1220. nvkm_wr32(device, NV03_PGRAPH_INTR, stat);
  1221. nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001);
  1222. if (show) {
  1223. nvkm_snprintbf(msg, sizeof(msg), nv04_gr_intr_name, show);
  1224. nvkm_snprintbf(src, sizeof(src), nv04_gr_nsource, nsource);
  1225. nvkm_snprintbf(sta, sizeof(sta), nv04_gr_nstatus, nstatus);
  1226. nvkm_error(subdev, "intr %08x [%s] nsource %08x [%s] "
  1227. "nstatus %08x [%s] ch %d [%s] subc %d "
  1228. "class %04x mthd %04x data %08x\n",
  1229. show, msg, nsource, src, nstatus, sta, chid,
  1230. chan ? chan->object.client->name : "unknown",
  1231. subc, class, mthd, data);
  1232. }
  1233. spin_unlock_irqrestore(&gr->lock, flags);
  1234. }
  1235. static int
  1236. nv04_gr_init(struct nvkm_gr *base)
  1237. {
  1238. struct nv04_gr *gr = nv04_gr(base);
  1239. struct nvkm_device *device = gr->base.engine.subdev.device;
  1240. /* Enable PGRAPH interrupts */
  1241. nvkm_wr32(device, NV03_PGRAPH_INTR, 0xFFFFFFFF);
  1242. nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  1243. nvkm_wr32(device, NV04_PGRAPH_VALID1, 0);
  1244. nvkm_wr32(device, NV04_PGRAPH_VALID2, 0);
  1245. /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x000001FF);
  1246. nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
  1247. nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x1231c000);
  1248. /*1231C000 blob, 001 haiku*/
  1249. /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
  1250. nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x72111100);
  1251. /*0x72111100 blob , 01 haiku*/
  1252. /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
  1253. nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
  1254. /*haiku same*/
  1255. /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
  1256. nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
  1257. /*haiku and blob 10d4*/
  1258. nvkm_wr32(device, NV04_PGRAPH_STATE , 0xFFFFFFFF);
  1259. nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
  1260. nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
  1261. /* These don't belong here, they're part of a per-channel context */
  1262. nvkm_wr32(device, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
  1263. nvkm_wr32(device, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
  1264. return 0;
  1265. }
  1266. static const struct nvkm_gr_func
  1267. nv04_gr = {
  1268. .init = nv04_gr_init,
  1269. .intr = nv04_gr_intr,
  1270. .chan_new = nv04_gr_chan_new,
  1271. .sclass = {
  1272. { -1, -1, 0x0012, &nv04_gr_object }, /* beta1 */
  1273. { -1, -1, 0x0017, &nv04_gr_object }, /* chroma */
  1274. { -1, -1, 0x0018, &nv04_gr_object }, /* pattern (nv01) */
  1275. { -1, -1, 0x0019, &nv04_gr_object }, /* clip */
  1276. { -1, -1, 0x001c, &nv04_gr_object }, /* line */
  1277. { -1, -1, 0x001d, &nv04_gr_object }, /* tri */
  1278. { -1, -1, 0x001e, &nv04_gr_object }, /* rect */
  1279. { -1, -1, 0x001f, &nv04_gr_object },
  1280. { -1, -1, 0x0021, &nv04_gr_object },
  1281. { -1, -1, 0x0030, &nv04_gr_object }, /* null */
  1282. { -1, -1, 0x0036, &nv04_gr_object },
  1283. { -1, -1, 0x0037, &nv04_gr_object },
  1284. { -1, -1, 0x0038, &nv04_gr_object }, /* dvd subpicture */
  1285. { -1, -1, 0x0039, &nv04_gr_object }, /* m2mf */
  1286. { -1, -1, 0x0042, &nv04_gr_object }, /* surf2d */
  1287. { -1, -1, 0x0043, &nv04_gr_object }, /* rop */
  1288. { -1, -1, 0x0044, &nv04_gr_object }, /* pattern */
  1289. { -1, -1, 0x0048, &nv04_gr_object },
  1290. { -1, -1, 0x004a, &nv04_gr_object },
  1291. { -1, -1, 0x004b, &nv04_gr_object },
  1292. { -1, -1, 0x0052, &nv04_gr_object }, /* swzsurf */
  1293. { -1, -1, 0x0053, &nv04_gr_object },
  1294. { -1, -1, 0x0054, &nv04_gr_object }, /* ttri */
  1295. { -1, -1, 0x0055, &nv04_gr_object }, /* mtri */
  1296. { -1, -1, 0x0057, &nv04_gr_object }, /* chroma */
  1297. { -1, -1, 0x0058, &nv04_gr_object }, /* surf_dst */
  1298. { -1, -1, 0x0059, &nv04_gr_object }, /* surf_src */
  1299. { -1, -1, 0x005a, &nv04_gr_object }, /* surf_color */
  1300. { -1, -1, 0x005b, &nv04_gr_object }, /* surf_zeta */
  1301. { -1, -1, 0x005c, &nv04_gr_object }, /* line */
  1302. { -1, -1, 0x005d, &nv04_gr_object }, /* tri */
  1303. { -1, -1, 0x005e, &nv04_gr_object }, /* rect */
  1304. { -1, -1, 0x005f, &nv04_gr_object },
  1305. { -1, -1, 0x0060, &nv04_gr_object },
  1306. { -1, -1, 0x0061, &nv04_gr_object },
  1307. { -1, -1, 0x0064, &nv04_gr_object }, /* iifc (nv05) */
  1308. { -1, -1, 0x0065, &nv04_gr_object }, /* ifc (nv05) */
  1309. { -1, -1, 0x0066, &nv04_gr_object }, /* sifc (nv05) */
  1310. { -1, -1, 0x0072, &nv04_gr_object }, /* beta4 */
  1311. { -1, -1, 0x0076, &nv04_gr_object },
  1312. { -1, -1, 0x0077, &nv04_gr_object },
  1313. {}
  1314. }
  1315. };
  1316. int
  1317. nv04_gr_new(struct nvkm_device *device, int index, struct nvkm_gr **pgr)
  1318. {
  1319. struct nv04_gr *gr;
  1320. if (!(gr = kzalloc(sizeof(*gr), GFP_KERNEL)))
  1321. return -ENOMEM;
  1322. spin_lock_init(&gr->lock);
  1323. *pgr = &gr->base;
  1324. return nvkm_gr_ctor(&nv04_gr, device, index, 0x00001000,
  1325. true, &gr->base);
  1326. }