nv50_display.c 62 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include <drm/drmP.h>
  26. #include <drm/drm_crtc_helper.h>
  27. #include "nouveau_drm.h"
  28. #include "nouveau_dma.h"
  29. #include "nouveau_gem.h"
  30. #include "nouveau_connector.h"
  31. #include "nouveau_encoder.h"
  32. #include "nouveau_crtc.h"
  33. #include "nouveau_fence.h"
  34. #include "nv50_display.h"
  35. #include <core/client.h>
  36. #include <core/gpuobj.h>
  37. #include <core/class.h>
  38. #include <subdev/timer.h>
  39. #include <subdev/bar.h>
  40. #include <subdev/fb.h>
  41. #include <subdev/i2c.h>
  42. #define EVO_DMA_NR 9
  43. #define EVO_MASTER (0x00)
  44. #define EVO_FLIP(c) (0x01 + (c))
  45. #define EVO_OVLY(c) (0x05 + (c))
  46. #define EVO_OIMM(c) (0x09 + (c))
  47. #define EVO_CURS(c) (0x0d + (c))
  48. /* offsets in shared sync bo of various structures */
  49. #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  50. #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
  51. #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
  52. #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
  53. #define EVO_CORE_HANDLE (0xd1500000)
  54. #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
  55. #define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
  56. #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
  57. (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
  58. /******************************************************************************
  59. * EVO channel
  60. *****************************************************************************/
  61. struct nv50_chan {
  62. struct nouveau_object *user;
  63. u32 handle;
  64. };
  65. static int
  66. nv50_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
  67. void *data, u32 size, struct nv50_chan *chan)
  68. {
  69. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  70. const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
  71. const u32 handle = EVO_CHAN_HANDLE(bclass, head);
  72. int ret;
  73. ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
  74. oclass, data, size, &chan->user);
  75. if (ret)
  76. return ret;
  77. chan->handle = handle;
  78. return 0;
  79. }
  80. static void
  81. nv50_chan_destroy(struct nouveau_object *core, struct nv50_chan *chan)
  82. {
  83. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  84. if (chan->handle)
  85. nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
  86. }
  87. /******************************************************************************
  88. * PIO EVO channel
  89. *****************************************************************************/
  90. struct nv50_pioc {
  91. struct nv50_chan base;
  92. };
  93. static void
  94. nv50_pioc_destroy(struct nouveau_object *core, struct nv50_pioc *pioc)
  95. {
  96. nv50_chan_destroy(core, &pioc->base);
  97. }
  98. static int
  99. nv50_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
  100. void *data, u32 size, struct nv50_pioc *pioc)
  101. {
  102. return nv50_chan_create(core, bclass, head, data, size, &pioc->base);
  103. }
  104. /******************************************************************************
  105. * DMA EVO channel
  106. *****************************************************************************/
  107. struct nv50_dmac {
  108. struct nv50_chan base;
  109. dma_addr_t handle;
  110. u32 *ptr;
  111. /* Protects against concurrent pushbuf access to this channel, lock is
  112. * grabbed by evo_wait (if the pushbuf reservation is successful) and
  113. * dropped again by evo_kick. */
  114. struct mutex lock;
  115. };
  116. static void
  117. nv50_dmac_destroy(struct nouveau_object *core, struct nv50_dmac *dmac)
  118. {
  119. if (dmac->ptr) {
  120. struct pci_dev *pdev = nv_device(core)->pdev;
  121. pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
  122. }
  123. nv50_chan_destroy(core, &dmac->base);
  124. }
  125. static int
  126. nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
  127. {
  128. struct nouveau_fb *pfb = nouveau_fb(core);
  129. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  130. struct nouveau_object *object;
  131. int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
  132. NV_DMA_IN_MEMORY_CLASS,
  133. &(struct nv_dma_class) {
  134. .flags = NV_DMA_TARGET_VRAM |
  135. NV_DMA_ACCESS_RDWR,
  136. .start = 0,
  137. .limit = pfb->ram->size - 1,
  138. .conf0 = NV50_DMA_CONF0_ENABLE |
  139. NV50_DMA_CONF0_PART_256,
  140. }, sizeof(struct nv_dma_class), &object);
  141. if (ret)
  142. return ret;
  143. ret = nouveau_object_new(client, parent, NvEvoFB16,
  144. NV_DMA_IN_MEMORY_CLASS,
  145. &(struct nv_dma_class) {
  146. .flags = NV_DMA_TARGET_VRAM |
  147. NV_DMA_ACCESS_RDWR,
  148. .start = 0,
  149. .limit = pfb->ram->size - 1,
  150. .conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
  151. NV50_DMA_CONF0_PART_256,
  152. }, sizeof(struct nv_dma_class), &object);
  153. if (ret)
  154. return ret;
  155. ret = nouveau_object_new(client, parent, NvEvoFB32,
  156. NV_DMA_IN_MEMORY_CLASS,
  157. &(struct nv_dma_class) {
  158. .flags = NV_DMA_TARGET_VRAM |
  159. NV_DMA_ACCESS_RDWR,
  160. .start = 0,
  161. .limit = pfb->ram->size - 1,
  162. .conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
  163. NV50_DMA_CONF0_PART_256,
  164. }, sizeof(struct nv_dma_class), &object);
  165. return ret;
  166. }
  167. static int
  168. nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
  169. {
  170. struct nouveau_fb *pfb = nouveau_fb(core);
  171. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  172. struct nouveau_object *object;
  173. int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
  174. NV_DMA_IN_MEMORY_CLASS,
  175. &(struct nv_dma_class) {
  176. .flags = NV_DMA_TARGET_VRAM |
  177. NV_DMA_ACCESS_RDWR,
  178. .start = 0,
  179. .limit = pfb->ram->size - 1,
  180. .conf0 = NVC0_DMA_CONF0_ENABLE,
  181. }, sizeof(struct nv_dma_class), &object);
  182. if (ret)
  183. return ret;
  184. ret = nouveau_object_new(client, parent, NvEvoFB16,
  185. NV_DMA_IN_MEMORY_CLASS,
  186. &(struct nv_dma_class) {
  187. .flags = NV_DMA_TARGET_VRAM |
  188. NV_DMA_ACCESS_RDWR,
  189. .start = 0,
  190. .limit = pfb->ram->size - 1,
  191. .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
  192. }, sizeof(struct nv_dma_class), &object);
  193. if (ret)
  194. return ret;
  195. ret = nouveau_object_new(client, parent, NvEvoFB32,
  196. NV_DMA_IN_MEMORY_CLASS,
  197. &(struct nv_dma_class) {
  198. .flags = NV_DMA_TARGET_VRAM |
  199. NV_DMA_ACCESS_RDWR,
  200. .start = 0,
  201. .limit = pfb->ram->size - 1,
  202. .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
  203. }, sizeof(struct nv_dma_class), &object);
  204. return ret;
  205. }
  206. static int
  207. nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
  208. {
  209. struct nouveau_fb *pfb = nouveau_fb(core);
  210. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  211. struct nouveau_object *object;
  212. int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
  213. NV_DMA_IN_MEMORY_CLASS,
  214. &(struct nv_dma_class) {
  215. .flags = NV_DMA_TARGET_VRAM |
  216. NV_DMA_ACCESS_RDWR,
  217. .start = 0,
  218. .limit = pfb->ram->size - 1,
  219. .conf0 = NVD0_DMA_CONF0_ENABLE |
  220. NVD0_DMA_CONF0_PAGE_LP,
  221. }, sizeof(struct nv_dma_class), &object);
  222. if (ret)
  223. return ret;
  224. ret = nouveau_object_new(client, parent, NvEvoFB32,
  225. NV_DMA_IN_MEMORY_CLASS,
  226. &(struct nv_dma_class) {
  227. .flags = NV_DMA_TARGET_VRAM |
  228. NV_DMA_ACCESS_RDWR,
  229. .start = 0,
  230. .limit = pfb->ram->size - 1,
  231. .conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
  232. NVD0_DMA_CONF0_PAGE_LP,
  233. }, sizeof(struct nv_dma_class), &object);
  234. return ret;
  235. }
  236. static int
  237. nv50_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
  238. void *data, u32 size, u64 syncbuf,
  239. struct nv50_dmac *dmac)
  240. {
  241. struct nouveau_fb *pfb = nouveau_fb(core);
  242. struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
  243. struct nouveau_object *object;
  244. u32 pushbuf = *(u32 *)data;
  245. int ret;
  246. mutex_init(&dmac->lock);
  247. dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
  248. &dmac->handle);
  249. if (!dmac->ptr)
  250. return -ENOMEM;
  251. ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
  252. NV_DMA_FROM_MEMORY_CLASS,
  253. &(struct nv_dma_class) {
  254. .flags = NV_DMA_TARGET_PCI_US |
  255. NV_DMA_ACCESS_RD,
  256. .start = dmac->handle + 0x0000,
  257. .limit = dmac->handle + 0x0fff,
  258. }, sizeof(struct nv_dma_class), &object);
  259. if (ret)
  260. return ret;
  261. ret = nv50_chan_create(core, bclass, head, data, size, &dmac->base);
  262. if (ret)
  263. return ret;
  264. ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
  265. NV_DMA_IN_MEMORY_CLASS,
  266. &(struct nv_dma_class) {
  267. .flags = NV_DMA_TARGET_VRAM |
  268. NV_DMA_ACCESS_RDWR,
  269. .start = syncbuf + 0x0000,
  270. .limit = syncbuf + 0x0fff,
  271. }, sizeof(struct nv_dma_class), &object);
  272. if (ret)
  273. return ret;
  274. ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
  275. NV_DMA_IN_MEMORY_CLASS,
  276. &(struct nv_dma_class) {
  277. .flags = NV_DMA_TARGET_VRAM |
  278. NV_DMA_ACCESS_RDWR,
  279. .start = 0,
  280. .limit = pfb->ram->size - 1,
  281. }, sizeof(struct nv_dma_class), &object);
  282. if (ret)
  283. return ret;
  284. if (nv_device(core)->card_type < NV_C0)
  285. ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
  286. else
  287. if (nv_device(core)->card_type < NV_D0)
  288. ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
  289. else
  290. ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
  291. return ret;
  292. }
  293. struct nv50_mast {
  294. struct nv50_dmac base;
  295. };
  296. struct nv50_curs {
  297. struct nv50_pioc base;
  298. };
  299. struct nv50_sync {
  300. struct nv50_dmac base;
  301. u32 addr;
  302. u32 data;
  303. };
  304. struct nv50_ovly {
  305. struct nv50_dmac base;
  306. };
  307. struct nv50_oimm {
  308. struct nv50_pioc base;
  309. };
  310. struct nv50_head {
  311. struct nouveau_crtc base;
  312. struct nouveau_bo *image;
  313. struct nv50_curs curs;
  314. struct nv50_sync sync;
  315. struct nv50_ovly ovly;
  316. struct nv50_oimm oimm;
  317. };
  318. #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
  319. #define nv50_curs(c) (&nv50_head(c)->curs)
  320. #define nv50_sync(c) (&nv50_head(c)->sync)
  321. #define nv50_ovly(c) (&nv50_head(c)->ovly)
  322. #define nv50_oimm(c) (&nv50_head(c)->oimm)
  323. #define nv50_chan(c) (&(c)->base.base)
  324. #define nv50_vers(c) nv_mclass(nv50_chan(c)->user)
  325. struct nv50_disp {
  326. struct nouveau_object *core;
  327. struct nv50_mast mast;
  328. u32 modeset;
  329. struct nouveau_bo *sync;
  330. };
  331. static struct nv50_disp *
  332. nv50_disp(struct drm_device *dev)
  333. {
  334. return nouveau_display(dev)->priv;
  335. }
  336. #define nv50_mast(d) (&nv50_disp(d)->mast)
  337. static struct drm_crtc *
  338. nv50_display_crtc_get(struct drm_encoder *encoder)
  339. {
  340. return nouveau_encoder(encoder)->crtc;
  341. }
  342. /******************************************************************************
  343. * EVO channel helpers
  344. *****************************************************************************/
  345. static u32 *
  346. evo_wait(void *evoc, int nr)
  347. {
  348. struct nv50_dmac *dmac = evoc;
  349. u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
  350. mutex_lock(&dmac->lock);
  351. if (put + nr >= (PAGE_SIZE / 4) - 8) {
  352. dmac->ptr[put] = 0x20000000;
  353. nv_wo32(dmac->base.user, 0x0000, 0x00000000);
  354. if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
  355. mutex_unlock(&dmac->lock);
  356. NV_ERROR(dmac->base.user, "channel stalled\n");
  357. return NULL;
  358. }
  359. put = 0;
  360. }
  361. return dmac->ptr + put;
  362. }
  363. static void
  364. evo_kick(u32 *push, void *evoc)
  365. {
  366. struct nv50_dmac *dmac = evoc;
  367. nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
  368. mutex_unlock(&dmac->lock);
  369. }
  370. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  371. #define evo_data(p,d) *((p)++) = (d)
  372. static bool
  373. evo_sync_wait(void *data)
  374. {
  375. if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
  376. return true;
  377. usleep_range(1, 2);
  378. return false;
  379. }
  380. static int
  381. evo_sync(struct drm_device *dev)
  382. {
  383. struct nouveau_device *device = nouveau_dev(dev);
  384. struct nv50_disp *disp = nv50_disp(dev);
  385. struct nv50_mast *mast = nv50_mast(dev);
  386. u32 *push = evo_wait(mast, 8);
  387. if (push) {
  388. nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
  389. evo_mthd(push, 0x0084, 1);
  390. evo_data(push, 0x80000000 | EVO_MAST_NTFY);
  391. evo_mthd(push, 0x0080, 2);
  392. evo_data(push, 0x00000000);
  393. evo_data(push, 0x00000000);
  394. evo_kick(push, mast);
  395. if (nv_wait_cb(device, evo_sync_wait, disp->sync))
  396. return 0;
  397. }
  398. return -EBUSY;
  399. }
  400. /******************************************************************************
  401. * Page flipping channel
  402. *****************************************************************************/
  403. struct nouveau_bo *
  404. nv50_display_crtc_sema(struct drm_device *dev, int crtc)
  405. {
  406. return nv50_disp(dev)->sync;
  407. }
  408. struct nv50_display_flip {
  409. struct nv50_disp *disp;
  410. struct nv50_sync *chan;
  411. };
  412. static bool
  413. nv50_display_flip_wait(void *data)
  414. {
  415. struct nv50_display_flip *flip = data;
  416. if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
  417. flip->chan->data)
  418. return true;
  419. usleep_range(1, 2);
  420. return false;
  421. }
  422. void
  423. nv50_display_flip_stop(struct drm_crtc *crtc)
  424. {
  425. struct nouveau_device *device = nouveau_dev(crtc->dev);
  426. struct nv50_display_flip flip = {
  427. .disp = nv50_disp(crtc->dev),
  428. .chan = nv50_sync(crtc),
  429. };
  430. u32 *push;
  431. push = evo_wait(flip.chan, 8);
  432. if (push) {
  433. evo_mthd(push, 0x0084, 1);
  434. evo_data(push, 0x00000000);
  435. evo_mthd(push, 0x0094, 1);
  436. evo_data(push, 0x00000000);
  437. evo_mthd(push, 0x00c0, 1);
  438. evo_data(push, 0x00000000);
  439. evo_mthd(push, 0x0080, 1);
  440. evo_data(push, 0x00000000);
  441. evo_kick(push, flip.chan);
  442. }
  443. nv_wait_cb(device, nv50_display_flip_wait, &flip);
  444. }
  445. int
  446. nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
  447. struct nouveau_channel *chan, u32 swap_interval)
  448. {
  449. struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
  450. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  451. struct nv50_head *head = nv50_head(crtc);
  452. struct nv50_sync *sync = nv50_sync(crtc);
  453. u32 *push;
  454. int ret;
  455. swap_interval <<= 4;
  456. if (swap_interval == 0)
  457. swap_interval |= 0x100;
  458. if (chan == NULL)
  459. evo_sync(crtc->dev);
  460. push = evo_wait(sync, 128);
  461. if (unlikely(push == NULL))
  462. return -EBUSY;
  463. if (chan && nv_mclass(chan->object) < NV84_CHANNEL_IND_CLASS) {
  464. ret = RING_SPACE(chan, 8);
  465. if (ret)
  466. return ret;
  467. BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
  468. OUT_RING (chan, NvEvoSema0 + nv_crtc->index);
  469. OUT_RING (chan, sync->addr ^ 0x10);
  470. BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
  471. OUT_RING (chan, sync->data + 1);
  472. BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
  473. OUT_RING (chan, sync->addr);
  474. OUT_RING (chan, sync->data);
  475. } else
  476. if (chan && nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
  477. u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
  478. ret = RING_SPACE(chan, 12);
  479. if (ret)
  480. return ret;
  481. BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
  482. OUT_RING (chan, chan->vram);
  483. BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  484. OUT_RING (chan, upper_32_bits(addr ^ 0x10));
  485. OUT_RING (chan, lower_32_bits(addr ^ 0x10));
  486. OUT_RING (chan, sync->data + 1);
  487. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
  488. BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  489. OUT_RING (chan, upper_32_bits(addr));
  490. OUT_RING (chan, lower_32_bits(addr));
  491. OUT_RING (chan, sync->data);
  492. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
  493. } else
  494. if (chan) {
  495. u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
  496. ret = RING_SPACE(chan, 10);
  497. if (ret)
  498. return ret;
  499. BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  500. OUT_RING (chan, upper_32_bits(addr ^ 0x10));
  501. OUT_RING (chan, lower_32_bits(addr ^ 0x10));
  502. OUT_RING (chan, sync->data + 1);
  503. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
  504. NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
  505. BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  506. OUT_RING (chan, upper_32_bits(addr));
  507. OUT_RING (chan, lower_32_bits(addr));
  508. OUT_RING (chan, sync->data);
  509. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
  510. NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
  511. }
  512. if (chan) {
  513. sync->addr ^= 0x10;
  514. sync->data++;
  515. FIRE_RING (chan);
  516. }
  517. /* queue the flip */
  518. evo_mthd(push, 0x0100, 1);
  519. evo_data(push, 0xfffe0000);
  520. evo_mthd(push, 0x0084, 1);
  521. evo_data(push, swap_interval);
  522. if (!(swap_interval & 0x00000100)) {
  523. evo_mthd(push, 0x00e0, 1);
  524. evo_data(push, 0x40000000);
  525. }
  526. evo_mthd(push, 0x0088, 4);
  527. evo_data(push, sync->addr);
  528. evo_data(push, sync->data++);
  529. evo_data(push, sync->data);
  530. evo_data(push, NvEvoSync);
  531. evo_mthd(push, 0x00a0, 2);
  532. evo_data(push, 0x00000000);
  533. evo_data(push, 0x00000000);
  534. evo_mthd(push, 0x00c0, 1);
  535. evo_data(push, nv_fb->r_dma);
  536. evo_mthd(push, 0x0110, 2);
  537. evo_data(push, 0x00000000);
  538. evo_data(push, 0x00000000);
  539. if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
  540. evo_mthd(push, 0x0800, 5);
  541. evo_data(push, nv_fb->nvbo->bo.offset >> 8);
  542. evo_data(push, 0);
  543. evo_data(push, (fb->height << 16) | fb->width);
  544. evo_data(push, nv_fb->r_pitch);
  545. evo_data(push, nv_fb->r_format);
  546. } else {
  547. evo_mthd(push, 0x0400, 5);
  548. evo_data(push, nv_fb->nvbo->bo.offset >> 8);
  549. evo_data(push, 0);
  550. evo_data(push, (fb->height << 16) | fb->width);
  551. evo_data(push, nv_fb->r_pitch);
  552. evo_data(push, nv_fb->r_format);
  553. }
  554. evo_mthd(push, 0x0080, 1);
  555. evo_data(push, 0x00000000);
  556. evo_kick(push, sync);
  557. nouveau_bo_ref(nv_fb->nvbo, &head->image);
  558. return 0;
  559. }
  560. /******************************************************************************
  561. * CRTC
  562. *****************************************************************************/
  563. static int
  564. nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
  565. {
  566. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  567. struct nouveau_connector *nv_connector;
  568. struct drm_connector *connector;
  569. u32 *push, mode = 0x00;
  570. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  571. connector = &nv_connector->base;
  572. if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
  573. if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
  574. mode = DITHERING_MODE_DYNAMIC2X2;
  575. } else {
  576. mode = nv_connector->dithering_mode;
  577. }
  578. if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
  579. if (connector->display_info.bpc >= 8)
  580. mode |= DITHERING_DEPTH_8BPC;
  581. } else {
  582. mode |= nv_connector->dithering_depth;
  583. }
  584. push = evo_wait(mast, 4);
  585. if (push) {
  586. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  587. evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
  588. evo_data(push, mode);
  589. } else
  590. if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
  591. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
  592. evo_data(push, mode);
  593. } else {
  594. evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
  595. evo_data(push, mode);
  596. }
  597. if (update) {
  598. evo_mthd(push, 0x0080, 1);
  599. evo_data(push, 0x00000000);
  600. }
  601. evo_kick(push, mast);
  602. }
  603. return 0;
  604. }
  605. static int
  606. nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
  607. {
  608. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  609. struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
  610. struct drm_crtc *crtc = &nv_crtc->base;
  611. struct nouveau_connector *nv_connector;
  612. int mode = DRM_MODE_SCALE_NONE;
  613. u32 oX, oY, *push;
  614. /* start off at the resolution we programmed the crtc for, this
  615. * effectively handles NONE/FULL scaling
  616. */
  617. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  618. if (nv_connector && nv_connector->native_mode)
  619. mode = nv_connector->scaling_mode;
  620. if (mode != DRM_MODE_SCALE_NONE)
  621. omode = nv_connector->native_mode;
  622. else
  623. omode = umode;
  624. oX = omode->hdisplay;
  625. oY = omode->vdisplay;
  626. if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
  627. oY *= 2;
  628. /* add overscan compensation if necessary, will keep the aspect
  629. * ratio the same as the backend mode unless overridden by the
  630. * user setting both hborder and vborder properties.
  631. */
  632. if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
  633. (nv_connector->underscan == UNDERSCAN_AUTO &&
  634. nv_connector->edid &&
  635. drm_detect_hdmi_monitor(nv_connector->edid)))) {
  636. u32 bX = nv_connector->underscan_hborder;
  637. u32 bY = nv_connector->underscan_vborder;
  638. u32 aspect = (oY << 19) / oX;
  639. if (bX) {
  640. oX -= (bX * 2);
  641. if (bY) oY -= (bY * 2);
  642. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  643. } else {
  644. oX -= (oX >> 4) + 32;
  645. if (bY) oY -= (bY * 2);
  646. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  647. }
  648. }
  649. /* handle CENTER/ASPECT scaling, taking into account the areas
  650. * removed already for overscan compensation
  651. */
  652. switch (mode) {
  653. case DRM_MODE_SCALE_CENTER:
  654. oX = min((u32)umode->hdisplay, oX);
  655. oY = min((u32)umode->vdisplay, oY);
  656. /* fall-through */
  657. case DRM_MODE_SCALE_ASPECT:
  658. if (oY < oX) {
  659. u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
  660. oX = ((oY * aspect) + (aspect / 2)) >> 19;
  661. } else {
  662. u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
  663. oY = ((oX * aspect) + (aspect / 2)) >> 19;
  664. }
  665. break;
  666. default:
  667. break;
  668. }
  669. push = evo_wait(mast, 8);
  670. if (push) {
  671. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  672. /*XXX: SCALE_CTRL_ACTIVE??? */
  673. evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
  674. evo_data(push, (oY << 16) | oX);
  675. evo_data(push, (oY << 16) | oX);
  676. evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
  677. evo_data(push, 0x00000000);
  678. evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
  679. evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
  680. } else {
  681. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  682. evo_data(push, (oY << 16) | oX);
  683. evo_data(push, (oY << 16) | oX);
  684. evo_data(push, (oY << 16) | oX);
  685. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  686. evo_data(push, 0x00000000);
  687. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  688. evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
  689. }
  690. evo_kick(push, mast);
  691. if (update) {
  692. nv50_display_flip_stop(crtc);
  693. nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
  694. }
  695. }
  696. return 0;
  697. }
  698. static int
  699. nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
  700. {
  701. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  702. u32 *push, hue, vib;
  703. int adj;
  704. adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
  705. vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
  706. hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
  707. push = evo_wait(mast, 16);
  708. if (push) {
  709. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  710. evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
  711. evo_data(push, (hue << 20) | (vib << 8));
  712. } else {
  713. evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
  714. evo_data(push, (hue << 20) | (vib << 8));
  715. }
  716. if (update) {
  717. evo_mthd(push, 0x0080, 1);
  718. evo_data(push, 0x00000000);
  719. }
  720. evo_kick(push, mast);
  721. }
  722. return 0;
  723. }
  724. static int
  725. nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  726. int x, int y, bool update)
  727. {
  728. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  729. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  730. u32 *push;
  731. push = evo_wait(mast, 16);
  732. if (push) {
  733. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  734. evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
  735. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  736. evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
  737. evo_data(push, (fb->height << 16) | fb->width);
  738. evo_data(push, nvfb->r_pitch);
  739. evo_data(push, nvfb->r_format);
  740. evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
  741. evo_data(push, (y << 16) | x);
  742. if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
  743. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  744. evo_data(push, nvfb->r_dma);
  745. }
  746. } else {
  747. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  748. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  749. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  750. evo_data(push, (fb->height << 16) | fb->width);
  751. evo_data(push, nvfb->r_pitch);
  752. evo_data(push, nvfb->r_format);
  753. evo_data(push, nvfb->r_dma);
  754. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  755. evo_data(push, (y << 16) | x);
  756. }
  757. if (update) {
  758. evo_mthd(push, 0x0080, 1);
  759. evo_data(push, 0x00000000);
  760. }
  761. evo_kick(push, mast);
  762. }
  763. nv_crtc->fb.tile_flags = nvfb->r_dma;
  764. return 0;
  765. }
  766. static void
  767. nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
  768. {
  769. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  770. u32 *push = evo_wait(mast, 16);
  771. if (push) {
  772. if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
  773. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
  774. evo_data(push, 0x85000000);
  775. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  776. } else
  777. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  778. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
  779. evo_data(push, 0x85000000);
  780. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  781. evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
  782. evo_data(push, NvEvoVRAM);
  783. } else {
  784. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  785. evo_data(push, 0x85000000);
  786. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  787. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  788. evo_data(push, NvEvoVRAM);
  789. }
  790. evo_kick(push, mast);
  791. }
  792. }
  793. static void
  794. nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
  795. {
  796. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  797. u32 *push = evo_wait(mast, 16);
  798. if (push) {
  799. if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
  800. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
  801. evo_data(push, 0x05000000);
  802. } else
  803. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  804. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
  805. evo_data(push, 0x05000000);
  806. evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
  807. evo_data(push, 0x00000000);
  808. } else {
  809. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  810. evo_data(push, 0x05000000);
  811. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  812. evo_data(push, 0x00000000);
  813. }
  814. evo_kick(push, mast);
  815. }
  816. }
  817. static void
  818. nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
  819. {
  820. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  821. if (show)
  822. nv50_crtc_cursor_show(nv_crtc);
  823. else
  824. nv50_crtc_cursor_hide(nv_crtc);
  825. if (update) {
  826. u32 *push = evo_wait(mast, 2);
  827. if (push) {
  828. evo_mthd(push, 0x0080, 1);
  829. evo_data(push, 0x00000000);
  830. evo_kick(push, mast);
  831. }
  832. }
  833. }
  834. static void
  835. nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
  836. {
  837. }
  838. static void
  839. nv50_crtc_prepare(struct drm_crtc *crtc)
  840. {
  841. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  842. struct nv50_mast *mast = nv50_mast(crtc->dev);
  843. u32 *push;
  844. nv50_display_flip_stop(crtc);
  845. push = evo_wait(mast, 2);
  846. if (push) {
  847. if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
  848. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  849. evo_data(push, 0x00000000);
  850. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
  851. evo_data(push, 0x40000000);
  852. } else
  853. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  854. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  855. evo_data(push, 0x00000000);
  856. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
  857. evo_data(push, 0x40000000);
  858. evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
  859. evo_data(push, 0x00000000);
  860. } else {
  861. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  862. evo_data(push, 0x00000000);
  863. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  864. evo_data(push, 0x03000000);
  865. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  866. evo_data(push, 0x00000000);
  867. }
  868. evo_kick(push, mast);
  869. }
  870. nv50_crtc_cursor_show_hide(nv_crtc, false, false);
  871. }
  872. static void
  873. nv50_crtc_commit(struct drm_crtc *crtc)
  874. {
  875. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  876. struct nv50_mast *mast = nv50_mast(crtc->dev);
  877. u32 *push;
  878. push = evo_wait(mast, 32);
  879. if (push) {
  880. if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
  881. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  882. evo_data(push, NvEvoVRAM_LP);
  883. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
  884. evo_data(push, 0xc0000000);
  885. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  886. } else
  887. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  888. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  889. evo_data(push, nv_crtc->fb.tile_flags);
  890. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
  891. evo_data(push, 0xc0000000);
  892. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  893. evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
  894. evo_data(push, NvEvoVRAM);
  895. } else {
  896. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  897. evo_data(push, nv_crtc->fb.tile_flags);
  898. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  899. evo_data(push, 0x83000000);
  900. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  901. evo_data(push, 0x00000000);
  902. evo_data(push, 0x00000000);
  903. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  904. evo_data(push, NvEvoVRAM);
  905. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  906. evo_data(push, 0xffffff00);
  907. }
  908. evo_kick(push, mast);
  909. }
  910. nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
  911. nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
  912. }
  913. static bool
  914. nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
  915. struct drm_display_mode *adjusted_mode)
  916. {
  917. drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
  918. return true;
  919. }
  920. static int
  921. nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  922. {
  923. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  924. struct nv50_head *head = nv50_head(crtc);
  925. int ret;
  926. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  927. if (ret == 0) {
  928. if (head->image)
  929. nouveau_bo_unpin(head->image);
  930. nouveau_bo_ref(nvfb->nvbo, &head->image);
  931. }
  932. return ret;
  933. }
  934. static int
  935. nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  936. struct drm_display_mode *mode, int x, int y,
  937. struct drm_framebuffer *old_fb)
  938. {
  939. struct nv50_mast *mast = nv50_mast(crtc->dev);
  940. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  941. struct nouveau_connector *nv_connector;
  942. u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
  943. u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
  944. u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
  945. u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
  946. u32 vblan2e = 0, vblan2s = 1;
  947. u32 *push;
  948. int ret;
  949. hactive = mode->htotal;
  950. hsynce = mode->hsync_end - mode->hsync_start - 1;
  951. hbackp = mode->htotal - mode->hsync_end;
  952. hblanke = hsynce + hbackp;
  953. hfrontp = mode->hsync_start - mode->hdisplay;
  954. hblanks = mode->htotal - hfrontp - 1;
  955. vactive = mode->vtotal * vscan / ilace;
  956. vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
  957. vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
  958. vblanke = vsynce + vbackp;
  959. vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
  960. vblanks = vactive - vfrontp - 1;
  961. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  962. vblan2e = vactive + vsynce + vbackp;
  963. vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
  964. vactive = (vactive * 2) + 1;
  965. }
  966. ret = nv50_crtc_swap_fbs(crtc, old_fb);
  967. if (ret)
  968. return ret;
  969. push = evo_wait(mast, 64);
  970. if (push) {
  971. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  972. evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
  973. evo_data(push, 0x00800000 | mode->clock);
  974. evo_data(push, (ilace == 2) ? 2 : 0);
  975. evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
  976. evo_data(push, 0x00000000);
  977. evo_data(push, (vactive << 16) | hactive);
  978. evo_data(push, ( vsynce << 16) | hsynce);
  979. evo_data(push, (vblanke << 16) | hblanke);
  980. evo_data(push, (vblanks << 16) | hblanks);
  981. evo_data(push, (vblan2e << 16) | vblan2s);
  982. evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
  983. evo_data(push, 0x00000000);
  984. evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
  985. evo_data(push, 0x00000311);
  986. evo_data(push, 0x00000100);
  987. } else {
  988. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
  989. evo_data(push, 0x00000000);
  990. evo_data(push, (vactive << 16) | hactive);
  991. evo_data(push, ( vsynce << 16) | hsynce);
  992. evo_data(push, (vblanke << 16) | hblanke);
  993. evo_data(push, (vblanks << 16) | hblanks);
  994. evo_data(push, (vblan2e << 16) | vblan2s);
  995. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  996. evo_data(push, 0x00000000); /* ??? */
  997. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  998. evo_data(push, mode->clock * 1000);
  999. evo_data(push, 0x00200000); /* ??? */
  1000. evo_data(push, mode->clock * 1000);
  1001. evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
  1002. evo_data(push, 0x00000311);
  1003. evo_data(push, 0x00000100);
  1004. }
  1005. evo_kick(push, mast);
  1006. }
  1007. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  1008. nv50_crtc_set_dither(nv_crtc, false);
  1009. nv50_crtc_set_scale(nv_crtc, false);
  1010. nv50_crtc_set_color_vibrance(nv_crtc, false);
  1011. nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  1012. return 0;
  1013. }
  1014. static int
  1015. nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  1016. struct drm_framebuffer *old_fb)
  1017. {
  1018. struct nouveau_drm *drm = nouveau_drm(crtc->dev);
  1019. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1020. int ret;
  1021. if (!crtc->fb) {
  1022. NV_DEBUG(drm, "No FB bound\n");
  1023. return 0;
  1024. }
  1025. ret = nv50_crtc_swap_fbs(crtc, old_fb);
  1026. if (ret)
  1027. return ret;
  1028. nv50_display_flip_stop(crtc);
  1029. nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  1030. nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
  1031. return 0;
  1032. }
  1033. static int
  1034. nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  1035. struct drm_framebuffer *fb, int x, int y,
  1036. enum mode_set_atomic state)
  1037. {
  1038. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1039. nv50_display_flip_stop(crtc);
  1040. nv50_crtc_set_image(nv_crtc, fb, x, y, true);
  1041. return 0;
  1042. }
  1043. static void
  1044. nv50_crtc_lut_load(struct drm_crtc *crtc)
  1045. {
  1046. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1047. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1048. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  1049. int i;
  1050. for (i = 0; i < 256; i++) {
  1051. u16 r = nv_crtc->lut.r[i] >> 2;
  1052. u16 g = nv_crtc->lut.g[i] >> 2;
  1053. u16 b = nv_crtc->lut.b[i] >> 2;
  1054. if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
  1055. writew(r + 0x0000, lut + (i * 0x08) + 0);
  1056. writew(g + 0x0000, lut + (i * 0x08) + 2);
  1057. writew(b + 0x0000, lut + (i * 0x08) + 4);
  1058. } else {
  1059. writew(r + 0x6000, lut + (i * 0x20) + 0);
  1060. writew(g + 0x6000, lut + (i * 0x20) + 2);
  1061. writew(b + 0x6000, lut + (i * 0x20) + 4);
  1062. }
  1063. }
  1064. }
  1065. static void
  1066. nv50_crtc_disable(struct drm_crtc *crtc)
  1067. {
  1068. struct nv50_head *head = nv50_head(crtc);
  1069. if (head->image)
  1070. nouveau_bo_unpin(head->image);
  1071. nouveau_bo_ref(NULL, &head->image);
  1072. }
  1073. static int
  1074. nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  1075. uint32_t handle, uint32_t width, uint32_t height)
  1076. {
  1077. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1078. struct drm_device *dev = crtc->dev;
  1079. struct drm_gem_object *gem;
  1080. struct nouveau_bo *nvbo;
  1081. bool visible = (handle != 0);
  1082. int i, ret = 0;
  1083. if (visible) {
  1084. if (width != 64 || height != 64)
  1085. return -EINVAL;
  1086. gem = drm_gem_object_lookup(dev, file_priv, handle);
  1087. if (unlikely(!gem))
  1088. return -ENOENT;
  1089. nvbo = nouveau_gem_object(gem);
  1090. ret = nouveau_bo_map(nvbo);
  1091. if (ret == 0) {
  1092. for (i = 0; i < 64 * 64; i++) {
  1093. u32 v = nouveau_bo_rd32(nvbo, i);
  1094. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  1095. }
  1096. nouveau_bo_unmap(nvbo);
  1097. }
  1098. drm_gem_object_unreference_unlocked(gem);
  1099. }
  1100. if (visible != nv_crtc->cursor.visible) {
  1101. nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
  1102. nv_crtc->cursor.visible = visible;
  1103. }
  1104. return ret;
  1105. }
  1106. static int
  1107. nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  1108. {
  1109. struct nv50_curs *curs = nv50_curs(crtc);
  1110. struct nv50_chan *chan = nv50_chan(curs);
  1111. nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
  1112. nv_wo32(chan->user, 0x0080, 0x00000000);
  1113. return 0;
  1114. }
  1115. static void
  1116. nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  1117. uint32_t start, uint32_t size)
  1118. {
  1119. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1120. u32 end = min_t(u32, start + size, 256);
  1121. u32 i;
  1122. for (i = start; i < end; i++) {
  1123. nv_crtc->lut.r[i] = r[i];
  1124. nv_crtc->lut.g[i] = g[i];
  1125. nv_crtc->lut.b[i] = b[i];
  1126. }
  1127. nv50_crtc_lut_load(crtc);
  1128. }
  1129. static void
  1130. nv50_crtc_destroy(struct drm_crtc *crtc)
  1131. {
  1132. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1133. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1134. struct nv50_head *head = nv50_head(crtc);
  1135. nv50_dmac_destroy(disp->core, &head->ovly.base);
  1136. nv50_pioc_destroy(disp->core, &head->oimm.base);
  1137. nv50_dmac_destroy(disp->core, &head->sync.base);
  1138. nv50_pioc_destroy(disp->core, &head->curs.base);
  1139. /*XXX: this shouldn't be necessary, but the core doesn't call
  1140. * disconnect() during the cleanup paths
  1141. */
  1142. if (head->image)
  1143. nouveau_bo_unpin(head->image);
  1144. nouveau_bo_ref(NULL, &head->image);
  1145. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  1146. if (nv_crtc->cursor.nvbo)
  1147. nouveau_bo_unpin(nv_crtc->cursor.nvbo);
  1148. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  1149. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  1150. if (nv_crtc->lut.nvbo)
  1151. nouveau_bo_unpin(nv_crtc->lut.nvbo);
  1152. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  1153. drm_crtc_cleanup(crtc);
  1154. kfree(crtc);
  1155. }
  1156. static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
  1157. .dpms = nv50_crtc_dpms,
  1158. .prepare = nv50_crtc_prepare,
  1159. .commit = nv50_crtc_commit,
  1160. .mode_fixup = nv50_crtc_mode_fixup,
  1161. .mode_set = nv50_crtc_mode_set,
  1162. .mode_set_base = nv50_crtc_mode_set_base,
  1163. .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
  1164. .load_lut = nv50_crtc_lut_load,
  1165. .disable = nv50_crtc_disable,
  1166. };
  1167. static const struct drm_crtc_funcs nv50_crtc_func = {
  1168. .cursor_set = nv50_crtc_cursor_set,
  1169. .cursor_move = nv50_crtc_cursor_move,
  1170. .gamma_set = nv50_crtc_gamma_set,
  1171. .set_config = nouveau_crtc_set_config,
  1172. .destroy = nv50_crtc_destroy,
  1173. .page_flip = nouveau_crtc_page_flip,
  1174. };
  1175. static void
  1176. nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
  1177. {
  1178. }
  1179. static void
  1180. nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
  1181. {
  1182. }
  1183. static int
  1184. nv50_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
  1185. {
  1186. struct nv50_disp *disp = nv50_disp(dev);
  1187. struct nv50_head *head;
  1188. struct drm_crtc *crtc;
  1189. int ret, i;
  1190. head = kzalloc(sizeof(*head), GFP_KERNEL);
  1191. if (!head)
  1192. return -ENOMEM;
  1193. head->base.index = index;
  1194. head->base.set_dither = nv50_crtc_set_dither;
  1195. head->base.set_scale = nv50_crtc_set_scale;
  1196. head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
  1197. head->base.color_vibrance = 50;
  1198. head->base.vibrant_hue = 0;
  1199. head->base.cursor.set_offset = nv50_cursor_set_offset;
  1200. head->base.cursor.set_pos = nv50_cursor_set_pos;
  1201. for (i = 0; i < 256; i++) {
  1202. head->base.lut.r[i] = i << 8;
  1203. head->base.lut.g[i] = i << 8;
  1204. head->base.lut.b[i] = i << 8;
  1205. }
  1206. crtc = &head->base.base;
  1207. drm_crtc_init(dev, crtc, &nv50_crtc_func);
  1208. drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
  1209. drm_mode_crtc_set_gamma_size(crtc, 256);
  1210. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  1211. 0, 0x0000, NULL, &head->base.lut.nvbo);
  1212. if (!ret) {
  1213. ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
  1214. if (!ret) {
  1215. ret = nouveau_bo_map(head->base.lut.nvbo);
  1216. if (ret)
  1217. nouveau_bo_unpin(head->base.lut.nvbo);
  1218. }
  1219. if (ret)
  1220. nouveau_bo_ref(NULL, &head->base.lut.nvbo);
  1221. }
  1222. if (ret)
  1223. goto out;
  1224. nv50_crtc_lut_load(crtc);
  1225. /* allocate cursor resources */
  1226. ret = nv50_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
  1227. &(struct nv50_display_curs_class) {
  1228. .head = index,
  1229. }, sizeof(struct nv50_display_curs_class),
  1230. &head->curs.base);
  1231. if (ret)
  1232. goto out;
  1233. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  1234. 0, 0x0000, NULL, &head->base.cursor.nvbo);
  1235. if (!ret) {
  1236. ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
  1237. if (!ret) {
  1238. ret = nouveau_bo_map(head->base.cursor.nvbo);
  1239. if (ret)
  1240. nouveau_bo_unpin(head->base.lut.nvbo);
  1241. }
  1242. if (ret)
  1243. nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
  1244. }
  1245. if (ret)
  1246. goto out;
  1247. /* allocate page flip / sync resources */
  1248. ret = nv50_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
  1249. &(struct nv50_display_sync_class) {
  1250. .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
  1251. .head = index,
  1252. }, sizeof(struct nv50_display_sync_class),
  1253. disp->sync->bo.offset, &head->sync.base);
  1254. if (ret)
  1255. goto out;
  1256. head->sync.addr = EVO_FLIP_SEM0(index);
  1257. head->sync.data = 0x00000000;
  1258. /* allocate overlay resources */
  1259. ret = nv50_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
  1260. &(struct nv50_display_oimm_class) {
  1261. .head = index,
  1262. }, sizeof(struct nv50_display_oimm_class),
  1263. &head->oimm.base);
  1264. if (ret)
  1265. goto out;
  1266. ret = nv50_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
  1267. &(struct nv50_display_ovly_class) {
  1268. .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
  1269. .head = index,
  1270. }, sizeof(struct nv50_display_ovly_class),
  1271. disp->sync->bo.offset, &head->ovly.base);
  1272. if (ret)
  1273. goto out;
  1274. out:
  1275. if (ret)
  1276. nv50_crtc_destroy(crtc);
  1277. return ret;
  1278. }
  1279. /******************************************************************************
  1280. * DAC
  1281. *****************************************************************************/
  1282. static void
  1283. nv50_dac_dpms(struct drm_encoder *encoder, int mode)
  1284. {
  1285. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1286. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1287. int or = nv_encoder->or;
  1288. u32 dpms_ctrl;
  1289. dpms_ctrl = 0x00000000;
  1290. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  1291. dpms_ctrl |= 0x00000001;
  1292. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  1293. dpms_ctrl |= 0x00000004;
  1294. nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
  1295. }
  1296. static bool
  1297. nv50_dac_mode_fixup(struct drm_encoder *encoder,
  1298. const struct drm_display_mode *mode,
  1299. struct drm_display_mode *adjusted_mode)
  1300. {
  1301. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1302. struct nouveau_connector *nv_connector;
  1303. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1304. if (nv_connector && nv_connector->native_mode) {
  1305. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1306. int id = adjusted_mode->base.id;
  1307. *adjusted_mode = *nv_connector->native_mode;
  1308. adjusted_mode->base.id = id;
  1309. }
  1310. }
  1311. return true;
  1312. }
  1313. static void
  1314. nv50_dac_commit(struct drm_encoder *encoder)
  1315. {
  1316. }
  1317. static void
  1318. nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  1319. struct drm_display_mode *adjusted_mode)
  1320. {
  1321. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1322. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1323. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1324. u32 *push;
  1325. nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  1326. push = evo_wait(mast, 8);
  1327. if (push) {
  1328. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  1329. u32 syncs = 0x00000000;
  1330. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1331. syncs |= 0x00000001;
  1332. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1333. syncs |= 0x00000002;
  1334. evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
  1335. evo_data(push, 1 << nv_crtc->index);
  1336. evo_data(push, syncs);
  1337. } else {
  1338. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  1339. u32 syncs = 0x00000001;
  1340. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1341. syncs |= 0x00000008;
  1342. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1343. syncs |= 0x00000010;
  1344. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  1345. magic |= 0x00000001;
  1346. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  1347. evo_data(push, syncs);
  1348. evo_data(push, magic);
  1349. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
  1350. evo_data(push, 1 << nv_crtc->index);
  1351. }
  1352. evo_kick(push, mast);
  1353. }
  1354. nv_encoder->crtc = encoder->crtc;
  1355. }
  1356. static void
  1357. nv50_dac_disconnect(struct drm_encoder *encoder)
  1358. {
  1359. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1360. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1361. const int or = nv_encoder->or;
  1362. u32 *push;
  1363. if (nv_encoder->crtc) {
  1364. nv50_crtc_prepare(nv_encoder->crtc);
  1365. push = evo_wait(mast, 4);
  1366. if (push) {
  1367. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  1368. evo_mthd(push, 0x0400 + (or * 0x080), 1);
  1369. evo_data(push, 0x00000000);
  1370. } else {
  1371. evo_mthd(push, 0x0180 + (or * 0x020), 1);
  1372. evo_data(push, 0x00000000);
  1373. }
  1374. evo_kick(push, mast);
  1375. }
  1376. }
  1377. nv_encoder->crtc = NULL;
  1378. }
  1379. static enum drm_connector_status
  1380. nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  1381. {
  1382. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1383. int ret, or = nouveau_encoder(encoder)->or;
  1384. u32 load = nouveau_drm(encoder->dev)->vbios.dactestval;
  1385. if (load == 0)
  1386. load = 340;
  1387. ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
  1388. if (ret || !load)
  1389. return connector_status_disconnected;
  1390. return connector_status_connected;
  1391. }
  1392. static void
  1393. nv50_dac_destroy(struct drm_encoder *encoder)
  1394. {
  1395. drm_encoder_cleanup(encoder);
  1396. kfree(encoder);
  1397. }
  1398. static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
  1399. .dpms = nv50_dac_dpms,
  1400. .mode_fixup = nv50_dac_mode_fixup,
  1401. .prepare = nv50_dac_disconnect,
  1402. .commit = nv50_dac_commit,
  1403. .mode_set = nv50_dac_mode_set,
  1404. .disable = nv50_dac_disconnect,
  1405. .get_crtc = nv50_display_crtc_get,
  1406. .detect = nv50_dac_detect
  1407. };
  1408. static const struct drm_encoder_funcs nv50_dac_func = {
  1409. .destroy = nv50_dac_destroy,
  1410. };
  1411. static int
  1412. nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1413. {
  1414. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1415. struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
  1416. struct nouveau_encoder *nv_encoder;
  1417. struct drm_encoder *encoder;
  1418. int type = DRM_MODE_ENCODER_DAC;
  1419. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1420. if (!nv_encoder)
  1421. return -ENOMEM;
  1422. nv_encoder->dcb = dcbe;
  1423. nv_encoder->or = ffs(dcbe->or) - 1;
  1424. nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
  1425. encoder = to_drm_encoder(nv_encoder);
  1426. encoder->possible_crtcs = dcbe->heads;
  1427. encoder->possible_clones = 0;
  1428. drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
  1429. drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
  1430. drm_mode_connector_attach_encoder(connector, encoder);
  1431. return 0;
  1432. }
  1433. /******************************************************************************
  1434. * Audio
  1435. *****************************************************************************/
  1436. static void
  1437. nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  1438. {
  1439. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1440. struct nouveau_connector *nv_connector;
  1441. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1442. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1443. if (!drm_detect_monitor_audio(nv_connector->edid))
  1444. return;
  1445. drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
  1446. nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
  1447. nv_connector->base.eld,
  1448. nv_connector->base.eld[2] * 4);
  1449. }
  1450. static void
  1451. nv50_audio_disconnect(struct drm_encoder *encoder)
  1452. {
  1453. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1454. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1455. nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
  1456. }
  1457. /******************************************************************************
  1458. * HDMI
  1459. *****************************************************************************/
  1460. static void
  1461. nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  1462. {
  1463. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1464. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1465. struct nouveau_connector *nv_connector;
  1466. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1467. const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
  1468. u32 rekey = 56; /* binary driver, and tegra constant */
  1469. u32 max_ac_packet;
  1470. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1471. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  1472. return;
  1473. max_ac_packet = mode->htotal - mode->hdisplay;
  1474. max_ac_packet -= rekey;
  1475. max_ac_packet -= 18; /* constant from tegra */
  1476. max_ac_packet /= 32;
  1477. nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
  1478. NV84_DISP_SOR_HDMI_PWR_STATE_ON |
  1479. (max_ac_packet << 16) | rekey);
  1480. nv50_audio_mode_set(encoder, mode);
  1481. }
  1482. static void
  1483. nv50_hdmi_disconnect(struct drm_encoder *encoder)
  1484. {
  1485. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1486. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  1487. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1488. const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
  1489. nv50_audio_disconnect(encoder);
  1490. nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
  1491. }
  1492. /******************************************************************************
  1493. * SOR
  1494. *****************************************************************************/
  1495. static void
  1496. nv50_sor_dpms(struct drm_encoder *encoder, int mode)
  1497. {
  1498. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1499. struct drm_device *dev = encoder->dev;
  1500. struct nv50_disp *disp = nv50_disp(dev);
  1501. struct drm_encoder *partner;
  1502. int or = nv_encoder->or;
  1503. nv_encoder->last_dpms = mode;
  1504. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  1505. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  1506. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  1507. continue;
  1508. if (nv_partner != nv_encoder &&
  1509. nv_partner->dcb->or == nv_encoder->dcb->or) {
  1510. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  1511. return;
  1512. break;
  1513. }
  1514. }
  1515. nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
  1516. }
  1517. static bool
  1518. nv50_sor_mode_fixup(struct drm_encoder *encoder,
  1519. const struct drm_display_mode *mode,
  1520. struct drm_display_mode *adjusted_mode)
  1521. {
  1522. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1523. struct nouveau_connector *nv_connector;
  1524. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1525. if (nv_connector && nv_connector->native_mode) {
  1526. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1527. int id = adjusted_mode->base.id;
  1528. *adjusted_mode = *nv_connector->native_mode;
  1529. adjusted_mode->base.id = id;
  1530. }
  1531. }
  1532. return true;
  1533. }
  1534. static void
  1535. nv50_sor_disconnect(struct drm_encoder *encoder)
  1536. {
  1537. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1538. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1539. const int or = nv_encoder->or;
  1540. u32 *push;
  1541. if (nv_encoder->crtc) {
  1542. nv50_crtc_prepare(nv_encoder->crtc);
  1543. push = evo_wait(mast, 4);
  1544. if (push) {
  1545. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  1546. evo_mthd(push, 0x0600 + (or * 0x40), 1);
  1547. evo_data(push, 0x00000000);
  1548. } else {
  1549. evo_mthd(push, 0x0200 + (or * 0x20), 1);
  1550. evo_data(push, 0x00000000);
  1551. }
  1552. evo_kick(push, mast);
  1553. }
  1554. nv50_hdmi_disconnect(encoder);
  1555. }
  1556. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  1557. nv_encoder->crtc = NULL;
  1558. }
  1559. static void
  1560. nv50_sor_commit(struct drm_encoder *encoder)
  1561. {
  1562. }
  1563. static void
  1564. nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
  1565. struct drm_display_mode *mode)
  1566. {
  1567. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1568. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1569. struct drm_device *dev = encoder->dev;
  1570. struct nouveau_drm *drm = nouveau_drm(dev);
  1571. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1572. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1573. struct nouveau_connector *nv_connector;
  1574. struct nvbios *bios = &drm->vbios;
  1575. u32 *push, lvds = 0;
  1576. u8 owner = 1 << nv_crtc->index;
  1577. u8 proto = 0xf;
  1578. u8 depth = 0x0;
  1579. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1580. switch (nv_encoder->dcb->type) {
  1581. case DCB_OUTPUT_TMDS:
  1582. if (nv_encoder->dcb->sorconf.link & 1) {
  1583. if (mode->clock < 165000)
  1584. proto = 0x1;
  1585. else
  1586. proto = 0x5;
  1587. } else {
  1588. proto = 0x2;
  1589. }
  1590. nv50_hdmi_mode_set(encoder, mode);
  1591. break;
  1592. case DCB_OUTPUT_LVDS:
  1593. proto = 0x0;
  1594. if (bios->fp_no_ddc) {
  1595. if (bios->fp.dual_link)
  1596. lvds |= 0x0100;
  1597. if (bios->fp.if_is_24bit)
  1598. lvds |= 0x0200;
  1599. } else {
  1600. if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
  1601. if (((u8 *)nv_connector->edid)[121] == 2)
  1602. lvds |= 0x0100;
  1603. } else
  1604. if (mode->clock >= bios->fp.duallink_transition_clk) {
  1605. lvds |= 0x0100;
  1606. }
  1607. if (lvds & 0x0100) {
  1608. if (bios->fp.strapless_is_24bit & 2)
  1609. lvds |= 0x0200;
  1610. } else {
  1611. if (bios->fp.strapless_is_24bit & 1)
  1612. lvds |= 0x0200;
  1613. }
  1614. if (nv_connector->base.display_info.bpc == 8)
  1615. lvds |= 0x0200;
  1616. }
  1617. nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
  1618. break;
  1619. case DCB_OUTPUT_DP:
  1620. if (nv_connector->base.display_info.bpc == 6) {
  1621. nv_encoder->dp.datarate = mode->clock * 18 / 8;
  1622. depth = 0x2;
  1623. } else
  1624. if (nv_connector->base.display_info.bpc == 8) {
  1625. nv_encoder->dp.datarate = mode->clock * 24 / 8;
  1626. depth = 0x5;
  1627. } else {
  1628. nv_encoder->dp.datarate = mode->clock * 30 / 8;
  1629. depth = 0x6;
  1630. }
  1631. if (nv_encoder->dcb->sorconf.link & 1)
  1632. proto = 0x8;
  1633. else
  1634. proto = 0x9;
  1635. break;
  1636. default:
  1637. BUG_ON(1);
  1638. break;
  1639. }
  1640. nv50_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  1641. push = evo_wait(nv50_mast(dev), 8);
  1642. if (push) {
  1643. if (nv50_vers(mast) < NVD0_DISP_CLASS) {
  1644. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  1645. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1646. ctrl |= 0x00001000;
  1647. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1648. ctrl |= 0x00002000;
  1649. evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
  1650. evo_data(push, ctrl);
  1651. } else {
  1652. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  1653. u32 syncs = 0x00000001;
  1654. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1655. syncs |= 0x00000008;
  1656. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1657. syncs |= 0x00000010;
  1658. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  1659. magic |= 0x00000001;
  1660. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  1661. evo_data(push, syncs | (depth << 6));
  1662. evo_data(push, magic);
  1663. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
  1664. evo_data(push, owner | (proto << 8));
  1665. }
  1666. evo_kick(push, mast);
  1667. }
  1668. nv_encoder->crtc = encoder->crtc;
  1669. }
  1670. static void
  1671. nv50_sor_destroy(struct drm_encoder *encoder)
  1672. {
  1673. drm_encoder_cleanup(encoder);
  1674. kfree(encoder);
  1675. }
  1676. static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
  1677. .dpms = nv50_sor_dpms,
  1678. .mode_fixup = nv50_sor_mode_fixup,
  1679. .prepare = nv50_sor_disconnect,
  1680. .commit = nv50_sor_commit,
  1681. .mode_set = nv50_sor_mode_set,
  1682. .disable = nv50_sor_disconnect,
  1683. .get_crtc = nv50_display_crtc_get,
  1684. };
  1685. static const struct drm_encoder_funcs nv50_sor_func = {
  1686. .destroy = nv50_sor_destroy,
  1687. };
  1688. static int
  1689. nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1690. {
  1691. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1692. struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
  1693. struct nouveau_encoder *nv_encoder;
  1694. struct drm_encoder *encoder;
  1695. int type;
  1696. switch (dcbe->type) {
  1697. case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
  1698. case DCB_OUTPUT_TMDS:
  1699. case DCB_OUTPUT_DP:
  1700. default:
  1701. type = DRM_MODE_ENCODER_TMDS;
  1702. break;
  1703. }
  1704. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1705. if (!nv_encoder)
  1706. return -ENOMEM;
  1707. nv_encoder->dcb = dcbe;
  1708. nv_encoder->or = ffs(dcbe->or) - 1;
  1709. nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
  1710. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  1711. encoder = to_drm_encoder(nv_encoder);
  1712. encoder->possible_crtcs = dcbe->heads;
  1713. encoder->possible_clones = 0;
  1714. drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
  1715. drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
  1716. drm_mode_connector_attach_encoder(connector, encoder);
  1717. return 0;
  1718. }
  1719. /******************************************************************************
  1720. * PIOR
  1721. *****************************************************************************/
  1722. static void
  1723. nv50_pior_dpms(struct drm_encoder *encoder, int mode)
  1724. {
  1725. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1726. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1727. u32 mthd = (nv_encoder->dcb->type << 12) | nv_encoder->or;
  1728. u32 ctrl = (mode == DRM_MODE_DPMS_ON);
  1729. nv_call(disp->core, NV50_DISP_PIOR_PWR + mthd, ctrl);
  1730. }
  1731. static bool
  1732. nv50_pior_mode_fixup(struct drm_encoder *encoder,
  1733. const struct drm_display_mode *mode,
  1734. struct drm_display_mode *adjusted_mode)
  1735. {
  1736. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1737. struct nouveau_connector *nv_connector;
  1738. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1739. if (nv_connector && nv_connector->native_mode) {
  1740. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1741. int id = adjusted_mode->base.id;
  1742. *adjusted_mode = *nv_connector->native_mode;
  1743. adjusted_mode->base.id = id;
  1744. }
  1745. }
  1746. adjusted_mode->clock *= 2;
  1747. return true;
  1748. }
  1749. static void
  1750. nv50_pior_commit(struct drm_encoder *encoder)
  1751. {
  1752. }
  1753. static void
  1754. nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  1755. struct drm_display_mode *adjusted_mode)
  1756. {
  1757. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1758. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1759. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1760. struct nouveau_connector *nv_connector;
  1761. u8 owner = 1 << nv_crtc->index;
  1762. u8 proto, depth;
  1763. u32 *push;
  1764. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1765. switch (nv_connector->base.display_info.bpc) {
  1766. case 10: depth = 0x6; break;
  1767. case 8: depth = 0x5; break;
  1768. case 6: depth = 0x2; break;
  1769. default: depth = 0x0; break;
  1770. }
  1771. switch (nv_encoder->dcb->type) {
  1772. case DCB_OUTPUT_TMDS:
  1773. case DCB_OUTPUT_DP:
  1774. proto = 0x0;
  1775. break;
  1776. default:
  1777. BUG_ON(1);
  1778. break;
  1779. }
  1780. nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
  1781. push = evo_wait(mast, 8);
  1782. if (push) {
  1783. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  1784. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  1785. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1786. ctrl |= 0x00001000;
  1787. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1788. ctrl |= 0x00002000;
  1789. evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
  1790. evo_data(push, ctrl);
  1791. }
  1792. evo_kick(push, mast);
  1793. }
  1794. nv_encoder->crtc = encoder->crtc;
  1795. }
  1796. static void
  1797. nv50_pior_disconnect(struct drm_encoder *encoder)
  1798. {
  1799. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1800. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1801. const int or = nv_encoder->or;
  1802. u32 *push;
  1803. if (nv_encoder->crtc) {
  1804. nv50_crtc_prepare(nv_encoder->crtc);
  1805. push = evo_wait(mast, 4);
  1806. if (push) {
  1807. if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
  1808. evo_mthd(push, 0x0700 + (or * 0x040), 1);
  1809. evo_data(push, 0x00000000);
  1810. }
  1811. evo_kick(push, mast);
  1812. }
  1813. }
  1814. nv_encoder->crtc = NULL;
  1815. }
  1816. static void
  1817. nv50_pior_destroy(struct drm_encoder *encoder)
  1818. {
  1819. drm_encoder_cleanup(encoder);
  1820. kfree(encoder);
  1821. }
  1822. static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
  1823. .dpms = nv50_pior_dpms,
  1824. .mode_fixup = nv50_pior_mode_fixup,
  1825. .prepare = nv50_pior_disconnect,
  1826. .commit = nv50_pior_commit,
  1827. .mode_set = nv50_pior_mode_set,
  1828. .disable = nv50_pior_disconnect,
  1829. .get_crtc = nv50_display_crtc_get,
  1830. };
  1831. static const struct drm_encoder_funcs nv50_pior_func = {
  1832. .destroy = nv50_pior_destroy,
  1833. };
  1834. static int
  1835. nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1836. {
  1837. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1838. struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
  1839. struct nouveau_i2c_port *ddc = NULL;
  1840. struct nouveau_encoder *nv_encoder;
  1841. struct drm_encoder *encoder;
  1842. int type;
  1843. switch (dcbe->type) {
  1844. case DCB_OUTPUT_TMDS:
  1845. ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
  1846. type = DRM_MODE_ENCODER_TMDS;
  1847. break;
  1848. case DCB_OUTPUT_DP:
  1849. ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
  1850. type = DRM_MODE_ENCODER_TMDS;
  1851. break;
  1852. default:
  1853. return -ENODEV;
  1854. }
  1855. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1856. if (!nv_encoder)
  1857. return -ENOMEM;
  1858. nv_encoder->dcb = dcbe;
  1859. nv_encoder->or = ffs(dcbe->or) - 1;
  1860. nv_encoder->i2c = ddc;
  1861. encoder = to_drm_encoder(nv_encoder);
  1862. encoder->possible_crtcs = dcbe->heads;
  1863. encoder->possible_clones = 0;
  1864. drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
  1865. drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
  1866. drm_mode_connector_attach_encoder(connector, encoder);
  1867. return 0;
  1868. }
  1869. /******************************************************************************
  1870. * Init
  1871. *****************************************************************************/
  1872. void
  1873. nv50_display_fini(struct drm_device *dev)
  1874. {
  1875. }
  1876. int
  1877. nv50_display_init(struct drm_device *dev)
  1878. {
  1879. struct nv50_disp *disp = nv50_disp(dev);
  1880. struct drm_crtc *crtc;
  1881. u32 *push;
  1882. push = evo_wait(nv50_mast(dev), 32);
  1883. if (!push)
  1884. return -EBUSY;
  1885. list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
  1886. struct nv50_sync *sync = nv50_sync(crtc);
  1887. nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
  1888. }
  1889. evo_mthd(push, 0x0088, 1);
  1890. evo_data(push, NvEvoSync);
  1891. evo_kick(push, nv50_mast(dev));
  1892. return 0;
  1893. }
  1894. void
  1895. nv50_display_destroy(struct drm_device *dev)
  1896. {
  1897. struct nv50_disp *disp = nv50_disp(dev);
  1898. nv50_dmac_destroy(disp->core, &disp->mast.base);
  1899. nouveau_bo_unmap(disp->sync);
  1900. if (disp->sync)
  1901. nouveau_bo_unpin(disp->sync);
  1902. nouveau_bo_ref(NULL, &disp->sync);
  1903. nouveau_display(dev)->priv = NULL;
  1904. kfree(disp);
  1905. }
  1906. int
  1907. nv50_display_create(struct drm_device *dev)
  1908. {
  1909. struct nouveau_device *device = nouveau_dev(dev);
  1910. struct nouveau_drm *drm = nouveau_drm(dev);
  1911. struct dcb_table *dcb = &drm->vbios.dcb;
  1912. struct drm_connector *connector, *tmp;
  1913. struct nv50_disp *disp;
  1914. struct dcb_output *dcbe;
  1915. int crtcs, ret, i;
  1916. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1917. if (!disp)
  1918. return -ENOMEM;
  1919. nouveau_display(dev)->priv = disp;
  1920. nouveau_display(dev)->dtor = nv50_display_destroy;
  1921. nouveau_display(dev)->init = nv50_display_init;
  1922. nouveau_display(dev)->fini = nv50_display_fini;
  1923. disp->core = nouveau_display(dev)->core;
  1924. /* small shared memory area we use for notifiers and semaphores */
  1925. ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
  1926. 0, 0x0000, NULL, &disp->sync);
  1927. if (!ret) {
  1928. ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
  1929. if (!ret) {
  1930. ret = nouveau_bo_map(disp->sync);
  1931. if (ret)
  1932. nouveau_bo_unpin(disp->sync);
  1933. }
  1934. if (ret)
  1935. nouveau_bo_ref(NULL, &disp->sync);
  1936. }
  1937. if (ret)
  1938. goto out;
  1939. /* allocate master evo channel */
  1940. ret = nv50_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
  1941. &(struct nv50_display_mast_class) {
  1942. .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
  1943. }, sizeof(struct nv50_display_mast_class),
  1944. disp->sync->bo.offset, &disp->mast.base);
  1945. if (ret)
  1946. goto out;
  1947. /* create crtc objects to represent the hw heads */
  1948. if (nv_mclass(disp->core) >= NVD0_DISP_CLASS)
  1949. crtcs = nv_rd32(device, 0x022448);
  1950. else
  1951. crtcs = 2;
  1952. for (i = 0; i < crtcs; i++) {
  1953. ret = nv50_crtc_create(dev, disp->core, i);
  1954. if (ret)
  1955. goto out;
  1956. }
  1957. /* create encoder/connector objects based on VBIOS DCB table */
  1958. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1959. connector = nouveau_connector_create(dev, dcbe->connector);
  1960. if (IS_ERR(connector))
  1961. continue;
  1962. if (dcbe->location == DCB_LOC_ON_CHIP) {
  1963. switch (dcbe->type) {
  1964. case DCB_OUTPUT_TMDS:
  1965. case DCB_OUTPUT_LVDS:
  1966. case DCB_OUTPUT_DP:
  1967. ret = nv50_sor_create(connector, dcbe);
  1968. break;
  1969. case DCB_OUTPUT_ANALOG:
  1970. ret = nv50_dac_create(connector, dcbe);
  1971. break;
  1972. default:
  1973. ret = -ENODEV;
  1974. break;
  1975. }
  1976. } else {
  1977. ret = nv50_pior_create(connector, dcbe);
  1978. }
  1979. if (ret) {
  1980. NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
  1981. dcbe->location, dcbe->type,
  1982. ffs(dcbe->or) - 1, ret);
  1983. ret = 0;
  1984. }
  1985. }
  1986. /* cull any connectors we created that don't have an encoder */
  1987. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1988. if (connector->encoder_ids[0])
  1989. continue;
  1990. NV_WARN(drm, "%s has no encoders, removing\n",
  1991. drm_get_connector_name(connector));
  1992. connector->funcs->destroy(connector);
  1993. }
  1994. out:
  1995. if (ret)
  1996. nv50_display_destroy(dev);
  1997. return ret;
  1998. }