nv50_display.c 69 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include <drm/drmP.h>
  26. #include <drm/drm_crtc_helper.h>
  27. #include <drm/drm_plane_helper.h>
  28. #include <drm/drm_dp_helper.h>
  29. #include <nvif/class.h>
  30. #include "nouveau_drm.h"
  31. #include "nouveau_dma.h"
  32. #include "nouveau_gem.h"
  33. #include "nouveau_connector.h"
  34. #include "nouveau_encoder.h"
  35. #include "nouveau_crtc.h"
  36. #include "nouveau_fence.h"
  37. #include "nv50_display.h"
  38. #define EVO_DMA_NR 9
  39. #define EVO_MASTER (0x00)
  40. #define EVO_FLIP(c) (0x01 + (c))
  41. #define EVO_OVLY(c) (0x05 + (c))
  42. #define EVO_OIMM(c) (0x09 + (c))
  43. #define EVO_CURS(c) (0x0d + (c))
  44. /* offsets in shared sync bo of various structures */
  45. #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  46. #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
  47. #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
  48. #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
  49. /******************************************************************************
  50. * EVO channel
  51. *****************************************************************************/
  52. struct nv50_chan {
  53. struct nvif_object user;
  54. };
  55. static int
  56. nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head,
  57. void *data, u32 size, struct nv50_chan *chan)
  58. {
  59. const u32 handle = (oclass[0] << 16) | head;
  60. u32 sclass[8];
  61. int ret, i;
  62. ret = nvif_object_sclass(disp, sclass, ARRAY_SIZE(sclass));
  63. WARN_ON(ret > ARRAY_SIZE(sclass));
  64. if (ret < 0)
  65. return ret;
  66. while (oclass[0]) {
  67. for (i = 0; i < ARRAY_SIZE(sclass); i++) {
  68. if (sclass[i] == oclass[0]) {
  69. ret = nvif_object_init(disp, NULL, handle,
  70. oclass[0], data, size,
  71. &chan->user);
  72. if (ret == 0)
  73. nvif_object_map(&chan->user);
  74. return ret;
  75. }
  76. }
  77. oclass++;
  78. }
  79. return -ENOSYS;
  80. }
  81. static void
  82. nv50_chan_destroy(struct nv50_chan *chan)
  83. {
  84. nvif_object_fini(&chan->user);
  85. }
  86. /******************************************************************************
  87. * PIO EVO channel
  88. *****************************************************************************/
  89. struct nv50_pioc {
  90. struct nv50_chan base;
  91. };
  92. static void
  93. nv50_pioc_destroy(struct nv50_pioc *pioc)
  94. {
  95. nv50_chan_destroy(&pioc->base);
  96. }
  97. static int
  98. nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head,
  99. void *data, u32 size, struct nv50_pioc *pioc)
  100. {
  101. return nv50_chan_create(disp, oclass, head, data, size, &pioc->base);
  102. }
  103. /******************************************************************************
  104. * Cursor Immediate
  105. *****************************************************************************/
  106. struct nv50_curs {
  107. struct nv50_pioc base;
  108. struct nouveau_bo *image;
  109. };
  110. static int
  111. nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs)
  112. {
  113. struct nv50_disp_cursor_v0 args = {
  114. .head = head,
  115. };
  116. static const u32 oclass[] = {
  117. GK104_DISP_CURSOR,
  118. GF110_DISP_CURSOR,
  119. GT214_DISP_CURSOR,
  120. G82_DISP_CURSOR,
  121. NV50_DISP_CURSOR,
  122. 0
  123. };
  124. return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
  125. &curs->base);
  126. }
  127. /******************************************************************************
  128. * Overlay Immediate
  129. *****************************************************************************/
  130. struct nv50_oimm {
  131. struct nv50_pioc base;
  132. };
  133. static int
  134. nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm)
  135. {
  136. struct nv50_disp_cursor_v0 args = {
  137. .head = head,
  138. };
  139. static const u32 oclass[] = {
  140. GK104_DISP_OVERLAY,
  141. GF110_DISP_OVERLAY,
  142. GT214_DISP_OVERLAY,
  143. G82_DISP_OVERLAY,
  144. NV50_DISP_OVERLAY,
  145. 0
  146. };
  147. return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
  148. &oimm->base);
  149. }
  150. /******************************************************************************
  151. * DMA EVO channel
  152. *****************************************************************************/
  153. struct nv50_dmac {
  154. struct nv50_chan base;
  155. dma_addr_t handle;
  156. u32 *ptr;
  157. struct nvif_object sync;
  158. struct nvif_object vram;
  159. /* Protects against concurrent pushbuf access to this channel, lock is
  160. * grabbed by evo_wait (if the pushbuf reservation is successful) and
  161. * dropped again by evo_kick. */
  162. struct mutex lock;
  163. };
  164. static void
  165. nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
  166. {
  167. nvif_object_fini(&dmac->vram);
  168. nvif_object_fini(&dmac->sync);
  169. nv50_chan_destroy(&dmac->base);
  170. if (dmac->ptr) {
  171. struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev;
  172. pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
  173. }
  174. }
  175. static int
  176. nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head,
  177. void *data, u32 size, u64 syncbuf,
  178. struct nv50_dmac *dmac)
  179. {
  180. struct nvif_device *device = nvif_device(disp);
  181. struct nv50_disp_core_channel_dma_v0 *args = data;
  182. struct nvif_object pushbuf;
  183. int ret;
  184. mutex_init(&dmac->lock);
  185. dmac->ptr = pci_alloc_consistent(nvkm_device(device)->pdev,
  186. PAGE_SIZE, &dmac->handle);
  187. if (!dmac->ptr)
  188. return -ENOMEM;
  189. ret = nvif_object_init(nvif_object(device), NULL,
  190. args->pushbuf, NV_DMA_FROM_MEMORY,
  191. &(struct nv_dma_v0) {
  192. .target = NV_DMA_V0_TARGET_PCI_US,
  193. .access = NV_DMA_V0_ACCESS_RD,
  194. .start = dmac->handle + 0x0000,
  195. .limit = dmac->handle + 0x0fff,
  196. }, sizeof(struct nv_dma_v0), &pushbuf);
  197. if (ret)
  198. return ret;
  199. ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base);
  200. nvif_object_fini(&pushbuf);
  201. if (ret)
  202. return ret;
  203. ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000,
  204. NV_DMA_IN_MEMORY,
  205. &(struct nv_dma_v0) {
  206. .target = NV_DMA_V0_TARGET_VRAM,
  207. .access = NV_DMA_V0_ACCESS_RDWR,
  208. .start = syncbuf + 0x0000,
  209. .limit = syncbuf + 0x0fff,
  210. }, sizeof(struct nv_dma_v0),
  211. &dmac->sync);
  212. if (ret)
  213. return ret;
  214. ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001,
  215. NV_DMA_IN_MEMORY,
  216. &(struct nv_dma_v0) {
  217. .target = NV_DMA_V0_TARGET_VRAM,
  218. .access = NV_DMA_V0_ACCESS_RDWR,
  219. .start = 0,
  220. .limit = device->info.ram_user - 1,
  221. }, sizeof(struct nv_dma_v0),
  222. &dmac->vram);
  223. if (ret)
  224. return ret;
  225. return ret;
  226. }
  227. /******************************************************************************
  228. * Core
  229. *****************************************************************************/
  230. struct nv50_mast {
  231. struct nv50_dmac base;
  232. };
  233. static int
  234. nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core)
  235. {
  236. struct nv50_disp_core_channel_dma_v0 args = {
  237. .pushbuf = 0xb0007d00,
  238. };
  239. static const u32 oclass[] = {
  240. GM204_DISP_CORE_CHANNEL_DMA,
  241. GM107_DISP_CORE_CHANNEL_DMA,
  242. GK110_DISP_CORE_CHANNEL_DMA,
  243. GK104_DISP_CORE_CHANNEL_DMA,
  244. GF110_DISP_CORE_CHANNEL_DMA,
  245. GT214_DISP_CORE_CHANNEL_DMA,
  246. GT206_DISP_CORE_CHANNEL_DMA,
  247. GT200_DISP_CORE_CHANNEL_DMA,
  248. G82_DISP_CORE_CHANNEL_DMA,
  249. NV50_DISP_CORE_CHANNEL_DMA,
  250. 0
  251. };
  252. return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf,
  253. &core->base);
  254. }
  255. /******************************************************************************
  256. * Base
  257. *****************************************************************************/
  258. struct nv50_sync {
  259. struct nv50_dmac base;
  260. u32 addr;
  261. u32 data;
  262. };
  263. static int
  264. nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf,
  265. struct nv50_sync *base)
  266. {
  267. struct nv50_disp_base_channel_dma_v0 args = {
  268. .pushbuf = 0xb0007c00 | head,
  269. .head = head,
  270. };
  271. static const u32 oclass[] = {
  272. GK110_DISP_BASE_CHANNEL_DMA,
  273. GK104_DISP_BASE_CHANNEL_DMA,
  274. GF110_DISP_BASE_CHANNEL_DMA,
  275. GT214_DISP_BASE_CHANNEL_DMA,
  276. GT200_DISP_BASE_CHANNEL_DMA,
  277. G82_DISP_BASE_CHANNEL_DMA,
  278. NV50_DISP_BASE_CHANNEL_DMA,
  279. 0
  280. };
  281. return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
  282. syncbuf, &base->base);
  283. }
  284. /******************************************************************************
  285. * Overlay
  286. *****************************************************************************/
  287. struct nv50_ovly {
  288. struct nv50_dmac base;
  289. };
  290. static int
  291. nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf,
  292. struct nv50_ovly *ovly)
  293. {
  294. struct nv50_disp_overlay_channel_dma_v0 args = {
  295. .pushbuf = 0xb0007e00 | head,
  296. .head = head,
  297. };
  298. static const u32 oclass[] = {
  299. GK104_DISP_OVERLAY_CONTROL_DMA,
  300. GF110_DISP_OVERLAY_CONTROL_DMA,
  301. GT214_DISP_OVERLAY_CHANNEL_DMA,
  302. GT200_DISP_OVERLAY_CHANNEL_DMA,
  303. G82_DISP_OVERLAY_CHANNEL_DMA,
  304. NV50_DISP_OVERLAY_CHANNEL_DMA,
  305. 0
  306. };
  307. return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
  308. syncbuf, &ovly->base);
  309. }
  310. struct nv50_head {
  311. struct nouveau_crtc base;
  312. struct nouveau_bo *image;
  313. struct nv50_curs curs;
  314. struct nv50_sync sync;
  315. struct nv50_ovly ovly;
  316. struct nv50_oimm oimm;
  317. };
  318. #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
  319. #define nv50_curs(c) (&nv50_head(c)->curs)
  320. #define nv50_sync(c) (&nv50_head(c)->sync)
  321. #define nv50_ovly(c) (&nv50_head(c)->ovly)
  322. #define nv50_oimm(c) (&nv50_head(c)->oimm)
  323. #define nv50_chan(c) (&(c)->base.base)
  324. #define nv50_vers(c) nv50_chan(c)->user.oclass
  325. struct nv50_fbdma {
  326. struct list_head head;
  327. struct nvif_object core;
  328. struct nvif_object base[4];
  329. };
  330. struct nv50_disp {
  331. struct nvif_object *disp;
  332. struct nv50_mast mast;
  333. struct list_head fbdma;
  334. struct nouveau_bo *sync;
  335. };
  336. static struct nv50_disp *
  337. nv50_disp(struct drm_device *dev)
  338. {
  339. return nouveau_display(dev)->priv;
  340. }
  341. #define nv50_mast(d) (&nv50_disp(d)->mast)
  342. static struct drm_crtc *
  343. nv50_display_crtc_get(struct drm_encoder *encoder)
  344. {
  345. return nouveau_encoder(encoder)->crtc;
  346. }
  347. /******************************************************************************
  348. * EVO channel helpers
  349. *****************************************************************************/
  350. static u32 *
  351. evo_wait(void *evoc, int nr)
  352. {
  353. struct nv50_dmac *dmac = evoc;
  354. u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
  355. mutex_lock(&dmac->lock);
  356. if (put + nr >= (PAGE_SIZE / 4) - 8) {
  357. dmac->ptr[put] = 0x20000000;
  358. nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
  359. if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) {
  360. mutex_unlock(&dmac->lock);
  361. nv_error(nvkm_object(&dmac->base.user), "channel stalled\n");
  362. return NULL;
  363. }
  364. put = 0;
  365. }
  366. return dmac->ptr + put;
  367. }
  368. static void
  369. evo_kick(u32 *push, void *evoc)
  370. {
  371. struct nv50_dmac *dmac = evoc;
  372. nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
  373. mutex_unlock(&dmac->lock);
  374. }
  375. #if 1
  376. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  377. #define evo_data(p,d) *((p)++) = (d)
  378. #else
  379. #define evo_mthd(p,m,s) do { \
  380. const u32 _m = (m), _s = (s); \
  381. printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \
  382. *((p)++) = ((_s << 18) | _m); \
  383. } while(0)
  384. #define evo_data(p,d) do { \
  385. const u32 _d = (d); \
  386. printk(KERN_ERR "\t%08x\n", _d); \
  387. *((p)++) = _d; \
  388. } while(0)
  389. #endif
  390. static bool
  391. evo_sync_wait(void *data)
  392. {
  393. if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
  394. return true;
  395. usleep_range(1, 2);
  396. return false;
  397. }
  398. static int
  399. evo_sync(struct drm_device *dev)
  400. {
  401. struct nvif_device *device = &nouveau_drm(dev)->device;
  402. struct nv50_disp *disp = nv50_disp(dev);
  403. struct nv50_mast *mast = nv50_mast(dev);
  404. u32 *push = evo_wait(mast, 8);
  405. if (push) {
  406. nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
  407. evo_mthd(push, 0x0084, 1);
  408. evo_data(push, 0x80000000 | EVO_MAST_NTFY);
  409. evo_mthd(push, 0x0080, 2);
  410. evo_data(push, 0x00000000);
  411. evo_data(push, 0x00000000);
  412. evo_kick(push, mast);
  413. if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync))
  414. return 0;
  415. }
  416. return -EBUSY;
  417. }
  418. /******************************************************************************
  419. * Page flipping channel
  420. *****************************************************************************/
  421. struct nouveau_bo *
  422. nv50_display_crtc_sema(struct drm_device *dev, int crtc)
  423. {
  424. return nv50_disp(dev)->sync;
  425. }
  426. struct nv50_display_flip {
  427. struct nv50_disp *disp;
  428. struct nv50_sync *chan;
  429. };
  430. static bool
  431. nv50_display_flip_wait(void *data)
  432. {
  433. struct nv50_display_flip *flip = data;
  434. if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
  435. flip->chan->data)
  436. return true;
  437. usleep_range(1, 2);
  438. return false;
  439. }
  440. void
  441. nv50_display_flip_stop(struct drm_crtc *crtc)
  442. {
  443. struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
  444. struct nv50_display_flip flip = {
  445. .disp = nv50_disp(crtc->dev),
  446. .chan = nv50_sync(crtc),
  447. };
  448. u32 *push;
  449. push = evo_wait(flip.chan, 8);
  450. if (push) {
  451. evo_mthd(push, 0x0084, 1);
  452. evo_data(push, 0x00000000);
  453. evo_mthd(push, 0x0094, 1);
  454. evo_data(push, 0x00000000);
  455. evo_mthd(push, 0x00c0, 1);
  456. evo_data(push, 0x00000000);
  457. evo_mthd(push, 0x0080, 1);
  458. evo_data(push, 0x00000000);
  459. evo_kick(push, flip.chan);
  460. }
  461. nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip);
  462. }
  463. int
  464. nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
  465. struct nouveau_channel *chan, u32 swap_interval)
  466. {
  467. struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
  468. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  469. struct nv50_head *head = nv50_head(crtc);
  470. struct nv50_sync *sync = nv50_sync(crtc);
  471. u32 *push;
  472. int ret;
  473. swap_interval <<= 4;
  474. if (swap_interval == 0)
  475. swap_interval |= 0x100;
  476. if (chan == NULL)
  477. evo_sync(crtc->dev);
  478. push = evo_wait(sync, 128);
  479. if (unlikely(push == NULL))
  480. return -EBUSY;
  481. if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) {
  482. ret = RING_SPACE(chan, 8);
  483. if (ret)
  484. return ret;
  485. BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
  486. OUT_RING (chan, NvEvoSema0 + nv_crtc->index);
  487. OUT_RING (chan, sync->addr ^ 0x10);
  488. BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
  489. OUT_RING (chan, sync->data + 1);
  490. BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
  491. OUT_RING (chan, sync->addr);
  492. OUT_RING (chan, sync->data);
  493. } else
  494. if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) {
  495. u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
  496. ret = RING_SPACE(chan, 12);
  497. if (ret)
  498. return ret;
  499. BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
  500. OUT_RING (chan, chan->vram.handle);
  501. BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  502. OUT_RING (chan, upper_32_bits(addr ^ 0x10));
  503. OUT_RING (chan, lower_32_bits(addr ^ 0x10));
  504. OUT_RING (chan, sync->data + 1);
  505. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
  506. BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  507. OUT_RING (chan, upper_32_bits(addr));
  508. OUT_RING (chan, lower_32_bits(addr));
  509. OUT_RING (chan, sync->data);
  510. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
  511. } else
  512. if (chan) {
  513. u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
  514. ret = RING_SPACE(chan, 10);
  515. if (ret)
  516. return ret;
  517. BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  518. OUT_RING (chan, upper_32_bits(addr ^ 0x10));
  519. OUT_RING (chan, lower_32_bits(addr ^ 0x10));
  520. OUT_RING (chan, sync->data + 1);
  521. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
  522. NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
  523. BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
  524. OUT_RING (chan, upper_32_bits(addr));
  525. OUT_RING (chan, lower_32_bits(addr));
  526. OUT_RING (chan, sync->data);
  527. OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
  528. NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
  529. }
  530. if (chan) {
  531. sync->addr ^= 0x10;
  532. sync->data++;
  533. FIRE_RING (chan);
  534. }
  535. /* queue the flip */
  536. evo_mthd(push, 0x0100, 1);
  537. evo_data(push, 0xfffe0000);
  538. evo_mthd(push, 0x0084, 1);
  539. evo_data(push, swap_interval);
  540. if (!(swap_interval & 0x00000100)) {
  541. evo_mthd(push, 0x00e0, 1);
  542. evo_data(push, 0x40000000);
  543. }
  544. evo_mthd(push, 0x0088, 4);
  545. evo_data(push, sync->addr);
  546. evo_data(push, sync->data++);
  547. evo_data(push, sync->data);
  548. evo_data(push, sync->base.sync.handle);
  549. evo_mthd(push, 0x00a0, 2);
  550. evo_data(push, 0x00000000);
  551. evo_data(push, 0x00000000);
  552. evo_mthd(push, 0x00c0, 1);
  553. evo_data(push, nv_fb->r_handle);
  554. evo_mthd(push, 0x0110, 2);
  555. evo_data(push, 0x00000000);
  556. evo_data(push, 0x00000000);
  557. if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
  558. evo_mthd(push, 0x0800, 5);
  559. evo_data(push, nv_fb->nvbo->bo.offset >> 8);
  560. evo_data(push, 0);
  561. evo_data(push, (fb->height << 16) | fb->width);
  562. evo_data(push, nv_fb->r_pitch);
  563. evo_data(push, nv_fb->r_format);
  564. } else {
  565. evo_mthd(push, 0x0400, 5);
  566. evo_data(push, nv_fb->nvbo->bo.offset >> 8);
  567. evo_data(push, 0);
  568. evo_data(push, (fb->height << 16) | fb->width);
  569. evo_data(push, nv_fb->r_pitch);
  570. evo_data(push, nv_fb->r_format);
  571. }
  572. evo_mthd(push, 0x0080, 1);
  573. evo_data(push, 0x00000000);
  574. evo_kick(push, sync);
  575. nouveau_bo_ref(nv_fb->nvbo, &head->image);
  576. return 0;
  577. }
  578. /******************************************************************************
  579. * CRTC
  580. *****************************************************************************/
  581. static int
  582. nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
  583. {
  584. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  585. struct nouveau_connector *nv_connector;
  586. struct drm_connector *connector;
  587. u32 *push, mode = 0x00;
  588. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  589. connector = &nv_connector->base;
  590. if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
  591. if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
  592. mode = DITHERING_MODE_DYNAMIC2X2;
  593. } else {
  594. mode = nv_connector->dithering_mode;
  595. }
  596. if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
  597. if (connector->display_info.bpc >= 8)
  598. mode |= DITHERING_DEPTH_8BPC;
  599. } else {
  600. mode |= nv_connector->dithering_depth;
  601. }
  602. push = evo_wait(mast, 4);
  603. if (push) {
  604. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  605. evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
  606. evo_data(push, mode);
  607. } else
  608. if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) {
  609. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
  610. evo_data(push, mode);
  611. } else {
  612. evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
  613. evo_data(push, mode);
  614. }
  615. if (update) {
  616. evo_mthd(push, 0x0080, 1);
  617. evo_data(push, 0x00000000);
  618. }
  619. evo_kick(push, mast);
  620. }
  621. return 0;
  622. }
  623. static int
  624. nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
  625. {
  626. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  627. struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
  628. struct drm_crtc *crtc = &nv_crtc->base;
  629. struct nouveau_connector *nv_connector;
  630. int mode = DRM_MODE_SCALE_NONE;
  631. u32 oX, oY, *push;
  632. /* start off at the resolution we programmed the crtc for, this
  633. * effectively handles NONE/FULL scaling
  634. */
  635. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  636. if (nv_connector && nv_connector->native_mode)
  637. mode = nv_connector->scaling_mode;
  638. if (mode != DRM_MODE_SCALE_NONE)
  639. omode = nv_connector->native_mode;
  640. else
  641. omode = umode;
  642. oX = omode->hdisplay;
  643. oY = omode->vdisplay;
  644. if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
  645. oY *= 2;
  646. /* add overscan compensation if necessary, will keep the aspect
  647. * ratio the same as the backend mode unless overridden by the
  648. * user setting both hborder and vborder properties.
  649. */
  650. if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
  651. (nv_connector->underscan == UNDERSCAN_AUTO &&
  652. nv_connector->edid &&
  653. drm_detect_hdmi_monitor(nv_connector->edid)))) {
  654. u32 bX = nv_connector->underscan_hborder;
  655. u32 bY = nv_connector->underscan_vborder;
  656. u32 aspect = (oY << 19) / oX;
  657. if (bX) {
  658. oX -= (bX * 2);
  659. if (bY) oY -= (bY * 2);
  660. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  661. } else {
  662. oX -= (oX >> 4) + 32;
  663. if (bY) oY -= (bY * 2);
  664. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  665. }
  666. }
  667. /* handle CENTER/ASPECT scaling, taking into account the areas
  668. * removed already for overscan compensation
  669. */
  670. switch (mode) {
  671. case DRM_MODE_SCALE_CENTER:
  672. oX = min((u32)umode->hdisplay, oX);
  673. oY = min((u32)umode->vdisplay, oY);
  674. /* fall-through */
  675. case DRM_MODE_SCALE_ASPECT:
  676. if (oY < oX) {
  677. u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
  678. oX = ((oY * aspect) + (aspect / 2)) >> 19;
  679. } else {
  680. u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
  681. oY = ((oX * aspect) + (aspect / 2)) >> 19;
  682. }
  683. break;
  684. default:
  685. break;
  686. }
  687. push = evo_wait(mast, 8);
  688. if (push) {
  689. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  690. /*XXX: SCALE_CTRL_ACTIVE??? */
  691. evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
  692. evo_data(push, (oY << 16) | oX);
  693. evo_data(push, (oY << 16) | oX);
  694. evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
  695. evo_data(push, 0x00000000);
  696. evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
  697. evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
  698. } else {
  699. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  700. evo_data(push, (oY << 16) | oX);
  701. evo_data(push, (oY << 16) | oX);
  702. evo_data(push, (oY << 16) | oX);
  703. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  704. evo_data(push, 0x00000000);
  705. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  706. evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
  707. }
  708. evo_kick(push, mast);
  709. if (update) {
  710. nv50_display_flip_stop(crtc);
  711. nv50_display_flip_next(crtc, crtc->primary->fb,
  712. NULL, 1);
  713. }
  714. }
  715. return 0;
  716. }
  717. static int
  718. nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec)
  719. {
  720. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  721. u32 *push;
  722. push = evo_wait(mast, 8);
  723. if (!push)
  724. return -ENOMEM;
  725. evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1);
  726. evo_data(push, usec);
  727. evo_kick(push, mast);
  728. return 0;
  729. }
  730. static int
  731. nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
  732. {
  733. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  734. u32 *push, hue, vib;
  735. int adj;
  736. adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
  737. vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
  738. hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
  739. push = evo_wait(mast, 16);
  740. if (push) {
  741. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  742. evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
  743. evo_data(push, (hue << 20) | (vib << 8));
  744. } else {
  745. evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
  746. evo_data(push, (hue << 20) | (vib << 8));
  747. }
  748. if (update) {
  749. evo_mthd(push, 0x0080, 1);
  750. evo_data(push, 0x00000000);
  751. }
  752. evo_kick(push, mast);
  753. }
  754. return 0;
  755. }
  756. static int
  757. nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  758. int x, int y, bool update)
  759. {
  760. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  761. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  762. u32 *push;
  763. push = evo_wait(mast, 16);
  764. if (push) {
  765. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  766. evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
  767. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  768. evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
  769. evo_data(push, (fb->height << 16) | fb->width);
  770. evo_data(push, nvfb->r_pitch);
  771. evo_data(push, nvfb->r_format);
  772. evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
  773. evo_data(push, (y << 16) | x);
  774. if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) {
  775. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  776. evo_data(push, nvfb->r_handle);
  777. }
  778. } else {
  779. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  780. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  781. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  782. evo_data(push, (fb->height << 16) | fb->width);
  783. evo_data(push, nvfb->r_pitch);
  784. evo_data(push, nvfb->r_format);
  785. evo_data(push, nvfb->r_handle);
  786. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  787. evo_data(push, (y << 16) | x);
  788. }
  789. if (update) {
  790. evo_mthd(push, 0x0080, 1);
  791. evo_data(push, 0x00000000);
  792. }
  793. evo_kick(push, mast);
  794. }
  795. nv_crtc->fb.handle = nvfb->r_handle;
  796. return 0;
  797. }
  798. static void
  799. nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
  800. {
  801. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  802. struct nv50_curs *curs = nv50_curs(&nv_crtc->base);
  803. u32 *push = evo_wait(mast, 16);
  804. if (push) {
  805. if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
  806. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
  807. evo_data(push, 0x85000000);
  808. evo_data(push, curs->image->bo.offset >> 8);
  809. } else
  810. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  811. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
  812. evo_data(push, 0x85000000);
  813. evo_data(push, curs->image->bo.offset >> 8);
  814. evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
  815. evo_data(push, mast->base.vram.handle);
  816. } else {
  817. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  818. evo_data(push, 0x85000000);
  819. evo_data(push, curs->image->bo.offset >> 8);
  820. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  821. evo_data(push, mast->base.vram.handle);
  822. }
  823. evo_kick(push, mast);
  824. }
  825. }
  826. static void
  827. nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
  828. {
  829. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  830. u32 *push = evo_wait(mast, 16);
  831. if (push) {
  832. if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
  833. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
  834. evo_data(push, 0x05000000);
  835. } else
  836. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  837. evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
  838. evo_data(push, 0x05000000);
  839. evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
  840. evo_data(push, 0x00000000);
  841. } else {
  842. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  843. evo_data(push, 0x05000000);
  844. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  845. evo_data(push, 0x00000000);
  846. }
  847. evo_kick(push, mast);
  848. }
  849. }
  850. static void
  851. nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
  852. {
  853. struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
  854. struct nv50_curs *curs = nv50_curs(&nv_crtc->base);
  855. if (show && curs->image)
  856. nv50_crtc_cursor_show(nv_crtc);
  857. else
  858. nv50_crtc_cursor_hide(nv_crtc);
  859. if (update) {
  860. u32 *push = evo_wait(mast, 2);
  861. if (push) {
  862. evo_mthd(push, 0x0080, 1);
  863. evo_data(push, 0x00000000);
  864. evo_kick(push, mast);
  865. }
  866. }
  867. }
  868. static void
  869. nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
  870. {
  871. }
  872. static void
  873. nv50_crtc_prepare(struct drm_crtc *crtc)
  874. {
  875. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  876. struct nv50_mast *mast = nv50_mast(crtc->dev);
  877. u32 *push;
  878. nv50_display_flip_stop(crtc);
  879. push = evo_wait(mast, 6);
  880. if (push) {
  881. if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
  882. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  883. evo_data(push, 0x00000000);
  884. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
  885. evo_data(push, 0x40000000);
  886. } else
  887. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  888. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  889. evo_data(push, 0x00000000);
  890. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
  891. evo_data(push, 0x40000000);
  892. evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
  893. evo_data(push, 0x00000000);
  894. } else {
  895. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  896. evo_data(push, 0x00000000);
  897. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  898. evo_data(push, 0x03000000);
  899. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  900. evo_data(push, 0x00000000);
  901. }
  902. evo_kick(push, mast);
  903. }
  904. nv50_crtc_cursor_show_hide(nv_crtc, false, false);
  905. }
  906. static void
  907. nv50_crtc_commit(struct drm_crtc *crtc)
  908. {
  909. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  910. struct nv50_mast *mast = nv50_mast(crtc->dev);
  911. u32 *push;
  912. push = evo_wait(mast, 32);
  913. if (push) {
  914. if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
  915. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  916. evo_data(push, nv_crtc->fb.handle);
  917. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
  918. evo_data(push, 0xc0000000);
  919. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  920. } else
  921. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  922. evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
  923. evo_data(push, nv_crtc->fb.handle);
  924. evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
  925. evo_data(push, 0xc0000000);
  926. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  927. evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
  928. evo_data(push, mast->base.vram.handle);
  929. } else {
  930. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  931. evo_data(push, nv_crtc->fb.handle);
  932. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  933. evo_data(push, 0x83000000);
  934. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  935. evo_data(push, 0x00000000);
  936. evo_data(push, 0x00000000);
  937. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  938. evo_data(push, mast->base.vram.handle);
  939. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  940. evo_data(push, 0xffffff00);
  941. }
  942. evo_kick(push, mast);
  943. }
  944. nv50_crtc_cursor_show_hide(nv_crtc, true, true);
  945. nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
  946. }
  947. static bool
  948. nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
  949. struct drm_display_mode *adjusted_mode)
  950. {
  951. drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
  952. return true;
  953. }
  954. static int
  955. nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  956. {
  957. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
  958. struct nv50_head *head = nv50_head(crtc);
  959. int ret;
  960. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
  961. if (ret == 0) {
  962. if (head->image)
  963. nouveau_bo_unpin(head->image);
  964. nouveau_bo_ref(nvfb->nvbo, &head->image);
  965. }
  966. return ret;
  967. }
  968. static int
  969. nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  970. struct drm_display_mode *mode, int x, int y,
  971. struct drm_framebuffer *old_fb)
  972. {
  973. struct nv50_mast *mast = nv50_mast(crtc->dev);
  974. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  975. struct nouveau_connector *nv_connector;
  976. u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
  977. u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
  978. u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
  979. u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
  980. u32 vblan2e = 0, vblan2s = 1, vblankus = 0;
  981. u32 *push;
  982. int ret;
  983. hactive = mode->htotal;
  984. hsynce = mode->hsync_end - mode->hsync_start - 1;
  985. hbackp = mode->htotal - mode->hsync_end;
  986. hblanke = hsynce + hbackp;
  987. hfrontp = mode->hsync_start - mode->hdisplay;
  988. hblanks = mode->htotal - hfrontp - 1;
  989. vactive = mode->vtotal * vscan / ilace;
  990. vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
  991. vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
  992. vblanke = vsynce + vbackp;
  993. vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
  994. vblanks = vactive - vfrontp - 1;
  995. /* XXX: Safe underestimate, even "0" works */
  996. vblankus = (vactive - mode->vdisplay - 2) * hactive;
  997. vblankus *= 1000;
  998. vblankus /= mode->clock;
  999. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  1000. vblan2e = vactive + vsynce + vbackp;
  1001. vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
  1002. vactive = (vactive * 2) + 1;
  1003. }
  1004. ret = nv50_crtc_swap_fbs(crtc, old_fb);
  1005. if (ret)
  1006. return ret;
  1007. push = evo_wait(mast, 64);
  1008. if (push) {
  1009. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1010. evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
  1011. evo_data(push, 0x00800000 | mode->clock);
  1012. evo_data(push, (ilace == 2) ? 2 : 0);
  1013. evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
  1014. evo_data(push, 0x00000000);
  1015. evo_data(push, (vactive << 16) | hactive);
  1016. evo_data(push, ( vsynce << 16) | hsynce);
  1017. evo_data(push, (vblanke << 16) | hblanke);
  1018. evo_data(push, (vblanks << 16) | hblanks);
  1019. evo_data(push, (vblan2e << 16) | vblan2s);
  1020. evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
  1021. evo_data(push, 0x00000000);
  1022. evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
  1023. evo_data(push, 0x00000311);
  1024. evo_data(push, 0x00000100);
  1025. } else {
  1026. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
  1027. evo_data(push, 0x00000000);
  1028. evo_data(push, (vactive << 16) | hactive);
  1029. evo_data(push, ( vsynce << 16) | hsynce);
  1030. evo_data(push, (vblanke << 16) | hblanke);
  1031. evo_data(push, (vblanks << 16) | hblanks);
  1032. evo_data(push, (vblan2e << 16) | vblan2s);
  1033. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  1034. evo_data(push, 0x00000000); /* ??? */
  1035. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  1036. evo_data(push, mode->clock * 1000);
  1037. evo_data(push, 0x00200000); /* ??? */
  1038. evo_data(push, mode->clock * 1000);
  1039. evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
  1040. evo_data(push, 0x00000311);
  1041. evo_data(push, 0x00000100);
  1042. }
  1043. evo_kick(push, mast);
  1044. }
  1045. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  1046. nv50_crtc_set_dither(nv_crtc, false);
  1047. nv50_crtc_set_scale(nv_crtc, false);
  1048. /* G94 only accepts this after setting scale */
  1049. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA)
  1050. nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus);
  1051. nv50_crtc_set_color_vibrance(nv_crtc, false);
  1052. nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
  1053. return 0;
  1054. }
  1055. static int
  1056. nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  1057. struct drm_framebuffer *old_fb)
  1058. {
  1059. struct nouveau_drm *drm = nouveau_drm(crtc->dev);
  1060. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1061. int ret;
  1062. if (!crtc->primary->fb) {
  1063. NV_DEBUG(drm, "No FB bound\n");
  1064. return 0;
  1065. }
  1066. ret = nv50_crtc_swap_fbs(crtc, old_fb);
  1067. if (ret)
  1068. return ret;
  1069. nv50_display_flip_stop(crtc);
  1070. nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
  1071. nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
  1072. return 0;
  1073. }
  1074. static int
  1075. nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  1076. struct drm_framebuffer *fb, int x, int y,
  1077. enum mode_set_atomic state)
  1078. {
  1079. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1080. nv50_display_flip_stop(crtc);
  1081. nv50_crtc_set_image(nv_crtc, fb, x, y, true);
  1082. return 0;
  1083. }
  1084. static void
  1085. nv50_crtc_lut_load(struct drm_crtc *crtc)
  1086. {
  1087. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1088. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1089. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  1090. int i;
  1091. for (i = 0; i < 256; i++) {
  1092. u16 r = nv_crtc->lut.r[i] >> 2;
  1093. u16 g = nv_crtc->lut.g[i] >> 2;
  1094. u16 b = nv_crtc->lut.b[i] >> 2;
  1095. if (disp->disp->oclass < GF110_DISP) {
  1096. writew(r + 0x0000, lut + (i * 0x08) + 0);
  1097. writew(g + 0x0000, lut + (i * 0x08) + 2);
  1098. writew(b + 0x0000, lut + (i * 0x08) + 4);
  1099. } else {
  1100. writew(r + 0x6000, lut + (i * 0x20) + 0);
  1101. writew(g + 0x6000, lut + (i * 0x20) + 2);
  1102. writew(b + 0x6000, lut + (i * 0x20) + 4);
  1103. }
  1104. }
  1105. }
  1106. static void
  1107. nv50_crtc_disable(struct drm_crtc *crtc)
  1108. {
  1109. struct nv50_head *head = nv50_head(crtc);
  1110. evo_sync(crtc->dev);
  1111. if (head->image)
  1112. nouveau_bo_unpin(head->image);
  1113. nouveau_bo_ref(NULL, &head->image);
  1114. }
  1115. static int
  1116. nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  1117. uint32_t handle, uint32_t width, uint32_t height)
  1118. {
  1119. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1120. struct nv50_curs *curs = nv50_curs(crtc);
  1121. struct drm_device *dev = crtc->dev;
  1122. struct drm_gem_object *gem = NULL;
  1123. struct nouveau_bo *nvbo = NULL;
  1124. int ret = 0;
  1125. if (handle) {
  1126. if (width != 64 || height != 64)
  1127. return -EINVAL;
  1128. gem = drm_gem_object_lookup(dev, file_priv, handle);
  1129. if (unlikely(!gem))
  1130. return -ENOENT;
  1131. nvbo = nouveau_gem_object(gem);
  1132. ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
  1133. }
  1134. if (ret == 0) {
  1135. if (curs->image)
  1136. nouveau_bo_unpin(curs->image);
  1137. nouveau_bo_ref(nvbo, &curs->image);
  1138. }
  1139. drm_gem_object_unreference_unlocked(gem);
  1140. nv50_crtc_cursor_show_hide(nv_crtc, true, true);
  1141. return ret;
  1142. }
  1143. static int
  1144. nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  1145. {
  1146. struct nv50_curs *curs = nv50_curs(crtc);
  1147. struct nv50_chan *chan = nv50_chan(curs);
  1148. nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
  1149. nvif_wr32(&chan->user, 0x0080, 0x00000000);
  1150. return 0;
  1151. }
  1152. static void
  1153. nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  1154. uint32_t start, uint32_t size)
  1155. {
  1156. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1157. u32 end = min_t(u32, start + size, 256);
  1158. u32 i;
  1159. for (i = start; i < end; i++) {
  1160. nv_crtc->lut.r[i] = r[i];
  1161. nv_crtc->lut.g[i] = g[i];
  1162. nv_crtc->lut.b[i] = b[i];
  1163. }
  1164. nv50_crtc_lut_load(crtc);
  1165. }
  1166. static void
  1167. nv50_crtc_destroy(struct drm_crtc *crtc)
  1168. {
  1169. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1170. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1171. struct nv50_head *head = nv50_head(crtc);
  1172. struct nv50_fbdma *fbdma;
  1173. list_for_each_entry(fbdma, &disp->fbdma, head) {
  1174. nvif_object_fini(&fbdma->base[nv_crtc->index]);
  1175. }
  1176. nv50_dmac_destroy(&head->ovly.base, disp->disp);
  1177. nv50_pioc_destroy(&head->oimm.base);
  1178. nv50_dmac_destroy(&head->sync.base, disp->disp);
  1179. nv50_pioc_destroy(&head->curs.base);
  1180. /*XXX: this shouldn't be necessary, but the core doesn't call
  1181. * disconnect() during the cleanup paths
  1182. */
  1183. if (head->image)
  1184. nouveau_bo_unpin(head->image);
  1185. nouveau_bo_ref(NULL, &head->image);
  1186. /*XXX: ditto */
  1187. if (head->curs.image)
  1188. nouveau_bo_unpin(head->curs.image);
  1189. nouveau_bo_ref(NULL, &head->curs.image);
  1190. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  1191. if (nv_crtc->lut.nvbo)
  1192. nouveau_bo_unpin(nv_crtc->lut.nvbo);
  1193. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  1194. drm_crtc_cleanup(crtc);
  1195. kfree(crtc);
  1196. }
  1197. static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
  1198. .dpms = nv50_crtc_dpms,
  1199. .prepare = nv50_crtc_prepare,
  1200. .commit = nv50_crtc_commit,
  1201. .mode_fixup = nv50_crtc_mode_fixup,
  1202. .mode_set = nv50_crtc_mode_set,
  1203. .mode_set_base = nv50_crtc_mode_set_base,
  1204. .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
  1205. .load_lut = nv50_crtc_lut_load,
  1206. .disable = nv50_crtc_disable,
  1207. };
  1208. static const struct drm_crtc_funcs nv50_crtc_func = {
  1209. .cursor_set = nv50_crtc_cursor_set,
  1210. .cursor_move = nv50_crtc_cursor_move,
  1211. .gamma_set = nv50_crtc_gamma_set,
  1212. .set_config = nouveau_crtc_set_config,
  1213. .destroy = nv50_crtc_destroy,
  1214. .page_flip = nouveau_crtc_page_flip,
  1215. };
  1216. static int
  1217. nv50_crtc_create(struct drm_device *dev, int index)
  1218. {
  1219. struct nv50_disp *disp = nv50_disp(dev);
  1220. struct nv50_head *head;
  1221. struct drm_crtc *crtc;
  1222. int ret, i;
  1223. head = kzalloc(sizeof(*head), GFP_KERNEL);
  1224. if (!head)
  1225. return -ENOMEM;
  1226. head->base.index = index;
  1227. head->base.set_dither = nv50_crtc_set_dither;
  1228. head->base.set_scale = nv50_crtc_set_scale;
  1229. head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
  1230. head->base.color_vibrance = 50;
  1231. head->base.vibrant_hue = 0;
  1232. for (i = 0; i < 256; i++) {
  1233. head->base.lut.r[i] = i << 8;
  1234. head->base.lut.g[i] = i << 8;
  1235. head->base.lut.b[i] = i << 8;
  1236. }
  1237. crtc = &head->base.base;
  1238. drm_crtc_init(dev, crtc, &nv50_crtc_func);
  1239. drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
  1240. drm_mode_crtc_set_gamma_size(crtc, 256);
  1241. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  1242. 0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
  1243. if (!ret) {
  1244. ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
  1245. if (!ret) {
  1246. ret = nouveau_bo_map(head->base.lut.nvbo);
  1247. if (ret)
  1248. nouveau_bo_unpin(head->base.lut.nvbo);
  1249. }
  1250. if (ret)
  1251. nouveau_bo_ref(NULL, &head->base.lut.nvbo);
  1252. }
  1253. if (ret)
  1254. goto out;
  1255. nv50_crtc_lut_load(crtc);
  1256. /* allocate cursor resources */
  1257. ret = nv50_curs_create(disp->disp, index, &head->curs);
  1258. if (ret)
  1259. goto out;
  1260. /* allocate page flip / sync resources */
  1261. ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset,
  1262. &head->sync);
  1263. if (ret)
  1264. goto out;
  1265. head->sync.addr = EVO_FLIP_SEM0(index);
  1266. head->sync.data = 0x00000000;
  1267. /* allocate overlay resources */
  1268. ret = nv50_oimm_create(disp->disp, index, &head->oimm);
  1269. if (ret)
  1270. goto out;
  1271. ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset,
  1272. &head->ovly);
  1273. if (ret)
  1274. goto out;
  1275. out:
  1276. if (ret)
  1277. nv50_crtc_destroy(crtc);
  1278. return ret;
  1279. }
  1280. /******************************************************************************
  1281. * DAC
  1282. *****************************************************************************/
  1283. static void
  1284. nv50_dac_dpms(struct drm_encoder *encoder, int mode)
  1285. {
  1286. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1287. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1288. struct {
  1289. struct nv50_disp_mthd_v1 base;
  1290. struct nv50_disp_dac_pwr_v0 pwr;
  1291. } args = {
  1292. .base.version = 1,
  1293. .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
  1294. .base.hasht = nv_encoder->dcb->hasht,
  1295. .base.hashm = nv_encoder->dcb->hashm,
  1296. .pwr.state = 1,
  1297. .pwr.data = 1,
  1298. .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
  1299. mode != DRM_MODE_DPMS_OFF),
  1300. .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
  1301. mode != DRM_MODE_DPMS_OFF),
  1302. };
  1303. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1304. }
  1305. static bool
  1306. nv50_dac_mode_fixup(struct drm_encoder *encoder,
  1307. const struct drm_display_mode *mode,
  1308. struct drm_display_mode *adjusted_mode)
  1309. {
  1310. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1311. struct nouveau_connector *nv_connector;
  1312. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1313. if (nv_connector && nv_connector->native_mode) {
  1314. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1315. int id = adjusted_mode->base.id;
  1316. *adjusted_mode = *nv_connector->native_mode;
  1317. adjusted_mode->base.id = id;
  1318. }
  1319. }
  1320. return true;
  1321. }
  1322. static void
  1323. nv50_dac_commit(struct drm_encoder *encoder)
  1324. {
  1325. }
  1326. static void
  1327. nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  1328. struct drm_display_mode *adjusted_mode)
  1329. {
  1330. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1331. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1332. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1333. u32 *push;
  1334. nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  1335. push = evo_wait(mast, 8);
  1336. if (push) {
  1337. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1338. u32 syncs = 0x00000000;
  1339. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1340. syncs |= 0x00000001;
  1341. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1342. syncs |= 0x00000002;
  1343. evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
  1344. evo_data(push, 1 << nv_crtc->index);
  1345. evo_data(push, syncs);
  1346. } else {
  1347. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  1348. u32 syncs = 0x00000001;
  1349. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1350. syncs |= 0x00000008;
  1351. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1352. syncs |= 0x00000010;
  1353. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  1354. magic |= 0x00000001;
  1355. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  1356. evo_data(push, syncs);
  1357. evo_data(push, magic);
  1358. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
  1359. evo_data(push, 1 << nv_crtc->index);
  1360. }
  1361. evo_kick(push, mast);
  1362. }
  1363. nv_encoder->crtc = encoder->crtc;
  1364. }
  1365. static void
  1366. nv50_dac_disconnect(struct drm_encoder *encoder)
  1367. {
  1368. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1369. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1370. const int or = nv_encoder->or;
  1371. u32 *push;
  1372. if (nv_encoder->crtc) {
  1373. nv50_crtc_prepare(nv_encoder->crtc);
  1374. push = evo_wait(mast, 4);
  1375. if (push) {
  1376. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1377. evo_mthd(push, 0x0400 + (or * 0x080), 1);
  1378. evo_data(push, 0x00000000);
  1379. } else {
  1380. evo_mthd(push, 0x0180 + (or * 0x020), 1);
  1381. evo_data(push, 0x00000000);
  1382. }
  1383. evo_kick(push, mast);
  1384. }
  1385. }
  1386. nv_encoder->crtc = NULL;
  1387. }
  1388. static enum drm_connector_status
  1389. nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  1390. {
  1391. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1392. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1393. struct {
  1394. struct nv50_disp_mthd_v1 base;
  1395. struct nv50_disp_dac_load_v0 load;
  1396. } args = {
  1397. .base.version = 1,
  1398. .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
  1399. .base.hasht = nv_encoder->dcb->hasht,
  1400. .base.hashm = nv_encoder->dcb->hashm,
  1401. };
  1402. int ret;
  1403. args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
  1404. if (args.load.data == 0)
  1405. args.load.data = 340;
  1406. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1407. if (ret || !args.load.load)
  1408. return connector_status_disconnected;
  1409. return connector_status_connected;
  1410. }
  1411. static void
  1412. nv50_dac_destroy(struct drm_encoder *encoder)
  1413. {
  1414. drm_encoder_cleanup(encoder);
  1415. kfree(encoder);
  1416. }
  1417. static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
  1418. .dpms = nv50_dac_dpms,
  1419. .mode_fixup = nv50_dac_mode_fixup,
  1420. .prepare = nv50_dac_disconnect,
  1421. .commit = nv50_dac_commit,
  1422. .mode_set = nv50_dac_mode_set,
  1423. .disable = nv50_dac_disconnect,
  1424. .get_crtc = nv50_display_crtc_get,
  1425. .detect = nv50_dac_detect
  1426. };
  1427. static const struct drm_encoder_funcs nv50_dac_func = {
  1428. .destroy = nv50_dac_destroy,
  1429. };
  1430. static int
  1431. nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1432. {
  1433. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1434. struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
  1435. struct nouveau_encoder *nv_encoder;
  1436. struct drm_encoder *encoder;
  1437. int type = DRM_MODE_ENCODER_DAC;
  1438. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1439. if (!nv_encoder)
  1440. return -ENOMEM;
  1441. nv_encoder->dcb = dcbe;
  1442. nv_encoder->or = ffs(dcbe->or) - 1;
  1443. nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
  1444. encoder = to_drm_encoder(nv_encoder);
  1445. encoder->possible_crtcs = dcbe->heads;
  1446. encoder->possible_clones = 0;
  1447. drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
  1448. drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
  1449. drm_mode_connector_attach_encoder(connector, encoder);
  1450. return 0;
  1451. }
  1452. /******************************************************************************
  1453. * Audio
  1454. *****************************************************************************/
  1455. static void
  1456. nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  1457. {
  1458. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1459. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1460. struct nouveau_connector *nv_connector;
  1461. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1462. struct __packed {
  1463. struct {
  1464. struct nv50_disp_mthd_v1 mthd;
  1465. struct nv50_disp_sor_hda_eld_v0 eld;
  1466. } base;
  1467. u8 data[sizeof(nv_connector->base.eld)];
  1468. } args = {
  1469. .base.mthd.version = 1,
  1470. .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  1471. .base.mthd.hasht = nv_encoder->dcb->hasht,
  1472. .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  1473. (0x0100 << nv_crtc->index),
  1474. };
  1475. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1476. if (!drm_detect_monitor_audio(nv_connector->edid))
  1477. return;
  1478. drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
  1479. memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
  1480. nvif_mthd(disp->disp, 0, &args,
  1481. sizeof(args.base) + drm_eld_size(args.data));
  1482. }
  1483. static void
  1484. nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  1485. {
  1486. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1487. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1488. struct {
  1489. struct nv50_disp_mthd_v1 base;
  1490. struct nv50_disp_sor_hda_eld_v0 eld;
  1491. } args = {
  1492. .base.version = 1,
  1493. .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  1494. .base.hasht = nv_encoder->dcb->hasht,
  1495. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  1496. (0x0100 << nv_crtc->index),
  1497. };
  1498. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1499. }
  1500. /******************************************************************************
  1501. * HDMI
  1502. *****************************************************************************/
  1503. static void
  1504. nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  1505. {
  1506. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1507. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1508. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1509. struct {
  1510. struct nv50_disp_mthd_v1 base;
  1511. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  1512. } args = {
  1513. .base.version = 1,
  1514. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  1515. .base.hasht = nv_encoder->dcb->hasht,
  1516. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  1517. (0x0100 << nv_crtc->index),
  1518. .pwr.state = 1,
  1519. .pwr.rekey = 56, /* binary driver, and tegra, constant */
  1520. };
  1521. struct nouveau_connector *nv_connector;
  1522. u32 max_ac_packet;
  1523. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1524. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  1525. return;
  1526. max_ac_packet = mode->htotal - mode->hdisplay;
  1527. max_ac_packet -= args.pwr.rekey;
  1528. max_ac_packet -= 18; /* constant from tegra */
  1529. args.pwr.max_ac_packet = max_ac_packet / 32;
  1530. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1531. nv50_audio_mode_set(encoder, mode);
  1532. }
  1533. static void
  1534. nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  1535. {
  1536. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1537. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1538. struct {
  1539. struct nv50_disp_mthd_v1 base;
  1540. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  1541. } args = {
  1542. .base.version = 1,
  1543. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  1544. .base.hasht = nv_encoder->dcb->hasht,
  1545. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  1546. (0x0100 << nv_crtc->index),
  1547. };
  1548. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1549. }
  1550. /******************************************************************************
  1551. * SOR
  1552. *****************************************************************************/
  1553. static void
  1554. nv50_sor_dpms(struct drm_encoder *encoder, int mode)
  1555. {
  1556. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1557. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1558. struct {
  1559. struct nv50_disp_mthd_v1 base;
  1560. struct nv50_disp_sor_pwr_v0 pwr;
  1561. } args = {
  1562. .base.version = 1,
  1563. .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
  1564. .base.hasht = nv_encoder->dcb->hasht,
  1565. .base.hashm = nv_encoder->dcb->hashm,
  1566. .pwr.state = mode == DRM_MODE_DPMS_ON,
  1567. };
  1568. struct {
  1569. struct nv50_disp_mthd_v1 base;
  1570. struct nv50_disp_sor_dp_pwr_v0 pwr;
  1571. } link = {
  1572. .base.version = 1,
  1573. .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
  1574. .base.hasht = nv_encoder->dcb->hasht,
  1575. .base.hashm = nv_encoder->dcb->hashm,
  1576. .pwr.state = mode == DRM_MODE_DPMS_ON,
  1577. };
  1578. struct drm_device *dev = encoder->dev;
  1579. struct drm_encoder *partner;
  1580. nv_encoder->last_dpms = mode;
  1581. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  1582. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  1583. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  1584. continue;
  1585. if (nv_partner != nv_encoder &&
  1586. nv_partner->dcb->or == nv_encoder->dcb->or) {
  1587. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  1588. return;
  1589. break;
  1590. }
  1591. }
  1592. if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
  1593. args.pwr.state = 1;
  1594. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1595. nvif_mthd(disp->disp, 0, &link, sizeof(link));
  1596. } else {
  1597. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1598. }
  1599. }
  1600. static bool
  1601. nv50_sor_mode_fixup(struct drm_encoder *encoder,
  1602. const struct drm_display_mode *mode,
  1603. struct drm_display_mode *adjusted_mode)
  1604. {
  1605. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1606. struct nouveau_connector *nv_connector;
  1607. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1608. if (nv_connector && nv_connector->native_mode) {
  1609. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1610. int id = adjusted_mode->base.id;
  1611. *adjusted_mode = *nv_connector->native_mode;
  1612. adjusted_mode->base.id = id;
  1613. }
  1614. }
  1615. return true;
  1616. }
  1617. static void
  1618. nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
  1619. {
  1620. struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
  1621. u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
  1622. if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
  1623. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1624. evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
  1625. evo_data(push, (nv_encoder->ctrl = temp));
  1626. } else {
  1627. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  1628. evo_data(push, (nv_encoder->ctrl = temp));
  1629. }
  1630. evo_kick(push, mast);
  1631. }
  1632. }
  1633. static void
  1634. nv50_sor_disconnect(struct drm_encoder *encoder)
  1635. {
  1636. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1637. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  1638. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  1639. nv_encoder->crtc = NULL;
  1640. if (nv_crtc) {
  1641. nv50_crtc_prepare(&nv_crtc->base);
  1642. nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
  1643. nv50_audio_disconnect(encoder, nv_crtc);
  1644. nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
  1645. }
  1646. }
  1647. static void
  1648. nv50_sor_commit(struct drm_encoder *encoder)
  1649. {
  1650. }
  1651. static void
  1652. nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
  1653. struct drm_display_mode *mode)
  1654. {
  1655. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1656. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1657. struct {
  1658. struct nv50_disp_mthd_v1 base;
  1659. struct nv50_disp_sor_lvds_script_v0 lvds;
  1660. } lvds = {
  1661. .base.version = 1,
  1662. .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
  1663. .base.hasht = nv_encoder->dcb->hasht,
  1664. .base.hashm = nv_encoder->dcb->hashm,
  1665. };
  1666. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1667. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1668. struct drm_device *dev = encoder->dev;
  1669. struct nouveau_drm *drm = nouveau_drm(dev);
  1670. struct nouveau_connector *nv_connector;
  1671. struct nvbios *bios = &drm->vbios;
  1672. u32 mask, ctrl;
  1673. u8 owner = 1 << nv_crtc->index;
  1674. u8 proto = 0xf;
  1675. u8 depth = 0x0;
  1676. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1677. nv_encoder->crtc = encoder->crtc;
  1678. switch (nv_encoder->dcb->type) {
  1679. case DCB_OUTPUT_TMDS:
  1680. if (nv_encoder->dcb->sorconf.link & 1) {
  1681. if (mode->clock < 165000)
  1682. proto = 0x1;
  1683. else
  1684. proto = 0x5;
  1685. } else {
  1686. proto = 0x2;
  1687. }
  1688. nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
  1689. break;
  1690. case DCB_OUTPUT_LVDS:
  1691. proto = 0x0;
  1692. if (bios->fp_no_ddc) {
  1693. if (bios->fp.dual_link)
  1694. lvds.lvds.script |= 0x0100;
  1695. if (bios->fp.if_is_24bit)
  1696. lvds.lvds.script |= 0x0200;
  1697. } else {
  1698. if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
  1699. if (((u8 *)nv_connector->edid)[121] == 2)
  1700. lvds.lvds.script |= 0x0100;
  1701. } else
  1702. if (mode->clock >= bios->fp.duallink_transition_clk) {
  1703. lvds.lvds.script |= 0x0100;
  1704. }
  1705. if (lvds.lvds.script & 0x0100) {
  1706. if (bios->fp.strapless_is_24bit & 2)
  1707. lvds.lvds.script |= 0x0200;
  1708. } else {
  1709. if (bios->fp.strapless_is_24bit & 1)
  1710. lvds.lvds.script |= 0x0200;
  1711. }
  1712. if (nv_connector->base.display_info.bpc == 8)
  1713. lvds.lvds.script |= 0x0200;
  1714. }
  1715. nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
  1716. break;
  1717. case DCB_OUTPUT_DP:
  1718. if (nv_connector->base.display_info.bpc == 6) {
  1719. nv_encoder->dp.datarate = mode->clock * 18 / 8;
  1720. depth = 0x2;
  1721. } else
  1722. if (nv_connector->base.display_info.bpc == 8) {
  1723. nv_encoder->dp.datarate = mode->clock * 24 / 8;
  1724. depth = 0x5;
  1725. } else {
  1726. nv_encoder->dp.datarate = mode->clock * 30 / 8;
  1727. depth = 0x6;
  1728. }
  1729. if (nv_encoder->dcb->sorconf.link & 1)
  1730. proto = 0x8;
  1731. else
  1732. proto = 0x9;
  1733. nv50_audio_mode_set(encoder, mode);
  1734. break;
  1735. default:
  1736. BUG_ON(1);
  1737. break;
  1738. }
  1739. nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
  1740. if (nv50_vers(mast) >= GF110_DISP) {
  1741. u32 *push = evo_wait(mast, 3);
  1742. if (push) {
  1743. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  1744. u32 syncs = 0x00000001;
  1745. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1746. syncs |= 0x00000008;
  1747. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1748. syncs |= 0x00000010;
  1749. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  1750. magic |= 0x00000001;
  1751. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  1752. evo_data(push, syncs | (depth << 6));
  1753. evo_data(push, magic);
  1754. evo_kick(push, mast);
  1755. }
  1756. ctrl = proto << 8;
  1757. mask = 0x00000f00;
  1758. } else {
  1759. ctrl = (depth << 16) | (proto << 8);
  1760. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1761. ctrl |= 0x00001000;
  1762. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1763. ctrl |= 0x00002000;
  1764. mask = 0x000f3f00;
  1765. }
  1766. nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
  1767. }
  1768. static void
  1769. nv50_sor_destroy(struct drm_encoder *encoder)
  1770. {
  1771. drm_encoder_cleanup(encoder);
  1772. kfree(encoder);
  1773. }
  1774. static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
  1775. .dpms = nv50_sor_dpms,
  1776. .mode_fixup = nv50_sor_mode_fixup,
  1777. .prepare = nv50_sor_disconnect,
  1778. .commit = nv50_sor_commit,
  1779. .mode_set = nv50_sor_mode_set,
  1780. .disable = nv50_sor_disconnect,
  1781. .get_crtc = nv50_display_crtc_get,
  1782. };
  1783. static const struct drm_encoder_funcs nv50_sor_func = {
  1784. .destroy = nv50_sor_destroy,
  1785. };
  1786. static int
  1787. nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1788. {
  1789. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1790. struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
  1791. struct nouveau_encoder *nv_encoder;
  1792. struct drm_encoder *encoder;
  1793. int type;
  1794. switch (dcbe->type) {
  1795. case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
  1796. case DCB_OUTPUT_TMDS:
  1797. case DCB_OUTPUT_DP:
  1798. default:
  1799. type = DRM_MODE_ENCODER_TMDS;
  1800. break;
  1801. }
  1802. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1803. if (!nv_encoder)
  1804. return -ENOMEM;
  1805. nv_encoder->dcb = dcbe;
  1806. nv_encoder->or = ffs(dcbe->or) - 1;
  1807. nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
  1808. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  1809. encoder = to_drm_encoder(nv_encoder);
  1810. encoder->possible_crtcs = dcbe->heads;
  1811. encoder->possible_clones = 0;
  1812. drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
  1813. drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
  1814. drm_mode_connector_attach_encoder(connector, encoder);
  1815. return 0;
  1816. }
  1817. /******************************************************************************
  1818. * PIOR
  1819. *****************************************************************************/
  1820. static void
  1821. nv50_pior_dpms(struct drm_encoder *encoder, int mode)
  1822. {
  1823. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1824. struct nv50_disp *disp = nv50_disp(encoder->dev);
  1825. struct {
  1826. struct nv50_disp_mthd_v1 base;
  1827. struct nv50_disp_pior_pwr_v0 pwr;
  1828. } args = {
  1829. .base.version = 1,
  1830. .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
  1831. .base.hasht = nv_encoder->dcb->hasht,
  1832. .base.hashm = nv_encoder->dcb->hashm,
  1833. .pwr.state = mode == DRM_MODE_DPMS_ON,
  1834. .pwr.type = nv_encoder->dcb->type,
  1835. };
  1836. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  1837. }
  1838. static bool
  1839. nv50_pior_mode_fixup(struct drm_encoder *encoder,
  1840. const struct drm_display_mode *mode,
  1841. struct drm_display_mode *adjusted_mode)
  1842. {
  1843. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1844. struct nouveau_connector *nv_connector;
  1845. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1846. if (nv_connector && nv_connector->native_mode) {
  1847. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  1848. int id = adjusted_mode->base.id;
  1849. *adjusted_mode = *nv_connector->native_mode;
  1850. adjusted_mode->base.id = id;
  1851. }
  1852. }
  1853. adjusted_mode->clock *= 2;
  1854. return true;
  1855. }
  1856. static void
  1857. nv50_pior_commit(struct drm_encoder *encoder)
  1858. {
  1859. }
  1860. static void
  1861. nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  1862. struct drm_display_mode *adjusted_mode)
  1863. {
  1864. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1865. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1866. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  1867. struct nouveau_connector *nv_connector;
  1868. u8 owner = 1 << nv_crtc->index;
  1869. u8 proto, depth;
  1870. u32 *push;
  1871. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  1872. switch (nv_connector->base.display_info.bpc) {
  1873. case 10: depth = 0x6; break;
  1874. case 8: depth = 0x5; break;
  1875. case 6: depth = 0x2; break;
  1876. default: depth = 0x0; break;
  1877. }
  1878. switch (nv_encoder->dcb->type) {
  1879. case DCB_OUTPUT_TMDS:
  1880. case DCB_OUTPUT_DP:
  1881. proto = 0x0;
  1882. break;
  1883. default:
  1884. BUG_ON(1);
  1885. break;
  1886. }
  1887. nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
  1888. push = evo_wait(mast, 8);
  1889. if (push) {
  1890. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1891. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  1892. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  1893. ctrl |= 0x00001000;
  1894. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  1895. ctrl |= 0x00002000;
  1896. evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
  1897. evo_data(push, ctrl);
  1898. }
  1899. evo_kick(push, mast);
  1900. }
  1901. nv_encoder->crtc = encoder->crtc;
  1902. }
  1903. static void
  1904. nv50_pior_disconnect(struct drm_encoder *encoder)
  1905. {
  1906. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  1907. struct nv50_mast *mast = nv50_mast(encoder->dev);
  1908. const int or = nv_encoder->or;
  1909. u32 *push;
  1910. if (nv_encoder->crtc) {
  1911. nv50_crtc_prepare(nv_encoder->crtc);
  1912. push = evo_wait(mast, 4);
  1913. if (push) {
  1914. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  1915. evo_mthd(push, 0x0700 + (or * 0x040), 1);
  1916. evo_data(push, 0x00000000);
  1917. }
  1918. evo_kick(push, mast);
  1919. }
  1920. }
  1921. nv_encoder->crtc = NULL;
  1922. }
  1923. static void
  1924. nv50_pior_destroy(struct drm_encoder *encoder)
  1925. {
  1926. drm_encoder_cleanup(encoder);
  1927. kfree(encoder);
  1928. }
  1929. static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
  1930. .dpms = nv50_pior_dpms,
  1931. .mode_fixup = nv50_pior_mode_fixup,
  1932. .prepare = nv50_pior_disconnect,
  1933. .commit = nv50_pior_commit,
  1934. .mode_set = nv50_pior_mode_set,
  1935. .disable = nv50_pior_disconnect,
  1936. .get_crtc = nv50_display_crtc_get,
  1937. };
  1938. static const struct drm_encoder_funcs nv50_pior_func = {
  1939. .destroy = nv50_pior_destroy,
  1940. };
  1941. static int
  1942. nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
  1943. {
  1944. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  1945. struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
  1946. struct nouveau_i2c_port *ddc = NULL;
  1947. struct nouveau_encoder *nv_encoder;
  1948. struct drm_encoder *encoder;
  1949. int type;
  1950. switch (dcbe->type) {
  1951. case DCB_OUTPUT_TMDS:
  1952. ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
  1953. type = DRM_MODE_ENCODER_TMDS;
  1954. break;
  1955. case DCB_OUTPUT_DP:
  1956. ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
  1957. type = DRM_MODE_ENCODER_TMDS;
  1958. break;
  1959. default:
  1960. return -ENODEV;
  1961. }
  1962. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  1963. if (!nv_encoder)
  1964. return -ENOMEM;
  1965. nv_encoder->dcb = dcbe;
  1966. nv_encoder->or = ffs(dcbe->or) - 1;
  1967. nv_encoder->i2c = ddc;
  1968. encoder = to_drm_encoder(nv_encoder);
  1969. encoder->possible_crtcs = dcbe->heads;
  1970. encoder->possible_clones = 0;
  1971. drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
  1972. drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
  1973. drm_mode_connector_attach_encoder(connector, encoder);
  1974. return 0;
  1975. }
  1976. /******************************************************************************
  1977. * Framebuffer
  1978. *****************************************************************************/
  1979. static void
  1980. nv50_fbdma_fini(struct nv50_fbdma *fbdma)
  1981. {
  1982. int i;
  1983. for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
  1984. nvif_object_fini(&fbdma->base[i]);
  1985. nvif_object_fini(&fbdma->core);
  1986. list_del(&fbdma->head);
  1987. kfree(fbdma);
  1988. }
  1989. static int
  1990. nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
  1991. {
  1992. struct nouveau_drm *drm = nouveau_drm(dev);
  1993. struct nv50_disp *disp = nv50_disp(dev);
  1994. struct nv50_mast *mast = nv50_mast(dev);
  1995. struct __attribute__ ((packed)) {
  1996. struct nv_dma_v0 base;
  1997. union {
  1998. struct nv50_dma_v0 nv50;
  1999. struct gf100_dma_v0 gf100;
  2000. struct gf110_dma_v0 gf110;
  2001. };
  2002. } args = {};
  2003. struct nv50_fbdma *fbdma;
  2004. struct drm_crtc *crtc;
  2005. u32 size = sizeof(args.base);
  2006. int ret;
  2007. list_for_each_entry(fbdma, &disp->fbdma, head) {
  2008. if (fbdma->core.handle == name)
  2009. return 0;
  2010. }
  2011. fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
  2012. if (!fbdma)
  2013. return -ENOMEM;
  2014. list_add(&fbdma->head, &disp->fbdma);
  2015. args.base.target = NV_DMA_V0_TARGET_VRAM;
  2016. args.base.access = NV_DMA_V0_ACCESS_RDWR;
  2017. args.base.start = offset;
  2018. args.base.limit = offset + length - 1;
  2019. if (drm->device.info.chipset < 0x80) {
  2020. args.nv50.part = NV50_DMA_V0_PART_256;
  2021. size += sizeof(args.nv50);
  2022. } else
  2023. if (drm->device.info.chipset < 0xc0) {
  2024. args.nv50.part = NV50_DMA_V0_PART_256;
  2025. args.nv50.kind = kind;
  2026. size += sizeof(args.nv50);
  2027. } else
  2028. if (drm->device.info.chipset < 0xd0) {
  2029. args.gf100.kind = kind;
  2030. size += sizeof(args.gf100);
  2031. } else {
  2032. args.gf110.page = GF110_DMA_V0_PAGE_LP;
  2033. args.gf110.kind = kind;
  2034. size += sizeof(args.gf110);
  2035. }
  2036. list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
  2037. struct nv50_head *head = nv50_head(crtc);
  2038. int ret = nvif_object_init(&head->sync.base.base.user, NULL,
  2039. name, NV_DMA_IN_MEMORY, &args, size,
  2040. &fbdma->base[head->base.index]);
  2041. if (ret) {
  2042. nv50_fbdma_fini(fbdma);
  2043. return ret;
  2044. }
  2045. }
  2046. ret = nvif_object_init(&mast->base.base.user, NULL, name,
  2047. NV_DMA_IN_MEMORY, &args, size,
  2048. &fbdma->core);
  2049. if (ret) {
  2050. nv50_fbdma_fini(fbdma);
  2051. return ret;
  2052. }
  2053. return 0;
  2054. }
  2055. static void
  2056. nv50_fb_dtor(struct drm_framebuffer *fb)
  2057. {
  2058. }
  2059. static int
  2060. nv50_fb_ctor(struct drm_framebuffer *fb)
  2061. {
  2062. struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
  2063. struct nouveau_drm *drm = nouveau_drm(fb->dev);
  2064. struct nouveau_bo *nvbo = nv_fb->nvbo;
  2065. struct nv50_disp *disp = nv50_disp(fb->dev);
  2066. u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
  2067. u8 tile = nvbo->tile_mode;
  2068. if (drm->device.info.chipset >= 0xc0)
  2069. tile >>= 4; /* yep.. */
  2070. switch (fb->depth) {
  2071. case 8: nv_fb->r_format = 0x1e00; break;
  2072. case 15: nv_fb->r_format = 0xe900; break;
  2073. case 16: nv_fb->r_format = 0xe800; break;
  2074. case 24:
  2075. case 32: nv_fb->r_format = 0xcf00; break;
  2076. case 30: nv_fb->r_format = 0xd100; break;
  2077. default:
  2078. NV_ERROR(drm, "unknown depth %d\n", fb->depth);
  2079. return -EINVAL;
  2080. }
  2081. if (disp->disp->oclass < G82_DISP) {
  2082. nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
  2083. (fb->pitches[0] | 0x00100000);
  2084. nv_fb->r_format |= kind << 16;
  2085. } else
  2086. if (disp->disp->oclass < GF110_DISP) {
  2087. nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
  2088. (fb->pitches[0] | 0x00100000);
  2089. } else {
  2090. nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
  2091. (fb->pitches[0] | 0x01000000);
  2092. }
  2093. nv_fb->r_handle = 0xffff0000 | kind;
  2094. return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
  2095. drm->device.info.ram_user, kind);
  2096. }
  2097. /******************************************************************************
  2098. * Init
  2099. *****************************************************************************/
  2100. void
  2101. nv50_display_fini(struct drm_device *dev)
  2102. {
  2103. }
  2104. int
  2105. nv50_display_init(struct drm_device *dev)
  2106. {
  2107. struct nv50_disp *disp = nv50_disp(dev);
  2108. struct drm_crtc *crtc;
  2109. u32 *push;
  2110. push = evo_wait(nv50_mast(dev), 32);
  2111. if (!push)
  2112. return -EBUSY;
  2113. list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
  2114. struct nv50_sync *sync = nv50_sync(crtc);
  2115. nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
  2116. }
  2117. evo_mthd(push, 0x0088, 1);
  2118. evo_data(push, nv50_mast(dev)->base.sync.handle);
  2119. evo_kick(push, nv50_mast(dev));
  2120. return 0;
  2121. }
  2122. void
  2123. nv50_display_destroy(struct drm_device *dev)
  2124. {
  2125. struct nv50_disp *disp = nv50_disp(dev);
  2126. struct nv50_fbdma *fbdma, *fbtmp;
  2127. list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
  2128. nv50_fbdma_fini(fbdma);
  2129. }
  2130. nv50_dmac_destroy(&disp->mast.base, disp->disp);
  2131. nouveau_bo_unmap(disp->sync);
  2132. if (disp->sync)
  2133. nouveau_bo_unpin(disp->sync);
  2134. nouveau_bo_ref(NULL, &disp->sync);
  2135. nouveau_display(dev)->priv = NULL;
  2136. kfree(disp);
  2137. }
  2138. int
  2139. nv50_display_create(struct drm_device *dev)
  2140. {
  2141. struct nvif_device *device = &nouveau_drm(dev)->device;
  2142. struct nouveau_drm *drm = nouveau_drm(dev);
  2143. struct dcb_table *dcb = &drm->vbios.dcb;
  2144. struct drm_connector *connector, *tmp;
  2145. struct nv50_disp *disp;
  2146. struct dcb_output *dcbe;
  2147. int crtcs, ret, i;
  2148. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  2149. if (!disp)
  2150. return -ENOMEM;
  2151. INIT_LIST_HEAD(&disp->fbdma);
  2152. nouveau_display(dev)->priv = disp;
  2153. nouveau_display(dev)->dtor = nv50_display_destroy;
  2154. nouveau_display(dev)->init = nv50_display_init;
  2155. nouveau_display(dev)->fini = nv50_display_fini;
  2156. nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
  2157. nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
  2158. disp->disp = &nouveau_display(dev)->disp;
  2159. /* small shared memory area we use for notifiers and semaphores */
  2160. ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
  2161. 0, 0x0000, NULL, NULL, &disp->sync);
  2162. if (!ret) {
  2163. ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
  2164. if (!ret) {
  2165. ret = nouveau_bo_map(disp->sync);
  2166. if (ret)
  2167. nouveau_bo_unpin(disp->sync);
  2168. }
  2169. if (ret)
  2170. nouveau_bo_ref(NULL, &disp->sync);
  2171. }
  2172. if (ret)
  2173. goto out;
  2174. /* allocate master evo channel */
  2175. ret = nv50_core_create(disp->disp, disp->sync->bo.offset,
  2176. &disp->mast);
  2177. if (ret)
  2178. goto out;
  2179. /* create crtc objects to represent the hw heads */
  2180. if (disp->disp->oclass >= GF110_DISP)
  2181. crtcs = nvif_rd32(device, 0x022448);
  2182. else
  2183. crtcs = 2;
  2184. for (i = 0; i < crtcs; i++) {
  2185. ret = nv50_crtc_create(dev, i);
  2186. if (ret)
  2187. goto out;
  2188. }
  2189. /* create encoder/connector objects based on VBIOS DCB table */
  2190. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  2191. connector = nouveau_connector_create(dev, dcbe->connector);
  2192. if (IS_ERR(connector))
  2193. continue;
  2194. if (dcbe->location == DCB_LOC_ON_CHIP) {
  2195. switch (dcbe->type) {
  2196. case DCB_OUTPUT_TMDS:
  2197. case DCB_OUTPUT_LVDS:
  2198. case DCB_OUTPUT_DP:
  2199. ret = nv50_sor_create(connector, dcbe);
  2200. break;
  2201. case DCB_OUTPUT_ANALOG:
  2202. ret = nv50_dac_create(connector, dcbe);
  2203. break;
  2204. default:
  2205. ret = -ENODEV;
  2206. break;
  2207. }
  2208. } else {
  2209. ret = nv50_pior_create(connector, dcbe);
  2210. }
  2211. if (ret) {
  2212. NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
  2213. dcbe->location, dcbe->type,
  2214. ffs(dcbe->or) - 1, ret);
  2215. ret = 0;
  2216. }
  2217. }
  2218. /* cull any connectors we created that don't have an encoder */
  2219. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  2220. if (connector->encoder_ids[0])
  2221. continue;
  2222. NV_WARN(drm, "%s has no encoders, removing\n",
  2223. connector->name);
  2224. connector->funcs->destroy(connector);
  2225. }
  2226. out:
  2227. if (ret)
  2228. nv50_display_destroy(dev);
  2229. return ret;
  2230. }