nv50_display.c 117 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include <linux/hdmi.h>
  26. #include <drm/drmP.h>
  27. #include <drm/drm_atomic.h>
  28. #include <drm/drm_atomic_helper.h>
  29. #include <drm/drm_crtc_helper.h>
  30. #include <drm/drm_dp_helper.h>
  31. #include <drm/drm_fb_helper.h>
  32. #include <drm/drm_plane_helper.h>
  33. #include <drm/drm_edid.h>
  34. #include <nvif/class.h>
  35. #include <nvif/cl0002.h>
  36. #include <nvif/cl5070.h>
  37. #include <nvif/cl507a.h>
  38. #include <nvif/cl507b.h>
  39. #include <nvif/cl507c.h>
  40. #include <nvif/cl507d.h>
  41. #include <nvif/cl507e.h>
  42. #include <nvif/event.h>
  43. #include "nouveau_drv.h"
  44. #include "nouveau_dma.h"
  45. #include "nouveau_gem.h"
  46. #include "nouveau_connector.h"
  47. #include "nouveau_encoder.h"
  48. #include "nouveau_crtc.h"
  49. #include "nouveau_fence.h"
  50. #include "nouveau_fbcon.h"
  51. #include "nv50_display.h"
  52. #define EVO_DMA_NR 9
  53. #define EVO_MASTER (0x00)
  54. #define EVO_FLIP(c) (0x01 + (c))
  55. #define EVO_OVLY(c) (0x05 + (c))
  56. #define EVO_OIMM(c) (0x09 + (c))
  57. #define EVO_CURS(c) (0x0d + (c))
  58. /* offsets in shared sync bo of various structures */
  59. #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  60. #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
  61. #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
  62. #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
  63. #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  64. #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  65. /******************************************************************************
  66. * Atomic state
  67. *****************************************************************************/
  68. #define nv50_atom(p) container_of((p), struct nv50_atom, state)
  69. struct nv50_atom {
  70. struct drm_atomic_state state;
  71. struct list_head outp;
  72. bool lock_core;
  73. bool flush_disable;
  74. };
  75. struct nv50_outp_atom {
  76. struct list_head head;
  77. struct drm_encoder *encoder;
  78. bool flush_disable;
  79. union {
  80. struct {
  81. bool ctrl:1;
  82. };
  83. u8 mask;
  84. } clr;
  85. union {
  86. struct {
  87. bool ctrl:1;
  88. };
  89. u8 mask;
  90. } set;
  91. };
  92. #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
  93. struct nv50_head_atom {
  94. struct drm_crtc_state state;
  95. struct {
  96. u16 iW;
  97. u16 iH;
  98. u16 oW;
  99. u16 oH;
  100. } view;
  101. struct nv50_head_mode {
  102. bool interlace;
  103. u32 clock;
  104. struct {
  105. u16 active;
  106. u16 synce;
  107. u16 blanke;
  108. u16 blanks;
  109. } h;
  110. struct {
  111. u32 active;
  112. u16 synce;
  113. u16 blanke;
  114. u16 blanks;
  115. u16 blank2s;
  116. u16 blank2e;
  117. u16 blankus;
  118. } v;
  119. } mode;
  120. struct {
  121. u32 handle;
  122. u64 offset:40;
  123. } lut;
  124. struct {
  125. bool visible;
  126. u32 handle;
  127. u64 offset:40;
  128. u8 format;
  129. u8 kind:7;
  130. u8 layout:1;
  131. u8 block:4;
  132. u32 pitch:20;
  133. u16 x;
  134. u16 y;
  135. u16 w;
  136. u16 h;
  137. } core;
  138. struct {
  139. bool visible;
  140. u32 handle;
  141. u64 offset:40;
  142. u8 layout:1;
  143. u8 format:1;
  144. } curs;
  145. struct {
  146. u8 depth;
  147. u8 cpp;
  148. u16 x;
  149. u16 y;
  150. u16 w;
  151. u16 h;
  152. } base;
  153. struct {
  154. u8 cpp;
  155. } ovly;
  156. struct {
  157. bool enable:1;
  158. u8 bits:2;
  159. u8 mode:4;
  160. } dither;
  161. struct {
  162. struct {
  163. u16 cos:12;
  164. u16 sin:12;
  165. } sat;
  166. } procamp;
  167. union {
  168. struct {
  169. bool core:1;
  170. bool curs:1;
  171. };
  172. u8 mask;
  173. } clr;
  174. union {
  175. struct {
  176. bool core:1;
  177. bool curs:1;
  178. bool view:1;
  179. bool mode:1;
  180. bool base:1;
  181. bool ovly:1;
  182. bool dither:1;
  183. bool procamp:1;
  184. };
  185. u16 mask;
  186. } set;
  187. };
  188. static inline struct nv50_head_atom *
  189. nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
  190. {
  191. struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
  192. if (IS_ERR(statec))
  193. return (void *)statec;
  194. return nv50_head_atom(statec);
  195. }
  196. #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
  197. struct nv50_wndw_atom {
  198. struct drm_plane_state state;
  199. u8 interval;
  200. struct drm_rect clip;
  201. struct {
  202. u32 handle;
  203. u16 offset:12;
  204. bool awaken:1;
  205. } ntfy;
  206. struct {
  207. u32 handle;
  208. u16 offset:12;
  209. u32 acquire;
  210. u32 release;
  211. } sema;
  212. struct {
  213. u8 enable:2;
  214. } lut;
  215. struct {
  216. u8 mode:2;
  217. u8 interval:4;
  218. u8 format;
  219. u8 kind:7;
  220. u8 layout:1;
  221. u8 block:4;
  222. u32 pitch:20;
  223. u16 w;
  224. u16 h;
  225. u32 handle;
  226. u64 offset;
  227. } image;
  228. struct {
  229. u16 x;
  230. u16 y;
  231. } point;
  232. union {
  233. struct {
  234. bool ntfy:1;
  235. bool sema:1;
  236. bool image:1;
  237. };
  238. u8 mask;
  239. } clr;
  240. union {
  241. struct {
  242. bool ntfy:1;
  243. bool sema:1;
  244. bool image:1;
  245. bool lut:1;
  246. bool point:1;
  247. };
  248. u8 mask;
  249. } set;
  250. };
  251. /******************************************************************************
  252. * EVO channel
  253. *****************************************************************************/
  254. struct nv50_chan {
  255. struct nvif_object user;
  256. struct nvif_device *device;
  257. };
  258. static int
  259. nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
  260. const s32 *oclass, u8 head, void *data, u32 size,
  261. struct nv50_chan *chan)
  262. {
  263. struct nvif_sclass *sclass;
  264. int ret, i, n;
  265. chan->device = device;
  266. ret = n = nvif_object_sclass_get(disp, &sclass);
  267. if (ret < 0)
  268. return ret;
  269. while (oclass[0]) {
  270. for (i = 0; i < n; i++) {
  271. if (sclass[i].oclass == oclass[0]) {
  272. ret = nvif_object_init(disp, 0, oclass[0],
  273. data, size, &chan->user);
  274. if (ret == 0)
  275. nvif_object_map(&chan->user);
  276. nvif_object_sclass_put(&sclass);
  277. return ret;
  278. }
  279. }
  280. oclass++;
  281. }
  282. nvif_object_sclass_put(&sclass);
  283. return -ENOSYS;
  284. }
  285. static void
  286. nv50_chan_destroy(struct nv50_chan *chan)
  287. {
  288. nvif_object_fini(&chan->user);
  289. }
  290. /******************************************************************************
  291. * PIO EVO channel
  292. *****************************************************************************/
  293. struct nv50_pioc {
  294. struct nv50_chan base;
  295. };
  296. static void
  297. nv50_pioc_destroy(struct nv50_pioc *pioc)
  298. {
  299. nv50_chan_destroy(&pioc->base);
  300. }
  301. static int
  302. nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
  303. const s32 *oclass, u8 head, void *data, u32 size,
  304. struct nv50_pioc *pioc)
  305. {
  306. return nv50_chan_create(device, disp, oclass, head, data, size,
  307. &pioc->base);
  308. }
  309. /******************************************************************************
  310. * Overlay Immediate
  311. *****************************************************************************/
  312. struct nv50_oimm {
  313. struct nv50_pioc base;
  314. };
  315. static int
  316. nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
  317. int head, struct nv50_oimm *oimm)
  318. {
  319. struct nv50_disp_cursor_v0 args = {
  320. .head = head,
  321. };
  322. static const s32 oclass[] = {
  323. GK104_DISP_OVERLAY,
  324. GF110_DISP_OVERLAY,
  325. GT214_DISP_OVERLAY,
  326. G82_DISP_OVERLAY,
  327. NV50_DISP_OVERLAY,
  328. 0
  329. };
  330. return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
  331. &oimm->base);
  332. }
  333. /******************************************************************************
  334. * DMA EVO channel
  335. *****************************************************************************/
  336. struct nv50_dmac_ctxdma {
  337. struct list_head head;
  338. struct nvif_object object;
  339. };
  340. struct nv50_dmac {
  341. struct nv50_chan base;
  342. dma_addr_t handle;
  343. u32 *ptr;
  344. struct nvif_object sync;
  345. struct nvif_object vram;
  346. struct list_head ctxdma;
  347. /* Protects against concurrent pushbuf access to this channel, lock is
  348. * grabbed by evo_wait (if the pushbuf reservation is successful) and
  349. * dropped again by evo_kick. */
  350. struct mutex lock;
  351. };
  352. static void
  353. nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
  354. {
  355. nvif_object_fini(&ctxdma->object);
  356. list_del(&ctxdma->head);
  357. kfree(ctxdma);
  358. }
  359. static struct nv50_dmac_ctxdma *
  360. nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
  361. {
  362. struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
  363. struct nv50_dmac_ctxdma *ctxdma;
  364. const u8 kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
  365. const u32 handle = 0xfb000000 | kind;
  366. struct {
  367. struct nv_dma_v0 base;
  368. union {
  369. struct nv50_dma_v0 nv50;
  370. struct gf100_dma_v0 gf100;
  371. struct gf119_dma_v0 gf119;
  372. };
  373. } args = {};
  374. u32 argc = sizeof(args.base);
  375. int ret;
  376. list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
  377. if (ctxdma->object.handle == handle)
  378. return ctxdma;
  379. }
  380. if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
  381. return ERR_PTR(-ENOMEM);
  382. list_add(&ctxdma->head, &dmac->ctxdma);
  383. args.base.target = NV_DMA_V0_TARGET_VRAM;
  384. args.base.access = NV_DMA_V0_ACCESS_RDWR;
  385. args.base.start = 0;
  386. args.base.limit = drm->client.device.info.ram_user - 1;
  387. if (drm->client.device.info.chipset < 0x80) {
  388. args.nv50.part = NV50_DMA_V0_PART_256;
  389. argc += sizeof(args.nv50);
  390. } else
  391. if (drm->client.device.info.chipset < 0xc0) {
  392. args.nv50.part = NV50_DMA_V0_PART_256;
  393. args.nv50.kind = kind;
  394. argc += sizeof(args.nv50);
  395. } else
  396. if (drm->client.device.info.chipset < 0xd0) {
  397. args.gf100.kind = kind;
  398. argc += sizeof(args.gf100);
  399. } else {
  400. args.gf119.page = GF119_DMA_V0_PAGE_LP;
  401. args.gf119.kind = kind;
  402. argc += sizeof(args.gf119);
  403. }
  404. ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
  405. &args, argc, &ctxdma->object);
  406. if (ret) {
  407. nv50_dmac_ctxdma_del(ctxdma);
  408. return ERR_PTR(ret);
  409. }
  410. return ctxdma;
  411. }
  412. static void
  413. nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
  414. {
  415. struct nvif_device *device = dmac->base.device;
  416. struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
  417. list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
  418. nv50_dmac_ctxdma_del(ctxdma);
  419. }
  420. nvif_object_fini(&dmac->vram);
  421. nvif_object_fini(&dmac->sync);
  422. nv50_chan_destroy(&dmac->base);
  423. if (dmac->ptr) {
  424. struct device *dev = nvxx_device(device)->dev;
  425. dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
  426. }
  427. }
  428. static int
  429. nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
  430. const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
  431. struct nv50_dmac *dmac)
  432. {
  433. struct nv50_disp_core_channel_dma_v0 *args = data;
  434. struct nvif_object pushbuf;
  435. int ret;
  436. mutex_init(&dmac->lock);
  437. dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
  438. &dmac->handle, GFP_KERNEL);
  439. if (!dmac->ptr)
  440. return -ENOMEM;
  441. ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
  442. &(struct nv_dma_v0) {
  443. .target = NV_DMA_V0_TARGET_PCI_US,
  444. .access = NV_DMA_V0_ACCESS_RD,
  445. .start = dmac->handle + 0x0000,
  446. .limit = dmac->handle + 0x0fff,
  447. }, sizeof(struct nv_dma_v0), &pushbuf);
  448. if (ret)
  449. return ret;
  450. args->pushbuf = nvif_handle(&pushbuf);
  451. ret = nv50_chan_create(device, disp, oclass, head, data, size,
  452. &dmac->base);
  453. nvif_object_fini(&pushbuf);
  454. if (ret)
  455. return ret;
  456. ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
  457. &(struct nv_dma_v0) {
  458. .target = NV_DMA_V0_TARGET_VRAM,
  459. .access = NV_DMA_V0_ACCESS_RDWR,
  460. .start = syncbuf + 0x0000,
  461. .limit = syncbuf + 0x0fff,
  462. }, sizeof(struct nv_dma_v0),
  463. &dmac->sync);
  464. if (ret)
  465. return ret;
  466. ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
  467. &(struct nv_dma_v0) {
  468. .target = NV_DMA_V0_TARGET_VRAM,
  469. .access = NV_DMA_V0_ACCESS_RDWR,
  470. .start = 0,
  471. .limit = device->info.ram_user - 1,
  472. }, sizeof(struct nv_dma_v0),
  473. &dmac->vram);
  474. if (ret)
  475. return ret;
  476. INIT_LIST_HEAD(&dmac->ctxdma);
  477. return ret;
  478. }
  479. /******************************************************************************
  480. * Core
  481. *****************************************************************************/
  482. struct nv50_mast {
  483. struct nv50_dmac base;
  484. };
  485. static int
  486. nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
  487. u64 syncbuf, struct nv50_mast *core)
  488. {
  489. struct nv50_disp_core_channel_dma_v0 args = {
  490. .pushbuf = 0xb0007d00,
  491. };
  492. static const s32 oclass[] = {
  493. GP102_DISP_CORE_CHANNEL_DMA,
  494. GP100_DISP_CORE_CHANNEL_DMA,
  495. GM200_DISP_CORE_CHANNEL_DMA,
  496. GM107_DISP_CORE_CHANNEL_DMA,
  497. GK110_DISP_CORE_CHANNEL_DMA,
  498. GK104_DISP_CORE_CHANNEL_DMA,
  499. GF110_DISP_CORE_CHANNEL_DMA,
  500. GT214_DISP_CORE_CHANNEL_DMA,
  501. GT206_DISP_CORE_CHANNEL_DMA,
  502. GT200_DISP_CORE_CHANNEL_DMA,
  503. G82_DISP_CORE_CHANNEL_DMA,
  504. NV50_DISP_CORE_CHANNEL_DMA,
  505. 0
  506. };
  507. return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
  508. syncbuf, &core->base);
  509. }
  510. /******************************************************************************
  511. * Base
  512. *****************************************************************************/
  513. struct nv50_sync {
  514. struct nv50_dmac base;
  515. u32 addr;
  516. u32 data;
  517. };
  518. static int
  519. nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
  520. int head, u64 syncbuf, struct nv50_sync *base)
  521. {
  522. struct nv50_disp_base_channel_dma_v0 args = {
  523. .pushbuf = 0xb0007c00 | head,
  524. .head = head,
  525. };
  526. static const s32 oclass[] = {
  527. GK110_DISP_BASE_CHANNEL_DMA,
  528. GK104_DISP_BASE_CHANNEL_DMA,
  529. GF110_DISP_BASE_CHANNEL_DMA,
  530. GT214_DISP_BASE_CHANNEL_DMA,
  531. GT200_DISP_BASE_CHANNEL_DMA,
  532. G82_DISP_BASE_CHANNEL_DMA,
  533. NV50_DISP_BASE_CHANNEL_DMA,
  534. 0
  535. };
  536. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  537. syncbuf, &base->base);
  538. }
  539. /******************************************************************************
  540. * Overlay
  541. *****************************************************************************/
  542. struct nv50_ovly {
  543. struct nv50_dmac base;
  544. };
  545. static int
  546. nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
  547. int head, u64 syncbuf, struct nv50_ovly *ovly)
  548. {
  549. struct nv50_disp_overlay_channel_dma_v0 args = {
  550. .pushbuf = 0xb0007e00 | head,
  551. .head = head,
  552. };
  553. static const s32 oclass[] = {
  554. GK104_DISP_OVERLAY_CONTROL_DMA,
  555. GF110_DISP_OVERLAY_CONTROL_DMA,
  556. GT214_DISP_OVERLAY_CHANNEL_DMA,
  557. GT200_DISP_OVERLAY_CHANNEL_DMA,
  558. G82_DISP_OVERLAY_CHANNEL_DMA,
  559. NV50_DISP_OVERLAY_CHANNEL_DMA,
  560. 0
  561. };
  562. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  563. syncbuf, &ovly->base);
  564. }
  565. struct nv50_head {
  566. struct nouveau_crtc base;
  567. struct nv50_ovly ovly;
  568. struct nv50_oimm oimm;
  569. };
  570. #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
  571. #define nv50_ovly(c) (&nv50_head(c)->ovly)
  572. #define nv50_oimm(c) (&nv50_head(c)->oimm)
  573. #define nv50_chan(c) (&(c)->base.base)
  574. #define nv50_vers(c) nv50_chan(c)->user.oclass
  575. struct nv50_disp {
  576. struct nvif_object *disp;
  577. struct nv50_mast mast;
  578. struct nouveau_bo *sync;
  579. struct mutex mutex;
  580. };
  581. static struct nv50_disp *
  582. nv50_disp(struct drm_device *dev)
  583. {
  584. return nouveau_display(dev)->priv;
  585. }
  586. #define nv50_mast(d) (&nv50_disp(d)->mast)
  587. /******************************************************************************
  588. * EVO channel helpers
  589. *****************************************************************************/
  590. static u32 *
  591. evo_wait(void *evoc, int nr)
  592. {
  593. struct nv50_dmac *dmac = evoc;
  594. struct nvif_device *device = dmac->base.device;
  595. u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
  596. mutex_lock(&dmac->lock);
  597. if (put + nr >= (PAGE_SIZE / 4) - 8) {
  598. dmac->ptr[put] = 0x20000000;
  599. nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
  600. if (nvif_msec(device, 2000,
  601. if (!nvif_rd32(&dmac->base.user, 0x0004))
  602. break;
  603. ) < 0) {
  604. mutex_unlock(&dmac->lock);
  605. pr_err("nouveau: evo channel stalled\n");
  606. return NULL;
  607. }
  608. put = 0;
  609. }
  610. return dmac->ptr + put;
  611. }
  612. static void
  613. evo_kick(u32 *push, void *evoc)
  614. {
  615. struct nv50_dmac *dmac = evoc;
  616. nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
  617. mutex_unlock(&dmac->lock);
  618. }
  619. #define evo_mthd(p, m, s) do { \
  620. const u32 _m = (m), _s = (s); \
  621. if (drm_debug & DRM_UT_KMS) \
  622. pr_err("%04x %d %s\n", _m, _s, __func__); \
  623. *((p)++) = ((_s << 18) | _m); \
  624. } while(0)
  625. #define evo_data(p, d) do { \
  626. const u32 _d = (d); \
  627. if (drm_debug & DRM_UT_KMS) \
  628. pr_err("\t%08x\n", _d); \
  629. *((p)++) = _d; \
  630. } while(0)
  631. /******************************************************************************
  632. * Plane
  633. *****************************************************************************/
  634. #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
  635. struct nv50_wndw {
  636. const struct nv50_wndw_func *func;
  637. struct nv50_dmac *dmac;
  638. struct drm_plane plane;
  639. struct nvif_notify notify;
  640. u16 ntfy;
  641. u16 sema;
  642. u32 data;
  643. };
  644. struct nv50_wndw_func {
  645. void *(*dtor)(struct nv50_wndw *);
  646. int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  647. struct nv50_head_atom *asyh);
  648. void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  649. struct nv50_head_atom *asyh);
  650. void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
  651. struct nv50_wndw_atom *asyw);
  652. void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  653. void (*sema_clr)(struct nv50_wndw *);
  654. void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  655. void (*ntfy_clr)(struct nv50_wndw *);
  656. int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
  657. void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  658. void (*image_clr)(struct nv50_wndw *);
  659. void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
  660. void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
  661. u32 (*update)(struct nv50_wndw *, u32 interlock);
  662. };
  663. static int
  664. nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  665. {
  666. if (asyw->set.ntfy)
  667. return wndw->func->ntfy_wait_begun(wndw, asyw);
  668. return 0;
  669. }
  670. static u32
  671. nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
  672. struct nv50_wndw_atom *asyw)
  673. {
  674. if (asyw->clr.sema && (!asyw->set.sema || flush))
  675. wndw->func->sema_clr(wndw);
  676. if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
  677. wndw->func->ntfy_clr(wndw);
  678. if (asyw->clr.image && (!asyw->set.image || flush))
  679. wndw->func->image_clr(wndw);
  680. return flush ? wndw->func->update(wndw, interlock) : 0;
  681. }
  682. static u32
  683. nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
  684. struct nv50_wndw_atom *asyw)
  685. {
  686. if (interlock) {
  687. asyw->image.mode = 0;
  688. asyw->image.interval = 1;
  689. }
  690. if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
  691. if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
  692. if (asyw->set.image) wndw->func->image_set(wndw, asyw);
  693. if (asyw->set.lut ) wndw->func->lut (wndw, asyw);
  694. if (asyw->set.point) wndw->func->point (wndw, asyw);
  695. return wndw->func->update(wndw, interlock);
  696. }
  697. static void
  698. nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
  699. struct nv50_wndw_atom *asyw,
  700. struct nv50_head_atom *asyh)
  701. {
  702. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  703. NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
  704. wndw->func->release(wndw, asyw, asyh);
  705. asyw->ntfy.handle = 0;
  706. asyw->sema.handle = 0;
  707. }
  708. static int
  709. nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
  710. struct nv50_wndw_atom *asyw,
  711. struct nv50_head_atom *asyh)
  712. {
  713. struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
  714. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  715. int ret;
  716. NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
  717. asyw->clip.x1 = 0;
  718. asyw->clip.y1 = 0;
  719. asyw->clip.x2 = asyh->state.mode.hdisplay;
  720. asyw->clip.y2 = asyh->state.mode.vdisplay;
  721. asyw->image.w = fb->base.width;
  722. asyw->image.h = fb->base.height;
  723. asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
  724. if (asyh->state.pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC)
  725. asyw->interval = 0;
  726. else
  727. asyw->interval = 1;
  728. if (asyw->image.kind) {
  729. asyw->image.layout = 0;
  730. if (drm->client.device.info.chipset >= 0xc0)
  731. asyw->image.block = fb->nvbo->tile_mode >> 4;
  732. else
  733. asyw->image.block = fb->nvbo->tile_mode;
  734. asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
  735. } else {
  736. asyw->image.layout = 1;
  737. asyw->image.block = 0;
  738. asyw->image.pitch = fb->base.pitches[0];
  739. }
  740. ret = wndw->func->acquire(wndw, asyw, asyh);
  741. if (ret)
  742. return ret;
  743. if (asyw->set.image) {
  744. if (!(asyw->image.mode = asyw->interval ? 0 : 1))
  745. asyw->image.interval = asyw->interval;
  746. else
  747. asyw->image.interval = 0;
  748. }
  749. return 0;
  750. }
  751. static int
  752. nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
  753. {
  754. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  755. struct nv50_wndw *wndw = nv50_wndw(plane);
  756. struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
  757. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  758. struct nv50_head_atom *harm = NULL, *asyh = NULL;
  759. bool varm = false, asyv = false, asym = false;
  760. int ret;
  761. NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
  762. if (asyw->state.crtc) {
  763. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  764. if (IS_ERR(asyh))
  765. return PTR_ERR(asyh);
  766. asym = drm_atomic_crtc_needs_modeset(&asyh->state);
  767. asyv = asyh->state.active;
  768. }
  769. if (armw->state.crtc) {
  770. harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
  771. if (IS_ERR(harm))
  772. return PTR_ERR(harm);
  773. varm = harm->state.crtc->state->active;
  774. }
  775. if (asyv) {
  776. asyw->point.x = asyw->state.crtc_x;
  777. asyw->point.y = asyw->state.crtc_y;
  778. if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
  779. asyw->set.point = true;
  780. ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
  781. if (ret)
  782. return ret;
  783. } else
  784. if (varm) {
  785. nv50_wndw_atomic_check_release(wndw, asyw, harm);
  786. } else {
  787. return 0;
  788. }
  789. if (!asyv || asym) {
  790. asyw->clr.ntfy = armw->ntfy.handle != 0;
  791. asyw->clr.sema = armw->sema.handle != 0;
  792. if (wndw->func->image_clr)
  793. asyw->clr.image = armw->image.handle != 0;
  794. asyw->set.lut = wndw->func->lut && asyv;
  795. }
  796. return 0;
  797. }
  798. static void
  799. nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
  800. {
  801. struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
  802. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  803. NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
  804. if (!old_state->fb)
  805. return;
  806. nouveau_bo_unpin(fb->nvbo);
  807. }
  808. static int
  809. nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
  810. {
  811. struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
  812. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  813. struct nv50_wndw *wndw = nv50_wndw(plane);
  814. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  815. struct nv50_head_atom *asyh;
  816. struct nv50_dmac_ctxdma *ctxdma;
  817. int ret;
  818. NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
  819. if (!asyw->state.fb)
  820. return 0;
  821. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
  822. if (ret)
  823. return ret;
  824. ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
  825. if (IS_ERR(ctxdma)) {
  826. nouveau_bo_unpin(fb->nvbo);
  827. return PTR_ERR(ctxdma);
  828. }
  829. asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
  830. asyw->image.handle = ctxdma->object.handle;
  831. asyw->image.offset = fb->nvbo->bo.offset;
  832. if (wndw->func->prepare) {
  833. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  834. if (IS_ERR(asyh))
  835. return PTR_ERR(asyh);
  836. wndw->func->prepare(wndw, asyh, asyw);
  837. }
  838. return 0;
  839. }
  840. static const struct drm_plane_helper_funcs
  841. nv50_wndw_helper = {
  842. .prepare_fb = nv50_wndw_prepare_fb,
  843. .cleanup_fb = nv50_wndw_cleanup_fb,
  844. .atomic_check = nv50_wndw_atomic_check,
  845. };
  846. static void
  847. nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
  848. struct drm_plane_state *state)
  849. {
  850. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  851. __drm_atomic_helper_plane_destroy_state(&asyw->state);
  852. kfree(asyw);
  853. }
  854. static struct drm_plane_state *
  855. nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
  856. {
  857. struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
  858. struct nv50_wndw_atom *asyw;
  859. if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
  860. return NULL;
  861. __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
  862. asyw->interval = 1;
  863. asyw->sema = armw->sema;
  864. asyw->ntfy = armw->ntfy;
  865. asyw->image = armw->image;
  866. asyw->point = armw->point;
  867. asyw->lut = armw->lut;
  868. asyw->clr.mask = 0;
  869. asyw->set.mask = 0;
  870. return &asyw->state;
  871. }
  872. static void
  873. nv50_wndw_reset(struct drm_plane *plane)
  874. {
  875. struct nv50_wndw_atom *asyw;
  876. if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
  877. return;
  878. if (plane->state)
  879. plane->funcs->atomic_destroy_state(plane, plane->state);
  880. plane->state = &asyw->state;
  881. plane->state->plane = plane;
  882. plane->state->rotation = DRM_MODE_ROTATE_0;
  883. }
  884. static void
  885. nv50_wndw_destroy(struct drm_plane *plane)
  886. {
  887. struct nv50_wndw *wndw = nv50_wndw(plane);
  888. void *data;
  889. nvif_notify_fini(&wndw->notify);
  890. data = wndw->func->dtor(wndw);
  891. drm_plane_cleanup(&wndw->plane);
  892. kfree(data);
  893. }
  894. static const struct drm_plane_funcs
  895. nv50_wndw = {
  896. .update_plane = drm_atomic_helper_update_plane,
  897. .disable_plane = drm_atomic_helper_disable_plane,
  898. .destroy = nv50_wndw_destroy,
  899. .reset = nv50_wndw_reset,
  900. .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
  901. .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
  902. };
  903. static void
  904. nv50_wndw_fini(struct nv50_wndw *wndw)
  905. {
  906. nvif_notify_put(&wndw->notify);
  907. }
  908. static void
  909. nv50_wndw_init(struct nv50_wndw *wndw)
  910. {
  911. nvif_notify_get(&wndw->notify);
  912. }
  913. static int
  914. nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
  915. enum drm_plane_type type, const char *name, int index,
  916. struct nv50_dmac *dmac, const u32 *format, int nformat,
  917. struct nv50_wndw *wndw)
  918. {
  919. int ret;
  920. wndw->func = func;
  921. wndw->dmac = dmac;
  922. ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw,
  923. format, nformat, NULL,
  924. type, "%s-%d", name, index);
  925. if (ret)
  926. return ret;
  927. drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
  928. return 0;
  929. }
  930. /******************************************************************************
  931. * Cursor plane
  932. *****************************************************************************/
  933. #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
  934. struct nv50_curs {
  935. struct nv50_wndw wndw;
  936. struct nvif_object chan;
  937. };
  938. static u32
  939. nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
  940. {
  941. struct nv50_curs *curs = nv50_curs(wndw);
  942. nvif_wr32(&curs->chan, 0x0080, 0x00000000);
  943. return 0;
  944. }
  945. static void
  946. nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  947. {
  948. struct nv50_curs *curs = nv50_curs(wndw);
  949. nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
  950. }
  951. static void
  952. nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
  953. struct nv50_wndw_atom *asyw)
  954. {
  955. u32 handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
  956. u32 offset = asyw->image.offset;
  957. if (asyh->curs.handle != handle || asyh->curs.offset != offset) {
  958. asyh->curs.handle = handle;
  959. asyh->curs.offset = offset;
  960. asyh->set.curs = asyh->curs.visible;
  961. }
  962. }
  963. static void
  964. nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  965. struct nv50_head_atom *asyh)
  966. {
  967. asyh->curs.visible = false;
  968. }
  969. static int
  970. nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  971. struct nv50_head_atom *asyh)
  972. {
  973. int ret;
  974. ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
  975. DRM_PLANE_HELPER_NO_SCALING,
  976. DRM_PLANE_HELPER_NO_SCALING,
  977. true, true);
  978. asyh->curs.visible = asyw->state.visible;
  979. if (ret || !asyh->curs.visible)
  980. return ret;
  981. switch (asyw->state.fb->width) {
  982. case 32: asyh->curs.layout = 0; break;
  983. case 64: asyh->curs.layout = 1; break;
  984. default:
  985. return -EINVAL;
  986. }
  987. if (asyw->state.fb->width != asyw->state.fb->height)
  988. return -EINVAL;
  989. switch (asyw->state.fb->format->format) {
  990. case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
  991. default:
  992. WARN_ON(1);
  993. return -EINVAL;
  994. }
  995. return 0;
  996. }
  997. static void *
  998. nv50_curs_dtor(struct nv50_wndw *wndw)
  999. {
  1000. struct nv50_curs *curs = nv50_curs(wndw);
  1001. nvif_object_fini(&curs->chan);
  1002. return curs;
  1003. }
  1004. static const u32
  1005. nv50_curs_format[] = {
  1006. DRM_FORMAT_ARGB8888,
  1007. };
  1008. static const struct nv50_wndw_func
  1009. nv50_curs = {
  1010. .dtor = nv50_curs_dtor,
  1011. .acquire = nv50_curs_acquire,
  1012. .release = nv50_curs_release,
  1013. .prepare = nv50_curs_prepare,
  1014. .point = nv50_curs_point,
  1015. .update = nv50_curs_update,
  1016. };
  1017. static int
  1018. nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
  1019. struct nv50_curs **pcurs)
  1020. {
  1021. static const struct nvif_mclass curses[] = {
  1022. { GK104_DISP_CURSOR, 0 },
  1023. { GF110_DISP_CURSOR, 0 },
  1024. { GT214_DISP_CURSOR, 0 },
  1025. { G82_DISP_CURSOR, 0 },
  1026. { NV50_DISP_CURSOR, 0 },
  1027. {}
  1028. };
  1029. struct nv50_disp_cursor_v0 args = {
  1030. .head = head->base.index,
  1031. };
  1032. struct nv50_disp *disp = nv50_disp(drm->dev);
  1033. struct nv50_curs *curs;
  1034. int cid, ret;
  1035. cid = nvif_mclass(disp->disp, curses);
  1036. if (cid < 0) {
  1037. NV_ERROR(drm, "No supported cursor immediate class\n");
  1038. return cid;
  1039. }
  1040. if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
  1041. return -ENOMEM;
  1042. ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
  1043. "curs", head->base.index, &disp->mast.base,
  1044. nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
  1045. &curs->wndw);
  1046. if (ret) {
  1047. kfree(curs);
  1048. return ret;
  1049. }
  1050. ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
  1051. sizeof(args), &curs->chan);
  1052. if (ret) {
  1053. NV_ERROR(drm, "curs%04x allocation failed: %d\n",
  1054. curses[cid].oclass, ret);
  1055. return ret;
  1056. }
  1057. return 0;
  1058. }
  1059. /******************************************************************************
  1060. * Primary plane
  1061. *****************************************************************************/
  1062. #define nv50_base(p) container_of((p), struct nv50_base, wndw)
  1063. struct nv50_base {
  1064. struct nv50_wndw wndw;
  1065. struct nv50_sync chan;
  1066. int id;
  1067. };
  1068. static int
  1069. nv50_base_notify(struct nvif_notify *notify)
  1070. {
  1071. return NVIF_NOTIFY_KEEP;
  1072. }
  1073. static void
  1074. nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1075. {
  1076. struct nv50_base *base = nv50_base(wndw);
  1077. u32 *push;
  1078. if ((push = evo_wait(&base->chan, 2))) {
  1079. evo_mthd(push, 0x00e0, 1);
  1080. evo_data(push, asyw->lut.enable << 30);
  1081. evo_kick(push, &base->chan);
  1082. }
  1083. }
  1084. static void
  1085. nv50_base_image_clr(struct nv50_wndw *wndw)
  1086. {
  1087. struct nv50_base *base = nv50_base(wndw);
  1088. u32 *push;
  1089. if ((push = evo_wait(&base->chan, 4))) {
  1090. evo_mthd(push, 0x0084, 1);
  1091. evo_data(push, 0x00000000);
  1092. evo_mthd(push, 0x00c0, 1);
  1093. evo_data(push, 0x00000000);
  1094. evo_kick(push, &base->chan);
  1095. }
  1096. }
  1097. static void
  1098. nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1099. {
  1100. struct nv50_base *base = nv50_base(wndw);
  1101. const s32 oclass = base->chan.base.base.user.oclass;
  1102. u32 *push;
  1103. if ((push = evo_wait(&base->chan, 10))) {
  1104. evo_mthd(push, 0x0084, 1);
  1105. evo_data(push, (asyw->image.mode << 8) |
  1106. (asyw->image.interval << 4));
  1107. evo_mthd(push, 0x00c0, 1);
  1108. evo_data(push, asyw->image.handle);
  1109. if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1110. evo_mthd(push, 0x0800, 5);
  1111. evo_data(push, asyw->image.offset >> 8);
  1112. evo_data(push, 0x00000000);
  1113. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1114. evo_data(push, (asyw->image.layout << 20) |
  1115. asyw->image.pitch |
  1116. asyw->image.block);
  1117. evo_data(push, (asyw->image.kind << 16) |
  1118. (asyw->image.format << 8));
  1119. } else
  1120. if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1121. evo_mthd(push, 0x0800, 5);
  1122. evo_data(push, asyw->image.offset >> 8);
  1123. evo_data(push, 0x00000000);
  1124. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1125. evo_data(push, (asyw->image.layout << 20) |
  1126. asyw->image.pitch |
  1127. asyw->image.block);
  1128. evo_data(push, asyw->image.format << 8);
  1129. } else {
  1130. evo_mthd(push, 0x0400, 5);
  1131. evo_data(push, asyw->image.offset >> 8);
  1132. evo_data(push, 0x00000000);
  1133. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1134. evo_data(push, (asyw->image.layout << 24) |
  1135. asyw->image.pitch |
  1136. asyw->image.block);
  1137. evo_data(push, asyw->image.format << 8);
  1138. }
  1139. evo_kick(push, &base->chan);
  1140. }
  1141. }
  1142. static void
  1143. nv50_base_ntfy_clr(struct nv50_wndw *wndw)
  1144. {
  1145. struct nv50_base *base = nv50_base(wndw);
  1146. u32 *push;
  1147. if ((push = evo_wait(&base->chan, 2))) {
  1148. evo_mthd(push, 0x00a4, 1);
  1149. evo_data(push, 0x00000000);
  1150. evo_kick(push, &base->chan);
  1151. }
  1152. }
  1153. static void
  1154. nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1155. {
  1156. struct nv50_base *base = nv50_base(wndw);
  1157. u32 *push;
  1158. if ((push = evo_wait(&base->chan, 3))) {
  1159. evo_mthd(push, 0x00a0, 2);
  1160. evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
  1161. evo_data(push, asyw->ntfy.handle);
  1162. evo_kick(push, &base->chan);
  1163. }
  1164. }
  1165. static void
  1166. nv50_base_sema_clr(struct nv50_wndw *wndw)
  1167. {
  1168. struct nv50_base *base = nv50_base(wndw);
  1169. u32 *push;
  1170. if ((push = evo_wait(&base->chan, 2))) {
  1171. evo_mthd(push, 0x0094, 1);
  1172. evo_data(push, 0x00000000);
  1173. evo_kick(push, &base->chan);
  1174. }
  1175. }
  1176. static void
  1177. nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1178. {
  1179. struct nv50_base *base = nv50_base(wndw);
  1180. u32 *push;
  1181. if ((push = evo_wait(&base->chan, 5))) {
  1182. evo_mthd(push, 0x0088, 4);
  1183. evo_data(push, asyw->sema.offset);
  1184. evo_data(push, asyw->sema.acquire);
  1185. evo_data(push, asyw->sema.release);
  1186. evo_data(push, asyw->sema.handle);
  1187. evo_kick(push, &base->chan);
  1188. }
  1189. }
  1190. static u32
  1191. nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
  1192. {
  1193. struct nv50_base *base = nv50_base(wndw);
  1194. u32 *push;
  1195. if (!(push = evo_wait(&base->chan, 2)))
  1196. return 0;
  1197. evo_mthd(push, 0x0080, 1);
  1198. evo_data(push, interlock);
  1199. evo_kick(push, &base->chan);
  1200. if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
  1201. return interlock ? 2 << (base->id * 8) : 0;
  1202. return interlock ? 2 << (base->id * 4) : 0;
  1203. }
  1204. static int
  1205. nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1206. {
  1207. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  1208. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1209. if (nvif_msec(&drm->client.device, 2000ULL,
  1210. u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
  1211. if ((data & 0xc0000000) == 0x40000000)
  1212. break;
  1213. usleep_range(1, 2);
  1214. ) < 0)
  1215. return -ETIMEDOUT;
  1216. return 0;
  1217. }
  1218. static void
  1219. nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1220. struct nv50_head_atom *asyh)
  1221. {
  1222. asyh->base.cpp = 0;
  1223. }
  1224. static int
  1225. nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1226. struct nv50_head_atom *asyh)
  1227. {
  1228. const struct drm_framebuffer *fb = asyw->state.fb;
  1229. int ret;
  1230. if (!fb->format->depth)
  1231. return -EINVAL;
  1232. ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
  1233. DRM_PLANE_HELPER_NO_SCALING,
  1234. DRM_PLANE_HELPER_NO_SCALING,
  1235. false, true);
  1236. if (ret)
  1237. return ret;
  1238. asyh->base.depth = fb->format->depth;
  1239. asyh->base.cpp = fb->format->cpp[0];
  1240. asyh->base.x = asyw->state.src.x1 >> 16;
  1241. asyh->base.y = asyw->state.src.y1 >> 16;
  1242. asyh->base.w = asyw->state.fb->width;
  1243. asyh->base.h = asyw->state.fb->height;
  1244. switch (fb->format->format) {
  1245. case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break;
  1246. case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break;
  1247. case DRM_FORMAT_XRGB1555 :
  1248. case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break;
  1249. case DRM_FORMAT_XRGB8888 :
  1250. case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break;
  1251. case DRM_FORMAT_XBGR2101010:
  1252. case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
  1253. case DRM_FORMAT_XBGR8888 :
  1254. case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break;
  1255. default:
  1256. WARN_ON(1);
  1257. return -EINVAL;
  1258. }
  1259. asyw->lut.enable = 1;
  1260. asyw->set.image = true;
  1261. return 0;
  1262. }
  1263. static void *
  1264. nv50_base_dtor(struct nv50_wndw *wndw)
  1265. {
  1266. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1267. struct nv50_base *base = nv50_base(wndw);
  1268. nv50_dmac_destroy(&base->chan.base, disp->disp);
  1269. return base;
  1270. }
  1271. static const u32
  1272. nv50_base_format[] = {
  1273. DRM_FORMAT_C8,
  1274. DRM_FORMAT_RGB565,
  1275. DRM_FORMAT_XRGB1555,
  1276. DRM_FORMAT_ARGB1555,
  1277. DRM_FORMAT_XRGB8888,
  1278. DRM_FORMAT_ARGB8888,
  1279. DRM_FORMAT_XBGR2101010,
  1280. DRM_FORMAT_ABGR2101010,
  1281. DRM_FORMAT_XBGR8888,
  1282. DRM_FORMAT_ABGR8888,
  1283. };
  1284. static const struct nv50_wndw_func
  1285. nv50_base = {
  1286. .dtor = nv50_base_dtor,
  1287. .acquire = nv50_base_acquire,
  1288. .release = nv50_base_release,
  1289. .sema_set = nv50_base_sema_set,
  1290. .sema_clr = nv50_base_sema_clr,
  1291. .ntfy_set = nv50_base_ntfy_set,
  1292. .ntfy_clr = nv50_base_ntfy_clr,
  1293. .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
  1294. .image_set = nv50_base_image_set,
  1295. .image_clr = nv50_base_image_clr,
  1296. .lut = nv50_base_lut,
  1297. .update = nv50_base_update,
  1298. };
  1299. static int
  1300. nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
  1301. struct nv50_base **pbase)
  1302. {
  1303. struct nv50_disp *disp = nv50_disp(drm->dev);
  1304. struct nv50_base *base;
  1305. int ret;
  1306. if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
  1307. return -ENOMEM;
  1308. base->id = head->base.index;
  1309. base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
  1310. base->wndw.sema = EVO_FLIP_SEM0(base->id);
  1311. base->wndw.data = 0x00000000;
  1312. ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
  1313. "base", base->id, &base->chan.base,
  1314. nv50_base_format, ARRAY_SIZE(nv50_base_format),
  1315. &base->wndw);
  1316. if (ret) {
  1317. kfree(base);
  1318. return ret;
  1319. }
  1320. ret = nv50_base_create(&drm->client.device, disp->disp, base->id,
  1321. disp->sync->bo.offset, &base->chan);
  1322. if (ret)
  1323. return ret;
  1324. return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
  1325. false,
  1326. NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
  1327. &(struct nvif_notify_uevent_req) {},
  1328. sizeof(struct nvif_notify_uevent_req),
  1329. sizeof(struct nvif_notify_uevent_rep),
  1330. &base->wndw.notify);
  1331. }
  1332. /******************************************************************************
  1333. * Head
  1334. *****************************************************************************/
  1335. static void
  1336. nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
  1337. {
  1338. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1339. u32 *push;
  1340. if ((push = evo_wait(core, 2))) {
  1341. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1342. evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
  1343. else
  1344. evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
  1345. evo_data(push, (asyh->procamp.sat.sin << 20) |
  1346. (asyh->procamp.sat.cos << 8));
  1347. evo_kick(push, core);
  1348. }
  1349. }
  1350. static void
  1351. nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
  1352. {
  1353. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1354. u32 *push;
  1355. if ((push = evo_wait(core, 2))) {
  1356. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1357. evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
  1358. else
  1359. if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
  1360. evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
  1361. else
  1362. evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
  1363. evo_data(push, (asyh->dither.mode << 3) |
  1364. (asyh->dither.bits << 1) |
  1365. asyh->dither.enable);
  1366. evo_kick(push, core);
  1367. }
  1368. }
  1369. static void
  1370. nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
  1371. {
  1372. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1373. u32 bounds = 0;
  1374. u32 *push;
  1375. if (asyh->base.cpp) {
  1376. switch (asyh->base.cpp) {
  1377. case 8: bounds |= 0x00000500; break;
  1378. case 4: bounds |= 0x00000300; break;
  1379. case 2: bounds |= 0x00000100; break;
  1380. default:
  1381. WARN_ON(1);
  1382. break;
  1383. }
  1384. bounds |= 0x00000001;
  1385. }
  1386. if ((push = evo_wait(core, 2))) {
  1387. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1388. evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
  1389. else
  1390. evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
  1391. evo_data(push, bounds);
  1392. evo_kick(push, core);
  1393. }
  1394. }
  1395. static void
  1396. nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
  1397. {
  1398. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1399. u32 bounds = 0;
  1400. u32 *push;
  1401. if (asyh->base.cpp) {
  1402. switch (asyh->base.cpp) {
  1403. case 8: bounds |= 0x00000500; break;
  1404. case 4: bounds |= 0x00000300; break;
  1405. case 2: bounds |= 0x00000100; break;
  1406. case 1: bounds |= 0x00000000; break;
  1407. default:
  1408. WARN_ON(1);
  1409. break;
  1410. }
  1411. bounds |= 0x00000001;
  1412. }
  1413. if ((push = evo_wait(core, 2))) {
  1414. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1415. evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
  1416. else
  1417. evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
  1418. evo_data(push, bounds);
  1419. evo_kick(push, core);
  1420. }
  1421. }
  1422. static void
  1423. nv50_head_curs_clr(struct nv50_head *head)
  1424. {
  1425. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1426. u32 *push;
  1427. if ((push = evo_wait(core, 4))) {
  1428. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1429. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1430. evo_data(push, 0x05000000);
  1431. } else
  1432. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1433. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1434. evo_data(push, 0x05000000);
  1435. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1436. evo_data(push, 0x00000000);
  1437. } else {
  1438. evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
  1439. evo_data(push, 0x05000000);
  1440. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1441. evo_data(push, 0x00000000);
  1442. }
  1443. evo_kick(push, core);
  1444. }
  1445. }
  1446. static void
  1447. nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1448. {
  1449. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1450. u32 *push;
  1451. if ((push = evo_wait(core, 5))) {
  1452. if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1453. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1454. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1455. (asyh->curs.format << 24));
  1456. evo_data(push, asyh->curs.offset >> 8);
  1457. } else
  1458. if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1459. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1460. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1461. (asyh->curs.format << 24));
  1462. evo_data(push, asyh->curs.offset >> 8);
  1463. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1464. evo_data(push, asyh->curs.handle);
  1465. } else {
  1466. evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
  1467. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1468. (asyh->curs.format << 24));
  1469. evo_data(push, asyh->curs.offset >> 8);
  1470. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1471. evo_data(push, asyh->curs.handle);
  1472. }
  1473. evo_kick(push, core);
  1474. }
  1475. }
  1476. static void
  1477. nv50_head_core_clr(struct nv50_head *head)
  1478. {
  1479. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1480. u32 *push;
  1481. if ((push = evo_wait(core, 2))) {
  1482. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1483. evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
  1484. else
  1485. evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
  1486. evo_data(push, 0x00000000);
  1487. evo_kick(push, core);
  1488. }
  1489. }
  1490. static void
  1491. nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1492. {
  1493. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1494. u32 *push;
  1495. if ((push = evo_wait(core, 9))) {
  1496. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1497. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1498. evo_data(push, asyh->core.offset >> 8);
  1499. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1500. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1501. evo_data(push, asyh->core.layout << 20 |
  1502. (asyh->core.pitch >> 8) << 8 |
  1503. asyh->core.block);
  1504. evo_data(push, asyh->core.kind << 16 |
  1505. asyh->core.format << 8);
  1506. evo_data(push, asyh->core.handle);
  1507. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1508. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1509. /* EVO will complain with INVALID_STATE if we have an
  1510. * active cursor and (re)specify HeadSetContextDmaIso
  1511. * without also updating HeadSetOffsetCursor.
  1512. */
  1513. asyh->set.curs = asyh->curs.visible;
  1514. } else
  1515. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1516. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1517. evo_data(push, asyh->core.offset >> 8);
  1518. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1519. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1520. evo_data(push, asyh->core.layout << 20 |
  1521. (asyh->core.pitch >> 8) << 8 |
  1522. asyh->core.block);
  1523. evo_data(push, asyh->core.format << 8);
  1524. evo_data(push, asyh->core.handle);
  1525. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1526. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1527. } else {
  1528. evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
  1529. evo_data(push, asyh->core.offset >> 8);
  1530. evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
  1531. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1532. evo_data(push, asyh->core.layout << 24 |
  1533. (asyh->core.pitch >> 8) << 8 |
  1534. asyh->core.block);
  1535. evo_data(push, asyh->core.format << 8);
  1536. evo_data(push, asyh->core.handle);
  1537. evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
  1538. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1539. }
  1540. evo_kick(push, core);
  1541. }
  1542. }
  1543. static void
  1544. nv50_head_lut_clr(struct nv50_head *head)
  1545. {
  1546. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1547. u32 *push;
  1548. if ((push = evo_wait(core, 4))) {
  1549. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1550. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1551. evo_data(push, 0x40000000);
  1552. } else
  1553. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1554. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1555. evo_data(push, 0x40000000);
  1556. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1557. evo_data(push, 0x00000000);
  1558. } else {
  1559. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
  1560. evo_data(push, 0x03000000);
  1561. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1562. evo_data(push, 0x00000000);
  1563. }
  1564. evo_kick(push, core);
  1565. }
  1566. }
  1567. static void
  1568. nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1569. {
  1570. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1571. u32 *push;
  1572. if ((push = evo_wait(core, 7))) {
  1573. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1574. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1575. evo_data(push, 0xc0000000);
  1576. evo_data(push, asyh->lut.offset >> 8);
  1577. } else
  1578. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1579. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1580. evo_data(push, 0xc0000000);
  1581. evo_data(push, asyh->lut.offset >> 8);
  1582. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1583. evo_data(push, asyh->lut.handle);
  1584. } else {
  1585. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
  1586. evo_data(push, 0x83000000);
  1587. evo_data(push, asyh->lut.offset >> 8);
  1588. evo_data(push, 0x00000000);
  1589. evo_data(push, 0x00000000);
  1590. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1591. evo_data(push, asyh->lut.handle);
  1592. }
  1593. evo_kick(push, core);
  1594. }
  1595. }
  1596. static void
  1597. nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1598. {
  1599. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1600. struct nv50_head_mode *m = &asyh->mode;
  1601. u32 *push;
  1602. if ((push = evo_wait(core, 14))) {
  1603. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1604. evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
  1605. evo_data(push, 0x00800000 | m->clock);
  1606. evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
  1607. evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
  1608. evo_data(push, 0x00000000);
  1609. evo_data(push, (m->v.active << 16) | m->h.active );
  1610. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1611. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1612. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1613. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1614. evo_data(push, asyh->mode.v.blankus);
  1615. evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
  1616. evo_data(push, 0x00000000);
  1617. } else {
  1618. evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
  1619. evo_data(push, 0x00000000);
  1620. evo_data(push, (m->v.active << 16) | m->h.active );
  1621. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1622. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1623. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1624. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1625. evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
  1626. evo_data(push, 0x00000000); /* ??? */
  1627. evo_data(push, 0xffffff00);
  1628. evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
  1629. evo_data(push, m->clock * 1000);
  1630. evo_data(push, 0x00200000); /* ??? */
  1631. evo_data(push, m->clock * 1000);
  1632. }
  1633. evo_kick(push, core);
  1634. }
  1635. }
  1636. static void
  1637. nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
  1638. {
  1639. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1640. u32 *push;
  1641. if ((push = evo_wait(core, 10))) {
  1642. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1643. evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
  1644. evo_data(push, 0x00000000);
  1645. evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
  1646. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1647. evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
  1648. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1649. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1650. } else {
  1651. evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
  1652. evo_data(push, 0x00000000);
  1653. evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
  1654. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1655. evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
  1656. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1657. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1658. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1659. }
  1660. evo_kick(push, core);
  1661. }
  1662. }
  1663. static void
  1664. nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
  1665. {
  1666. if (asyh->clr.core && (!asyh->set.core || y))
  1667. nv50_head_lut_clr(head);
  1668. if (asyh->clr.core && (!asyh->set.core || y))
  1669. nv50_head_core_clr(head);
  1670. if (asyh->clr.curs && (!asyh->set.curs || y))
  1671. nv50_head_curs_clr(head);
  1672. }
  1673. static void
  1674. nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1675. {
  1676. if (asyh->set.view ) nv50_head_view (head, asyh);
  1677. if (asyh->set.mode ) nv50_head_mode (head, asyh);
  1678. if (asyh->set.core ) nv50_head_lut_set (head, asyh);
  1679. if (asyh->set.core ) nv50_head_core_set(head, asyh);
  1680. if (asyh->set.curs ) nv50_head_curs_set(head, asyh);
  1681. if (asyh->set.base ) nv50_head_base (head, asyh);
  1682. if (asyh->set.ovly ) nv50_head_ovly (head, asyh);
  1683. if (asyh->set.dither ) nv50_head_dither (head, asyh);
  1684. if (asyh->set.procamp) nv50_head_procamp (head, asyh);
  1685. }
  1686. static void
  1687. nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
  1688. struct nv50_head_atom *asyh,
  1689. struct nouveau_conn_atom *asyc)
  1690. {
  1691. const int vib = asyc->procamp.color_vibrance - 100;
  1692. const int hue = asyc->procamp.vibrant_hue - 90;
  1693. const int adj = (vib > 0) ? 50 : 0;
  1694. asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
  1695. asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
  1696. asyh->set.procamp = true;
  1697. }
  1698. static void
  1699. nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
  1700. struct nv50_head_atom *asyh,
  1701. struct nouveau_conn_atom *asyc)
  1702. {
  1703. struct drm_connector *connector = asyc->state.connector;
  1704. u32 mode = 0x00;
  1705. if (asyc->dither.mode == DITHERING_MODE_AUTO) {
  1706. if (asyh->base.depth > connector->display_info.bpc * 3)
  1707. mode = DITHERING_MODE_DYNAMIC2X2;
  1708. } else {
  1709. mode = asyc->dither.mode;
  1710. }
  1711. if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
  1712. if (connector->display_info.bpc >= 8)
  1713. mode |= DITHERING_DEPTH_8BPC;
  1714. } else {
  1715. mode |= asyc->dither.depth;
  1716. }
  1717. asyh->dither.enable = mode;
  1718. asyh->dither.bits = mode >> 1;
  1719. asyh->dither.mode = mode >> 3;
  1720. asyh->set.dither = true;
  1721. }
  1722. static void
  1723. nv50_head_atomic_check_view(struct nv50_head_atom *armh,
  1724. struct nv50_head_atom *asyh,
  1725. struct nouveau_conn_atom *asyc)
  1726. {
  1727. struct drm_connector *connector = asyc->state.connector;
  1728. struct drm_display_mode *omode = &asyh->state.adjusted_mode;
  1729. struct drm_display_mode *umode = &asyh->state.mode;
  1730. int mode = asyc->scaler.mode;
  1731. struct edid *edid;
  1732. int umode_vdisplay, omode_hdisplay, omode_vdisplay;
  1733. if (connector->edid_blob_ptr)
  1734. edid = (struct edid *)connector->edid_blob_ptr->data;
  1735. else
  1736. edid = NULL;
  1737. if (!asyc->scaler.full) {
  1738. if (mode == DRM_MODE_SCALE_NONE)
  1739. omode = umode;
  1740. } else {
  1741. /* Non-EDID LVDS/eDP mode. */
  1742. mode = DRM_MODE_SCALE_FULLSCREEN;
  1743. }
  1744. /* For the user-specified mode, we must ignore doublescan and
  1745. * the like, but honor frame packing.
  1746. */
  1747. umode_vdisplay = umode->vdisplay;
  1748. if ((umode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
  1749. umode_vdisplay += umode->vtotal;
  1750. asyh->view.iW = umode->hdisplay;
  1751. asyh->view.iH = umode_vdisplay;
  1752. /* For the output mode, we can just use the stock helper. */
  1753. drm_mode_get_hv_timing(omode, &omode_hdisplay, &omode_vdisplay);
  1754. asyh->view.oW = omode_hdisplay;
  1755. asyh->view.oH = omode_vdisplay;
  1756. /* Add overscan compensation if necessary, will keep the aspect
  1757. * ratio the same as the backend mode unless overridden by the
  1758. * user setting both hborder and vborder properties.
  1759. */
  1760. if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
  1761. (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
  1762. drm_detect_hdmi_monitor(edid)))) {
  1763. u32 bX = asyc->scaler.underscan.hborder;
  1764. u32 bY = asyc->scaler.underscan.vborder;
  1765. u32 r = (asyh->view.oH << 19) / asyh->view.oW;
  1766. if (bX) {
  1767. asyh->view.oW -= (bX * 2);
  1768. if (bY) asyh->view.oH -= (bY * 2);
  1769. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1770. } else {
  1771. asyh->view.oW -= (asyh->view.oW >> 4) + 32;
  1772. if (bY) asyh->view.oH -= (bY * 2);
  1773. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1774. }
  1775. }
  1776. /* Handle CENTER/ASPECT scaling, taking into account the areas
  1777. * removed already for overscan compensation.
  1778. */
  1779. switch (mode) {
  1780. case DRM_MODE_SCALE_CENTER:
  1781. asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
  1782. asyh->view.oH = min((u16)umode_vdisplay, asyh->view.oH);
  1783. /* fall-through */
  1784. case DRM_MODE_SCALE_ASPECT:
  1785. if (asyh->view.oH < asyh->view.oW) {
  1786. u32 r = (asyh->view.iW << 19) / asyh->view.iH;
  1787. asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
  1788. } else {
  1789. u32 r = (asyh->view.iH << 19) / asyh->view.iW;
  1790. asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1791. }
  1792. break;
  1793. default:
  1794. break;
  1795. }
  1796. asyh->set.view = true;
  1797. }
  1798. static void
  1799. nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1800. {
  1801. struct drm_display_mode *mode = &asyh->state.adjusted_mode;
  1802. struct nv50_head_mode *m = &asyh->mode;
  1803. u32 blankus;
  1804. drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V | CRTC_STEREO_DOUBLE);
  1805. /*
  1806. * DRM modes are defined in terms of a repeating interval
  1807. * starting with the active display area. The hardware modes
  1808. * are defined in terms of a repeating interval starting one
  1809. * unit (pixel or line) into the sync pulse. So, add bias.
  1810. */
  1811. m->h.active = mode->crtc_htotal;
  1812. m->h.synce = mode->crtc_hsync_end - mode->crtc_hsync_start - 1;
  1813. m->h.blanke = mode->crtc_hblank_end - mode->crtc_hsync_start - 1;
  1814. m->h.blanks = m->h.blanke + mode->crtc_hdisplay;
  1815. m->v.active = mode->crtc_vtotal;
  1816. m->v.synce = mode->crtc_vsync_end - mode->crtc_vsync_start - 1;
  1817. m->v.blanke = mode->crtc_vblank_end - mode->crtc_vsync_start - 1;
  1818. m->v.blanks = m->v.blanke + mode->crtc_vdisplay;
  1819. /*XXX: Safe underestimate, even "0" works */
  1820. blankus = (m->v.active - mode->crtc_vdisplay - 2) * m->h.active;
  1821. blankus *= 1000;
  1822. blankus /= mode->crtc_clock;
  1823. m->v.blankus = blankus;
  1824. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  1825. m->v.blank2e = m->v.active + m->v.blanke;
  1826. m->v.blank2s = m->v.blank2e + mode->crtc_vdisplay;
  1827. m->v.active = (m->v.active * 2) + 1;
  1828. m->interlace = true;
  1829. } else {
  1830. m->v.blank2e = 0;
  1831. m->v.blank2s = 1;
  1832. m->interlace = false;
  1833. }
  1834. m->clock = mode->crtc_clock;
  1835. asyh->set.mode = true;
  1836. }
  1837. static int
  1838. nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
  1839. {
  1840. struct nouveau_drm *drm = nouveau_drm(crtc->dev);
  1841. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1842. struct nv50_head *head = nv50_head(crtc);
  1843. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  1844. struct nv50_head_atom *asyh = nv50_head_atom(state);
  1845. struct nouveau_conn_atom *asyc = NULL;
  1846. struct drm_connector_state *conns;
  1847. struct drm_connector *conn;
  1848. int i;
  1849. NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
  1850. if (asyh->state.active) {
  1851. for_each_new_connector_in_state(asyh->state.state, conn, conns, i) {
  1852. if (conns->crtc == crtc) {
  1853. asyc = nouveau_conn_atom(conns);
  1854. break;
  1855. }
  1856. }
  1857. if (armh->state.active) {
  1858. if (asyc) {
  1859. if (asyh->state.mode_changed)
  1860. asyc->set.scaler = true;
  1861. if (armh->base.depth != asyh->base.depth)
  1862. asyc->set.dither = true;
  1863. }
  1864. } else {
  1865. if (asyc)
  1866. asyc->set.mask = ~0;
  1867. asyh->set.mask = ~0;
  1868. }
  1869. if (asyh->state.mode_changed)
  1870. nv50_head_atomic_check_mode(head, asyh);
  1871. if (asyc) {
  1872. if (asyc->set.scaler)
  1873. nv50_head_atomic_check_view(armh, asyh, asyc);
  1874. if (asyc->set.dither)
  1875. nv50_head_atomic_check_dither(armh, asyh, asyc);
  1876. if (asyc->set.procamp)
  1877. nv50_head_atomic_check_procamp(armh, asyh, asyc);
  1878. }
  1879. if ((asyh->core.visible = (asyh->base.cpp != 0))) {
  1880. asyh->core.x = asyh->base.x;
  1881. asyh->core.y = asyh->base.y;
  1882. asyh->core.w = asyh->base.w;
  1883. asyh->core.h = asyh->base.h;
  1884. } else
  1885. if ((asyh->core.visible = asyh->curs.visible)) {
  1886. /*XXX: We need to either find some way of having the
  1887. * primary base layer appear black, while still
  1888. * being able to display the other layers, or we
  1889. * need to allocate a dummy black surface here.
  1890. */
  1891. asyh->core.x = 0;
  1892. asyh->core.y = 0;
  1893. asyh->core.w = asyh->state.mode.hdisplay;
  1894. asyh->core.h = asyh->state.mode.vdisplay;
  1895. }
  1896. asyh->core.handle = disp->mast.base.vram.handle;
  1897. asyh->core.offset = 0;
  1898. asyh->core.format = 0xcf;
  1899. asyh->core.kind = 0;
  1900. asyh->core.layout = 1;
  1901. asyh->core.block = 0;
  1902. asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
  1903. asyh->lut.handle = disp->mast.base.vram.handle;
  1904. asyh->lut.offset = head->base.lut.nvbo->bo.offset;
  1905. asyh->set.base = armh->base.cpp != asyh->base.cpp;
  1906. asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
  1907. } else {
  1908. asyh->core.visible = false;
  1909. asyh->curs.visible = false;
  1910. asyh->base.cpp = 0;
  1911. asyh->ovly.cpp = 0;
  1912. }
  1913. if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
  1914. if (asyh->core.visible) {
  1915. if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
  1916. asyh->set.core = true;
  1917. } else
  1918. if (armh->core.visible) {
  1919. asyh->clr.core = true;
  1920. }
  1921. if (asyh->curs.visible) {
  1922. if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
  1923. asyh->set.curs = true;
  1924. } else
  1925. if (armh->curs.visible) {
  1926. asyh->clr.curs = true;
  1927. }
  1928. } else {
  1929. asyh->clr.core = armh->core.visible;
  1930. asyh->clr.curs = armh->curs.visible;
  1931. asyh->set.core = asyh->core.visible;
  1932. asyh->set.curs = asyh->curs.visible;
  1933. }
  1934. if (asyh->clr.mask || asyh->set.mask)
  1935. nv50_atom(asyh->state.state)->lock_core = true;
  1936. return 0;
  1937. }
  1938. static void
  1939. nv50_head_lut_load(struct drm_crtc *crtc)
  1940. {
  1941. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1942. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1943. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  1944. u16 *r, *g, *b;
  1945. int i;
  1946. r = crtc->gamma_store;
  1947. g = r + crtc->gamma_size;
  1948. b = g + crtc->gamma_size;
  1949. for (i = 0; i < 256; i++) {
  1950. if (disp->disp->oclass < GF110_DISP) {
  1951. writew((*r++ >> 2) + 0x0000, lut + (i * 0x08) + 0);
  1952. writew((*g++ >> 2) + 0x0000, lut + (i * 0x08) + 2);
  1953. writew((*b++ >> 2) + 0x0000, lut + (i * 0x08) + 4);
  1954. } else {
  1955. /* 0x6000 interferes with the 14-bit color??? */
  1956. writew((*r++ >> 2) + 0x6000, lut + (i * 0x20) + 0);
  1957. writew((*g++ >> 2) + 0x6000, lut + (i * 0x20) + 2);
  1958. writew((*b++ >> 2) + 0x6000, lut + (i * 0x20) + 4);
  1959. }
  1960. }
  1961. }
  1962. static const struct drm_crtc_helper_funcs
  1963. nv50_head_help = {
  1964. .atomic_check = nv50_head_atomic_check,
  1965. };
  1966. static int
  1967. nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  1968. uint32_t size,
  1969. struct drm_modeset_acquire_ctx *ctx)
  1970. {
  1971. nv50_head_lut_load(crtc);
  1972. return 0;
  1973. }
  1974. static void
  1975. nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
  1976. struct drm_crtc_state *state)
  1977. {
  1978. struct nv50_head_atom *asyh = nv50_head_atom(state);
  1979. __drm_atomic_helper_crtc_destroy_state(&asyh->state);
  1980. kfree(asyh);
  1981. }
  1982. static struct drm_crtc_state *
  1983. nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
  1984. {
  1985. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  1986. struct nv50_head_atom *asyh;
  1987. if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
  1988. return NULL;
  1989. __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
  1990. asyh->view = armh->view;
  1991. asyh->mode = armh->mode;
  1992. asyh->lut = armh->lut;
  1993. asyh->core = armh->core;
  1994. asyh->curs = armh->curs;
  1995. asyh->base = armh->base;
  1996. asyh->ovly = armh->ovly;
  1997. asyh->dither = armh->dither;
  1998. asyh->procamp = armh->procamp;
  1999. asyh->clr.mask = 0;
  2000. asyh->set.mask = 0;
  2001. return &asyh->state;
  2002. }
  2003. static void
  2004. __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
  2005. struct drm_crtc_state *state)
  2006. {
  2007. if (crtc->state)
  2008. crtc->funcs->atomic_destroy_state(crtc, crtc->state);
  2009. crtc->state = state;
  2010. crtc->state->crtc = crtc;
  2011. }
  2012. static void
  2013. nv50_head_reset(struct drm_crtc *crtc)
  2014. {
  2015. struct nv50_head_atom *asyh;
  2016. if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
  2017. return;
  2018. __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
  2019. }
  2020. static void
  2021. nv50_head_destroy(struct drm_crtc *crtc)
  2022. {
  2023. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  2024. struct nv50_disp *disp = nv50_disp(crtc->dev);
  2025. struct nv50_head *head = nv50_head(crtc);
  2026. nv50_dmac_destroy(&head->ovly.base, disp->disp);
  2027. nv50_pioc_destroy(&head->oimm.base);
  2028. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  2029. if (nv_crtc->lut.nvbo)
  2030. nouveau_bo_unpin(nv_crtc->lut.nvbo);
  2031. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  2032. drm_crtc_cleanup(crtc);
  2033. kfree(crtc);
  2034. }
  2035. static const struct drm_crtc_funcs
  2036. nv50_head_func = {
  2037. .reset = nv50_head_reset,
  2038. .gamma_set = nv50_head_gamma_set,
  2039. .destroy = nv50_head_destroy,
  2040. .set_config = drm_atomic_helper_set_config,
  2041. .page_flip = drm_atomic_helper_page_flip,
  2042. .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
  2043. .atomic_destroy_state = nv50_head_atomic_destroy_state,
  2044. };
  2045. static int
  2046. nv50_head_create(struct drm_device *dev, int index)
  2047. {
  2048. struct nouveau_drm *drm = nouveau_drm(dev);
  2049. struct nvif_device *device = &drm->client.device;
  2050. struct nv50_disp *disp = nv50_disp(dev);
  2051. struct nv50_head *head;
  2052. struct nv50_base *base;
  2053. struct nv50_curs *curs;
  2054. struct drm_crtc *crtc;
  2055. int ret;
  2056. head = kzalloc(sizeof(*head), GFP_KERNEL);
  2057. if (!head)
  2058. return -ENOMEM;
  2059. head->base.index = index;
  2060. ret = nv50_base_new(drm, head, &base);
  2061. if (ret == 0)
  2062. ret = nv50_curs_new(drm, head, &curs);
  2063. if (ret) {
  2064. kfree(head);
  2065. return ret;
  2066. }
  2067. crtc = &head->base.base;
  2068. drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
  2069. &curs->wndw.plane, &nv50_head_func,
  2070. "head-%d", head->base.index);
  2071. drm_crtc_helper_add(crtc, &nv50_head_help);
  2072. drm_mode_crtc_set_gamma_size(crtc, 256);
  2073. ret = nouveau_bo_new(&drm->client, 8192, 0x100, TTM_PL_FLAG_VRAM,
  2074. 0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
  2075. if (!ret) {
  2076. ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
  2077. if (!ret) {
  2078. ret = nouveau_bo_map(head->base.lut.nvbo);
  2079. if (ret)
  2080. nouveau_bo_unpin(head->base.lut.nvbo);
  2081. }
  2082. if (ret)
  2083. nouveau_bo_ref(NULL, &head->base.lut.nvbo);
  2084. }
  2085. if (ret)
  2086. goto out;
  2087. /* allocate overlay resources */
  2088. ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
  2089. if (ret)
  2090. goto out;
  2091. ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
  2092. &head->ovly);
  2093. if (ret)
  2094. goto out;
  2095. out:
  2096. if (ret)
  2097. nv50_head_destroy(crtc);
  2098. return ret;
  2099. }
  2100. /******************************************************************************
  2101. * Output path helpers
  2102. *****************************************************************************/
  2103. static void
  2104. nv50_outp_release(struct nouveau_encoder *nv_encoder)
  2105. {
  2106. struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
  2107. struct {
  2108. struct nv50_disp_mthd_v1 base;
  2109. } args = {
  2110. .base.version = 1,
  2111. .base.method = NV50_DISP_MTHD_V1_RELEASE,
  2112. .base.hasht = nv_encoder->dcb->hasht,
  2113. .base.hashm = nv_encoder->dcb->hashm,
  2114. };
  2115. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2116. nv_encoder->or = -1;
  2117. nv_encoder->link = 0;
  2118. }
  2119. static int
  2120. nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
  2121. {
  2122. struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
  2123. struct nv50_disp *disp = nv50_disp(drm->dev);
  2124. struct {
  2125. struct nv50_disp_mthd_v1 base;
  2126. struct nv50_disp_acquire_v0 info;
  2127. } args = {
  2128. .base.version = 1,
  2129. .base.method = NV50_DISP_MTHD_V1_ACQUIRE,
  2130. .base.hasht = nv_encoder->dcb->hasht,
  2131. .base.hashm = nv_encoder->dcb->hashm,
  2132. };
  2133. int ret;
  2134. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2135. if (ret) {
  2136. NV_ERROR(drm, "error acquiring output path: %d\n", ret);
  2137. return ret;
  2138. }
  2139. nv_encoder->or = args.info.or;
  2140. nv_encoder->link = args.info.link;
  2141. return 0;
  2142. }
  2143. static int
  2144. nv50_outp_atomic_check_view(struct drm_encoder *encoder,
  2145. struct drm_crtc_state *crtc_state,
  2146. struct drm_connector_state *conn_state,
  2147. struct drm_display_mode *native_mode)
  2148. {
  2149. struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
  2150. struct drm_display_mode *mode = &crtc_state->mode;
  2151. struct drm_connector *connector = conn_state->connector;
  2152. struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
  2153. struct nouveau_drm *drm = nouveau_drm(encoder->dev);
  2154. NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
  2155. asyc->scaler.full = false;
  2156. if (!native_mode)
  2157. return 0;
  2158. if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
  2159. switch (connector->connector_type) {
  2160. case DRM_MODE_CONNECTOR_LVDS:
  2161. case DRM_MODE_CONNECTOR_eDP:
  2162. /* Force use of scaler for non-EDID modes. */
  2163. if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
  2164. break;
  2165. mode = native_mode;
  2166. asyc->scaler.full = true;
  2167. break;
  2168. default:
  2169. break;
  2170. }
  2171. } else {
  2172. mode = native_mode;
  2173. }
  2174. if (!drm_mode_equal(adjusted_mode, mode)) {
  2175. drm_mode_copy(adjusted_mode, mode);
  2176. crtc_state->mode_changed = true;
  2177. }
  2178. return 0;
  2179. }
  2180. static int
  2181. nv50_outp_atomic_check(struct drm_encoder *encoder,
  2182. struct drm_crtc_state *crtc_state,
  2183. struct drm_connector_state *conn_state)
  2184. {
  2185. struct nouveau_connector *nv_connector =
  2186. nouveau_connector(conn_state->connector);
  2187. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2188. nv_connector->native_mode);
  2189. }
  2190. /******************************************************************************
  2191. * DAC
  2192. *****************************************************************************/
  2193. static void
  2194. nv50_dac_disable(struct drm_encoder *encoder)
  2195. {
  2196. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2197. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2198. const int or = nv_encoder->or;
  2199. u32 *push;
  2200. if (nv_encoder->crtc) {
  2201. push = evo_wait(mast, 4);
  2202. if (push) {
  2203. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2204. evo_mthd(push, 0x0400 + (or * 0x080), 1);
  2205. evo_data(push, 0x00000000);
  2206. } else {
  2207. evo_mthd(push, 0x0180 + (or * 0x020), 1);
  2208. evo_data(push, 0x00000000);
  2209. }
  2210. evo_kick(push, mast);
  2211. }
  2212. }
  2213. nv_encoder->crtc = NULL;
  2214. nv50_outp_release(nv_encoder);
  2215. }
  2216. static void
  2217. nv50_dac_enable(struct drm_encoder *encoder)
  2218. {
  2219. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2220. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2221. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2222. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  2223. u32 *push;
  2224. nv50_outp_acquire(nv_encoder);
  2225. push = evo_wait(mast, 8);
  2226. if (push) {
  2227. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2228. u32 syncs = 0x00000000;
  2229. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2230. syncs |= 0x00000001;
  2231. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2232. syncs |= 0x00000002;
  2233. evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
  2234. evo_data(push, 1 << nv_crtc->index);
  2235. evo_data(push, syncs);
  2236. } else {
  2237. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  2238. u32 syncs = 0x00000001;
  2239. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2240. syncs |= 0x00000008;
  2241. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2242. syncs |= 0x00000010;
  2243. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  2244. magic |= 0x00000001;
  2245. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  2246. evo_data(push, syncs);
  2247. evo_data(push, magic);
  2248. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
  2249. evo_data(push, 1 << nv_crtc->index);
  2250. }
  2251. evo_kick(push, mast);
  2252. }
  2253. nv_encoder->crtc = encoder->crtc;
  2254. }
  2255. static enum drm_connector_status
  2256. nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  2257. {
  2258. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2259. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2260. struct {
  2261. struct nv50_disp_mthd_v1 base;
  2262. struct nv50_disp_dac_load_v0 load;
  2263. } args = {
  2264. .base.version = 1,
  2265. .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
  2266. .base.hasht = nv_encoder->dcb->hasht,
  2267. .base.hashm = nv_encoder->dcb->hashm,
  2268. };
  2269. int ret;
  2270. args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
  2271. if (args.load.data == 0)
  2272. args.load.data = 340;
  2273. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2274. if (ret || !args.load.load)
  2275. return connector_status_disconnected;
  2276. return connector_status_connected;
  2277. }
  2278. static const struct drm_encoder_helper_funcs
  2279. nv50_dac_help = {
  2280. .atomic_check = nv50_outp_atomic_check,
  2281. .enable = nv50_dac_enable,
  2282. .disable = nv50_dac_disable,
  2283. .detect = nv50_dac_detect
  2284. };
  2285. static void
  2286. nv50_dac_destroy(struct drm_encoder *encoder)
  2287. {
  2288. drm_encoder_cleanup(encoder);
  2289. kfree(encoder);
  2290. }
  2291. static const struct drm_encoder_funcs
  2292. nv50_dac_func = {
  2293. .destroy = nv50_dac_destroy,
  2294. };
  2295. static int
  2296. nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
  2297. {
  2298. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2299. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  2300. struct nvkm_i2c_bus *bus;
  2301. struct nouveau_encoder *nv_encoder;
  2302. struct drm_encoder *encoder;
  2303. int type = DRM_MODE_ENCODER_DAC;
  2304. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  2305. if (!nv_encoder)
  2306. return -ENOMEM;
  2307. nv_encoder->dcb = dcbe;
  2308. bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  2309. if (bus)
  2310. nv_encoder->i2c = &bus->i2c;
  2311. encoder = to_drm_encoder(nv_encoder);
  2312. encoder->possible_crtcs = dcbe->heads;
  2313. encoder->possible_clones = 0;
  2314. drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
  2315. "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
  2316. drm_encoder_helper_add(encoder, &nv50_dac_help);
  2317. drm_mode_connector_attach_encoder(connector, encoder);
  2318. return 0;
  2319. }
  2320. /******************************************************************************
  2321. * Audio
  2322. *****************************************************************************/
  2323. static void
  2324. nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2325. {
  2326. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2327. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2328. struct {
  2329. struct nv50_disp_mthd_v1 base;
  2330. struct nv50_disp_sor_hda_eld_v0 eld;
  2331. } args = {
  2332. .base.version = 1,
  2333. .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2334. .base.hasht = nv_encoder->dcb->hasht,
  2335. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2336. (0x0100 << nv_crtc->index),
  2337. };
  2338. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2339. }
  2340. static void
  2341. nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2342. {
  2343. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2344. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2345. struct nouveau_connector *nv_connector;
  2346. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2347. struct __packed {
  2348. struct {
  2349. struct nv50_disp_mthd_v1 mthd;
  2350. struct nv50_disp_sor_hda_eld_v0 eld;
  2351. } base;
  2352. u8 data[sizeof(nv_connector->base.eld)];
  2353. } args = {
  2354. .base.mthd.version = 1,
  2355. .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2356. .base.mthd.hasht = nv_encoder->dcb->hasht,
  2357. .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2358. (0x0100 << nv_crtc->index),
  2359. };
  2360. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2361. if (!drm_detect_monitor_audio(nv_connector->edid))
  2362. return;
  2363. drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
  2364. memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
  2365. nvif_mthd(disp->disp, 0, &args,
  2366. sizeof(args.base) + drm_eld_size(args.data));
  2367. }
  2368. /******************************************************************************
  2369. * HDMI
  2370. *****************************************************************************/
  2371. static void
  2372. nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2373. {
  2374. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2375. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2376. struct {
  2377. struct nv50_disp_mthd_v1 base;
  2378. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2379. } args = {
  2380. .base.version = 1,
  2381. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2382. .base.hasht = nv_encoder->dcb->hasht,
  2383. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2384. (0x0100 << nv_crtc->index),
  2385. };
  2386. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2387. }
  2388. static void
  2389. nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2390. {
  2391. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2392. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2393. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2394. struct {
  2395. struct nv50_disp_mthd_v1 base;
  2396. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2397. u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
  2398. } args = {
  2399. .base.version = 1,
  2400. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2401. .base.hasht = nv_encoder->dcb->hasht,
  2402. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2403. (0x0100 << nv_crtc->index),
  2404. .pwr.state = 1,
  2405. .pwr.rekey = 56, /* binary driver, and tegra, constant */
  2406. };
  2407. struct nouveau_connector *nv_connector;
  2408. u32 max_ac_packet;
  2409. union hdmi_infoframe avi_frame;
  2410. union hdmi_infoframe vendor_frame;
  2411. int ret;
  2412. int size;
  2413. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2414. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  2415. return;
  2416. ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, mode,
  2417. false);
  2418. if (!ret) {
  2419. /* We have an AVI InfoFrame, populate it to the display */
  2420. args.pwr.avi_infoframe_length
  2421. = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
  2422. }
  2423. ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi, mode);
  2424. if (!ret) {
  2425. /* We have a Vendor InfoFrame, populate it to the display */
  2426. args.pwr.vendor_infoframe_length
  2427. = hdmi_infoframe_pack(&vendor_frame,
  2428. args.infoframes
  2429. + args.pwr.avi_infoframe_length,
  2430. 17);
  2431. }
  2432. max_ac_packet = mode->htotal - mode->hdisplay;
  2433. max_ac_packet -= args.pwr.rekey;
  2434. max_ac_packet -= 18; /* constant from tegra */
  2435. args.pwr.max_ac_packet = max_ac_packet / 32;
  2436. size = sizeof(args.base)
  2437. + sizeof(args.pwr)
  2438. + args.pwr.avi_infoframe_length
  2439. + args.pwr.vendor_infoframe_length;
  2440. nvif_mthd(disp->disp, 0, &args, size);
  2441. nv50_audio_enable(encoder, mode);
  2442. }
  2443. /******************************************************************************
  2444. * MST
  2445. *****************************************************************************/
  2446. #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
  2447. #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
  2448. #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
  2449. struct nv50_mstm {
  2450. struct nouveau_encoder *outp;
  2451. struct drm_dp_mst_topology_mgr mgr;
  2452. struct nv50_msto *msto[4];
  2453. bool modified;
  2454. bool disabled;
  2455. int links;
  2456. };
  2457. struct nv50_mstc {
  2458. struct nv50_mstm *mstm;
  2459. struct drm_dp_mst_port *port;
  2460. struct drm_connector connector;
  2461. struct drm_display_mode *native;
  2462. struct edid *edid;
  2463. int pbn;
  2464. };
  2465. struct nv50_msto {
  2466. struct drm_encoder encoder;
  2467. struct nv50_head *head;
  2468. struct nv50_mstc *mstc;
  2469. bool disabled;
  2470. };
  2471. static struct drm_dp_payload *
  2472. nv50_msto_payload(struct nv50_msto *msto)
  2473. {
  2474. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2475. struct nv50_mstc *mstc = msto->mstc;
  2476. struct nv50_mstm *mstm = mstc->mstm;
  2477. int vcpi = mstc->port->vcpi.vcpi, i;
  2478. NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
  2479. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2480. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2481. NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
  2482. mstm->outp->base.base.name, i, payload->vcpi,
  2483. payload->start_slot, payload->num_slots);
  2484. }
  2485. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2486. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2487. if (payload->vcpi == vcpi)
  2488. return payload;
  2489. }
  2490. return NULL;
  2491. }
  2492. static void
  2493. nv50_msto_cleanup(struct nv50_msto *msto)
  2494. {
  2495. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2496. struct nv50_mstc *mstc = msto->mstc;
  2497. struct nv50_mstm *mstm = mstc->mstm;
  2498. NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
  2499. if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
  2500. drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
  2501. if (msto->disabled) {
  2502. msto->mstc = NULL;
  2503. msto->head = NULL;
  2504. msto->disabled = false;
  2505. }
  2506. }
  2507. static void
  2508. nv50_msto_prepare(struct nv50_msto *msto)
  2509. {
  2510. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2511. struct nv50_mstc *mstc = msto->mstc;
  2512. struct nv50_mstm *mstm = mstc->mstm;
  2513. struct {
  2514. struct nv50_disp_mthd_v1 base;
  2515. struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
  2516. } args = {
  2517. .base.version = 1,
  2518. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
  2519. .base.hasht = mstm->outp->dcb->hasht,
  2520. .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
  2521. (0x0100 << msto->head->base.index),
  2522. };
  2523. NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
  2524. if (mstc->port && mstc->port->vcpi.vcpi > 0) {
  2525. struct drm_dp_payload *payload = nv50_msto_payload(msto);
  2526. if (payload) {
  2527. args.vcpi.start_slot = payload->start_slot;
  2528. args.vcpi.num_slots = payload->num_slots;
  2529. args.vcpi.pbn = mstc->port->vcpi.pbn;
  2530. args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
  2531. }
  2532. }
  2533. NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
  2534. msto->encoder.name, msto->head->base.base.name,
  2535. args.vcpi.start_slot, args.vcpi.num_slots,
  2536. args.vcpi.pbn, args.vcpi.aligned_pbn);
  2537. nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
  2538. }
  2539. static int
  2540. nv50_msto_atomic_check(struct drm_encoder *encoder,
  2541. struct drm_crtc_state *crtc_state,
  2542. struct drm_connector_state *conn_state)
  2543. {
  2544. struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
  2545. struct nv50_mstm *mstm = mstc->mstm;
  2546. int bpp = conn_state->connector->display_info.bpc * 3;
  2547. int slots;
  2548. mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
  2549. slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
  2550. if (slots < 0)
  2551. return slots;
  2552. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2553. mstc->native);
  2554. }
  2555. static void
  2556. nv50_msto_enable(struct drm_encoder *encoder)
  2557. {
  2558. struct nv50_head *head = nv50_head(encoder->crtc);
  2559. struct nv50_msto *msto = nv50_msto(encoder);
  2560. struct nv50_mstc *mstc = NULL;
  2561. struct nv50_mstm *mstm = NULL;
  2562. struct drm_connector *connector;
  2563. struct drm_connector_list_iter conn_iter;
  2564. u8 proto, depth;
  2565. int slots;
  2566. bool r;
  2567. drm_connector_list_iter_begin(encoder->dev, &conn_iter);
  2568. drm_for_each_connector_iter(connector, &conn_iter) {
  2569. if (connector->state->best_encoder == &msto->encoder) {
  2570. mstc = nv50_mstc(connector);
  2571. mstm = mstc->mstm;
  2572. break;
  2573. }
  2574. }
  2575. drm_connector_list_iter_end(&conn_iter);
  2576. if (WARN_ON(!mstc))
  2577. return;
  2578. slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
  2579. r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
  2580. WARN_ON(!r);
  2581. if (!mstm->links++)
  2582. nv50_outp_acquire(mstm->outp);
  2583. if (mstm->outp->link & 1)
  2584. proto = 0x8;
  2585. else
  2586. proto = 0x9;
  2587. switch (mstc->connector.display_info.bpc) {
  2588. case 6: depth = 0x2; break;
  2589. case 8: depth = 0x5; break;
  2590. case 10:
  2591. default: depth = 0x6; break;
  2592. }
  2593. mstm->outp->update(mstm->outp, head->base.index,
  2594. &head->base.base.state->adjusted_mode, proto, depth);
  2595. msto->head = head;
  2596. msto->mstc = mstc;
  2597. mstm->modified = true;
  2598. }
  2599. static void
  2600. nv50_msto_disable(struct drm_encoder *encoder)
  2601. {
  2602. struct nv50_msto *msto = nv50_msto(encoder);
  2603. struct nv50_mstc *mstc = msto->mstc;
  2604. struct nv50_mstm *mstm = mstc->mstm;
  2605. if (mstc->port)
  2606. drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
  2607. mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
  2608. mstm->modified = true;
  2609. if (!--mstm->links)
  2610. mstm->disabled = true;
  2611. msto->disabled = true;
  2612. }
  2613. static const struct drm_encoder_helper_funcs
  2614. nv50_msto_help = {
  2615. .disable = nv50_msto_disable,
  2616. .enable = nv50_msto_enable,
  2617. .atomic_check = nv50_msto_atomic_check,
  2618. };
  2619. static void
  2620. nv50_msto_destroy(struct drm_encoder *encoder)
  2621. {
  2622. struct nv50_msto *msto = nv50_msto(encoder);
  2623. drm_encoder_cleanup(&msto->encoder);
  2624. kfree(msto);
  2625. }
  2626. static const struct drm_encoder_funcs
  2627. nv50_msto = {
  2628. .destroy = nv50_msto_destroy,
  2629. };
  2630. static int
  2631. nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
  2632. struct nv50_msto **pmsto)
  2633. {
  2634. struct nv50_msto *msto;
  2635. int ret;
  2636. if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
  2637. return -ENOMEM;
  2638. ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
  2639. DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
  2640. if (ret) {
  2641. kfree(*pmsto);
  2642. *pmsto = NULL;
  2643. return ret;
  2644. }
  2645. drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
  2646. msto->encoder.possible_crtcs = heads;
  2647. return 0;
  2648. }
  2649. static struct drm_encoder *
  2650. nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
  2651. struct drm_connector_state *connector_state)
  2652. {
  2653. struct nv50_head *head = nv50_head(connector_state->crtc);
  2654. struct nv50_mstc *mstc = nv50_mstc(connector);
  2655. if (mstc->port) {
  2656. struct nv50_mstm *mstm = mstc->mstm;
  2657. return &mstm->msto[head->base.index]->encoder;
  2658. }
  2659. return NULL;
  2660. }
  2661. static struct drm_encoder *
  2662. nv50_mstc_best_encoder(struct drm_connector *connector)
  2663. {
  2664. struct nv50_mstc *mstc = nv50_mstc(connector);
  2665. if (mstc->port) {
  2666. struct nv50_mstm *mstm = mstc->mstm;
  2667. return &mstm->msto[0]->encoder;
  2668. }
  2669. return NULL;
  2670. }
  2671. static enum drm_mode_status
  2672. nv50_mstc_mode_valid(struct drm_connector *connector,
  2673. struct drm_display_mode *mode)
  2674. {
  2675. return MODE_OK;
  2676. }
  2677. static int
  2678. nv50_mstc_get_modes(struct drm_connector *connector)
  2679. {
  2680. struct nv50_mstc *mstc = nv50_mstc(connector);
  2681. int ret = 0;
  2682. mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
  2683. drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
  2684. if (mstc->edid) {
  2685. ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
  2686. drm_edid_to_eld(&mstc->connector, mstc->edid);
  2687. }
  2688. if (!mstc->connector.display_info.bpc)
  2689. mstc->connector.display_info.bpc = 8;
  2690. if (mstc->native)
  2691. drm_mode_destroy(mstc->connector.dev, mstc->native);
  2692. mstc->native = nouveau_conn_native_mode(&mstc->connector);
  2693. return ret;
  2694. }
  2695. static const struct drm_connector_helper_funcs
  2696. nv50_mstc_help = {
  2697. .get_modes = nv50_mstc_get_modes,
  2698. .mode_valid = nv50_mstc_mode_valid,
  2699. .best_encoder = nv50_mstc_best_encoder,
  2700. .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
  2701. };
  2702. static enum drm_connector_status
  2703. nv50_mstc_detect(struct drm_connector *connector, bool force)
  2704. {
  2705. struct nv50_mstc *mstc = nv50_mstc(connector);
  2706. if (!mstc->port)
  2707. return connector_status_disconnected;
  2708. return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
  2709. }
  2710. static void
  2711. nv50_mstc_destroy(struct drm_connector *connector)
  2712. {
  2713. struct nv50_mstc *mstc = nv50_mstc(connector);
  2714. drm_connector_cleanup(&mstc->connector);
  2715. kfree(mstc);
  2716. }
  2717. static const struct drm_connector_funcs
  2718. nv50_mstc = {
  2719. .reset = nouveau_conn_reset,
  2720. .detect = nv50_mstc_detect,
  2721. .fill_modes = drm_helper_probe_single_connector_modes,
  2722. .destroy = nv50_mstc_destroy,
  2723. .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
  2724. .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
  2725. .atomic_set_property = nouveau_conn_atomic_set_property,
  2726. .atomic_get_property = nouveau_conn_atomic_get_property,
  2727. };
  2728. static int
  2729. nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
  2730. const char *path, struct nv50_mstc **pmstc)
  2731. {
  2732. struct drm_device *dev = mstm->outp->base.base.dev;
  2733. struct nv50_mstc *mstc;
  2734. int ret, i;
  2735. if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
  2736. return -ENOMEM;
  2737. mstc->mstm = mstm;
  2738. mstc->port = port;
  2739. ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
  2740. DRM_MODE_CONNECTOR_DisplayPort);
  2741. if (ret) {
  2742. kfree(*pmstc);
  2743. *pmstc = NULL;
  2744. return ret;
  2745. }
  2746. drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
  2747. mstc->connector.funcs->reset(&mstc->connector);
  2748. nouveau_conn_attach_properties(&mstc->connector);
  2749. for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto[i]; i++)
  2750. drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
  2751. drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
  2752. drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
  2753. drm_mode_connector_set_path_property(&mstc->connector, path);
  2754. return 0;
  2755. }
  2756. static void
  2757. nv50_mstm_cleanup(struct nv50_mstm *mstm)
  2758. {
  2759. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2760. struct drm_encoder *encoder;
  2761. int ret;
  2762. NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
  2763. ret = drm_dp_check_act_status(&mstm->mgr);
  2764. ret = drm_dp_update_payload_part2(&mstm->mgr);
  2765. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2766. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2767. struct nv50_msto *msto = nv50_msto(encoder);
  2768. struct nv50_mstc *mstc = msto->mstc;
  2769. if (mstc && mstc->mstm == mstm)
  2770. nv50_msto_cleanup(msto);
  2771. }
  2772. }
  2773. mstm->modified = false;
  2774. }
  2775. static void
  2776. nv50_mstm_prepare(struct nv50_mstm *mstm)
  2777. {
  2778. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2779. struct drm_encoder *encoder;
  2780. int ret;
  2781. NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
  2782. ret = drm_dp_update_payload_part1(&mstm->mgr);
  2783. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2784. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2785. struct nv50_msto *msto = nv50_msto(encoder);
  2786. struct nv50_mstc *mstc = msto->mstc;
  2787. if (mstc && mstc->mstm == mstm)
  2788. nv50_msto_prepare(msto);
  2789. }
  2790. }
  2791. if (mstm->disabled) {
  2792. if (!mstm->links)
  2793. nv50_outp_release(mstm->outp);
  2794. mstm->disabled = false;
  2795. }
  2796. }
  2797. static void
  2798. nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
  2799. {
  2800. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2801. drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
  2802. }
  2803. static void
  2804. nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
  2805. struct drm_connector *connector)
  2806. {
  2807. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2808. struct nv50_mstc *mstc = nv50_mstc(connector);
  2809. drm_connector_unregister(&mstc->connector);
  2810. drm_modeset_lock_all(drm->dev);
  2811. drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
  2812. mstc->port = NULL;
  2813. drm_modeset_unlock_all(drm->dev);
  2814. drm_connector_unreference(&mstc->connector);
  2815. }
  2816. static void
  2817. nv50_mstm_register_connector(struct drm_connector *connector)
  2818. {
  2819. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2820. drm_modeset_lock_all(drm->dev);
  2821. drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
  2822. drm_modeset_unlock_all(drm->dev);
  2823. drm_connector_register(connector);
  2824. }
  2825. static struct drm_connector *
  2826. nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
  2827. struct drm_dp_mst_port *port, const char *path)
  2828. {
  2829. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2830. struct nv50_mstc *mstc;
  2831. int ret;
  2832. ret = nv50_mstc_new(mstm, port, path, &mstc);
  2833. if (ret) {
  2834. if (mstc)
  2835. mstc->connector.funcs->destroy(&mstc->connector);
  2836. return NULL;
  2837. }
  2838. return &mstc->connector;
  2839. }
  2840. static const struct drm_dp_mst_topology_cbs
  2841. nv50_mstm = {
  2842. .add_connector = nv50_mstm_add_connector,
  2843. .register_connector = nv50_mstm_register_connector,
  2844. .destroy_connector = nv50_mstm_destroy_connector,
  2845. .hotplug = nv50_mstm_hotplug,
  2846. };
  2847. void
  2848. nv50_mstm_service(struct nv50_mstm *mstm)
  2849. {
  2850. struct drm_dp_aux *aux = mstm->mgr.aux;
  2851. bool handled = true;
  2852. int ret;
  2853. u8 esi[8] = {};
  2854. while (handled) {
  2855. ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
  2856. if (ret != 8) {
  2857. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2858. return;
  2859. }
  2860. drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
  2861. if (!handled)
  2862. break;
  2863. drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
  2864. }
  2865. }
  2866. void
  2867. nv50_mstm_remove(struct nv50_mstm *mstm)
  2868. {
  2869. if (mstm)
  2870. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2871. }
  2872. static int
  2873. nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
  2874. {
  2875. struct nouveau_encoder *outp = mstm->outp;
  2876. struct {
  2877. struct nv50_disp_mthd_v1 base;
  2878. struct nv50_disp_sor_dp_mst_link_v0 mst;
  2879. } args = {
  2880. .base.version = 1,
  2881. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
  2882. .base.hasht = outp->dcb->hasht,
  2883. .base.hashm = outp->dcb->hashm,
  2884. .mst.state = state,
  2885. };
  2886. struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
  2887. struct nvif_object *disp = &drm->display->disp;
  2888. int ret;
  2889. if (dpcd >= 0x12) {
  2890. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
  2891. if (ret < 0)
  2892. return ret;
  2893. dpcd &= ~DP_MST_EN;
  2894. if (state)
  2895. dpcd |= DP_MST_EN;
  2896. ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
  2897. if (ret < 0)
  2898. return ret;
  2899. }
  2900. return nvif_mthd(disp, 0, &args, sizeof(args));
  2901. }
  2902. int
  2903. nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
  2904. {
  2905. int ret, state = 0;
  2906. if (!mstm)
  2907. return 0;
  2908. if (dpcd[0] >= 0x12) {
  2909. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
  2910. if (ret < 0)
  2911. return ret;
  2912. if (!(dpcd[1] & DP_MST_CAP))
  2913. dpcd[0] = 0x11;
  2914. else
  2915. state = allow;
  2916. }
  2917. ret = nv50_mstm_enable(mstm, dpcd[0], state);
  2918. if (ret)
  2919. return ret;
  2920. ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
  2921. if (ret)
  2922. return nv50_mstm_enable(mstm, dpcd[0], 0);
  2923. return mstm->mgr.mst_state;
  2924. }
  2925. static void
  2926. nv50_mstm_fini(struct nv50_mstm *mstm)
  2927. {
  2928. if (mstm && mstm->mgr.mst_state)
  2929. drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
  2930. }
  2931. static void
  2932. nv50_mstm_init(struct nv50_mstm *mstm)
  2933. {
  2934. if (mstm && mstm->mgr.mst_state)
  2935. drm_dp_mst_topology_mgr_resume(&mstm->mgr);
  2936. }
  2937. static void
  2938. nv50_mstm_del(struct nv50_mstm **pmstm)
  2939. {
  2940. struct nv50_mstm *mstm = *pmstm;
  2941. if (mstm) {
  2942. kfree(*pmstm);
  2943. *pmstm = NULL;
  2944. }
  2945. }
  2946. static int
  2947. nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
  2948. int conn_base_id, struct nv50_mstm **pmstm)
  2949. {
  2950. const int max_payloads = hweight8(outp->dcb->heads);
  2951. struct drm_device *dev = outp->base.base.dev;
  2952. struct nv50_mstm *mstm;
  2953. int ret, i;
  2954. u8 dpcd;
  2955. /* This is a workaround for some monitors not functioning
  2956. * correctly in MST mode on initial module load. I think
  2957. * some bad interaction with the VBIOS may be responsible.
  2958. *
  2959. * A good ol' off and on again seems to work here ;)
  2960. */
  2961. ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
  2962. if (ret >= 0 && dpcd >= 0x12)
  2963. drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
  2964. if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
  2965. return -ENOMEM;
  2966. mstm->outp = outp;
  2967. mstm->mgr.cbs = &nv50_mstm;
  2968. ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
  2969. max_payloads, conn_base_id);
  2970. if (ret)
  2971. return ret;
  2972. for (i = 0; i < max_payloads; i++) {
  2973. ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
  2974. i, &mstm->msto[i]);
  2975. if (ret)
  2976. return ret;
  2977. }
  2978. return 0;
  2979. }
  2980. /******************************************************************************
  2981. * SOR
  2982. *****************************************************************************/
  2983. static void
  2984. nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
  2985. struct drm_display_mode *mode, u8 proto, u8 depth)
  2986. {
  2987. struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
  2988. u32 *push;
  2989. if (!mode) {
  2990. nv_encoder->ctrl &= ~BIT(head);
  2991. if (!(nv_encoder->ctrl & 0x0000000f))
  2992. nv_encoder->ctrl = 0;
  2993. } else {
  2994. nv_encoder->ctrl |= proto << 8;
  2995. nv_encoder->ctrl |= BIT(head);
  2996. }
  2997. if ((push = evo_wait(core, 6))) {
  2998. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  2999. if (mode) {
  3000. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3001. nv_encoder->ctrl |= 0x00001000;
  3002. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3003. nv_encoder->ctrl |= 0x00002000;
  3004. nv_encoder->ctrl |= depth << 16;
  3005. }
  3006. evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
  3007. } else {
  3008. if (mode) {
  3009. u32 magic = 0x31ec6000 | (head << 25);
  3010. u32 syncs = 0x00000001;
  3011. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3012. syncs |= 0x00000008;
  3013. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3014. syncs |= 0x00000010;
  3015. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  3016. magic |= 0x00000001;
  3017. evo_mthd(push, 0x0404 + (head * 0x300), 2);
  3018. evo_data(push, syncs | (depth << 6));
  3019. evo_data(push, magic);
  3020. }
  3021. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  3022. }
  3023. evo_data(push, nv_encoder->ctrl);
  3024. evo_kick(push, core);
  3025. }
  3026. }
  3027. static void
  3028. nv50_sor_disable(struct drm_encoder *encoder)
  3029. {
  3030. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3031. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  3032. nv_encoder->crtc = NULL;
  3033. if (nv_crtc) {
  3034. struct nvkm_i2c_aux *aux = nv_encoder->aux;
  3035. u8 pwr;
  3036. if (aux) {
  3037. int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
  3038. if (ret == 0) {
  3039. pwr &= ~DP_SET_POWER_MASK;
  3040. pwr |= DP_SET_POWER_D3;
  3041. nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
  3042. }
  3043. }
  3044. nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
  3045. nv50_audio_disable(encoder, nv_crtc);
  3046. nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
  3047. nv50_outp_release(nv_encoder);
  3048. }
  3049. }
  3050. static void
  3051. nv50_sor_enable(struct drm_encoder *encoder)
  3052. {
  3053. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3054. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3055. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3056. struct {
  3057. struct nv50_disp_mthd_v1 base;
  3058. struct nv50_disp_sor_lvds_script_v0 lvds;
  3059. } lvds = {
  3060. .base.version = 1,
  3061. .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
  3062. .base.hasht = nv_encoder->dcb->hasht,
  3063. .base.hashm = nv_encoder->dcb->hashm,
  3064. };
  3065. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3066. struct drm_device *dev = encoder->dev;
  3067. struct nouveau_drm *drm = nouveau_drm(dev);
  3068. struct nouveau_connector *nv_connector;
  3069. struct nvbios *bios = &drm->vbios;
  3070. u8 proto = 0xf;
  3071. u8 depth = 0x0;
  3072. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3073. nv_encoder->crtc = encoder->crtc;
  3074. nv50_outp_acquire(nv_encoder);
  3075. switch (nv_encoder->dcb->type) {
  3076. case DCB_OUTPUT_TMDS:
  3077. if (nv_encoder->link & 1) {
  3078. proto = 0x1;
  3079. /* Only enable dual-link if:
  3080. * - Need to (i.e. rate > 165MHz)
  3081. * - DCB says we can
  3082. * - Not an HDMI monitor, since there's no dual-link
  3083. * on HDMI.
  3084. */
  3085. if (mode->clock >= 165000 &&
  3086. nv_encoder->dcb->duallink_possible &&
  3087. !drm_detect_hdmi_monitor(nv_connector->edid))
  3088. proto |= 0x4;
  3089. } else {
  3090. proto = 0x2;
  3091. }
  3092. nv50_hdmi_enable(&nv_encoder->base.base, mode);
  3093. break;
  3094. case DCB_OUTPUT_LVDS:
  3095. proto = 0x0;
  3096. if (bios->fp_no_ddc) {
  3097. if (bios->fp.dual_link)
  3098. lvds.lvds.script |= 0x0100;
  3099. if (bios->fp.if_is_24bit)
  3100. lvds.lvds.script |= 0x0200;
  3101. } else {
  3102. if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
  3103. if (((u8 *)nv_connector->edid)[121] == 2)
  3104. lvds.lvds.script |= 0x0100;
  3105. } else
  3106. if (mode->clock >= bios->fp.duallink_transition_clk) {
  3107. lvds.lvds.script |= 0x0100;
  3108. }
  3109. if (lvds.lvds.script & 0x0100) {
  3110. if (bios->fp.strapless_is_24bit & 2)
  3111. lvds.lvds.script |= 0x0200;
  3112. } else {
  3113. if (bios->fp.strapless_is_24bit & 1)
  3114. lvds.lvds.script |= 0x0200;
  3115. }
  3116. if (nv_connector->base.display_info.bpc == 8)
  3117. lvds.lvds.script |= 0x0200;
  3118. }
  3119. nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
  3120. break;
  3121. case DCB_OUTPUT_DP:
  3122. if (nv_connector->base.display_info.bpc == 6)
  3123. depth = 0x2;
  3124. else
  3125. if (nv_connector->base.display_info.bpc == 8)
  3126. depth = 0x5;
  3127. else
  3128. depth = 0x6;
  3129. if (nv_encoder->link & 1)
  3130. proto = 0x8;
  3131. else
  3132. proto = 0x9;
  3133. nv50_audio_enable(encoder, mode);
  3134. break;
  3135. default:
  3136. BUG();
  3137. break;
  3138. }
  3139. nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
  3140. }
  3141. static const struct drm_encoder_helper_funcs
  3142. nv50_sor_help = {
  3143. .atomic_check = nv50_outp_atomic_check,
  3144. .enable = nv50_sor_enable,
  3145. .disable = nv50_sor_disable,
  3146. };
  3147. static void
  3148. nv50_sor_destroy(struct drm_encoder *encoder)
  3149. {
  3150. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3151. nv50_mstm_del(&nv_encoder->dp.mstm);
  3152. drm_encoder_cleanup(encoder);
  3153. kfree(encoder);
  3154. }
  3155. static const struct drm_encoder_funcs
  3156. nv50_sor_func = {
  3157. .destroy = nv50_sor_destroy,
  3158. };
  3159. static int
  3160. nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3161. {
  3162. struct nouveau_connector *nv_connector = nouveau_connector(connector);
  3163. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3164. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  3165. struct nouveau_encoder *nv_encoder;
  3166. struct drm_encoder *encoder;
  3167. int type, ret;
  3168. switch (dcbe->type) {
  3169. case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
  3170. case DCB_OUTPUT_TMDS:
  3171. case DCB_OUTPUT_DP:
  3172. default:
  3173. type = DRM_MODE_ENCODER_TMDS;
  3174. break;
  3175. }
  3176. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3177. if (!nv_encoder)
  3178. return -ENOMEM;
  3179. nv_encoder->dcb = dcbe;
  3180. nv_encoder->update = nv50_sor_update;
  3181. encoder = to_drm_encoder(nv_encoder);
  3182. encoder->possible_crtcs = dcbe->heads;
  3183. encoder->possible_clones = 0;
  3184. drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
  3185. "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3186. drm_encoder_helper_add(encoder, &nv50_sor_help);
  3187. drm_mode_connector_attach_encoder(connector, encoder);
  3188. if (dcbe->type == DCB_OUTPUT_DP) {
  3189. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3190. struct nvkm_i2c_aux *aux =
  3191. nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
  3192. if (aux) {
  3193. if (disp->disp->oclass < GF110_DISP) {
  3194. /* HW has no support for address-only
  3195. * transactions, so we're required to
  3196. * use custom I2C-over-AUX code.
  3197. */
  3198. nv_encoder->i2c = &aux->i2c;
  3199. } else {
  3200. nv_encoder->i2c = &nv_connector->aux.ddc;
  3201. }
  3202. nv_encoder->aux = aux;
  3203. }
  3204. /*TODO: Use DP Info Table to check for support. */
  3205. if (disp->disp->oclass >= GF110_DISP) {
  3206. ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
  3207. nv_connector->base.base.id,
  3208. &nv_encoder->dp.mstm);
  3209. if (ret)
  3210. return ret;
  3211. }
  3212. } else {
  3213. struct nvkm_i2c_bus *bus =
  3214. nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  3215. if (bus)
  3216. nv_encoder->i2c = &bus->i2c;
  3217. }
  3218. return 0;
  3219. }
  3220. /******************************************************************************
  3221. * PIOR
  3222. *****************************************************************************/
  3223. static int
  3224. nv50_pior_atomic_check(struct drm_encoder *encoder,
  3225. struct drm_crtc_state *crtc_state,
  3226. struct drm_connector_state *conn_state)
  3227. {
  3228. int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
  3229. if (ret)
  3230. return ret;
  3231. crtc_state->adjusted_mode.clock *= 2;
  3232. return 0;
  3233. }
  3234. static void
  3235. nv50_pior_disable(struct drm_encoder *encoder)
  3236. {
  3237. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3238. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3239. const int or = nv_encoder->or;
  3240. u32 *push;
  3241. if (nv_encoder->crtc) {
  3242. push = evo_wait(mast, 4);
  3243. if (push) {
  3244. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3245. evo_mthd(push, 0x0700 + (or * 0x040), 1);
  3246. evo_data(push, 0x00000000);
  3247. }
  3248. evo_kick(push, mast);
  3249. }
  3250. }
  3251. nv_encoder->crtc = NULL;
  3252. nv50_outp_release(nv_encoder);
  3253. }
  3254. static void
  3255. nv50_pior_enable(struct drm_encoder *encoder)
  3256. {
  3257. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3258. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3259. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3260. struct nouveau_connector *nv_connector;
  3261. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3262. u8 owner = 1 << nv_crtc->index;
  3263. u8 proto, depth;
  3264. u32 *push;
  3265. nv50_outp_acquire(nv_encoder);
  3266. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3267. switch (nv_connector->base.display_info.bpc) {
  3268. case 10: depth = 0x6; break;
  3269. case 8: depth = 0x5; break;
  3270. case 6: depth = 0x2; break;
  3271. default: depth = 0x0; break;
  3272. }
  3273. switch (nv_encoder->dcb->type) {
  3274. case DCB_OUTPUT_TMDS:
  3275. case DCB_OUTPUT_DP:
  3276. proto = 0x0;
  3277. break;
  3278. default:
  3279. BUG();
  3280. break;
  3281. }
  3282. push = evo_wait(mast, 8);
  3283. if (push) {
  3284. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3285. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  3286. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3287. ctrl |= 0x00001000;
  3288. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3289. ctrl |= 0x00002000;
  3290. evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
  3291. evo_data(push, ctrl);
  3292. }
  3293. evo_kick(push, mast);
  3294. }
  3295. nv_encoder->crtc = encoder->crtc;
  3296. }
  3297. static const struct drm_encoder_helper_funcs
  3298. nv50_pior_help = {
  3299. .atomic_check = nv50_pior_atomic_check,
  3300. .enable = nv50_pior_enable,
  3301. .disable = nv50_pior_disable,
  3302. };
  3303. static void
  3304. nv50_pior_destroy(struct drm_encoder *encoder)
  3305. {
  3306. drm_encoder_cleanup(encoder);
  3307. kfree(encoder);
  3308. }
  3309. static const struct drm_encoder_funcs
  3310. nv50_pior_func = {
  3311. .destroy = nv50_pior_destroy,
  3312. };
  3313. static int
  3314. nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3315. {
  3316. struct nouveau_connector *nv_connector = nouveau_connector(connector);
  3317. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3318. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  3319. struct nvkm_i2c_bus *bus = NULL;
  3320. struct nvkm_i2c_aux *aux = NULL;
  3321. struct i2c_adapter *ddc;
  3322. struct nouveau_encoder *nv_encoder;
  3323. struct drm_encoder *encoder;
  3324. int type;
  3325. switch (dcbe->type) {
  3326. case DCB_OUTPUT_TMDS:
  3327. bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
  3328. ddc = bus ? &bus->i2c : NULL;
  3329. type = DRM_MODE_ENCODER_TMDS;
  3330. break;
  3331. case DCB_OUTPUT_DP:
  3332. aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
  3333. ddc = aux ? &nv_connector->aux.ddc : NULL;
  3334. type = DRM_MODE_ENCODER_TMDS;
  3335. break;
  3336. default:
  3337. return -ENODEV;
  3338. }
  3339. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3340. if (!nv_encoder)
  3341. return -ENOMEM;
  3342. nv_encoder->dcb = dcbe;
  3343. nv_encoder->i2c = ddc;
  3344. nv_encoder->aux = aux;
  3345. encoder = to_drm_encoder(nv_encoder);
  3346. encoder->possible_crtcs = dcbe->heads;
  3347. encoder->possible_clones = 0;
  3348. drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
  3349. "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3350. drm_encoder_helper_add(encoder, &nv50_pior_help);
  3351. drm_mode_connector_attach_encoder(connector, encoder);
  3352. return 0;
  3353. }
  3354. /******************************************************************************
  3355. * Atomic
  3356. *****************************************************************************/
  3357. static void
  3358. nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
  3359. {
  3360. struct nv50_disp *disp = nv50_disp(drm->dev);
  3361. struct nv50_dmac *core = &disp->mast.base;
  3362. struct nv50_mstm *mstm;
  3363. struct drm_encoder *encoder;
  3364. u32 *push;
  3365. NV_ATOMIC(drm, "commit core %08x\n", interlock);
  3366. drm_for_each_encoder(encoder, drm->dev) {
  3367. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3368. mstm = nouveau_encoder(encoder)->dp.mstm;
  3369. if (mstm && mstm->modified)
  3370. nv50_mstm_prepare(mstm);
  3371. }
  3372. }
  3373. if ((push = evo_wait(core, 5))) {
  3374. evo_mthd(push, 0x0084, 1);
  3375. evo_data(push, 0x80000000);
  3376. evo_mthd(push, 0x0080, 2);
  3377. evo_data(push, interlock);
  3378. evo_data(push, 0x00000000);
  3379. nouveau_bo_wr32(disp->sync, 0, 0x00000000);
  3380. evo_kick(push, core);
  3381. if (nvif_msec(&drm->client.device, 2000ULL,
  3382. if (nouveau_bo_rd32(disp->sync, 0))
  3383. break;
  3384. usleep_range(1, 2);
  3385. ) < 0)
  3386. NV_ERROR(drm, "EVO timeout\n");
  3387. }
  3388. drm_for_each_encoder(encoder, drm->dev) {
  3389. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3390. mstm = nouveau_encoder(encoder)->dp.mstm;
  3391. if (mstm && mstm->modified)
  3392. nv50_mstm_cleanup(mstm);
  3393. }
  3394. }
  3395. }
  3396. static void
  3397. nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
  3398. {
  3399. struct drm_device *dev = state->dev;
  3400. struct drm_crtc_state *new_crtc_state, *old_crtc_state;
  3401. struct drm_crtc *crtc;
  3402. struct drm_plane_state *new_plane_state;
  3403. struct drm_plane *plane;
  3404. struct nouveau_drm *drm = nouveau_drm(dev);
  3405. struct nv50_disp *disp = nv50_disp(dev);
  3406. struct nv50_atom *atom = nv50_atom(state);
  3407. struct nv50_outp_atom *outp, *outt;
  3408. u32 interlock_core = 0;
  3409. u32 interlock_chan = 0;
  3410. int i;
  3411. NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
  3412. drm_atomic_helper_wait_for_fences(dev, state, false);
  3413. drm_atomic_helper_wait_for_dependencies(state);
  3414. drm_atomic_helper_update_legacy_modeset_state(dev, state);
  3415. if (atom->lock_core)
  3416. mutex_lock(&disp->mutex);
  3417. /* Disable head(s). */
  3418. for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
  3419. struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
  3420. struct nv50_head *head = nv50_head(crtc);
  3421. NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
  3422. asyh->clr.mask, asyh->set.mask);
  3423. if (old_crtc_state->active && !new_crtc_state->active)
  3424. drm_crtc_vblank_off(crtc);
  3425. if (asyh->clr.mask) {
  3426. nv50_head_flush_clr(head, asyh, atom->flush_disable);
  3427. interlock_core |= 1;
  3428. }
  3429. }
  3430. /* Disable plane(s). */
  3431. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3432. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3433. struct nv50_wndw *wndw = nv50_wndw(plane);
  3434. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
  3435. asyw->clr.mask, asyw->set.mask);
  3436. if (!asyw->clr.mask)
  3437. continue;
  3438. interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
  3439. atom->flush_disable,
  3440. asyw);
  3441. }
  3442. /* Disable output path(s). */
  3443. list_for_each_entry(outp, &atom->outp, head) {
  3444. const struct drm_encoder_helper_funcs *help;
  3445. struct drm_encoder *encoder;
  3446. encoder = outp->encoder;
  3447. help = encoder->helper_private;
  3448. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
  3449. outp->clr.mask, outp->set.mask);
  3450. if (outp->clr.mask) {
  3451. help->disable(encoder);
  3452. interlock_core |= 1;
  3453. if (outp->flush_disable) {
  3454. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3455. interlock_core = 0;
  3456. interlock_chan = 0;
  3457. }
  3458. }
  3459. }
  3460. /* Flush disable. */
  3461. if (interlock_core) {
  3462. if (atom->flush_disable) {
  3463. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3464. interlock_core = 0;
  3465. interlock_chan = 0;
  3466. }
  3467. }
  3468. /* Update output path(s). */
  3469. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3470. const struct drm_encoder_helper_funcs *help;
  3471. struct drm_encoder *encoder;
  3472. encoder = outp->encoder;
  3473. help = encoder->helper_private;
  3474. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
  3475. outp->set.mask, outp->clr.mask);
  3476. if (outp->set.mask) {
  3477. help->enable(encoder);
  3478. interlock_core = 1;
  3479. }
  3480. list_del(&outp->head);
  3481. kfree(outp);
  3482. }
  3483. /* Update head(s). */
  3484. for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
  3485. struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
  3486. struct nv50_head *head = nv50_head(crtc);
  3487. NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
  3488. asyh->set.mask, asyh->clr.mask);
  3489. if (asyh->set.mask) {
  3490. nv50_head_flush_set(head, asyh);
  3491. interlock_core = 1;
  3492. }
  3493. if (new_crtc_state->active) {
  3494. if (!old_crtc_state->active)
  3495. drm_crtc_vblank_on(crtc);
  3496. if (new_crtc_state->event)
  3497. drm_crtc_vblank_get(crtc);
  3498. }
  3499. }
  3500. /* Update plane(s). */
  3501. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3502. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3503. struct nv50_wndw *wndw = nv50_wndw(plane);
  3504. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
  3505. asyw->set.mask, asyw->clr.mask);
  3506. if ( !asyw->set.mask &&
  3507. (!asyw->clr.mask || atom->flush_disable))
  3508. continue;
  3509. interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
  3510. }
  3511. /* Flush update. */
  3512. if (interlock_core) {
  3513. if (!interlock_chan && atom->state.legacy_cursor_update) {
  3514. u32 *push = evo_wait(&disp->mast, 2);
  3515. if (push) {
  3516. evo_mthd(push, 0x0080, 1);
  3517. evo_data(push, 0x00000000);
  3518. evo_kick(push, &disp->mast);
  3519. }
  3520. } else {
  3521. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3522. }
  3523. }
  3524. if (atom->lock_core)
  3525. mutex_unlock(&disp->mutex);
  3526. /* Wait for HW to signal completion. */
  3527. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3528. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3529. struct nv50_wndw *wndw = nv50_wndw(plane);
  3530. int ret = nv50_wndw_wait_armed(wndw, asyw);
  3531. if (ret)
  3532. NV_ERROR(drm, "%s: timeout\n", plane->name);
  3533. }
  3534. for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
  3535. if (new_crtc_state->event) {
  3536. unsigned long flags;
  3537. /* Get correct count/ts if racing with vblank irq */
  3538. if (new_crtc_state->active)
  3539. drm_crtc_accurate_vblank_count(crtc);
  3540. spin_lock_irqsave(&crtc->dev->event_lock, flags);
  3541. drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
  3542. spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
  3543. new_crtc_state->event = NULL;
  3544. if (new_crtc_state->active)
  3545. drm_crtc_vblank_put(crtc);
  3546. }
  3547. }
  3548. drm_atomic_helper_commit_hw_done(state);
  3549. drm_atomic_helper_cleanup_planes(dev, state);
  3550. drm_atomic_helper_commit_cleanup_done(state);
  3551. drm_atomic_state_put(state);
  3552. }
  3553. static void
  3554. nv50_disp_atomic_commit_work(struct work_struct *work)
  3555. {
  3556. struct drm_atomic_state *state =
  3557. container_of(work, typeof(*state), commit_work);
  3558. nv50_disp_atomic_commit_tail(state);
  3559. }
  3560. static int
  3561. nv50_disp_atomic_commit(struct drm_device *dev,
  3562. struct drm_atomic_state *state, bool nonblock)
  3563. {
  3564. struct nouveau_drm *drm = nouveau_drm(dev);
  3565. struct nv50_disp *disp = nv50_disp(dev);
  3566. struct drm_plane_state *old_plane_state;
  3567. struct drm_plane *plane;
  3568. struct drm_crtc *crtc;
  3569. bool active = false;
  3570. int ret, i;
  3571. ret = pm_runtime_get_sync(dev->dev);
  3572. if (ret < 0 && ret != -EACCES)
  3573. return ret;
  3574. ret = drm_atomic_helper_setup_commit(state, nonblock);
  3575. if (ret)
  3576. goto done;
  3577. INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
  3578. ret = drm_atomic_helper_prepare_planes(dev, state);
  3579. if (ret)
  3580. goto done;
  3581. if (!nonblock) {
  3582. ret = drm_atomic_helper_wait_for_fences(dev, state, true);
  3583. if (ret)
  3584. goto err_cleanup;
  3585. }
  3586. ret = drm_atomic_helper_swap_state(state, true);
  3587. if (ret)
  3588. goto err_cleanup;
  3589. for_each_old_plane_in_state(state, plane, old_plane_state, i) {
  3590. struct nv50_wndw_atom *asyw = nv50_wndw_atom(old_plane_state);
  3591. struct nv50_wndw *wndw = nv50_wndw(plane);
  3592. if (asyw->set.image) {
  3593. asyw->ntfy.handle = wndw->dmac->sync.handle;
  3594. asyw->ntfy.offset = wndw->ntfy;
  3595. asyw->ntfy.awaken = false;
  3596. asyw->set.ntfy = true;
  3597. nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
  3598. wndw->ntfy ^= 0x10;
  3599. }
  3600. }
  3601. drm_atomic_state_get(state);
  3602. if (nonblock)
  3603. queue_work(system_unbound_wq, &state->commit_work);
  3604. else
  3605. nv50_disp_atomic_commit_tail(state);
  3606. drm_for_each_crtc(crtc, dev) {
  3607. if (crtc->state->enable) {
  3608. if (!drm->have_disp_power_ref) {
  3609. drm->have_disp_power_ref = true;
  3610. return 0;
  3611. }
  3612. active = true;
  3613. break;
  3614. }
  3615. }
  3616. if (!active && drm->have_disp_power_ref) {
  3617. pm_runtime_put_autosuspend(dev->dev);
  3618. drm->have_disp_power_ref = false;
  3619. }
  3620. err_cleanup:
  3621. if (ret)
  3622. drm_atomic_helper_cleanup_planes(dev, state);
  3623. done:
  3624. pm_runtime_put_autosuspend(dev->dev);
  3625. return ret;
  3626. }
  3627. static struct nv50_outp_atom *
  3628. nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
  3629. {
  3630. struct nv50_outp_atom *outp;
  3631. list_for_each_entry(outp, &atom->outp, head) {
  3632. if (outp->encoder == encoder)
  3633. return outp;
  3634. }
  3635. outp = kzalloc(sizeof(*outp), GFP_KERNEL);
  3636. if (!outp)
  3637. return ERR_PTR(-ENOMEM);
  3638. list_add(&outp->head, &atom->outp);
  3639. outp->encoder = encoder;
  3640. return outp;
  3641. }
  3642. static int
  3643. nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
  3644. struct drm_connector_state *old_connector_state)
  3645. {
  3646. struct drm_encoder *encoder = old_connector_state->best_encoder;
  3647. struct drm_crtc_state *old_crtc_state, *new_crtc_state;
  3648. struct drm_crtc *crtc;
  3649. struct nv50_outp_atom *outp;
  3650. if (!(crtc = old_connector_state->crtc))
  3651. return 0;
  3652. old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
  3653. new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
  3654. if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
  3655. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3656. if (IS_ERR(outp))
  3657. return PTR_ERR(outp);
  3658. if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  3659. outp->flush_disable = true;
  3660. atom->flush_disable = true;
  3661. }
  3662. outp->clr.ctrl = true;
  3663. atom->lock_core = true;
  3664. }
  3665. return 0;
  3666. }
  3667. static int
  3668. nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
  3669. struct drm_connector_state *connector_state)
  3670. {
  3671. struct drm_encoder *encoder = connector_state->best_encoder;
  3672. struct drm_crtc_state *new_crtc_state;
  3673. struct drm_crtc *crtc;
  3674. struct nv50_outp_atom *outp;
  3675. if (!(crtc = connector_state->crtc))
  3676. return 0;
  3677. new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
  3678. if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
  3679. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3680. if (IS_ERR(outp))
  3681. return PTR_ERR(outp);
  3682. outp->set.ctrl = true;
  3683. atom->lock_core = true;
  3684. }
  3685. return 0;
  3686. }
  3687. static int
  3688. nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
  3689. {
  3690. struct nv50_atom *atom = nv50_atom(state);
  3691. struct drm_connector_state *old_connector_state, *new_connector_state;
  3692. struct drm_connector *connector;
  3693. int ret, i;
  3694. ret = drm_atomic_helper_check(dev, state);
  3695. if (ret)
  3696. return ret;
  3697. for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
  3698. ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
  3699. if (ret)
  3700. return ret;
  3701. ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
  3702. if (ret)
  3703. return ret;
  3704. }
  3705. return 0;
  3706. }
  3707. static void
  3708. nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
  3709. {
  3710. struct nv50_atom *atom = nv50_atom(state);
  3711. struct nv50_outp_atom *outp, *outt;
  3712. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3713. list_del(&outp->head);
  3714. kfree(outp);
  3715. }
  3716. drm_atomic_state_default_clear(state);
  3717. }
  3718. static void
  3719. nv50_disp_atomic_state_free(struct drm_atomic_state *state)
  3720. {
  3721. struct nv50_atom *atom = nv50_atom(state);
  3722. drm_atomic_state_default_release(&atom->state);
  3723. kfree(atom);
  3724. }
  3725. static struct drm_atomic_state *
  3726. nv50_disp_atomic_state_alloc(struct drm_device *dev)
  3727. {
  3728. struct nv50_atom *atom;
  3729. if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
  3730. drm_atomic_state_init(dev, &atom->state) < 0) {
  3731. kfree(atom);
  3732. return NULL;
  3733. }
  3734. INIT_LIST_HEAD(&atom->outp);
  3735. return &atom->state;
  3736. }
  3737. static const struct drm_mode_config_funcs
  3738. nv50_disp_func = {
  3739. .fb_create = nouveau_user_framebuffer_create,
  3740. .output_poll_changed = nouveau_fbcon_output_poll_changed,
  3741. .atomic_check = nv50_disp_atomic_check,
  3742. .atomic_commit = nv50_disp_atomic_commit,
  3743. .atomic_state_alloc = nv50_disp_atomic_state_alloc,
  3744. .atomic_state_clear = nv50_disp_atomic_state_clear,
  3745. .atomic_state_free = nv50_disp_atomic_state_free,
  3746. };
  3747. /******************************************************************************
  3748. * Init
  3749. *****************************************************************************/
  3750. void
  3751. nv50_display_fini(struct drm_device *dev)
  3752. {
  3753. struct nouveau_encoder *nv_encoder;
  3754. struct drm_encoder *encoder;
  3755. struct drm_plane *plane;
  3756. drm_for_each_plane(plane, dev) {
  3757. struct nv50_wndw *wndw = nv50_wndw(plane);
  3758. if (plane->funcs != &nv50_wndw)
  3759. continue;
  3760. nv50_wndw_fini(wndw);
  3761. }
  3762. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3763. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3764. nv_encoder = nouveau_encoder(encoder);
  3765. nv50_mstm_fini(nv_encoder->dp.mstm);
  3766. }
  3767. }
  3768. }
  3769. int
  3770. nv50_display_init(struct drm_device *dev)
  3771. {
  3772. struct drm_encoder *encoder;
  3773. struct drm_plane *plane;
  3774. struct drm_crtc *crtc;
  3775. u32 *push;
  3776. push = evo_wait(nv50_mast(dev), 32);
  3777. if (!push)
  3778. return -EBUSY;
  3779. evo_mthd(push, 0x0088, 1);
  3780. evo_data(push, nv50_mast(dev)->base.sync.handle);
  3781. evo_kick(push, nv50_mast(dev));
  3782. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3783. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3784. struct nouveau_encoder *nv_encoder =
  3785. nouveau_encoder(encoder);
  3786. nv50_mstm_init(nv_encoder->dp.mstm);
  3787. }
  3788. }
  3789. drm_for_each_crtc(crtc, dev) {
  3790. nv50_head_lut_load(crtc);
  3791. }
  3792. drm_for_each_plane(plane, dev) {
  3793. struct nv50_wndw *wndw = nv50_wndw(plane);
  3794. if (plane->funcs != &nv50_wndw)
  3795. continue;
  3796. nv50_wndw_init(wndw);
  3797. }
  3798. return 0;
  3799. }
  3800. void
  3801. nv50_display_destroy(struct drm_device *dev)
  3802. {
  3803. struct nv50_disp *disp = nv50_disp(dev);
  3804. nv50_dmac_destroy(&disp->mast.base, disp->disp);
  3805. nouveau_bo_unmap(disp->sync);
  3806. if (disp->sync)
  3807. nouveau_bo_unpin(disp->sync);
  3808. nouveau_bo_ref(NULL, &disp->sync);
  3809. nouveau_display(dev)->priv = NULL;
  3810. kfree(disp);
  3811. }
  3812. MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
  3813. static int nouveau_atomic = 0;
  3814. module_param_named(atomic, nouveau_atomic, int, 0400);
  3815. int
  3816. nv50_display_create(struct drm_device *dev)
  3817. {
  3818. struct nvif_device *device = &nouveau_drm(dev)->client.device;
  3819. struct nouveau_drm *drm = nouveau_drm(dev);
  3820. struct dcb_table *dcb = &drm->vbios.dcb;
  3821. struct drm_connector *connector, *tmp;
  3822. struct nv50_disp *disp;
  3823. struct dcb_output *dcbe;
  3824. int crtcs, ret, i;
  3825. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  3826. if (!disp)
  3827. return -ENOMEM;
  3828. mutex_init(&disp->mutex);
  3829. nouveau_display(dev)->priv = disp;
  3830. nouveau_display(dev)->dtor = nv50_display_destroy;
  3831. nouveau_display(dev)->init = nv50_display_init;
  3832. nouveau_display(dev)->fini = nv50_display_fini;
  3833. disp->disp = &nouveau_display(dev)->disp;
  3834. dev->mode_config.funcs = &nv50_disp_func;
  3835. if (nouveau_atomic)
  3836. dev->driver->driver_features |= DRIVER_ATOMIC;
  3837. /* small shared memory area we use for notifiers and semaphores */
  3838. ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
  3839. 0, 0x0000, NULL, NULL, &disp->sync);
  3840. if (!ret) {
  3841. ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
  3842. if (!ret) {
  3843. ret = nouveau_bo_map(disp->sync);
  3844. if (ret)
  3845. nouveau_bo_unpin(disp->sync);
  3846. }
  3847. if (ret)
  3848. nouveau_bo_ref(NULL, &disp->sync);
  3849. }
  3850. if (ret)
  3851. goto out;
  3852. /* allocate master evo channel */
  3853. ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
  3854. &disp->mast);
  3855. if (ret)
  3856. goto out;
  3857. /* create crtc objects to represent the hw heads */
  3858. if (disp->disp->oclass >= GF110_DISP)
  3859. crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
  3860. else
  3861. crtcs = 0x3;
  3862. for (i = 0; i < fls(crtcs); i++) {
  3863. if (!(crtcs & (1 << i)))
  3864. continue;
  3865. ret = nv50_head_create(dev, i);
  3866. if (ret)
  3867. goto out;
  3868. }
  3869. /* create encoder/connector objects based on VBIOS DCB table */
  3870. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  3871. connector = nouveau_connector_create(dev, dcbe->connector);
  3872. if (IS_ERR(connector))
  3873. continue;
  3874. if (dcbe->location == DCB_LOC_ON_CHIP) {
  3875. switch (dcbe->type) {
  3876. case DCB_OUTPUT_TMDS:
  3877. case DCB_OUTPUT_LVDS:
  3878. case DCB_OUTPUT_DP:
  3879. ret = nv50_sor_create(connector, dcbe);
  3880. break;
  3881. case DCB_OUTPUT_ANALOG:
  3882. ret = nv50_dac_create(connector, dcbe);
  3883. break;
  3884. default:
  3885. ret = -ENODEV;
  3886. break;
  3887. }
  3888. } else {
  3889. ret = nv50_pior_create(connector, dcbe);
  3890. }
  3891. if (ret) {
  3892. NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
  3893. dcbe->location, dcbe->type,
  3894. ffs(dcbe->or) - 1, ret);
  3895. ret = 0;
  3896. }
  3897. }
  3898. /* cull any connectors we created that don't have an encoder */
  3899. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  3900. if (connector->encoder_ids[0])
  3901. continue;
  3902. NV_WARN(drm, "%s has no encoders, removing\n",
  3903. connector->name);
  3904. connector->funcs->destroy(connector);
  3905. }
  3906. out:
  3907. if (ret)
  3908. nv50_display_destroy(dev);
  3909. return ret;
  3910. }