nv50_display.c 118 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include <linux/hdmi.h>
  26. #include <drm/drmP.h>
  27. #include <drm/drm_atomic.h>
  28. #include <drm/drm_atomic_helper.h>
  29. #include <drm/drm_crtc_helper.h>
  30. #include <drm/drm_dp_helper.h>
  31. #include <drm/drm_fb_helper.h>
  32. #include <drm/drm_plane_helper.h>
  33. #include <drm/drm_edid.h>
  34. #include <nvif/class.h>
  35. #include <nvif/cl0002.h>
  36. #include <nvif/cl5070.h>
  37. #include <nvif/cl507a.h>
  38. #include <nvif/cl507b.h>
  39. #include <nvif/cl507c.h>
  40. #include <nvif/cl507d.h>
  41. #include <nvif/cl507e.h>
  42. #include <nvif/event.h>
  43. #include "nouveau_drv.h"
  44. #include "nouveau_dma.h"
  45. #include "nouveau_gem.h"
  46. #include "nouveau_connector.h"
  47. #include "nouveau_encoder.h"
  48. #include "nouveau_crtc.h"
  49. #include "nouveau_fence.h"
  50. #include "nouveau_fbcon.h"
  51. #include "nv50_display.h"
  52. #define EVO_DMA_NR 9
  53. #define EVO_MASTER (0x00)
  54. #define EVO_FLIP(c) (0x01 + (c))
  55. #define EVO_OVLY(c) (0x05 + (c))
  56. #define EVO_OIMM(c) (0x09 + (c))
  57. #define EVO_CURS(c) (0x0d + (c))
  58. /* offsets in shared sync bo of various structures */
  59. #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  60. #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
  61. #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
  62. #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
  63. #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  64. #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  65. /******************************************************************************
  66. * Atomic state
  67. *****************************************************************************/
  68. #define nv50_atom(p) container_of((p), struct nv50_atom, state)
  69. struct nv50_atom {
  70. struct drm_atomic_state state;
  71. struct list_head outp;
  72. bool lock_core;
  73. bool flush_disable;
  74. };
  75. struct nv50_outp_atom {
  76. struct list_head head;
  77. struct drm_encoder *encoder;
  78. bool flush_disable;
  79. union {
  80. struct {
  81. bool ctrl:1;
  82. };
  83. u8 mask;
  84. } clr;
  85. union {
  86. struct {
  87. bool ctrl:1;
  88. };
  89. u8 mask;
  90. } set;
  91. };
  92. #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
  93. struct nv50_head_atom {
  94. struct drm_crtc_state state;
  95. struct {
  96. u16 iW;
  97. u16 iH;
  98. u16 oW;
  99. u16 oH;
  100. } view;
  101. struct nv50_head_mode {
  102. bool interlace;
  103. u32 clock;
  104. struct {
  105. u16 active;
  106. u16 synce;
  107. u16 blanke;
  108. u16 blanks;
  109. } h;
  110. struct {
  111. u32 active;
  112. u16 synce;
  113. u16 blanke;
  114. u16 blanks;
  115. u16 blank2s;
  116. u16 blank2e;
  117. u16 blankus;
  118. } v;
  119. } mode;
  120. struct {
  121. bool visible;
  122. u32 handle;
  123. u64 offset:40;
  124. u8 mode:4;
  125. } lut;
  126. struct {
  127. bool visible;
  128. u32 handle;
  129. u64 offset:40;
  130. u8 format;
  131. u8 kind:7;
  132. u8 layout:1;
  133. u8 block:4;
  134. u32 pitch:20;
  135. u16 x;
  136. u16 y;
  137. u16 w;
  138. u16 h;
  139. } core;
  140. struct {
  141. bool visible;
  142. u32 handle;
  143. u64 offset:40;
  144. u8 layout:1;
  145. u8 format:1;
  146. } curs;
  147. struct {
  148. u8 depth;
  149. u8 cpp;
  150. u16 x;
  151. u16 y;
  152. u16 w;
  153. u16 h;
  154. } base;
  155. struct {
  156. u8 cpp;
  157. } ovly;
  158. struct {
  159. bool enable:1;
  160. u8 bits:2;
  161. u8 mode:4;
  162. } dither;
  163. struct {
  164. struct {
  165. u16 cos:12;
  166. u16 sin:12;
  167. } sat;
  168. } procamp;
  169. union {
  170. struct {
  171. bool ilut:1;
  172. bool core:1;
  173. bool curs:1;
  174. };
  175. u8 mask;
  176. } clr;
  177. union {
  178. struct {
  179. bool ilut:1;
  180. bool core:1;
  181. bool curs:1;
  182. bool view:1;
  183. bool mode:1;
  184. bool base:1;
  185. bool ovly:1;
  186. bool dither:1;
  187. bool procamp:1;
  188. };
  189. u16 mask;
  190. } set;
  191. };
  192. static inline struct nv50_head_atom *
  193. nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
  194. {
  195. struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
  196. if (IS_ERR(statec))
  197. return (void *)statec;
  198. return nv50_head_atom(statec);
  199. }
  200. #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
  201. struct nv50_wndw_atom {
  202. struct drm_plane_state state;
  203. u8 interval;
  204. struct {
  205. u32 handle;
  206. u16 offset:12;
  207. bool awaken:1;
  208. } ntfy;
  209. struct {
  210. u32 handle;
  211. u16 offset:12;
  212. u32 acquire;
  213. u32 release;
  214. } sema;
  215. struct {
  216. u8 enable:2;
  217. } lut;
  218. struct {
  219. u8 mode:2;
  220. u8 interval:4;
  221. u8 format;
  222. u8 kind:7;
  223. u8 layout:1;
  224. u8 block:4;
  225. u32 pitch:20;
  226. u16 w;
  227. u16 h;
  228. u32 handle;
  229. u64 offset;
  230. } image;
  231. struct {
  232. u16 x;
  233. u16 y;
  234. } point;
  235. union {
  236. struct {
  237. bool ntfy:1;
  238. bool sema:1;
  239. bool image:1;
  240. };
  241. u8 mask;
  242. } clr;
  243. union {
  244. struct {
  245. bool ntfy:1;
  246. bool sema:1;
  247. bool image:1;
  248. bool lut:1;
  249. bool point:1;
  250. };
  251. u8 mask;
  252. } set;
  253. };
  254. /******************************************************************************
  255. * EVO channel
  256. *****************************************************************************/
  257. struct nv50_chan {
  258. struct nvif_object user;
  259. struct nvif_device *device;
  260. };
  261. static int
  262. nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
  263. const s32 *oclass, u8 head, void *data, u32 size,
  264. struct nv50_chan *chan)
  265. {
  266. struct nvif_sclass *sclass;
  267. int ret, i, n;
  268. chan->device = device;
  269. ret = n = nvif_object_sclass_get(disp, &sclass);
  270. if (ret < 0)
  271. return ret;
  272. while (oclass[0]) {
  273. for (i = 0; i < n; i++) {
  274. if (sclass[i].oclass == oclass[0]) {
  275. ret = nvif_object_init(disp, 0, oclass[0],
  276. data, size, &chan->user);
  277. if (ret == 0)
  278. nvif_object_map(&chan->user, NULL, 0);
  279. nvif_object_sclass_put(&sclass);
  280. return ret;
  281. }
  282. }
  283. oclass++;
  284. }
  285. nvif_object_sclass_put(&sclass);
  286. return -ENOSYS;
  287. }
  288. static void
  289. nv50_chan_destroy(struct nv50_chan *chan)
  290. {
  291. nvif_object_fini(&chan->user);
  292. }
  293. /******************************************************************************
  294. * PIO EVO channel
  295. *****************************************************************************/
  296. struct nv50_pioc {
  297. struct nv50_chan base;
  298. };
  299. static void
  300. nv50_pioc_destroy(struct nv50_pioc *pioc)
  301. {
  302. nv50_chan_destroy(&pioc->base);
  303. }
  304. static int
  305. nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
  306. const s32 *oclass, u8 head, void *data, u32 size,
  307. struct nv50_pioc *pioc)
  308. {
  309. return nv50_chan_create(device, disp, oclass, head, data, size,
  310. &pioc->base);
  311. }
  312. /******************************************************************************
  313. * Overlay Immediate
  314. *****************************************************************************/
  315. struct nv50_oimm {
  316. struct nv50_pioc base;
  317. };
  318. static int
  319. nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
  320. int head, struct nv50_oimm *oimm)
  321. {
  322. struct nv50_disp_cursor_v0 args = {
  323. .head = head,
  324. };
  325. static const s32 oclass[] = {
  326. GK104_DISP_OVERLAY,
  327. GF110_DISP_OVERLAY,
  328. GT214_DISP_OVERLAY,
  329. G82_DISP_OVERLAY,
  330. NV50_DISP_OVERLAY,
  331. 0
  332. };
  333. return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
  334. &oimm->base);
  335. }
  336. /******************************************************************************
  337. * DMA EVO channel
  338. *****************************************************************************/
  339. struct nv50_dmac_ctxdma {
  340. struct list_head head;
  341. struct nvif_object object;
  342. };
  343. struct nv50_dmac {
  344. struct nv50_chan base;
  345. dma_addr_t handle;
  346. u32 *ptr;
  347. struct nvif_object sync;
  348. struct nvif_object vram;
  349. struct list_head ctxdma;
  350. /* Protects against concurrent pushbuf access to this channel, lock is
  351. * grabbed by evo_wait (if the pushbuf reservation is successful) and
  352. * dropped again by evo_kick. */
  353. struct mutex lock;
  354. };
  355. static void
  356. nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
  357. {
  358. nvif_object_fini(&ctxdma->object);
  359. list_del(&ctxdma->head);
  360. kfree(ctxdma);
  361. }
  362. static struct nv50_dmac_ctxdma *
  363. nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
  364. {
  365. struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
  366. struct nv50_dmac_ctxdma *ctxdma;
  367. const u8 kind = fb->nvbo->kind;
  368. const u32 handle = 0xfb000000 | kind;
  369. struct {
  370. struct nv_dma_v0 base;
  371. union {
  372. struct nv50_dma_v0 nv50;
  373. struct gf100_dma_v0 gf100;
  374. struct gf119_dma_v0 gf119;
  375. };
  376. } args = {};
  377. u32 argc = sizeof(args.base);
  378. int ret;
  379. list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
  380. if (ctxdma->object.handle == handle)
  381. return ctxdma;
  382. }
  383. if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
  384. return ERR_PTR(-ENOMEM);
  385. list_add(&ctxdma->head, &dmac->ctxdma);
  386. args.base.target = NV_DMA_V0_TARGET_VRAM;
  387. args.base.access = NV_DMA_V0_ACCESS_RDWR;
  388. args.base.start = 0;
  389. args.base.limit = drm->client.device.info.ram_user - 1;
  390. if (drm->client.device.info.chipset < 0x80) {
  391. args.nv50.part = NV50_DMA_V0_PART_256;
  392. argc += sizeof(args.nv50);
  393. } else
  394. if (drm->client.device.info.chipset < 0xc0) {
  395. args.nv50.part = NV50_DMA_V0_PART_256;
  396. args.nv50.kind = kind;
  397. argc += sizeof(args.nv50);
  398. } else
  399. if (drm->client.device.info.chipset < 0xd0) {
  400. args.gf100.kind = kind;
  401. argc += sizeof(args.gf100);
  402. } else {
  403. args.gf119.page = GF119_DMA_V0_PAGE_LP;
  404. args.gf119.kind = kind;
  405. argc += sizeof(args.gf119);
  406. }
  407. ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
  408. &args, argc, &ctxdma->object);
  409. if (ret) {
  410. nv50_dmac_ctxdma_del(ctxdma);
  411. return ERR_PTR(ret);
  412. }
  413. return ctxdma;
  414. }
  415. static void
  416. nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
  417. {
  418. struct nvif_device *device = dmac->base.device;
  419. struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
  420. list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
  421. nv50_dmac_ctxdma_del(ctxdma);
  422. }
  423. nvif_object_fini(&dmac->vram);
  424. nvif_object_fini(&dmac->sync);
  425. nv50_chan_destroy(&dmac->base);
  426. if (dmac->ptr) {
  427. struct device *dev = nvxx_device(device)->dev;
  428. dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
  429. }
  430. }
  431. static int
  432. nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
  433. const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
  434. struct nv50_dmac *dmac)
  435. {
  436. struct nv50_disp_core_channel_dma_v0 *args = data;
  437. struct nvif_object pushbuf;
  438. int ret;
  439. mutex_init(&dmac->lock);
  440. INIT_LIST_HEAD(&dmac->ctxdma);
  441. dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
  442. &dmac->handle, GFP_KERNEL);
  443. if (!dmac->ptr)
  444. return -ENOMEM;
  445. ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
  446. &(struct nv_dma_v0) {
  447. .target = NV_DMA_V0_TARGET_PCI_US,
  448. .access = NV_DMA_V0_ACCESS_RD,
  449. .start = dmac->handle + 0x0000,
  450. .limit = dmac->handle + 0x0fff,
  451. }, sizeof(struct nv_dma_v0), &pushbuf);
  452. if (ret)
  453. return ret;
  454. args->pushbuf = nvif_handle(&pushbuf);
  455. ret = nv50_chan_create(device, disp, oclass, head, data, size,
  456. &dmac->base);
  457. nvif_object_fini(&pushbuf);
  458. if (ret)
  459. return ret;
  460. ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
  461. &(struct nv_dma_v0) {
  462. .target = NV_DMA_V0_TARGET_VRAM,
  463. .access = NV_DMA_V0_ACCESS_RDWR,
  464. .start = syncbuf + 0x0000,
  465. .limit = syncbuf + 0x0fff,
  466. }, sizeof(struct nv_dma_v0),
  467. &dmac->sync);
  468. if (ret)
  469. return ret;
  470. ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
  471. &(struct nv_dma_v0) {
  472. .target = NV_DMA_V0_TARGET_VRAM,
  473. .access = NV_DMA_V0_ACCESS_RDWR,
  474. .start = 0,
  475. .limit = device->info.ram_user - 1,
  476. }, sizeof(struct nv_dma_v0),
  477. &dmac->vram);
  478. if (ret)
  479. return ret;
  480. return ret;
  481. }
  482. /******************************************************************************
  483. * Core
  484. *****************************************************************************/
  485. struct nv50_mast {
  486. struct nv50_dmac base;
  487. };
  488. static int
  489. nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
  490. u64 syncbuf, struct nv50_mast *core)
  491. {
  492. struct nv50_disp_core_channel_dma_v0 args = {
  493. .pushbuf = 0xb0007d00,
  494. };
  495. static const s32 oclass[] = {
  496. GP102_DISP_CORE_CHANNEL_DMA,
  497. GP100_DISP_CORE_CHANNEL_DMA,
  498. GM200_DISP_CORE_CHANNEL_DMA,
  499. GM107_DISP_CORE_CHANNEL_DMA,
  500. GK110_DISP_CORE_CHANNEL_DMA,
  501. GK104_DISP_CORE_CHANNEL_DMA,
  502. GF110_DISP_CORE_CHANNEL_DMA,
  503. GT214_DISP_CORE_CHANNEL_DMA,
  504. GT206_DISP_CORE_CHANNEL_DMA,
  505. GT200_DISP_CORE_CHANNEL_DMA,
  506. G82_DISP_CORE_CHANNEL_DMA,
  507. NV50_DISP_CORE_CHANNEL_DMA,
  508. 0
  509. };
  510. return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
  511. syncbuf, &core->base);
  512. }
  513. /******************************************************************************
  514. * Base
  515. *****************************************************************************/
  516. struct nv50_sync {
  517. struct nv50_dmac base;
  518. u32 addr;
  519. u32 data;
  520. };
  521. static int
  522. nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
  523. int head, u64 syncbuf, struct nv50_sync *base)
  524. {
  525. struct nv50_disp_base_channel_dma_v0 args = {
  526. .pushbuf = 0xb0007c00 | head,
  527. .head = head,
  528. };
  529. static const s32 oclass[] = {
  530. GK110_DISP_BASE_CHANNEL_DMA,
  531. GK104_DISP_BASE_CHANNEL_DMA,
  532. GF110_DISP_BASE_CHANNEL_DMA,
  533. GT214_DISP_BASE_CHANNEL_DMA,
  534. GT200_DISP_BASE_CHANNEL_DMA,
  535. G82_DISP_BASE_CHANNEL_DMA,
  536. NV50_DISP_BASE_CHANNEL_DMA,
  537. 0
  538. };
  539. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  540. syncbuf, &base->base);
  541. }
  542. /******************************************************************************
  543. * Overlay
  544. *****************************************************************************/
  545. struct nv50_ovly {
  546. struct nv50_dmac base;
  547. };
  548. static int
  549. nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
  550. int head, u64 syncbuf, struct nv50_ovly *ovly)
  551. {
  552. struct nv50_disp_overlay_channel_dma_v0 args = {
  553. .pushbuf = 0xb0007e00 | head,
  554. .head = head,
  555. };
  556. static const s32 oclass[] = {
  557. GK104_DISP_OVERLAY_CONTROL_DMA,
  558. GF110_DISP_OVERLAY_CONTROL_DMA,
  559. GT214_DISP_OVERLAY_CHANNEL_DMA,
  560. GT200_DISP_OVERLAY_CHANNEL_DMA,
  561. G82_DISP_OVERLAY_CHANNEL_DMA,
  562. NV50_DISP_OVERLAY_CHANNEL_DMA,
  563. 0
  564. };
  565. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  566. syncbuf, &ovly->base);
  567. }
  568. struct nv50_head {
  569. struct nouveau_crtc base;
  570. struct {
  571. struct nouveau_bo *nvbo[2];
  572. int next;
  573. } lut;
  574. struct nv50_ovly ovly;
  575. struct nv50_oimm oimm;
  576. };
  577. #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
  578. #define nv50_ovly(c) (&nv50_head(c)->ovly)
  579. #define nv50_oimm(c) (&nv50_head(c)->oimm)
  580. #define nv50_chan(c) (&(c)->base.base)
  581. #define nv50_vers(c) nv50_chan(c)->user.oclass
  582. struct nv50_disp {
  583. struct nvif_object *disp;
  584. struct nv50_mast mast;
  585. struct nouveau_bo *sync;
  586. struct mutex mutex;
  587. };
  588. static struct nv50_disp *
  589. nv50_disp(struct drm_device *dev)
  590. {
  591. return nouveau_display(dev)->priv;
  592. }
  593. #define nv50_mast(d) (&nv50_disp(d)->mast)
  594. /******************************************************************************
  595. * EVO channel helpers
  596. *****************************************************************************/
  597. static u32 *
  598. evo_wait(void *evoc, int nr)
  599. {
  600. struct nv50_dmac *dmac = evoc;
  601. struct nvif_device *device = dmac->base.device;
  602. u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
  603. mutex_lock(&dmac->lock);
  604. if (put + nr >= (PAGE_SIZE / 4) - 8) {
  605. dmac->ptr[put] = 0x20000000;
  606. nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
  607. if (nvif_msec(device, 2000,
  608. if (!nvif_rd32(&dmac->base.user, 0x0004))
  609. break;
  610. ) < 0) {
  611. mutex_unlock(&dmac->lock);
  612. pr_err("nouveau: evo channel stalled\n");
  613. return NULL;
  614. }
  615. put = 0;
  616. }
  617. return dmac->ptr + put;
  618. }
  619. static void
  620. evo_kick(u32 *push, void *evoc)
  621. {
  622. struct nv50_dmac *dmac = evoc;
  623. nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
  624. mutex_unlock(&dmac->lock);
  625. }
  626. #define evo_mthd(p, m, s) do { \
  627. const u32 _m = (m), _s = (s); \
  628. if (drm_debug & DRM_UT_KMS) \
  629. pr_err("%04x %d %s\n", _m, _s, __func__); \
  630. *((p)++) = ((_s << 18) | _m); \
  631. } while(0)
  632. #define evo_data(p, d) do { \
  633. const u32 _d = (d); \
  634. if (drm_debug & DRM_UT_KMS) \
  635. pr_err("\t%08x\n", _d); \
  636. *((p)++) = _d; \
  637. } while(0)
  638. /******************************************************************************
  639. * Plane
  640. *****************************************************************************/
  641. #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
  642. struct nv50_wndw {
  643. const struct nv50_wndw_func *func;
  644. struct nv50_dmac *dmac;
  645. struct drm_plane plane;
  646. struct nvif_notify notify;
  647. u16 ntfy;
  648. u16 sema;
  649. u32 data;
  650. };
  651. struct nv50_wndw_func {
  652. void *(*dtor)(struct nv50_wndw *);
  653. int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  654. struct nv50_head_atom *asyh);
  655. void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  656. struct nv50_head_atom *asyh);
  657. void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
  658. struct nv50_wndw_atom *asyw);
  659. void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  660. void (*sema_clr)(struct nv50_wndw *);
  661. void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  662. void (*ntfy_clr)(struct nv50_wndw *);
  663. int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
  664. void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  665. void (*image_clr)(struct nv50_wndw *);
  666. void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
  667. void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
  668. u32 (*update)(struct nv50_wndw *, u32 interlock);
  669. };
  670. static int
  671. nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  672. {
  673. if (asyw->set.ntfy)
  674. return wndw->func->ntfy_wait_begun(wndw, asyw);
  675. return 0;
  676. }
  677. static u32
  678. nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
  679. struct nv50_wndw_atom *asyw)
  680. {
  681. if (asyw->clr.sema && (!asyw->set.sema || flush))
  682. wndw->func->sema_clr(wndw);
  683. if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
  684. wndw->func->ntfy_clr(wndw);
  685. if (asyw->clr.image && (!asyw->set.image || flush))
  686. wndw->func->image_clr(wndw);
  687. return flush ? wndw->func->update(wndw, interlock) : 0;
  688. }
  689. static u32
  690. nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
  691. struct nv50_wndw_atom *asyw)
  692. {
  693. if (interlock) {
  694. asyw->image.mode = 0;
  695. asyw->image.interval = 1;
  696. }
  697. if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
  698. if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
  699. if (asyw->set.image) wndw->func->image_set(wndw, asyw);
  700. if (asyw->set.lut ) wndw->func->lut (wndw, asyw);
  701. if (asyw->set.point) wndw->func->point (wndw, asyw);
  702. return wndw->func->update(wndw, interlock);
  703. }
  704. static void
  705. nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
  706. struct nv50_wndw_atom *asyw,
  707. struct nv50_head_atom *asyh)
  708. {
  709. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  710. NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
  711. wndw->func->release(wndw, asyw, asyh);
  712. asyw->ntfy.handle = 0;
  713. asyw->sema.handle = 0;
  714. }
  715. static int
  716. nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
  717. struct nv50_wndw_atom *asyw,
  718. struct nv50_head_atom *asyh)
  719. {
  720. struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
  721. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  722. int ret;
  723. NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
  724. asyw->image.w = fb->base.width;
  725. asyw->image.h = fb->base.height;
  726. asyw->image.kind = fb->nvbo->kind;
  727. if (asyh->state.pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC)
  728. asyw->interval = 0;
  729. else
  730. asyw->interval = 1;
  731. if (asyw->image.kind) {
  732. asyw->image.layout = 0;
  733. if (drm->client.device.info.chipset >= 0xc0)
  734. asyw->image.block = fb->nvbo->mode >> 4;
  735. else
  736. asyw->image.block = fb->nvbo->mode;
  737. asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
  738. } else {
  739. asyw->image.layout = 1;
  740. asyw->image.block = 0;
  741. asyw->image.pitch = fb->base.pitches[0];
  742. }
  743. ret = wndw->func->acquire(wndw, asyw, asyh);
  744. if (ret)
  745. return ret;
  746. if (asyw->set.image) {
  747. if (!(asyw->image.mode = asyw->interval ? 0 : 1))
  748. asyw->image.interval = asyw->interval;
  749. else
  750. asyw->image.interval = 0;
  751. }
  752. return 0;
  753. }
  754. static int
  755. nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
  756. {
  757. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  758. struct nv50_wndw *wndw = nv50_wndw(plane);
  759. struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
  760. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  761. struct nv50_head_atom *harm = NULL, *asyh = NULL;
  762. bool varm = false, asyv = false, asym = false;
  763. int ret;
  764. NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
  765. if (asyw->state.crtc) {
  766. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  767. if (IS_ERR(asyh))
  768. return PTR_ERR(asyh);
  769. asym = drm_atomic_crtc_needs_modeset(&asyh->state);
  770. asyv = asyh->state.active;
  771. }
  772. if (armw->state.crtc) {
  773. harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
  774. if (IS_ERR(harm))
  775. return PTR_ERR(harm);
  776. varm = harm->state.crtc->state->active;
  777. }
  778. if (asyv) {
  779. asyw->point.x = asyw->state.crtc_x;
  780. asyw->point.y = asyw->state.crtc_y;
  781. if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
  782. asyw->set.point = true;
  783. ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
  784. if (ret)
  785. return ret;
  786. } else
  787. if (varm) {
  788. nv50_wndw_atomic_check_release(wndw, asyw, harm);
  789. } else {
  790. return 0;
  791. }
  792. if (!asyv || asym) {
  793. asyw->clr.ntfy = armw->ntfy.handle != 0;
  794. asyw->clr.sema = armw->sema.handle != 0;
  795. if (wndw->func->image_clr)
  796. asyw->clr.image = armw->image.handle != 0;
  797. asyw->set.lut = wndw->func->lut && asyv;
  798. }
  799. return 0;
  800. }
  801. static void
  802. nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
  803. {
  804. struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
  805. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  806. NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
  807. if (!old_state->fb)
  808. return;
  809. nouveau_bo_unpin(fb->nvbo);
  810. }
  811. static int
  812. nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
  813. {
  814. struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
  815. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  816. struct nv50_wndw *wndw = nv50_wndw(plane);
  817. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  818. struct nv50_head_atom *asyh;
  819. struct nv50_dmac_ctxdma *ctxdma;
  820. int ret;
  821. NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
  822. if (!asyw->state.fb)
  823. return 0;
  824. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
  825. if (ret)
  826. return ret;
  827. ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
  828. if (IS_ERR(ctxdma)) {
  829. nouveau_bo_unpin(fb->nvbo);
  830. return PTR_ERR(ctxdma);
  831. }
  832. asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
  833. asyw->image.handle = ctxdma->object.handle;
  834. asyw->image.offset = fb->nvbo->bo.offset;
  835. if (wndw->func->prepare) {
  836. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  837. if (IS_ERR(asyh))
  838. return PTR_ERR(asyh);
  839. wndw->func->prepare(wndw, asyh, asyw);
  840. }
  841. return 0;
  842. }
  843. static const struct drm_plane_helper_funcs
  844. nv50_wndw_helper = {
  845. .prepare_fb = nv50_wndw_prepare_fb,
  846. .cleanup_fb = nv50_wndw_cleanup_fb,
  847. .atomic_check = nv50_wndw_atomic_check,
  848. };
  849. static void
  850. nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
  851. struct drm_plane_state *state)
  852. {
  853. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  854. __drm_atomic_helper_plane_destroy_state(&asyw->state);
  855. kfree(asyw);
  856. }
  857. static struct drm_plane_state *
  858. nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
  859. {
  860. struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
  861. struct nv50_wndw_atom *asyw;
  862. if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
  863. return NULL;
  864. __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
  865. asyw->interval = 1;
  866. asyw->sema = armw->sema;
  867. asyw->ntfy = armw->ntfy;
  868. asyw->image = armw->image;
  869. asyw->point = armw->point;
  870. asyw->lut = armw->lut;
  871. asyw->clr.mask = 0;
  872. asyw->set.mask = 0;
  873. return &asyw->state;
  874. }
  875. static void
  876. nv50_wndw_reset(struct drm_plane *plane)
  877. {
  878. struct nv50_wndw_atom *asyw;
  879. if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
  880. return;
  881. if (plane->state)
  882. plane->funcs->atomic_destroy_state(plane, plane->state);
  883. plane->state = &asyw->state;
  884. plane->state->plane = plane;
  885. plane->state->rotation = DRM_MODE_ROTATE_0;
  886. }
  887. static void
  888. nv50_wndw_destroy(struct drm_plane *plane)
  889. {
  890. struct nv50_wndw *wndw = nv50_wndw(plane);
  891. void *data;
  892. nvif_notify_fini(&wndw->notify);
  893. data = wndw->func->dtor(wndw);
  894. drm_plane_cleanup(&wndw->plane);
  895. kfree(data);
  896. }
  897. static const struct drm_plane_funcs
  898. nv50_wndw = {
  899. .update_plane = drm_atomic_helper_update_plane,
  900. .disable_plane = drm_atomic_helper_disable_plane,
  901. .destroy = nv50_wndw_destroy,
  902. .reset = nv50_wndw_reset,
  903. .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
  904. .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
  905. };
  906. static void
  907. nv50_wndw_fini(struct nv50_wndw *wndw)
  908. {
  909. nvif_notify_put(&wndw->notify);
  910. }
  911. static void
  912. nv50_wndw_init(struct nv50_wndw *wndw)
  913. {
  914. nvif_notify_get(&wndw->notify);
  915. }
  916. static int
  917. nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
  918. enum drm_plane_type type, const char *name, int index,
  919. struct nv50_dmac *dmac, const u32 *format, int nformat,
  920. struct nv50_wndw *wndw)
  921. {
  922. int ret;
  923. wndw->func = func;
  924. wndw->dmac = dmac;
  925. ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw,
  926. format, nformat, NULL,
  927. type, "%s-%d", name, index);
  928. if (ret)
  929. return ret;
  930. drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
  931. return 0;
  932. }
  933. /******************************************************************************
  934. * Cursor plane
  935. *****************************************************************************/
  936. #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
  937. struct nv50_curs {
  938. struct nv50_wndw wndw;
  939. struct nvif_object chan;
  940. };
  941. static u32
  942. nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
  943. {
  944. struct nv50_curs *curs = nv50_curs(wndw);
  945. nvif_wr32(&curs->chan, 0x0080, 0x00000000);
  946. return 0;
  947. }
  948. static void
  949. nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  950. {
  951. struct nv50_curs *curs = nv50_curs(wndw);
  952. nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
  953. }
  954. static void
  955. nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
  956. struct nv50_wndw_atom *asyw)
  957. {
  958. u32 handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
  959. u32 offset = asyw->image.offset;
  960. if (asyh->curs.handle != handle || asyh->curs.offset != offset) {
  961. asyh->curs.handle = handle;
  962. asyh->curs.offset = offset;
  963. asyh->set.curs = asyh->curs.visible;
  964. }
  965. }
  966. static void
  967. nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  968. struct nv50_head_atom *asyh)
  969. {
  970. asyh->curs.visible = false;
  971. }
  972. static int
  973. nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  974. struct nv50_head_atom *asyh)
  975. {
  976. int ret;
  977. ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
  978. DRM_PLANE_HELPER_NO_SCALING,
  979. DRM_PLANE_HELPER_NO_SCALING,
  980. true, true);
  981. asyh->curs.visible = asyw->state.visible;
  982. if (ret || !asyh->curs.visible)
  983. return ret;
  984. switch (asyw->state.fb->width) {
  985. case 32: asyh->curs.layout = 0; break;
  986. case 64: asyh->curs.layout = 1; break;
  987. default:
  988. return -EINVAL;
  989. }
  990. if (asyw->state.fb->width != asyw->state.fb->height)
  991. return -EINVAL;
  992. switch (asyw->state.fb->format->format) {
  993. case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
  994. default:
  995. WARN_ON(1);
  996. return -EINVAL;
  997. }
  998. return 0;
  999. }
  1000. static void *
  1001. nv50_curs_dtor(struct nv50_wndw *wndw)
  1002. {
  1003. struct nv50_curs *curs = nv50_curs(wndw);
  1004. nvif_object_fini(&curs->chan);
  1005. return curs;
  1006. }
  1007. static const u32
  1008. nv50_curs_format[] = {
  1009. DRM_FORMAT_ARGB8888,
  1010. };
  1011. static const struct nv50_wndw_func
  1012. nv50_curs = {
  1013. .dtor = nv50_curs_dtor,
  1014. .acquire = nv50_curs_acquire,
  1015. .release = nv50_curs_release,
  1016. .prepare = nv50_curs_prepare,
  1017. .point = nv50_curs_point,
  1018. .update = nv50_curs_update,
  1019. };
  1020. static int
  1021. nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
  1022. struct nv50_curs **pcurs)
  1023. {
  1024. static const struct nvif_mclass curses[] = {
  1025. { GK104_DISP_CURSOR, 0 },
  1026. { GF110_DISP_CURSOR, 0 },
  1027. { GT214_DISP_CURSOR, 0 },
  1028. { G82_DISP_CURSOR, 0 },
  1029. { NV50_DISP_CURSOR, 0 },
  1030. {}
  1031. };
  1032. struct nv50_disp_cursor_v0 args = {
  1033. .head = head->base.index,
  1034. };
  1035. struct nv50_disp *disp = nv50_disp(drm->dev);
  1036. struct nv50_curs *curs;
  1037. int cid, ret;
  1038. cid = nvif_mclass(disp->disp, curses);
  1039. if (cid < 0) {
  1040. NV_ERROR(drm, "No supported cursor immediate class\n");
  1041. return cid;
  1042. }
  1043. if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
  1044. return -ENOMEM;
  1045. ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
  1046. "curs", head->base.index, &disp->mast.base,
  1047. nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
  1048. &curs->wndw);
  1049. if (ret) {
  1050. kfree(curs);
  1051. return ret;
  1052. }
  1053. ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
  1054. sizeof(args), &curs->chan);
  1055. if (ret) {
  1056. NV_ERROR(drm, "curs%04x allocation failed: %d\n",
  1057. curses[cid].oclass, ret);
  1058. return ret;
  1059. }
  1060. return 0;
  1061. }
  1062. /******************************************************************************
  1063. * Primary plane
  1064. *****************************************************************************/
  1065. #define nv50_base(p) container_of((p), struct nv50_base, wndw)
  1066. struct nv50_base {
  1067. struct nv50_wndw wndw;
  1068. struct nv50_sync chan;
  1069. int id;
  1070. };
  1071. static int
  1072. nv50_base_notify(struct nvif_notify *notify)
  1073. {
  1074. return NVIF_NOTIFY_KEEP;
  1075. }
  1076. static void
  1077. nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1078. {
  1079. struct nv50_base *base = nv50_base(wndw);
  1080. u32 *push;
  1081. if ((push = evo_wait(&base->chan, 2))) {
  1082. evo_mthd(push, 0x00e0, 1);
  1083. evo_data(push, asyw->lut.enable << 30);
  1084. evo_kick(push, &base->chan);
  1085. }
  1086. }
  1087. static void
  1088. nv50_base_image_clr(struct nv50_wndw *wndw)
  1089. {
  1090. struct nv50_base *base = nv50_base(wndw);
  1091. u32 *push;
  1092. if ((push = evo_wait(&base->chan, 4))) {
  1093. evo_mthd(push, 0x0084, 1);
  1094. evo_data(push, 0x00000000);
  1095. evo_mthd(push, 0x00c0, 1);
  1096. evo_data(push, 0x00000000);
  1097. evo_kick(push, &base->chan);
  1098. }
  1099. }
  1100. static void
  1101. nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1102. {
  1103. struct nv50_base *base = nv50_base(wndw);
  1104. const s32 oclass = base->chan.base.base.user.oclass;
  1105. u32 *push;
  1106. if ((push = evo_wait(&base->chan, 10))) {
  1107. evo_mthd(push, 0x0084, 1);
  1108. evo_data(push, (asyw->image.mode << 8) |
  1109. (asyw->image.interval << 4));
  1110. evo_mthd(push, 0x00c0, 1);
  1111. evo_data(push, asyw->image.handle);
  1112. if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1113. evo_mthd(push, 0x0800, 5);
  1114. evo_data(push, asyw->image.offset >> 8);
  1115. evo_data(push, 0x00000000);
  1116. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1117. evo_data(push, (asyw->image.layout << 20) |
  1118. asyw->image.pitch |
  1119. asyw->image.block);
  1120. evo_data(push, (asyw->image.kind << 16) |
  1121. (asyw->image.format << 8));
  1122. } else
  1123. if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1124. evo_mthd(push, 0x0800, 5);
  1125. evo_data(push, asyw->image.offset >> 8);
  1126. evo_data(push, 0x00000000);
  1127. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1128. evo_data(push, (asyw->image.layout << 20) |
  1129. asyw->image.pitch |
  1130. asyw->image.block);
  1131. evo_data(push, asyw->image.format << 8);
  1132. } else {
  1133. evo_mthd(push, 0x0400, 5);
  1134. evo_data(push, asyw->image.offset >> 8);
  1135. evo_data(push, 0x00000000);
  1136. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1137. evo_data(push, (asyw->image.layout << 24) |
  1138. asyw->image.pitch |
  1139. asyw->image.block);
  1140. evo_data(push, asyw->image.format << 8);
  1141. }
  1142. evo_kick(push, &base->chan);
  1143. }
  1144. }
  1145. static void
  1146. nv50_base_ntfy_clr(struct nv50_wndw *wndw)
  1147. {
  1148. struct nv50_base *base = nv50_base(wndw);
  1149. u32 *push;
  1150. if ((push = evo_wait(&base->chan, 2))) {
  1151. evo_mthd(push, 0x00a4, 1);
  1152. evo_data(push, 0x00000000);
  1153. evo_kick(push, &base->chan);
  1154. }
  1155. }
  1156. static void
  1157. nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1158. {
  1159. struct nv50_base *base = nv50_base(wndw);
  1160. u32 *push;
  1161. if ((push = evo_wait(&base->chan, 3))) {
  1162. evo_mthd(push, 0x00a0, 2);
  1163. evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
  1164. evo_data(push, asyw->ntfy.handle);
  1165. evo_kick(push, &base->chan);
  1166. }
  1167. }
  1168. static void
  1169. nv50_base_sema_clr(struct nv50_wndw *wndw)
  1170. {
  1171. struct nv50_base *base = nv50_base(wndw);
  1172. u32 *push;
  1173. if ((push = evo_wait(&base->chan, 2))) {
  1174. evo_mthd(push, 0x0094, 1);
  1175. evo_data(push, 0x00000000);
  1176. evo_kick(push, &base->chan);
  1177. }
  1178. }
  1179. static void
  1180. nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1181. {
  1182. struct nv50_base *base = nv50_base(wndw);
  1183. u32 *push;
  1184. if ((push = evo_wait(&base->chan, 5))) {
  1185. evo_mthd(push, 0x0088, 4);
  1186. evo_data(push, asyw->sema.offset);
  1187. evo_data(push, asyw->sema.acquire);
  1188. evo_data(push, asyw->sema.release);
  1189. evo_data(push, asyw->sema.handle);
  1190. evo_kick(push, &base->chan);
  1191. }
  1192. }
  1193. static u32
  1194. nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
  1195. {
  1196. struct nv50_base *base = nv50_base(wndw);
  1197. u32 *push;
  1198. if (!(push = evo_wait(&base->chan, 2)))
  1199. return 0;
  1200. evo_mthd(push, 0x0080, 1);
  1201. evo_data(push, interlock);
  1202. evo_kick(push, &base->chan);
  1203. if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
  1204. return interlock ? 2 << (base->id * 8) : 0;
  1205. return interlock ? 2 << (base->id * 4) : 0;
  1206. }
  1207. static int
  1208. nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1209. {
  1210. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  1211. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1212. if (nvif_msec(&drm->client.device, 2000ULL,
  1213. u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
  1214. if ((data & 0xc0000000) == 0x40000000)
  1215. break;
  1216. usleep_range(1, 2);
  1217. ) < 0)
  1218. return -ETIMEDOUT;
  1219. return 0;
  1220. }
  1221. static void
  1222. nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1223. struct nv50_head_atom *asyh)
  1224. {
  1225. asyh->base.cpp = 0;
  1226. }
  1227. static int
  1228. nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1229. struct nv50_head_atom *asyh)
  1230. {
  1231. const struct drm_framebuffer *fb = asyw->state.fb;
  1232. int ret;
  1233. if (!fb->format->depth)
  1234. return -EINVAL;
  1235. ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
  1236. DRM_PLANE_HELPER_NO_SCALING,
  1237. DRM_PLANE_HELPER_NO_SCALING,
  1238. false, true);
  1239. if (ret)
  1240. return ret;
  1241. asyh->base.depth = fb->format->depth;
  1242. asyh->base.cpp = fb->format->cpp[0];
  1243. asyh->base.x = asyw->state.src.x1 >> 16;
  1244. asyh->base.y = asyw->state.src.y1 >> 16;
  1245. asyh->base.w = asyw->state.fb->width;
  1246. asyh->base.h = asyw->state.fb->height;
  1247. switch (fb->format->format) {
  1248. case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break;
  1249. case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break;
  1250. case DRM_FORMAT_XRGB1555 :
  1251. case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break;
  1252. case DRM_FORMAT_XRGB8888 :
  1253. case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break;
  1254. case DRM_FORMAT_XBGR2101010:
  1255. case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
  1256. case DRM_FORMAT_XBGR8888 :
  1257. case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break;
  1258. default:
  1259. WARN_ON(1);
  1260. return -EINVAL;
  1261. }
  1262. asyw->lut.enable = 1;
  1263. asyw->set.image = true;
  1264. return 0;
  1265. }
  1266. static void *
  1267. nv50_base_dtor(struct nv50_wndw *wndw)
  1268. {
  1269. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1270. struct nv50_base *base = nv50_base(wndw);
  1271. nv50_dmac_destroy(&base->chan.base, disp->disp);
  1272. return base;
  1273. }
  1274. static const u32
  1275. nv50_base_format[] = {
  1276. DRM_FORMAT_C8,
  1277. DRM_FORMAT_RGB565,
  1278. DRM_FORMAT_XRGB1555,
  1279. DRM_FORMAT_ARGB1555,
  1280. DRM_FORMAT_XRGB8888,
  1281. DRM_FORMAT_ARGB8888,
  1282. DRM_FORMAT_XBGR2101010,
  1283. DRM_FORMAT_ABGR2101010,
  1284. DRM_FORMAT_XBGR8888,
  1285. DRM_FORMAT_ABGR8888,
  1286. };
  1287. static const struct nv50_wndw_func
  1288. nv50_base = {
  1289. .dtor = nv50_base_dtor,
  1290. .acquire = nv50_base_acquire,
  1291. .release = nv50_base_release,
  1292. .sema_set = nv50_base_sema_set,
  1293. .sema_clr = nv50_base_sema_clr,
  1294. .ntfy_set = nv50_base_ntfy_set,
  1295. .ntfy_clr = nv50_base_ntfy_clr,
  1296. .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
  1297. .image_set = nv50_base_image_set,
  1298. .image_clr = nv50_base_image_clr,
  1299. .lut = nv50_base_lut,
  1300. .update = nv50_base_update,
  1301. };
  1302. static int
  1303. nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
  1304. struct nv50_base **pbase)
  1305. {
  1306. struct nv50_disp *disp = nv50_disp(drm->dev);
  1307. struct nv50_base *base;
  1308. int ret;
  1309. if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
  1310. return -ENOMEM;
  1311. base->id = head->base.index;
  1312. base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
  1313. base->wndw.sema = EVO_FLIP_SEM0(base->id);
  1314. base->wndw.data = 0x00000000;
  1315. ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
  1316. "base", base->id, &base->chan.base,
  1317. nv50_base_format, ARRAY_SIZE(nv50_base_format),
  1318. &base->wndw);
  1319. if (ret) {
  1320. kfree(base);
  1321. return ret;
  1322. }
  1323. ret = nv50_base_create(&drm->client.device, disp->disp, base->id,
  1324. disp->sync->bo.offset, &base->chan);
  1325. if (ret)
  1326. return ret;
  1327. return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
  1328. false,
  1329. NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
  1330. &(struct nvif_notify_uevent_req) {},
  1331. sizeof(struct nvif_notify_uevent_req),
  1332. sizeof(struct nvif_notify_uevent_rep),
  1333. &base->wndw.notify);
  1334. }
  1335. /******************************************************************************
  1336. * Head
  1337. *****************************************************************************/
  1338. static void
  1339. nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
  1340. {
  1341. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1342. u32 *push;
  1343. if ((push = evo_wait(core, 2))) {
  1344. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1345. evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
  1346. else
  1347. evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
  1348. evo_data(push, (asyh->procamp.sat.sin << 20) |
  1349. (asyh->procamp.sat.cos << 8));
  1350. evo_kick(push, core);
  1351. }
  1352. }
  1353. static void
  1354. nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
  1355. {
  1356. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1357. u32 *push;
  1358. if ((push = evo_wait(core, 2))) {
  1359. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1360. evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
  1361. else
  1362. if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
  1363. evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
  1364. else
  1365. evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
  1366. evo_data(push, (asyh->dither.mode << 3) |
  1367. (asyh->dither.bits << 1) |
  1368. asyh->dither.enable);
  1369. evo_kick(push, core);
  1370. }
  1371. }
  1372. static void
  1373. nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
  1374. {
  1375. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1376. u32 bounds = 0;
  1377. u32 *push;
  1378. if (asyh->base.cpp) {
  1379. switch (asyh->base.cpp) {
  1380. case 8: bounds |= 0x00000500; break;
  1381. case 4: bounds |= 0x00000300; break;
  1382. case 2: bounds |= 0x00000100; break;
  1383. default:
  1384. WARN_ON(1);
  1385. break;
  1386. }
  1387. bounds |= 0x00000001;
  1388. }
  1389. if ((push = evo_wait(core, 2))) {
  1390. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1391. evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
  1392. else
  1393. evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
  1394. evo_data(push, bounds);
  1395. evo_kick(push, core);
  1396. }
  1397. }
  1398. static void
  1399. nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
  1400. {
  1401. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1402. u32 bounds = 0;
  1403. u32 *push;
  1404. if (asyh->base.cpp) {
  1405. switch (asyh->base.cpp) {
  1406. case 8: bounds |= 0x00000500; break;
  1407. case 4: bounds |= 0x00000300; break;
  1408. case 2: bounds |= 0x00000100; break;
  1409. case 1: bounds |= 0x00000000; break;
  1410. default:
  1411. WARN_ON(1);
  1412. break;
  1413. }
  1414. bounds |= 0x00000001;
  1415. }
  1416. if ((push = evo_wait(core, 2))) {
  1417. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1418. evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
  1419. else
  1420. evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
  1421. evo_data(push, bounds);
  1422. evo_kick(push, core);
  1423. }
  1424. }
  1425. static void
  1426. nv50_head_curs_clr(struct nv50_head *head)
  1427. {
  1428. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1429. u32 *push;
  1430. if ((push = evo_wait(core, 4))) {
  1431. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1432. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1433. evo_data(push, 0x05000000);
  1434. } else
  1435. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1436. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1437. evo_data(push, 0x05000000);
  1438. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1439. evo_data(push, 0x00000000);
  1440. } else {
  1441. evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
  1442. evo_data(push, 0x05000000);
  1443. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1444. evo_data(push, 0x00000000);
  1445. }
  1446. evo_kick(push, core);
  1447. }
  1448. }
  1449. static void
  1450. nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1451. {
  1452. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1453. u32 *push;
  1454. if ((push = evo_wait(core, 5))) {
  1455. if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1456. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1457. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1458. (asyh->curs.format << 24));
  1459. evo_data(push, asyh->curs.offset >> 8);
  1460. } else
  1461. if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1462. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1463. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1464. (asyh->curs.format << 24));
  1465. evo_data(push, asyh->curs.offset >> 8);
  1466. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1467. evo_data(push, asyh->curs.handle);
  1468. } else {
  1469. evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
  1470. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1471. (asyh->curs.format << 24));
  1472. evo_data(push, asyh->curs.offset >> 8);
  1473. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1474. evo_data(push, asyh->curs.handle);
  1475. }
  1476. evo_kick(push, core);
  1477. }
  1478. }
  1479. static void
  1480. nv50_head_core_clr(struct nv50_head *head)
  1481. {
  1482. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1483. u32 *push;
  1484. if ((push = evo_wait(core, 2))) {
  1485. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1486. evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
  1487. else
  1488. evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
  1489. evo_data(push, 0x00000000);
  1490. evo_kick(push, core);
  1491. }
  1492. }
  1493. static void
  1494. nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1495. {
  1496. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1497. u32 *push;
  1498. if ((push = evo_wait(core, 9))) {
  1499. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1500. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1501. evo_data(push, asyh->core.offset >> 8);
  1502. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1503. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1504. evo_data(push, asyh->core.layout << 20 |
  1505. (asyh->core.pitch >> 8) << 8 |
  1506. asyh->core.block);
  1507. evo_data(push, asyh->core.kind << 16 |
  1508. asyh->core.format << 8);
  1509. evo_data(push, asyh->core.handle);
  1510. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1511. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1512. /* EVO will complain with INVALID_STATE if we have an
  1513. * active cursor and (re)specify HeadSetContextDmaIso
  1514. * without also updating HeadSetOffsetCursor.
  1515. */
  1516. asyh->set.curs = asyh->curs.visible;
  1517. } else
  1518. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1519. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1520. evo_data(push, asyh->core.offset >> 8);
  1521. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1522. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1523. evo_data(push, asyh->core.layout << 20 |
  1524. (asyh->core.pitch >> 8) << 8 |
  1525. asyh->core.block);
  1526. evo_data(push, asyh->core.format << 8);
  1527. evo_data(push, asyh->core.handle);
  1528. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1529. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1530. } else {
  1531. evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
  1532. evo_data(push, asyh->core.offset >> 8);
  1533. evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
  1534. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1535. evo_data(push, asyh->core.layout << 24 |
  1536. (asyh->core.pitch >> 8) << 8 |
  1537. asyh->core.block);
  1538. evo_data(push, asyh->core.format << 8);
  1539. evo_data(push, asyh->core.handle);
  1540. evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
  1541. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1542. }
  1543. evo_kick(push, core);
  1544. }
  1545. }
  1546. static void
  1547. nv50_head_lut_clr(struct nv50_head *head)
  1548. {
  1549. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1550. u32 *push;
  1551. if ((push = evo_wait(core, 4))) {
  1552. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1553. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1554. evo_data(push, 0x40000000);
  1555. } else
  1556. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1557. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1558. evo_data(push, 0x40000000);
  1559. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1560. evo_data(push, 0x00000000);
  1561. } else {
  1562. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
  1563. evo_data(push, 0x03000000);
  1564. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1565. evo_data(push, 0x00000000);
  1566. }
  1567. evo_kick(push, core);
  1568. }
  1569. }
  1570. static void
  1571. nv50_head_lut_load(struct drm_property_blob *blob, int mode,
  1572. struct nouveau_bo *nvbo)
  1573. {
  1574. struct drm_color_lut *in = (struct drm_color_lut *)blob->data;
  1575. void __iomem *lut = (u8 *)nvbo_kmap_obj_iovirtual(nvbo);
  1576. const int size = blob->length / sizeof(*in);
  1577. int bits, shift, i;
  1578. u16 zero, r, g, b;
  1579. /* This can't happen.. But it shuts the compiler up. */
  1580. if (WARN_ON(size != 256))
  1581. return;
  1582. switch (mode) {
  1583. case 0: /* LORES. */
  1584. case 1: /* HIRES. */
  1585. bits = 11;
  1586. shift = 3;
  1587. zero = 0x0000;
  1588. break;
  1589. case 7: /* INTERPOLATE_257_UNITY_RANGE. */
  1590. bits = 14;
  1591. shift = 0;
  1592. zero = 0x6000;
  1593. break;
  1594. default:
  1595. WARN_ON(1);
  1596. return;
  1597. }
  1598. for (i = 0; i < size; i++) {
  1599. r = (drm_color_lut_extract(in[i]. red, bits) + zero) << shift;
  1600. g = (drm_color_lut_extract(in[i].green, bits) + zero) << shift;
  1601. b = (drm_color_lut_extract(in[i]. blue, bits) + zero) << shift;
  1602. writew(r, lut + (i * 0x08) + 0);
  1603. writew(g, lut + (i * 0x08) + 2);
  1604. writew(b, lut + (i * 0x08) + 4);
  1605. }
  1606. /* INTERPOLATE modes require a "next" entry to interpolate with,
  1607. * so we replicate the last entry to deal with this for now.
  1608. */
  1609. writew(r, lut + (i * 0x08) + 0);
  1610. writew(g, lut + (i * 0x08) + 2);
  1611. writew(b, lut + (i * 0x08) + 4);
  1612. }
  1613. static void
  1614. nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1615. {
  1616. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1617. u32 *push;
  1618. if ((push = evo_wait(core, 7))) {
  1619. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1620. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1621. evo_data(push, 0x80000000 | asyh->lut.mode << 30);
  1622. evo_data(push, asyh->lut.offset >> 8);
  1623. } else
  1624. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1625. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1626. evo_data(push, 0x80000000 | asyh->lut.mode << 30);
  1627. evo_data(push, asyh->lut.offset >> 8);
  1628. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1629. evo_data(push, asyh->lut.handle);
  1630. } else {
  1631. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
  1632. evo_data(push, 0x80000000 | asyh->lut.mode << 24);
  1633. evo_data(push, asyh->lut.offset >> 8);
  1634. evo_data(push, 0x00000000);
  1635. evo_data(push, 0x00000000);
  1636. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1637. evo_data(push, asyh->lut.handle);
  1638. }
  1639. evo_kick(push, core);
  1640. }
  1641. }
  1642. static void
  1643. nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1644. {
  1645. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1646. struct nv50_head_mode *m = &asyh->mode;
  1647. u32 *push;
  1648. if ((push = evo_wait(core, 14))) {
  1649. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1650. evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
  1651. evo_data(push, 0x00800000 | m->clock);
  1652. evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
  1653. evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
  1654. evo_data(push, 0x00000000);
  1655. evo_data(push, (m->v.active << 16) | m->h.active );
  1656. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1657. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1658. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1659. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1660. evo_data(push, asyh->mode.v.blankus);
  1661. evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
  1662. evo_data(push, 0x00000000);
  1663. } else {
  1664. evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
  1665. evo_data(push, 0x00000000);
  1666. evo_data(push, (m->v.active << 16) | m->h.active );
  1667. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1668. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1669. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1670. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1671. evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
  1672. evo_data(push, 0x00000000); /* ??? */
  1673. evo_data(push, 0xffffff00);
  1674. evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
  1675. evo_data(push, m->clock * 1000);
  1676. evo_data(push, 0x00200000); /* ??? */
  1677. evo_data(push, m->clock * 1000);
  1678. }
  1679. evo_kick(push, core);
  1680. }
  1681. }
  1682. static void
  1683. nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
  1684. {
  1685. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1686. u32 *push;
  1687. if ((push = evo_wait(core, 10))) {
  1688. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1689. evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
  1690. evo_data(push, 0x00000000);
  1691. evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
  1692. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1693. evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
  1694. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1695. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1696. } else {
  1697. evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
  1698. evo_data(push, 0x00000000);
  1699. evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
  1700. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1701. evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
  1702. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1703. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1704. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1705. }
  1706. evo_kick(push, core);
  1707. }
  1708. }
  1709. static void
  1710. nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
  1711. {
  1712. if (asyh->clr.ilut && (!asyh->set.ilut || y))
  1713. nv50_head_lut_clr(head);
  1714. if (asyh->clr.core && (!asyh->set.core || y))
  1715. nv50_head_core_clr(head);
  1716. if (asyh->clr.curs && (!asyh->set.curs || y))
  1717. nv50_head_curs_clr(head);
  1718. }
  1719. static void
  1720. nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1721. {
  1722. if (asyh->set.view ) nv50_head_view (head, asyh);
  1723. if (asyh->set.mode ) nv50_head_mode (head, asyh);
  1724. if (asyh->set.ilut ) {
  1725. struct nouveau_bo *nvbo = head->lut.nvbo[head->lut.next];
  1726. struct drm_property_blob *blob = asyh->state.gamma_lut;
  1727. if (blob)
  1728. nv50_head_lut_load(blob, asyh->lut.mode, nvbo);
  1729. asyh->lut.offset = nvbo->bo.offset;
  1730. head->lut.next ^= 1;
  1731. nv50_head_lut_set(head, asyh);
  1732. }
  1733. if (asyh->set.core ) nv50_head_core_set(head, asyh);
  1734. if (asyh->set.curs ) nv50_head_curs_set(head, asyh);
  1735. if (asyh->set.base ) nv50_head_base (head, asyh);
  1736. if (asyh->set.ovly ) nv50_head_ovly (head, asyh);
  1737. if (asyh->set.dither ) nv50_head_dither (head, asyh);
  1738. if (asyh->set.procamp) nv50_head_procamp (head, asyh);
  1739. }
  1740. static void
  1741. nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
  1742. struct nv50_head_atom *asyh,
  1743. struct nouveau_conn_atom *asyc)
  1744. {
  1745. const int vib = asyc->procamp.color_vibrance - 100;
  1746. const int hue = asyc->procamp.vibrant_hue - 90;
  1747. const int adj = (vib > 0) ? 50 : 0;
  1748. asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
  1749. asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
  1750. asyh->set.procamp = true;
  1751. }
  1752. static void
  1753. nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
  1754. struct nv50_head_atom *asyh,
  1755. struct nouveau_conn_atom *asyc)
  1756. {
  1757. struct drm_connector *connector = asyc->state.connector;
  1758. u32 mode = 0x00;
  1759. if (asyc->dither.mode == DITHERING_MODE_AUTO) {
  1760. if (asyh->base.depth > connector->display_info.bpc * 3)
  1761. mode = DITHERING_MODE_DYNAMIC2X2;
  1762. } else {
  1763. mode = asyc->dither.mode;
  1764. }
  1765. if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
  1766. if (connector->display_info.bpc >= 8)
  1767. mode |= DITHERING_DEPTH_8BPC;
  1768. } else {
  1769. mode |= asyc->dither.depth;
  1770. }
  1771. asyh->dither.enable = mode;
  1772. asyh->dither.bits = mode >> 1;
  1773. asyh->dither.mode = mode >> 3;
  1774. asyh->set.dither = true;
  1775. }
  1776. static void
  1777. nv50_head_atomic_check_view(struct nv50_head_atom *armh,
  1778. struct nv50_head_atom *asyh,
  1779. struct nouveau_conn_atom *asyc)
  1780. {
  1781. struct drm_connector *connector = asyc->state.connector;
  1782. struct drm_display_mode *omode = &asyh->state.adjusted_mode;
  1783. struct drm_display_mode *umode = &asyh->state.mode;
  1784. int mode = asyc->scaler.mode;
  1785. struct edid *edid;
  1786. int umode_vdisplay, omode_hdisplay, omode_vdisplay;
  1787. if (connector->edid_blob_ptr)
  1788. edid = (struct edid *)connector->edid_blob_ptr->data;
  1789. else
  1790. edid = NULL;
  1791. if (!asyc->scaler.full) {
  1792. if (mode == DRM_MODE_SCALE_NONE)
  1793. omode = umode;
  1794. } else {
  1795. /* Non-EDID LVDS/eDP mode. */
  1796. mode = DRM_MODE_SCALE_FULLSCREEN;
  1797. }
  1798. /* For the user-specified mode, we must ignore doublescan and
  1799. * the like, but honor frame packing.
  1800. */
  1801. umode_vdisplay = umode->vdisplay;
  1802. if ((umode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
  1803. umode_vdisplay += umode->vtotal;
  1804. asyh->view.iW = umode->hdisplay;
  1805. asyh->view.iH = umode_vdisplay;
  1806. /* For the output mode, we can just use the stock helper. */
  1807. drm_mode_get_hv_timing(omode, &omode_hdisplay, &omode_vdisplay);
  1808. asyh->view.oW = omode_hdisplay;
  1809. asyh->view.oH = omode_vdisplay;
  1810. /* Add overscan compensation if necessary, will keep the aspect
  1811. * ratio the same as the backend mode unless overridden by the
  1812. * user setting both hborder and vborder properties.
  1813. */
  1814. if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
  1815. (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
  1816. drm_detect_hdmi_monitor(edid)))) {
  1817. u32 bX = asyc->scaler.underscan.hborder;
  1818. u32 bY = asyc->scaler.underscan.vborder;
  1819. u32 r = (asyh->view.oH << 19) / asyh->view.oW;
  1820. if (bX) {
  1821. asyh->view.oW -= (bX * 2);
  1822. if (bY) asyh->view.oH -= (bY * 2);
  1823. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1824. } else {
  1825. asyh->view.oW -= (asyh->view.oW >> 4) + 32;
  1826. if (bY) asyh->view.oH -= (bY * 2);
  1827. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1828. }
  1829. }
  1830. /* Handle CENTER/ASPECT scaling, taking into account the areas
  1831. * removed already for overscan compensation.
  1832. */
  1833. switch (mode) {
  1834. case DRM_MODE_SCALE_CENTER:
  1835. asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
  1836. asyh->view.oH = min((u16)umode_vdisplay, asyh->view.oH);
  1837. /* fall-through */
  1838. case DRM_MODE_SCALE_ASPECT:
  1839. if (asyh->view.oH < asyh->view.oW) {
  1840. u32 r = (asyh->view.iW << 19) / asyh->view.iH;
  1841. asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
  1842. } else {
  1843. u32 r = (asyh->view.iH << 19) / asyh->view.iW;
  1844. asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1845. }
  1846. break;
  1847. default:
  1848. break;
  1849. }
  1850. asyh->set.view = true;
  1851. }
  1852. static void
  1853. nv50_head_atomic_check_lut(struct nv50_head *head,
  1854. struct nv50_head_atom *armh,
  1855. struct nv50_head_atom *asyh)
  1856. {
  1857. struct nv50_disp *disp = nv50_disp(head->base.base.dev);
  1858. /* An I8 surface without an input LUT makes no sense, and
  1859. * EVO will throw an error if you try.
  1860. *
  1861. * Legacy clients actually cause this due to the order in
  1862. * which they call ioctls, so we will enable the LUT with
  1863. * whatever contents the buffer already contains to avoid
  1864. * triggering the error check.
  1865. */
  1866. if (!asyh->state.gamma_lut && asyh->base.cpp != 1) {
  1867. asyh->lut.handle = 0;
  1868. asyh->clr.ilut = armh->lut.visible;
  1869. return;
  1870. }
  1871. if (disp->disp->oclass < GF110_DISP) {
  1872. asyh->lut.mode = (asyh->base.cpp == 1) ? 0 : 1;
  1873. asyh->set.ilut = true;
  1874. } else {
  1875. asyh->lut.mode = 7;
  1876. asyh->set.ilut = asyh->state.color_mgmt_changed;
  1877. }
  1878. asyh->lut.handle = disp->mast.base.vram.handle;
  1879. }
  1880. static void
  1881. nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1882. {
  1883. struct drm_display_mode *mode = &asyh->state.adjusted_mode;
  1884. struct nv50_head_mode *m = &asyh->mode;
  1885. u32 blankus;
  1886. drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V | CRTC_STEREO_DOUBLE);
  1887. /*
  1888. * DRM modes are defined in terms of a repeating interval
  1889. * starting with the active display area. The hardware modes
  1890. * are defined in terms of a repeating interval starting one
  1891. * unit (pixel or line) into the sync pulse. So, add bias.
  1892. */
  1893. m->h.active = mode->crtc_htotal;
  1894. m->h.synce = mode->crtc_hsync_end - mode->crtc_hsync_start - 1;
  1895. m->h.blanke = mode->crtc_hblank_end - mode->crtc_hsync_start - 1;
  1896. m->h.blanks = m->h.blanke + mode->crtc_hdisplay;
  1897. m->v.active = mode->crtc_vtotal;
  1898. m->v.synce = mode->crtc_vsync_end - mode->crtc_vsync_start - 1;
  1899. m->v.blanke = mode->crtc_vblank_end - mode->crtc_vsync_start - 1;
  1900. m->v.blanks = m->v.blanke + mode->crtc_vdisplay;
  1901. /*XXX: Safe underestimate, even "0" works */
  1902. blankus = (m->v.active - mode->crtc_vdisplay - 2) * m->h.active;
  1903. blankus *= 1000;
  1904. blankus /= mode->crtc_clock;
  1905. m->v.blankus = blankus;
  1906. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  1907. m->v.blank2e = m->v.active + m->v.blanke;
  1908. m->v.blank2s = m->v.blank2e + mode->crtc_vdisplay;
  1909. m->v.active = (m->v.active * 2) + 1;
  1910. m->interlace = true;
  1911. } else {
  1912. m->v.blank2e = 0;
  1913. m->v.blank2s = 1;
  1914. m->interlace = false;
  1915. }
  1916. m->clock = mode->crtc_clock;
  1917. asyh->set.mode = true;
  1918. }
  1919. static int
  1920. nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
  1921. {
  1922. struct nouveau_drm *drm = nouveau_drm(crtc->dev);
  1923. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1924. struct nv50_head *head = nv50_head(crtc);
  1925. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  1926. struct nv50_head_atom *asyh = nv50_head_atom(state);
  1927. struct nouveau_conn_atom *asyc = NULL;
  1928. struct drm_connector_state *conns;
  1929. struct drm_connector *conn;
  1930. int i;
  1931. NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
  1932. if (asyh->state.active) {
  1933. for_each_new_connector_in_state(asyh->state.state, conn, conns, i) {
  1934. if (conns->crtc == crtc) {
  1935. asyc = nouveau_conn_atom(conns);
  1936. break;
  1937. }
  1938. }
  1939. if (armh->state.active) {
  1940. if (asyc) {
  1941. if (asyh->state.mode_changed)
  1942. asyc->set.scaler = true;
  1943. if (armh->base.depth != asyh->base.depth)
  1944. asyc->set.dither = true;
  1945. }
  1946. } else {
  1947. if (asyc)
  1948. asyc->set.mask = ~0;
  1949. asyh->set.mask = ~0;
  1950. }
  1951. if (asyh->state.mode_changed)
  1952. nv50_head_atomic_check_mode(head, asyh);
  1953. if (asyh->state.color_mgmt_changed ||
  1954. asyh->base.cpp != armh->base.cpp)
  1955. nv50_head_atomic_check_lut(head, armh, asyh);
  1956. asyh->lut.visible = asyh->lut.handle != 0;
  1957. if (asyc) {
  1958. if (asyc->set.scaler)
  1959. nv50_head_atomic_check_view(armh, asyh, asyc);
  1960. if (asyc->set.dither)
  1961. nv50_head_atomic_check_dither(armh, asyh, asyc);
  1962. if (asyc->set.procamp)
  1963. nv50_head_atomic_check_procamp(armh, asyh, asyc);
  1964. }
  1965. if ((asyh->core.visible = (asyh->base.cpp != 0))) {
  1966. asyh->core.x = asyh->base.x;
  1967. asyh->core.y = asyh->base.y;
  1968. asyh->core.w = asyh->base.w;
  1969. asyh->core.h = asyh->base.h;
  1970. } else
  1971. if ((asyh->core.visible = asyh->curs.visible) ||
  1972. (asyh->core.visible = asyh->lut.visible)) {
  1973. /*XXX: We need to either find some way of having the
  1974. * primary base layer appear black, while still
  1975. * being able to display the other layers, or we
  1976. * need to allocate a dummy black surface here.
  1977. */
  1978. asyh->core.x = 0;
  1979. asyh->core.y = 0;
  1980. asyh->core.w = asyh->state.mode.hdisplay;
  1981. asyh->core.h = asyh->state.mode.vdisplay;
  1982. }
  1983. asyh->core.handle = disp->mast.base.vram.handle;
  1984. asyh->core.offset = 0;
  1985. asyh->core.format = 0xcf;
  1986. asyh->core.kind = 0;
  1987. asyh->core.layout = 1;
  1988. asyh->core.block = 0;
  1989. asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
  1990. asyh->set.base = armh->base.cpp != asyh->base.cpp;
  1991. asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
  1992. } else {
  1993. asyh->lut.visible = false;
  1994. asyh->core.visible = false;
  1995. asyh->curs.visible = false;
  1996. asyh->base.cpp = 0;
  1997. asyh->ovly.cpp = 0;
  1998. }
  1999. if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
  2000. if (asyh->core.visible) {
  2001. if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
  2002. asyh->set.core = true;
  2003. } else
  2004. if (armh->core.visible) {
  2005. asyh->clr.core = true;
  2006. }
  2007. if (asyh->curs.visible) {
  2008. if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
  2009. asyh->set.curs = true;
  2010. } else
  2011. if (armh->curs.visible) {
  2012. asyh->clr.curs = true;
  2013. }
  2014. } else {
  2015. asyh->clr.ilut = armh->lut.visible;
  2016. asyh->clr.core = armh->core.visible;
  2017. asyh->clr.curs = armh->curs.visible;
  2018. asyh->set.ilut = asyh->lut.visible;
  2019. asyh->set.core = asyh->core.visible;
  2020. asyh->set.curs = asyh->curs.visible;
  2021. }
  2022. if (asyh->clr.mask || asyh->set.mask)
  2023. nv50_atom(asyh->state.state)->lock_core = true;
  2024. return 0;
  2025. }
  2026. static const struct drm_crtc_helper_funcs
  2027. nv50_head_help = {
  2028. .atomic_check = nv50_head_atomic_check,
  2029. };
  2030. static void
  2031. nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
  2032. struct drm_crtc_state *state)
  2033. {
  2034. struct nv50_head_atom *asyh = nv50_head_atom(state);
  2035. __drm_atomic_helper_crtc_destroy_state(&asyh->state);
  2036. kfree(asyh);
  2037. }
  2038. static struct drm_crtc_state *
  2039. nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
  2040. {
  2041. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  2042. struct nv50_head_atom *asyh;
  2043. if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
  2044. return NULL;
  2045. __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
  2046. asyh->view = armh->view;
  2047. asyh->mode = armh->mode;
  2048. asyh->lut = armh->lut;
  2049. asyh->core = armh->core;
  2050. asyh->curs = armh->curs;
  2051. asyh->base = armh->base;
  2052. asyh->ovly = armh->ovly;
  2053. asyh->dither = armh->dither;
  2054. asyh->procamp = armh->procamp;
  2055. asyh->clr.mask = 0;
  2056. asyh->set.mask = 0;
  2057. return &asyh->state;
  2058. }
  2059. static void
  2060. __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
  2061. struct drm_crtc_state *state)
  2062. {
  2063. if (crtc->state)
  2064. crtc->funcs->atomic_destroy_state(crtc, crtc->state);
  2065. crtc->state = state;
  2066. crtc->state->crtc = crtc;
  2067. }
  2068. static void
  2069. nv50_head_reset(struct drm_crtc *crtc)
  2070. {
  2071. struct nv50_head_atom *asyh;
  2072. if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
  2073. return;
  2074. __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
  2075. }
  2076. static void
  2077. nv50_head_destroy(struct drm_crtc *crtc)
  2078. {
  2079. struct nv50_disp *disp = nv50_disp(crtc->dev);
  2080. struct nv50_head *head = nv50_head(crtc);
  2081. int i;
  2082. nv50_dmac_destroy(&head->ovly.base, disp->disp);
  2083. nv50_pioc_destroy(&head->oimm.base);
  2084. for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++)
  2085. nouveau_bo_unmap_unpin_unref(&head->lut.nvbo[i]);
  2086. drm_crtc_cleanup(crtc);
  2087. kfree(crtc);
  2088. }
  2089. static const struct drm_crtc_funcs
  2090. nv50_head_func = {
  2091. .reset = nv50_head_reset,
  2092. .gamma_set = drm_atomic_helper_legacy_gamma_set,
  2093. .destroy = nv50_head_destroy,
  2094. .set_config = drm_atomic_helper_set_config,
  2095. .page_flip = drm_atomic_helper_page_flip,
  2096. .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
  2097. .atomic_destroy_state = nv50_head_atomic_destroy_state,
  2098. };
  2099. static int
  2100. nv50_head_create(struct drm_device *dev, int index)
  2101. {
  2102. struct nouveau_drm *drm = nouveau_drm(dev);
  2103. struct nvif_device *device = &drm->client.device;
  2104. struct nv50_disp *disp = nv50_disp(dev);
  2105. struct nv50_head *head;
  2106. struct nv50_base *base;
  2107. struct nv50_curs *curs;
  2108. struct drm_crtc *crtc;
  2109. int ret, i;
  2110. head = kzalloc(sizeof(*head), GFP_KERNEL);
  2111. if (!head)
  2112. return -ENOMEM;
  2113. head->base.index = index;
  2114. ret = nv50_base_new(drm, head, &base);
  2115. if (ret == 0)
  2116. ret = nv50_curs_new(drm, head, &curs);
  2117. if (ret) {
  2118. kfree(head);
  2119. return ret;
  2120. }
  2121. crtc = &head->base.base;
  2122. drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
  2123. &curs->wndw.plane, &nv50_head_func,
  2124. "head-%d", head->base.index);
  2125. drm_crtc_helper_add(crtc, &nv50_head_help);
  2126. drm_mode_crtc_set_gamma_size(crtc, 256);
  2127. for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++) {
  2128. ret = nouveau_bo_new_pin_map(&drm->client, 1025 * 8, 0x100,
  2129. TTM_PL_FLAG_VRAM,
  2130. &head->lut.nvbo[i]);
  2131. if (ret)
  2132. goto out;
  2133. }
  2134. /* allocate overlay resources */
  2135. ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
  2136. if (ret)
  2137. goto out;
  2138. ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
  2139. &head->ovly);
  2140. if (ret)
  2141. goto out;
  2142. out:
  2143. if (ret)
  2144. nv50_head_destroy(crtc);
  2145. return ret;
  2146. }
  2147. /******************************************************************************
  2148. * Output path helpers
  2149. *****************************************************************************/
  2150. static void
  2151. nv50_outp_release(struct nouveau_encoder *nv_encoder)
  2152. {
  2153. struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
  2154. struct {
  2155. struct nv50_disp_mthd_v1 base;
  2156. } args = {
  2157. .base.version = 1,
  2158. .base.method = NV50_DISP_MTHD_V1_RELEASE,
  2159. .base.hasht = nv_encoder->dcb->hasht,
  2160. .base.hashm = nv_encoder->dcb->hashm,
  2161. };
  2162. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2163. nv_encoder->or = -1;
  2164. nv_encoder->link = 0;
  2165. }
  2166. static int
  2167. nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
  2168. {
  2169. struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
  2170. struct nv50_disp *disp = nv50_disp(drm->dev);
  2171. struct {
  2172. struct nv50_disp_mthd_v1 base;
  2173. struct nv50_disp_acquire_v0 info;
  2174. } args = {
  2175. .base.version = 1,
  2176. .base.method = NV50_DISP_MTHD_V1_ACQUIRE,
  2177. .base.hasht = nv_encoder->dcb->hasht,
  2178. .base.hashm = nv_encoder->dcb->hashm,
  2179. };
  2180. int ret;
  2181. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2182. if (ret) {
  2183. NV_ERROR(drm, "error acquiring output path: %d\n", ret);
  2184. return ret;
  2185. }
  2186. nv_encoder->or = args.info.or;
  2187. nv_encoder->link = args.info.link;
  2188. return 0;
  2189. }
  2190. static int
  2191. nv50_outp_atomic_check_view(struct drm_encoder *encoder,
  2192. struct drm_crtc_state *crtc_state,
  2193. struct drm_connector_state *conn_state,
  2194. struct drm_display_mode *native_mode)
  2195. {
  2196. struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
  2197. struct drm_display_mode *mode = &crtc_state->mode;
  2198. struct drm_connector *connector = conn_state->connector;
  2199. struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
  2200. struct nouveau_drm *drm = nouveau_drm(encoder->dev);
  2201. NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
  2202. asyc->scaler.full = false;
  2203. if (!native_mode)
  2204. return 0;
  2205. if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
  2206. switch (connector->connector_type) {
  2207. case DRM_MODE_CONNECTOR_LVDS:
  2208. case DRM_MODE_CONNECTOR_eDP:
  2209. /* Force use of scaler for non-EDID modes. */
  2210. if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
  2211. break;
  2212. mode = native_mode;
  2213. asyc->scaler.full = true;
  2214. break;
  2215. default:
  2216. break;
  2217. }
  2218. } else {
  2219. mode = native_mode;
  2220. }
  2221. if (!drm_mode_equal(adjusted_mode, mode)) {
  2222. drm_mode_copy(adjusted_mode, mode);
  2223. crtc_state->mode_changed = true;
  2224. }
  2225. return 0;
  2226. }
  2227. static int
  2228. nv50_outp_atomic_check(struct drm_encoder *encoder,
  2229. struct drm_crtc_state *crtc_state,
  2230. struct drm_connector_state *conn_state)
  2231. {
  2232. struct nouveau_connector *nv_connector =
  2233. nouveau_connector(conn_state->connector);
  2234. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2235. nv_connector->native_mode);
  2236. }
  2237. /******************************************************************************
  2238. * DAC
  2239. *****************************************************************************/
  2240. static void
  2241. nv50_dac_disable(struct drm_encoder *encoder)
  2242. {
  2243. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2244. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2245. const int or = nv_encoder->or;
  2246. u32 *push;
  2247. if (nv_encoder->crtc) {
  2248. push = evo_wait(mast, 4);
  2249. if (push) {
  2250. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2251. evo_mthd(push, 0x0400 + (or * 0x080), 1);
  2252. evo_data(push, 0x00000000);
  2253. } else {
  2254. evo_mthd(push, 0x0180 + (or * 0x020), 1);
  2255. evo_data(push, 0x00000000);
  2256. }
  2257. evo_kick(push, mast);
  2258. }
  2259. }
  2260. nv_encoder->crtc = NULL;
  2261. nv50_outp_release(nv_encoder);
  2262. }
  2263. static void
  2264. nv50_dac_enable(struct drm_encoder *encoder)
  2265. {
  2266. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2267. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2268. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2269. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  2270. u32 *push;
  2271. nv50_outp_acquire(nv_encoder);
  2272. push = evo_wait(mast, 8);
  2273. if (push) {
  2274. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2275. u32 syncs = 0x00000000;
  2276. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2277. syncs |= 0x00000001;
  2278. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2279. syncs |= 0x00000002;
  2280. evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
  2281. evo_data(push, 1 << nv_crtc->index);
  2282. evo_data(push, syncs);
  2283. } else {
  2284. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  2285. u32 syncs = 0x00000001;
  2286. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2287. syncs |= 0x00000008;
  2288. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2289. syncs |= 0x00000010;
  2290. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  2291. magic |= 0x00000001;
  2292. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  2293. evo_data(push, syncs);
  2294. evo_data(push, magic);
  2295. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
  2296. evo_data(push, 1 << nv_crtc->index);
  2297. }
  2298. evo_kick(push, mast);
  2299. }
  2300. nv_encoder->crtc = encoder->crtc;
  2301. }
  2302. static enum drm_connector_status
  2303. nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  2304. {
  2305. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2306. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2307. struct {
  2308. struct nv50_disp_mthd_v1 base;
  2309. struct nv50_disp_dac_load_v0 load;
  2310. } args = {
  2311. .base.version = 1,
  2312. .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
  2313. .base.hasht = nv_encoder->dcb->hasht,
  2314. .base.hashm = nv_encoder->dcb->hashm,
  2315. };
  2316. int ret;
  2317. args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
  2318. if (args.load.data == 0)
  2319. args.load.data = 340;
  2320. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2321. if (ret || !args.load.load)
  2322. return connector_status_disconnected;
  2323. return connector_status_connected;
  2324. }
  2325. static const struct drm_encoder_helper_funcs
  2326. nv50_dac_help = {
  2327. .atomic_check = nv50_outp_atomic_check,
  2328. .enable = nv50_dac_enable,
  2329. .disable = nv50_dac_disable,
  2330. .detect = nv50_dac_detect
  2331. };
  2332. static void
  2333. nv50_dac_destroy(struct drm_encoder *encoder)
  2334. {
  2335. drm_encoder_cleanup(encoder);
  2336. kfree(encoder);
  2337. }
  2338. static const struct drm_encoder_funcs
  2339. nv50_dac_func = {
  2340. .destroy = nv50_dac_destroy,
  2341. };
  2342. static int
  2343. nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
  2344. {
  2345. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2346. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  2347. struct nvkm_i2c_bus *bus;
  2348. struct nouveau_encoder *nv_encoder;
  2349. struct drm_encoder *encoder;
  2350. int type = DRM_MODE_ENCODER_DAC;
  2351. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  2352. if (!nv_encoder)
  2353. return -ENOMEM;
  2354. nv_encoder->dcb = dcbe;
  2355. bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  2356. if (bus)
  2357. nv_encoder->i2c = &bus->i2c;
  2358. encoder = to_drm_encoder(nv_encoder);
  2359. encoder->possible_crtcs = dcbe->heads;
  2360. encoder->possible_clones = 0;
  2361. drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
  2362. "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
  2363. drm_encoder_helper_add(encoder, &nv50_dac_help);
  2364. drm_mode_connector_attach_encoder(connector, encoder);
  2365. return 0;
  2366. }
  2367. /******************************************************************************
  2368. * Audio
  2369. *****************************************************************************/
  2370. static void
  2371. nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2372. {
  2373. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2374. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2375. struct {
  2376. struct nv50_disp_mthd_v1 base;
  2377. struct nv50_disp_sor_hda_eld_v0 eld;
  2378. } args = {
  2379. .base.version = 1,
  2380. .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2381. .base.hasht = nv_encoder->dcb->hasht,
  2382. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2383. (0x0100 << nv_crtc->index),
  2384. };
  2385. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2386. }
  2387. static void
  2388. nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2389. {
  2390. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2391. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2392. struct nouveau_connector *nv_connector;
  2393. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2394. struct __packed {
  2395. struct {
  2396. struct nv50_disp_mthd_v1 mthd;
  2397. struct nv50_disp_sor_hda_eld_v0 eld;
  2398. } base;
  2399. u8 data[sizeof(nv_connector->base.eld)];
  2400. } args = {
  2401. .base.mthd.version = 1,
  2402. .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2403. .base.mthd.hasht = nv_encoder->dcb->hasht,
  2404. .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2405. (0x0100 << nv_crtc->index),
  2406. };
  2407. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2408. if (!drm_detect_monitor_audio(nv_connector->edid))
  2409. return;
  2410. memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
  2411. nvif_mthd(disp->disp, 0, &args,
  2412. sizeof(args.base) + drm_eld_size(args.data));
  2413. }
  2414. /******************************************************************************
  2415. * HDMI
  2416. *****************************************************************************/
  2417. static void
  2418. nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2419. {
  2420. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2421. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2422. struct {
  2423. struct nv50_disp_mthd_v1 base;
  2424. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2425. } args = {
  2426. .base.version = 1,
  2427. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2428. .base.hasht = nv_encoder->dcb->hasht,
  2429. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2430. (0x0100 << nv_crtc->index),
  2431. };
  2432. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2433. }
  2434. static void
  2435. nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2436. {
  2437. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2438. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2439. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2440. struct {
  2441. struct nv50_disp_mthd_v1 base;
  2442. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2443. u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
  2444. } args = {
  2445. .base.version = 1,
  2446. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2447. .base.hasht = nv_encoder->dcb->hasht,
  2448. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2449. (0x0100 << nv_crtc->index),
  2450. .pwr.state = 1,
  2451. .pwr.rekey = 56, /* binary driver, and tegra, constant */
  2452. };
  2453. struct nouveau_connector *nv_connector;
  2454. u32 max_ac_packet;
  2455. union hdmi_infoframe avi_frame;
  2456. union hdmi_infoframe vendor_frame;
  2457. int ret;
  2458. int size;
  2459. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2460. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  2461. return;
  2462. ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, mode,
  2463. false);
  2464. if (!ret) {
  2465. /* We have an AVI InfoFrame, populate it to the display */
  2466. args.pwr.avi_infoframe_length
  2467. = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
  2468. }
  2469. ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
  2470. &nv_connector->base, mode);
  2471. if (!ret) {
  2472. /* We have a Vendor InfoFrame, populate it to the display */
  2473. args.pwr.vendor_infoframe_length
  2474. = hdmi_infoframe_pack(&vendor_frame,
  2475. args.infoframes
  2476. + args.pwr.avi_infoframe_length,
  2477. 17);
  2478. }
  2479. max_ac_packet = mode->htotal - mode->hdisplay;
  2480. max_ac_packet -= args.pwr.rekey;
  2481. max_ac_packet -= 18; /* constant from tegra */
  2482. args.pwr.max_ac_packet = max_ac_packet / 32;
  2483. size = sizeof(args.base)
  2484. + sizeof(args.pwr)
  2485. + args.pwr.avi_infoframe_length
  2486. + args.pwr.vendor_infoframe_length;
  2487. nvif_mthd(disp->disp, 0, &args, size);
  2488. nv50_audio_enable(encoder, mode);
  2489. }
  2490. /******************************************************************************
  2491. * MST
  2492. *****************************************************************************/
  2493. #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
  2494. #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
  2495. #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
  2496. struct nv50_mstm {
  2497. struct nouveau_encoder *outp;
  2498. struct drm_dp_mst_topology_mgr mgr;
  2499. struct nv50_msto *msto[4];
  2500. bool modified;
  2501. bool disabled;
  2502. int links;
  2503. };
  2504. struct nv50_mstc {
  2505. struct nv50_mstm *mstm;
  2506. struct drm_dp_mst_port *port;
  2507. struct drm_connector connector;
  2508. struct drm_display_mode *native;
  2509. struct edid *edid;
  2510. int pbn;
  2511. };
  2512. struct nv50_msto {
  2513. struct drm_encoder encoder;
  2514. struct nv50_head *head;
  2515. struct nv50_mstc *mstc;
  2516. bool disabled;
  2517. };
  2518. static struct drm_dp_payload *
  2519. nv50_msto_payload(struct nv50_msto *msto)
  2520. {
  2521. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2522. struct nv50_mstc *mstc = msto->mstc;
  2523. struct nv50_mstm *mstm = mstc->mstm;
  2524. int vcpi = mstc->port->vcpi.vcpi, i;
  2525. NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
  2526. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2527. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2528. NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
  2529. mstm->outp->base.base.name, i, payload->vcpi,
  2530. payload->start_slot, payload->num_slots);
  2531. }
  2532. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2533. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2534. if (payload->vcpi == vcpi)
  2535. return payload;
  2536. }
  2537. return NULL;
  2538. }
  2539. static void
  2540. nv50_msto_cleanup(struct nv50_msto *msto)
  2541. {
  2542. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2543. struct nv50_mstc *mstc = msto->mstc;
  2544. struct nv50_mstm *mstm = mstc->mstm;
  2545. NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
  2546. if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
  2547. drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
  2548. if (msto->disabled) {
  2549. msto->mstc = NULL;
  2550. msto->head = NULL;
  2551. msto->disabled = false;
  2552. }
  2553. }
  2554. static void
  2555. nv50_msto_prepare(struct nv50_msto *msto)
  2556. {
  2557. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2558. struct nv50_mstc *mstc = msto->mstc;
  2559. struct nv50_mstm *mstm = mstc->mstm;
  2560. struct {
  2561. struct nv50_disp_mthd_v1 base;
  2562. struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
  2563. } args = {
  2564. .base.version = 1,
  2565. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
  2566. .base.hasht = mstm->outp->dcb->hasht,
  2567. .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
  2568. (0x0100 << msto->head->base.index),
  2569. };
  2570. NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
  2571. if (mstc->port && mstc->port->vcpi.vcpi > 0) {
  2572. struct drm_dp_payload *payload = nv50_msto_payload(msto);
  2573. if (payload) {
  2574. args.vcpi.start_slot = payload->start_slot;
  2575. args.vcpi.num_slots = payload->num_slots;
  2576. args.vcpi.pbn = mstc->port->vcpi.pbn;
  2577. args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
  2578. }
  2579. }
  2580. NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
  2581. msto->encoder.name, msto->head->base.base.name,
  2582. args.vcpi.start_slot, args.vcpi.num_slots,
  2583. args.vcpi.pbn, args.vcpi.aligned_pbn);
  2584. nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
  2585. }
  2586. static int
  2587. nv50_msto_atomic_check(struct drm_encoder *encoder,
  2588. struct drm_crtc_state *crtc_state,
  2589. struct drm_connector_state *conn_state)
  2590. {
  2591. struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
  2592. struct nv50_mstm *mstm = mstc->mstm;
  2593. int bpp = conn_state->connector->display_info.bpc * 3;
  2594. int slots;
  2595. mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
  2596. slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
  2597. if (slots < 0)
  2598. return slots;
  2599. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2600. mstc->native);
  2601. }
  2602. static void
  2603. nv50_msto_enable(struct drm_encoder *encoder)
  2604. {
  2605. struct nv50_head *head = nv50_head(encoder->crtc);
  2606. struct nv50_msto *msto = nv50_msto(encoder);
  2607. struct nv50_mstc *mstc = NULL;
  2608. struct nv50_mstm *mstm = NULL;
  2609. struct drm_connector *connector;
  2610. struct drm_connector_list_iter conn_iter;
  2611. u8 proto, depth;
  2612. int slots;
  2613. bool r;
  2614. drm_connector_list_iter_begin(encoder->dev, &conn_iter);
  2615. drm_for_each_connector_iter(connector, &conn_iter) {
  2616. if (connector->state->best_encoder == &msto->encoder) {
  2617. mstc = nv50_mstc(connector);
  2618. mstm = mstc->mstm;
  2619. break;
  2620. }
  2621. }
  2622. drm_connector_list_iter_end(&conn_iter);
  2623. if (WARN_ON(!mstc))
  2624. return;
  2625. slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
  2626. r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
  2627. WARN_ON(!r);
  2628. if (!mstm->links++)
  2629. nv50_outp_acquire(mstm->outp);
  2630. if (mstm->outp->link & 1)
  2631. proto = 0x8;
  2632. else
  2633. proto = 0x9;
  2634. switch (mstc->connector.display_info.bpc) {
  2635. case 6: depth = 0x2; break;
  2636. case 8: depth = 0x5; break;
  2637. case 10:
  2638. default: depth = 0x6; break;
  2639. }
  2640. mstm->outp->update(mstm->outp, head->base.index,
  2641. &head->base.base.state->adjusted_mode, proto, depth);
  2642. msto->head = head;
  2643. msto->mstc = mstc;
  2644. mstm->modified = true;
  2645. }
  2646. static void
  2647. nv50_msto_disable(struct drm_encoder *encoder)
  2648. {
  2649. struct nv50_msto *msto = nv50_msto(encoder);
  2650. struct nv50_mstc *mstc = msto->mstc;
  2651. struct nv50_mstm *mstm = mstc->mstm;
  2652. if (mstc->port)
  2653. drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
  2654. mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
  2655. mstm->modified = true;
  2656. if (!--mstm->links)
  2657. mstm->disabled = true;
  2658. msto->disabled = true;
  2659. }
  2660. static const struct drm_encoder_helper_funcs
  2661. nv50_msto_help = {
  2662. .disable = nv50_msto_disable,
  2663. .enable = nv50_msto_enable,
  2664. .atomic_check = nv50_msto_atomic_check,
  2665. };
  2666. static void
  2667. nv50_msto_destroy(struct drm_encoder *encoder)
  2668. {
  2669. struct nv50_msto *msto = nv50_msto(encoder);
  2670. drm_encoder_cleanup(&msto->encoder);
  2671. kfree(msto);
  2672. }
  2673. static const struct drm_encoder_funcs
  2674. nv50_msto = {
  2675. .destroy = nv50_msto_destroy,
  2676. };
  2677. static int
  2678. nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
  2679. struct nv50_msto **pmsto)
  2680. {
  2681. struct nv50_msto *msto;
  2682. int ret;
  2683. if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
  2684. return -ENOMEM;
  2685. ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
  2686. DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
  2687. if (ret) {
  2688. kfree(*pmsto);
  2689. *pmsto = NULL;
  2690. return ret;
  2691. }
  2692. drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
  2693. msto->encoder.possible_crtcs = heads;
  2694. return 0;
  2695. }
  2696. static struct drm_encoder *
  2697. nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
  2698. struct drm_connector_state *connector_state)
  2699. {
  2700. struct nv50_head *head = nv50_head(connector_state->crtc);
  2701. struct nv50_mstc *mstc = nv50_mstc(connector);
  2702. if (mstc->port) {
  2703. struct nv50_mstm *mstm = mstc->mstm;
  2704. return &mstm->msto[head->base.index]->encoder;
  2705. }
  2706. return NULL;
  2707. }
  2708. static struct drm_encoder *
  2709. nv50_mstc_best_encoder(struct drm_connector *connector)
  2710. {
  2711. struct nv50_mstc *mstc = nv50_mstc(connector);
  2712. if (mstc->port) {
  2713. struct nv50_mstm *mstm = mstc->mstm;
  2714. return &mstm->msto[0]->encoder;
  2715. }
  2716. return NULL;
  2717. }
  2718. static enum drm_mode_status
  2719. nv50_mstc_mode_valid(struct drm_connector *connector,
  2720. struct drm_display_mode *mode)
  2721. {
  2722. return MODE_OK;
  2723. }
  2724. static int
  2725. nv50_mstc_get_modes(struct drm_connector *connector)
  2726. {
  2727. struct nv50_mstc *mstc = nv50_mstc(connector);
  2728. int ret = 0;
  2729. mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
  2730. drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
  2731. if (mstc->edid)
  2732. ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
  2733. if (!mstc->connector.display_info.bpc)
  2734. mstc->connector.display_info.bpc = 8;
  2735. if (mstc->native)
  2736. drm_mode_destroy(mstc->connector.dev, mstc->native);
  2737. mstc->native = nouveau_conn_native_mode(&mstc->connector);
  2738. return ret;
  2739. }
  2740. static const struct drm_connector_helper_funcs
  2741. nv50_mstc_help = {
  2742. .get_modes = nv50_mstc_get_modes,
  2743. .mode_valid = nv50_mstc_mode_valid,
  2744. .best_encoder = nv50_mstc_best_encoder,
  2745. .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
  2746. };
  2747. static enum drm_connector_status
  2748. nv50_mstc_detect(struct drm_connector *connector, bool force)
  2749. {
  2750. struct nv50_mstc *mstc = nv50_mstc(connector);
  2751. if (!mstc->port)
  2752. return connector_status_disconnected;
  2753. return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
  2754. }
  2755. static void
  2756. nv50_mstc_destroy(struct drm_connector *connector)
  2757. {
  2758. struct nv50_mstc *mstc = nv50_mstc(connector);
  2759. drm_connector_cleanup(&mstc->connector);
  2760. kfree(mstc);
  2761. }
  2762. static const struct drm_connector_funcs
  2763. nv50_mstc = {
  2764. .reset = nouveau_conn_reset,
  2765. .detect = nv50_mstc_detect,
  2766. .fill_modes = drm_helper_probe_single_connector_modes,
  2767. .destroy = nv50_mstc_destroy,
  2768. .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
  2769. .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
  2770. .atomic_set_property = nouveau_conn_atomic_set_property,
  2771. .atomic_get_property = nouveau_conn_atomic_get_property,
  2772. };
  2773. static int
  2774. nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
  2775. const char *path, struct nv50_mstc **pmstc)
  2776. {
  2777. struct drm_device *dev = mstm->outp->base.base.dev;
  2778. struct nv50_mstc *mstc;
  2779. int ret, i;
  2780. if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
  2781. return -ENOMEM;
  2782. mstc->mstm = mstm;
  2783. mstc->port = port;
  2784. ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
  2785. DRM_MODE_CONNECTOR_DisplayPort);
  2786. if (ret) {
  2787. kfree(*pmstc);
  2788. *pmstc = NULL;
  2789. return ret;
  2790. }
  2791. drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
  2792. mstc->connector.funcs->reset(&mstc->connector);
  2793. nouveau_conn_attach_properties(&mstc->connector);
  2794. for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto[i]; i++)
  2795. drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
  2796. drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
  2797. drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
  2798. drm_mode_connector_set_path_property(&mstc->connector, path);
  2799. return 0;
  2800. }
  2801. static void
  2802. nv50_mstm_cleanup(struct nv50_mstm *mstm)
  2803. {
  2804. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2805. struct drm_encoder *encoder;
  2806. int ret;
  2807. NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
  2808. ret = drm_dp_check_act_status(&mstm->mgr);
  2809. ret = drm_dp_update_payload_part2(&mstm->mgr);
  2810. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2811. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2812. struct nv50_msto *msto = nv50_msto(encoder);
  2813. struct nv50_mstc *mstc = msto->mstc;
  2814. if (mstc && mstc->mstm == mstm)
  2815. nv50_msto_cleanup(msto);
  2816. }
  2817. }
  2818. mstm->modified = false;
  2819. }
  2820. static void
  2821. nv50_mstm_prepare(struct nv50_mstm *mstm)
  2822. {
  2823. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2824. struct drm_encoder *encoder;
  2825. int ret;
  2826. NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
  2827. ret = drm_dp_update_payload_part1(&mstm->mgr);
  2828. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2829. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2830. struct nv50_msto *msto = nv50_msto(encoder);
  2831. struct nv50_mstc *mstc = msto->mstc;
  2832. if (mstc && mstc->mstm == mstm)
  2833. nv50_msto_prepare(msto);
  2834. }
  2835. }
  2836. if (mstm->disabled) {
  2837. if (!mstm->links)
  2838. nv50_outp_release(mstm->outp);
  2839. mstm->disabled = false;
  2840. }
  2841. }
  2842. static void
  2843. nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
  2844. {
  2845. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2846. drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
  2847. }
  2848. static void
  2849. nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
  2850. struct drm_connector *connector)
  2851. {
  2852. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2853. struct nv50_mstc *mstc = nv50_mstc(connector);
  2854. drm_connector_unregister(&mstc->connector);
  2855. drm_modeset_lock_all(drm->dev);
  2856. drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
  2857. mstc->port = NULL;
  2858. drm_modeset_unlock_all(drm->dev);
  2859. drm_connector_unreference(&mstc->connector);
  2860. }
  2861. static void
  2862. nv50_mstm_register_connector(struct drm_connector *connector)
  2863. {
  2864. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2865. drm_modeset_lock_all(drm->dev);
  2866. drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
  2867. drm_modeset_unlock_all(drm->dev);
  2868. drm_connector_register(connector);
  2869. }
  2870. static struct drm_connector *
  2871. nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
  2872. struct drm_dp_mst_port *port, const char *path)
  2873. {
  2874. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2875. struct nv50_mstc *mstc;
  2876. int ret;
  2877. ret = nv50_mstc_new(mstm, port, path, &mstc);
  2878. if (ret) {
  2879. if (mstc)
  2880. mstc->connector.funcs->destroy(&mstc->connector);
  2881. return NULL;
  2882. }
  2883. return &mstc->connector;
  2884. }
  2885. static const struct drm_dp_mst_topology_cbs
  2886. nv50_mstm = {
  2887. .add_connector = nv50_mstm_add_connector,
  2888. .register_connector = nv50_mstm_register_connector,
  2889. .destroy_connector = nv50_mstm_destroy_connector,
  2890. .hotplug = nv50_mstm_hotplug,
  2891. };
  2892. void
  2893. nv50_mstm_service(struct nv50_mstm *mstm)
  2894. {
  2895. struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
  2896. bool handled = true;
  2897. int ret;
  2898. u8 esi[8] = {};
  2899. if (!aux)
  2900. return;
  2901. while (handled) {
  2902. ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
  2903. if (ret != 8) {
  2904. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2905. return;
  2906. }
  2907. drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
  2908. if (!handled)
  2909. break;
  2910. drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
  2911. }
  2912. }
  2913. void
  2914. nv50_mstm_remove(struct nv50_mstm *mstm)
  2915. {
  2916. if (mstm)
  2917. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2918. }
  2919. static int
  2920. nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
  2921. {
  2922. struct nouveau_encoder *outp = mstm->outp;
  2923. struct {
  2924. struct nv50_disp_mthd_v1 base;
  2925. struct nv50_disp_sor_dp_mst_link_v0 mst;
  2926. } args = {
  2927. .base.version = 1,
  2928. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
  2929. .base.hasht = outp->dcb->hasht,
  2930. .base.hashm = outp->dcb->hashm,
  2931. .mst.state = state,
  2932. };
  2933. struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
  2934. struct nvif_object *disp = &drm->display->disp;
  2935. int ret;
  2936. if (dpcd >= 0x12) {
  2937. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
  2938. if (ret < 0)
  2939. return ret;
  2940. dpcd &= ~DP_MST_EN;
  2941. if (state)
  2942. dpcd |= DP_MST_EN;
  2943. ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
  2944. if (ret < 0)
  2945. return ret;
  2946. }
  2947. return nvif_mthd(disp, 0, &args, sizeof(args));
  2948. }
  2949. int
  2950. nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
  2951. {
  2952. int ret, state = 0;
  2953. if (!mstm)
  2954. return 0;
  2955. if (dpcd[0] >= 0x12) {
  2956. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
  2957. if (ret < 0)
  2958. return ret;
  2959. if (!(dpcd[1] & DP_MST_CAP))
  2960. dpcd[0] = 0x11;
  2961. else
  2962. state = allow;
  2963. }
  2964. ret = nv50_mstm_enable(mstm, dpcd[0], state);
  2965. if (ret)
  2966. return ret;
  2967. ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
  2968. if (ret)
  2969. return nv50_mstm_enable(mstm, dpcd[0], 0);
  2970. return mstm->mgr.mst_state;
  2971. }
  2972. static void
  2973. nv50_mstm_fini(struct nv50_mstm *mstm)
  2974. {
  2975. if (mstm && mstm->mgr.mst_state)
  2976. drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
  2977. }
  2978. static void
  2979. nv50_mstm_init(struct nv50_mstm *mstm)
  2980. {
  2981. if (mstm && mstm->mgr.mst_state)
  2982. drm_dp_mst_topology_mgr_resume(&mstm->mgr);
  2983. }
  2984. static void
  2985. nv50_mstm_del(struct nv50_mstm **pmstm)
  2986. {
  2987. struct nv50_mstm *mstm = *pmstm;
  2988. if (mstm) {
  2989. kfree(*pmstm);
  2990. *pmstm = NULL;
  2991. }
  2992. }
  2993. static int
  2994. nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
  2995. int conn_base_id, struct nv50_mstm **pmstm)
  2996. {
  2997. const int max_payloads = hweight8(outp->dcb->heads);
  2998. struct drm_device *dev = outp->base.base.dev;
  2999. struct nv50_mstm *mstm;
  3000. int ret, i;
  3001. u8 dpcd;
  3002. /* This is a workaround for some monitors not functioning
  3003. * correctly in MST mode on initial module load. I think
  3004. * some bad interaction with the VBIOS may be responsible.
  3005. *
  3006. * A good ol' off and on again seems to work here ;)
  3007. */
  3008. ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
  3009. if (ret >= 0 && dpcd >= 0x12)
  3010. drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
  3011. if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
  3012. return -ENOMEM;
  3013. mstm->outp = outp;
  3014. mstm->mgr.cbs = &nv50_mstm;
  3015. ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
  3016. max_payloads, conn_base_id);
  3017. if (ret)
  3018. return ret;
  3019. for (i = 0; i < max_payloads; i++) {
  3020. ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
  3021. i, &mstm->msto[i]);
  3022. if (ret)
  3023. return ret;
  3024. }
  3025. return 0;
  3026. }
  3027. /******************************************************************************
  3028. * SOR
  3029. *****************************************************************************/
  3030. static void
  3031. nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
  3032. struct drm_display_mode *mode, u8 proto, u8 depth)
  3033. {
  3034. struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
  3035. u32 *push;
  3036. if (!mode) {
  3037. nv_encoder->ctrl &= ~BIT(head);
  3038. if (!(nv_encoder->ctrl & 0x0000000f))
  3039. nv_encoder->ctrl = 0;
  3040. } else {
  3041. nv_encoder->ctrl |= proto << 8;
  3042. nv_encoder->ctrl |= BIT(head);
  3043. }
  3044. if ((push = evo_wait(core, 6))) {
  3045. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  3046. if (mode) {
  3047. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3048. nv_encoder->ctrl |= 0x00001000;
  3049. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3050. nv_encoder->ctrl |= 0x00002000;
  3051. nv_encoder->ctrl |= depth << 16;
  3052. }
  3053. evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
  3054. } else {
  3055. if (mode) {
  3056. u32 magic = 0x31ec6000 | (head << 25);
  3057. u32 syncs = 0x00000001;
  3058. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3059. syncs |= 0x00000008;
  3060. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3061. syncs |= 0x00000010;
  3062. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  3063. magic |= 0x00000001;
  3064. evo_mthd(push, 0x0404 + (head * 0x300), 2);
  3065. evo_data(push, syncs | (depth << 6));
  3066. evo_data(push, magic);
  3067. }
  3068. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  3069. }
  3070. evo_data(push, nv_encoder->ctrl);
  3071. evo_kick(push, core);
  3072. }
  3073. }
  3074. static void
  3075. nv50_sor_disable(struct drm_encoder *encoder)
  3076. {
  3077. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3078. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  3079. nv_encoder->crtc = NULL;
  3080. if (nv_crtc) {
  3081. struct nvkm_i2c_aux *aux = nv_encoder->aux;
  3082. u8 pwr;
  3083. if (aux) {
  3084. int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
  3085. if (ret == 0) {
  3086. pwr &= ~DP_SET_POWER_MASK;
  3087. pwr |= DP_SET_POWER_D3;
  3088. nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
  3089. }
  3090. }
  3091. nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
  3092. nv50_audio_disable(encoder, nv_crtc);
  3093. nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
  3094. nv50_outp_release(nv_encoder);
  3095. }
  3096. }
  3097. static void
  3098. nv50_sor_enable(struct drm_encoder *encoder)
  3099. {
  3100. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3101. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3102. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3103. struct {
  3104. struct nv50_disp_mthd_v1 base;
  3105. struct nv50_disp_sor_lvds_script_v0 lvds;
  3106. } lvds = {
  3107. .base.version = 1,
  3108. .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
  3109. .base.hasht = nv_encoder->dcb->hasht,
  3110. .base.hashm = nv_encoder->dcb->hashm,
  3111. };
  3112. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3113. struct drm_device *dev = encoder->dev;
  3114. struct nouveau_drm *drm = nouveau_drm(dev);
  3115. struct nouveau_connector *nv_connector;
  3116. struct nvbios *bios = &drm->vbios;
  3117. u8 proto = 0xf;
  3118. u8 depth = 0x0;
  3119. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3120. nv_encoder->crtc = encoder->crtc;
  3121. nv50_outp_acquire(nv_encoder);
  3122. switch (nv_encoder->dcb->type) {
  3123. case DCB_OUTPUT_TMDS:
  3124. if (nv_encoder->link & 1) {
  3125. proto = 0x1;
  3126. /* Only enable dual-link if:
  3127. * - Need to (i.e. rate > 165MHz)
  3128. * - DCB says we can
  3129. * - Not an HDMI monitor, since there's no dual-link
  3130. * on HDMI.
  3131. */
  3132. if (mode->clock >= 165000 &&
  3133. nv_encoder->dcb->duallink_possible &&
  3134. !drm_detect_hdmi_monitor(nv_connector->edid))
  3135. proto |= 0x4;
  3136. } else {
  3137. proto = 0x2;
  3138. }
  3139. nv50_hdmi_enable(&nv_encoder->base.base, mode);
  3140. break;
  3141. case DCB_OUTPUT_LVDS:
  3142. proto = 0x0;
  3143. if (bios->fp_no_ddc) {
  3144. if (bios->fp.dual_link)
  3145. lvds.lvds.script |= 0x0100;
  3146. if (bios->fp.if_is_24bit)
  3147. lvds.lvds.script |= 0x0200;
  3148. } else {
  3149. if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
  3150. if (((u8 *)nv_connector->edid)[121] == 2)
  3151. lvds.lvds.script |= 0x0100;
  3152. } else
  3153. if (mode->clock >= bios->fp.duallink_transition_clk) {
  3154. lvds.lvds.script |= 0x0100;
  3155. }
  3156. if (lvds.lvds.script & 0x0100) {
  3157. if (bios->fp.strapless_is_24bit & 2)
  3158. lvds.lvds.script |= 0x0200;
  3159. } else {
  3160. if (bios->fp.strapless_is_24bit & 1)
  3161. lvds.lvds.script |= 0x0200;
  3162. }
  3163. if (nv_connector->base.display_info.bpc == 8)
  3164. lvds.lvds.script |= 0x0200;
  3165. }
  3166. nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
  3167. break;
  3168. case DCB_OUTPUT_DP:
  3169. if (nv_connector->base.display_info.bpc == 6)
  3170. depth = 0x2;
  3171. else
  3172. if (nv_connector->base.display_info.bpc == 8)
  3173. depth = 0x5;
  3174. else
  3175. depth = 0x6;
  3176. if (nv_encoder->link & 1)
  3177. proto = 0x8;
  3178. else
  3179. proto = 0x9;
  3180. nv50_audio_enable(encoder, mode);
  3181. break;
  3182. default:
  3183. BUG();
  3184. break;
  3185. }
  3186. nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
  3187. }
  3188. static const struct drm_encoder_helper_funcs
  3189. nv50_sor_help = {
  3190. .atomic_check = nv50_outp_atomic_check,
  3191. .enable = nv50_sor_enable,
  3192. .disable = nv50_sor_disable,
  3193. };
  3194. static void
  3195. nv50_sor_destroy(struct drm_encoder *encoder)
  3196. {
  3197. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3198. nv50_mstm_del(&nv_encoder->dp.mstm);
  3199. drm_encoder_cleanup(encoder);
  3200. kfree(encoder);
  3201. }
  3202. static const struct drm_encoder_funcs
  3203. nv50_sor_func = {
  3204. .destroy = nv50_sor_destroy,
  3205. };
  3206. static int
  3207. nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3208. {
  3209. struct nouveau_connector *nv_connector = nouveau_connector(connector);
  3210. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3211. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  3212. struct nouveau_encoder *nv_encoder;
  3213. struct drm_encoder *encoder;
  3214. int type, ret;
  3215. switch (dcbe->type) {
  3216. case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
  3217. case DCB_OUTPUT_TMDS:
  3218. case DCB_OUTPUT_DP:
  3219. default:
  3220. type = DRM_MODE_ENCODER_TMDS;
  3221. break;
  3222. }
  3223. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3224. if (!nv_encoder)
  3225. return -ENOMEM;
  3226. nv_encoder->dcb = dcbe;
  3227. nv_encoder->update = nv50_sor_update;
  3228. encoder = to_drm_encoder(nv_encoder);
  3229. encoder->possible_crtcs = dcbe->heads;
  3230. encoder->possible_clones = 0;
  3231. drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
  3232. "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3233. drm_encoder_helper_add(encoder, &nv50_sor_help);
  3234. drm_mode_connector_attach_encoder(connector, encoder);
  3235. if (dcbe->type == DCB_OUTPUT_DP) {
  3236. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3237. struct nvkm_i2c_aux *aux =
  3238. nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
  3239. if (aux) {
  3240. if (disp->disp->oclass < GF110_DISP) {
  3241. /* HW has no support for address-only
  3242. * transactions, so we're required to
  3243. * use custom I2C-over-AUX code.
  3244. */
  3245. nv_encoder->i2c = &aux->i2c;
  3246. } else {
  3247. nv_encoder->i2c = &nv_connector->aux.ddc;
  3248. }
  3249. nv_encoder->aux = aux;
  3250. }
  3251. /*TODO: Use DP Info Table to check for support. */
  3252. if (disp->disp->oclass >= GF110_DISP) {
  3253. ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
  3254. nv_connector->base.base.id,
  3255. &nv_encoder->dp.mstm);
  3256. if (ret)
  3257. return ret;
  3258. }
  3259. } else {
  3260. struct nvkm_i2c_bus *bus =
  3261. nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  3262. if (bus)
  3263. nv_encoder->i2c = &bus->i2c;
  3264. }
  3265. return 0;
  3266. }
  3267. /******************************************************************************
  3268. * PIOR
  3269. *****************************************************************************/
  3270. static int
  3271. nv50_pior_atomic_check(struct drm_encoder *encoder,
  3272. struct drm_crtc_state *crtc_state,
  3273. struct drm_connector_state *conn_state)
  3274. {
  3275. int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
  3276. if (ret)
  3277. return ret;
  3278. crtc_state->adjusted_mode.clock *= 2;
  3279. return 0;
  3280. }
  3281. static void
  3282. nv50_pior_disable(struct drm_encoder *encoder)
  3283. {
  3284. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3285. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3286. const int or = nv_encoder->or;
  3287. u32 *push;
  3288. if (nv_encoder->crtc) {
  3289. push = evo_wait(mast, 4);
  3290. if (push) {
  3291. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3292. evo_mthd(push, 0x0700 + (or * 0x040), 1);
  3293. evo_data(push, 0x00000000);
  3294. }
  3295. evo_kick(push, mast);
  3296. }
  3297. }
  3298. nv_encoder->crtc = NULL;
  3299. nv50_outp_release(nv_encoder);
  3300. }
  3301. static void
  3302. nv50_pior_enable(struct drm_encoder *encoder)
  3303. {
  3304. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3305. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3306. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3307. struct nouveau_connector *nv_connector;
  3308. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3309. u8 owner = 1 << nv_crtc->index;
  3310. u8 proto, depth;
  3311. u32 *push;
  3312. nv50_outp_acquire(nv_encoder);
  3313. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3314. switch (nv_connector->base.display_info.bpc) {
  3315. case 10: depth = 0x6; break;
  3316. case 8: depth = 0x5; break;
  3317. case 6: depth = 0x2; break;
  3318. default: depth = 0x0; break;
  3319. }
  3320. switch (nv_encoder->dcb->type) {
  3321. case DCB_OUTPUT_TMDS:
  3322. case DCB_OUTPUT_DP:
  3323. proto = 0x0;
  3324. break;
  3325. default:
  3326. BUG();
  3327. break;
  3328. }
  3329. push = evo_wait(mast, 8);
  3330. if (push) {
  3331. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3332. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  3333. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3334. ctrl |= 0x00001000;
  3335. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3336. ctrl |= 0x00002000;
  3337. evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
  3338. evo_data(push, ctrl);
  3339. }
  3340. evo_kick(push, mast);
  3341. }
  3342. nv_encoder->crtc = encoder->crtc;
  3343. }
  3344. static const struct drm_encoder_helper_funcs
  3345. nv50_pior_help = {
  3346. .atomic_check = nv50_pior_atomic_check,
  3347. .enable = nv50_pior_enable,
  3348. .disable = nv50_pior_disable,
  3349. };
  3350. static void
  3351. nv50_pior_destroy(struct drm_encoder *encoder)
  3352. {
  3353. drm_encoder_cleanup(encoder);
  3354. kfree(encoder);
  3355. }
  3356. static const struct drm_encoder_funcs
  3357. nv50_pior_func = {
  3358. .destroy = nv50_pior_destroy,
  3359. };
  3360. static int
  3361. nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3362. {
  3363. struct nouveau_connector *nv_connector = nouveau_connector(connector);
  3364. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3365. struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
  3366. struct nvkm_i2c_bus *bus = NULL;
  3367. struct nvkm_i2c_aux *aux = NULL;
  3368. struct i2c_adapter *ddc;
  3369. struct nouveau_encoder *nv_encoder;
  3370. struct drm_encoder *encoder;
  3371. int type;
  3372. switch (dcbe->type) {
  3373. case DCB_OUTPUT_TMDS:
  3374. bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
  3375. ddc = bus ? &bus->i2c : NULL;
  3376. type = DRM_MODE_ENCODER_TMDS;
  3377. break;
  3378. case DCB_OUTPUT_DP:
  3379. aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
  3380. ddc = aux ? &nv_connector->aux.ddc : NULL;
  3381. type = DRM_MODE_ENCODER_TMDS;
  3382. break;
  3383. default:
  3384. return -ENODEV;
  3385. }
  3386. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3387. if (!nv_encoder)
  3388. return -ENOMEM;
  3389. nv_encoder->dcb = dcbe;
  3390. nv_encoder->i2c = ddc;
  3391. nv_encoder->aux = aux;
  3392. encoder = to_drm_encoder(nv_encoder);
  3393. encoder->possible_crtcs = dcbe->heads;
  3394. encoder->possible_clones = 0;
  3395. drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
  3396. "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3397. drm_encoder_helper_add(encoder, &nv50_pior_help);
  3398. drm_mode_connector_attach_encoder(connector, encoder);
  3399. return 0;
  3400. }
  3401. /******************************************************************************
  3402. * Atomic
  3403. *****************************************************************************/
  3404. static void
  3405. nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
  3406. {
  3407. struct nv50_disp *disp = nv50_disp(drm->dev);
  3408. struct nv50_dmac *core = &disp->mast.base;
  3409. struct nv50_mstm *mstm;
  3410. struct drm_encoder *encoder;
  3411. u32 *push;
  3412. NV_ATOMIC(drm, "commit core %08x\n", interlock);
  3413. drm_for_each_encoder(encoder, drm->dev) {
  3414. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3415. mstm = nouveau_encoder(encoder)->dp.mstm;
  3416. if (mstm && mstm->modified)
  3417. nv50_mstm_prepare(mstm);
  3418. }
  3419. }
  3420. if ((push = evo_wait(core, 5))) {
  3421. evo_mthd(push, 0x0084, 1);
  3422. evo_data(push, 0x80000000);
  3423. evo_mthd(push, 0x0080, 2);
  3424. evo_data(push, interlock);
  3425. evo_data(push, 0x00000000);
  3426. nouveau_bo_wr32(disp->sync, 0, 0x00000000);
  3427. evo_kick(push, core);
  3428. if (nvif_msec(&drm->client.device, 2000ULL,
  3429. if (nouveau_bo_rd32(disp->sync, 0))
  3430. break;
  3431. usleep_range(1, 2);
  3432. ) < 0)
  3433. NV_ERROR(drm, "EVO timeout\n");
  3434. }
  3435. drm_for_each_encoder(encoder, drm->dev) {
  3436. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3437. mstm = nouveau_encoder(encoder)->dp.mstm;
  3438. if (mstm && mstm->modified)
  3439. nv50_mstm_cleanup(mstm);
  3440. }
  3441. }
  3442. }
  3443. static void
  3444. nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
  3445. {
  3446. struct drm_device *dev = state->dev;
  3447. struct drm_crtc_state *new_crtc_state, *old_crtc_state;
  3448. struct drm_crtc *crtc;
  3449. struct drm_plane_state *new_plane_state;
  3450. struct drm_plane *plane;
  3451. struct nouveau_drm *drm = nouveau_drm(dev);
  3452. struct nv50_disp *disp = nv50_disp(dev);
  3453. struct nv50_atom *atom = nv50_atom(state);
  3454. struct nv50_outp_atom *outp, *outt;
  3455. u32 interlock_core = 0;
  3456. u32 interlock_chan = 0;
  3457. int i;
  3458. NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
  3459. drm_atomic_helper_wait_for_fences(dev, state, false);
  3460. drm_atomic_helper_wait_for_dependencies(state);
  3461. drm_atomic_helper_update_legacy_modeset_state(dev, state);
  3462. if (atom->lock_core)
  3463. mutex_lock(&disp->mutex);
  3464. /* Disable head(s). */
  3465. for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
  3466. struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
  3467. struct nv50_head *head = nv50_head(crtc);
  3468. NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
  3469. asyh->clr.mask, asyh->set.mask);
  3470. if (old_crtc_state->active && !new_crtc_state->active)
  3471. drm_crtc_vblank_off(crtc);
  3472. if (asyh->clr.mask) {
  3473. nv50_head_flush_clr(head, asyh, atom->flush_disable);
  3474. interlock_core |= 1;
  3475. }
  3476. }
  3477. /* Disable plane(s). */
  3478. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3479. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3480. struct nv50_wndw *wndw = nv50_wndw(plane);
  3481. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
  3482. asyw->clr.mask, asyw->set.mask);
  3483. if (!asyw->clr.mask)
  3484. continue;
  3485. interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
  3486. atom->flush_disable,
  3487. asyw);
  3488. }
  3489. /* Disable output path(s). */
  3490. list_for_each_entry(outp, &atom->outp, head) {
  3491. const struct drm_encoder_helper_funcs *help;
  3492. struct drm_encoder *encoder;
  3493. encoder = outp->encoder;
  3494. help = encoder->helper_private;
  3495. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
  3496. outp->clr.mask, outp->set.mask);
  3497. if (outp->clr.mask) {
  3498. help->disable(encoder);
  3499. interlock_core |= 1;
  3500. if (outp->flush_disable) {
  3501. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3502. interlock_core = 0;
  3503. interlock_chan = 0;
  3504. }
  3505. }
  3506. }
  3507. /* Flush disable. */
  3508. if (interlock_core) {
  3509. if (atom->flush_disable) {
  3510. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3511. interlock_core = 0;
  3512. interlock_chan = 0;
  3513. }
  3514. }
  3515. /* Update output path(s). */
  3516. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3517. const struct drm_encoder_helper_funcs *help;
  3518. struct drm_encoder *encoder;
  3519. encoder = outp->encoder;
  3520. help = encoder->helper_private;
  3521. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
  3522. outp->set.mask, outp->clr.mask);
  3523. if (outp->set.mask) {
  3524. help->enable(encoder);
  3525. interlock_core = 1;
  3526. }
  3527. list_del(&outp->head);
  3528. kfree(outp);
  3529. }
  3530. /* Update head(s). */
  3531. for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
  3532. struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
  3533. struct nv50_head *head = nv50_head(crtc);
  3534. NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
  3535. asyh->set.mask, asyh->clr.mask);
  3536. if (asyh->set.mask) {
  3537. nv50_head_flush_set(head, asyh);
  3538. interlock_core = 1;
  3539. }
  3540. if (new_crtc_state->active) {
  3541. if (!old_crtc_state->active)
  3542. drm_crtc_vblank_on(crtc);
  3543. if (new_crtc_state->event)
  3544. drm_crtc_vblank_get(crtc);
  3545. }
  3546. }
  3547. /* Update plane(s). */
  3548. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3549. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3550. struct nv50_wndw *wndw = nv50_wndw(plane);
  3551. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
  3552. asyw->set.mask, asyw->clr.mask);
  3553. if ( !asyw->set.mask &&
  3554. (!asyw->clr.mask || atom->flush_disable))
  3555. continue;
  3556. interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
  3557. }
  3558. /* Flush update. */
  3559. if (interlock_core) {
  3560. if (!interlock_chan && atom->state.legacy_cursor_update) {
  3561. u32 *push = evo_wait(&disp->mast, 2);
  3562. if (push) {
  3563. evo_mthd(push, 0x0080, 1);
  3564. evo_data(push, 0x00000000);
  3565. evo_kick(push, &disp->mast);
  3566. }
  3567. } else {
  3568. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3569. }
  3570. }
  3571. if (atom->lock_core)
  3572. mutex_unlock(&disp->mutex);
  3573. /* Wait for HW to signal completion. */
  3574. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3575. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3576. struct nv50_wndw *wndw = nv50_wndw(plane);
  3577. int ret = nv50_wndw_wait_armed(wndw, asyw);
  3578. if (ret)
  3579. NV_ERROR(drm, "%s: timeout\n", plane->name);
  3580. }
  3581. for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
  3582. if (new_crtc_state->event) {
  3583. unsigned long flags;
  3584. /* Get correct count/ts if racing with vblank irq */
  3585. if (new_crtc_state->active)
  3586. drm_crtc_accurate_vblank_count(crtc);
  3587. spin_lock_irqsave(&crtc->dev->event_lock, flags);
  3588. drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
  3589. spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
  3590. new_crtc_state->event = NULL;
  3591. if (new_crtc_state->active)
  3592. drm_crtc_vblank_put(crtc);
  3593. }
  3594. }
  3595. drm_atomic_helper_commit_hw_done(state);
  3596. drm_atomic_helper_cleanup_planes(dev, state);
  3597. drm_atomic_helper_commit_cleanup_done(state);
  3598. drm_atomic_state_put(state);
  3599. }
  3600. static void
  3601. nv50_disp_atomic_commit_work(struct work_struct *work)
  3602. {
  3603. struct drm_atomic_state *state =
  3604. container_of(work, typeof(*state), commit_work);
  3605. nv50_disp_atomic_commit_tail(state);
  3606. }
  3607. static int
  3608. nv50_disp_atomic_commit(struct drm_device *dev,
  3609. struct drm_atomic_state *state, bool nonblock)
  3610. {
  3611. struct nouveau_drm *drm = nouveau_drm(dev);
  3612. struct nv50_disp *disp = nv50_disp(dev);
  3613. struct drm_plane_state *new_plane_state;
  3614. struct drm_plane *plane;
  3615. struct drm_crtc *crtc;
  3616. bool active = false;
  3617. int ret, i;
  3618. ret = pm_runtime_get_sync(dev->dev);
  3619. if (ret < 0 && ret != -EACCES)
  3620. return ret;
  3621. ret = drm_atomic_helper_setup_commit(state, nonblock);
  3622. if (ret)
  3623. goto done;
  3624. INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
  3625. ret = drm_atomic_helper_prepare_planes(dev, state);
  3626. if (ret)
  3627. goto done;
  3628. if (!nonblock) {
  3629. ret = drm_atomic_helper_wait_for_fences(dev, state, true);
  3630. if (ret)
  3631. goto err_cleanup;
  3632. }
  3633. ret = drm_atomic_helper_swap_state(state, true);
  3634. if (ret)
  3635. goto err_cleanup;
  3636. for_each_new_plane_in_state(state, plane, new_plane_state, i) {
  3637. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  3638. struct nv50_wndw *wndw = nv50_wndw(plane);
  3639. if (asyw->set.image) {
  3640. asyw->ntfy.handle = wndw->dmac->sync.handle;
  3641. asyw->ntfy.offset = wndw->ntfy;
  3642. asyw->ntfy.awaken = false;
  3643. asyw->set.ntfy = true;
  3644. nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
  3645. wndw->ntfy ^= 0x10;
  3646. }
  3647. }
  3648. drm_atomic_state_get(state);
  3649. if (nonblock)
  3650. queue_work(system_unbound_wq, &state->commit_work);
  3651. else
  3652. nv50_disp_atomic_commit_tail(state);
  3653. drm_for_each_crtc(crtc, dev) {
  3654. if (crtc->state->enable) {
  3655. if (!drm->have_disp_power_ref) {
  3656. drm->have_disp_power_ref = true;
  3657. return 0;
  3658. }
  3659. active = true;
  3660. break;
  3661. }
  3662. }
  3663. if (!active && drm->have_disp_power_ref) {
  3664. pm_runtime_put_autosuspend(dev->dev);
  3665. drm->have_disp_power_ref = false;
  3666. }
  3667. err_cleanup:
  3668. if (ret)
  3669. drm_atomic_helper_cleanup_planes(dev, state);
  3670. done:
  3671. pm_runtime_put_autosuspend(dev->dev);
  3672. return ret;
  3673. }
  3674. static struct nv50_outp_atom *
  3675. nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
  3676. {
  3677. struct nv50_outp_atom *outp;
  3678. list_for_each_entry(outp, &atom->outp, head) {
  3679. if (outp->encoder == encoder)
  3680. return outp;
  3681. }
  3682. outp = kzalloc(sizeof(*outp), GFP_KERNEL);
  3683. if (!outp)
  3684. return ERR_PTR(-ENOMEM);
  3685. list_add(&outp->head, &atom->outp);
  3686. outp->encoder = encoder;
  3687. return outp;
  3688. }
  3689. static int
  3690. nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
  3691. struct drm_connector_state *old_connector_state)
  3692. {
  3693. struct drm_encoder *encoder = old_connector_state->best_encoder;
  3694. struct drm_crtc_state *old_crtc_state, *new_crtc_state;
  3695. struct drm_crtc *crtc;
  3696. struct nv50_outp_atom *outp;
  3697. if (!(crtc = old_connector_state->crtc))
  3698. return 0;
  3699. old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
  3700. new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
  3701. if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
  3702. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3703. if (IS_ERR(outp))
  3704. return PTR_ERR(outp);
  3705. if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  3706. outp->flush_disable = true;
  3707. atom->flush_disable = true;
  3708. }
  3709. outp->clr.ctrl = true;
  3710. atom->lock_core = true;
  3711. }
  3712. return 0;
  3713. }
  3714. static int
  3715. nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
  3716. struct drm_connector_state *connector_state)
  3717. {
  3718. struct drm_encoder *encoder = connector_state->best_encoder;
  3719. struct drm_crtc_state *new_crtc_state;
  3720. struct drm_crtc *crtc;
  3721. struct nv50_outp_atom *outp;
  3722. if (!(crtc = connector_state->crtc))
  3723. return 0;
  3724. new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
  3725. if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
  3726. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3727. if (IS_ERR(outp))
  3728. return PTR_ERR(outp);
  3729. outp->set.ctrl = true;
  3730. atom->lock_core = true;
  3731. }
  3732. return 0;
  3733. }
  3734. static int
  3735. nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
  3736. {
  3737. struct nv50_atom *atom = nv50_atom(state);
  3738. struct drm_connector_state *old_connector_state, *new_connector_state;
  3739. struct drm_connector *connector;
  3740. int ret, i;
  3741. ret = drm_atomic_helper_check(dev, state);
  3742. if (ret)
  3743. return ret;
  3744. for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
  3745. ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
  3746. if (ret)
  3747. return ret;
  3748. ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
  3749. if (ret)
  3750. return ret;
  3751. }
  3752. return 0;
  3753. }
  3754. static void
  3755. nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
  3756. {
  3757. struct nv50_atom *atom = nv50_atom(state);
  3758. struct nv50_outp_atom *outp, *outt;
  3759. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3760. list_del(&outp->head);
  3761. kfree(outp);
  3762. }
  3763. drm_atomic_state_default_clear(state);
  3764. }
  3765. static void
  3766. nv50_disp_atomic_state_free(struct drm_atomic_state *state)
  3767. {
  3768. struct nv50_atom *atom = nv50_atom(state);
  3769. drm_atomic_state_default_release(&atom->state);
  3770. kfree(atom);
  3771. }
  3772. static struct drm_atomic_state *
  3773. nv50_disp_atomic_state_alloc(struct drm_device *dev)
  3774. {
  3775. struct nv50_atom *atom;
  3776. if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
  3777. drm_atomic_state_init(dev, &atom->state) < 0) {
  3778. kfree(atom);
  3779. return NULL;
  3780. }
  3781. INIT_LIST_HEAD(&atom->outp);
  3782. return &atom->state;
  3783. }
  3784. static const struct drm_mode_config_funcs
  3785. nv50_disp_func = {
  3786. .fb_create = nouveau_user_framebuffer_create,
  3787. .output_poll_changed = drm_fb_helper_output_poll_changed,
  3788. .atomic_check = nv50_disp_atomic_check,
  3789. .atomic_commit = nv50_disp_atomic_commit,
  3790. .atomic_state_alloc = nv50_disp_atomic_state_alloc,
  3791. .atomic_state_clear = nv50_disp_atomic_state_clear,
  3792. .atomic_state_free = nv50_disp_atomic_state_free,
  3793. };
  3794. /******************************************************************************
  3795. * Init
  3796. *****************************************************************************/
  3797. void
  3798. nv50_display_fini(struct drm_device *dev)
  3799. {
  3800. struct nouveau_encoder *nv_encoder;
  3801. struct drm_encoder *encoder;
  3802. struct drm_plane *plane;
  3803. drm_for_each_plane(plane, dev) {
  3804. struct nv50_wndw *wndw = nv50_wndw(plane);
  3805. if (plane->funcs != &nv50_wndw)
  3806. continue;
  3807. nv50_wndw_fini(wndw);
  3808. }
  3809. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3810. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3811. nv_encoder = nouveau_encoder(encoder);
  3812. nv50_mstm_fini(nv_encoder->dp.mstm);
  3813. }
  3814. }
  3815. }
  3816. int
  3817. nv50_display_init(struct drm_device *dev)
  3818. {
  3819. struct drm_encoder *encoder;
  3820. struct drm_plane *plane;
  3821. u32 *push;
  3822. push = evo_wait(nv50_mast(dev), 32);
  3823. if (!push)
  3824. return -EBUSY;
  3825. evo_mthd(push, 0x0088, 1);
  3826. evo_data(push, nv50_mast(dev)->base.sync.handle);
  3827. evo_kick(push, nv50_mast(dev));
  3828. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3829. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3830. struct nouveau_encoder *nv_encoder =
  3831. nouveau_encoder(encoder);
  3832. nv50_mstm_init(nv_encoder->dp.mstm);
  3833. }
  3834. }
  3835. drm_for_each_plane(plane, dev) {
  3836. struct nv50_wndw *wndw = nv50_wndw(plane);
  3837. if (plane->funcs != &nv50_wndw)
  3838. continue;
  3839. nv50_wndw_init(wndw);
  3840. }
  3841. return 0;
  3842. }
  3843. void
  3844. nv50_display_destroy(struct drm_device *dev)
  3845. {
  3846. struct nv50_disp *disp = nv50_disp(dev);
  3847. nv50_dmac_destroy(&disp->mast.base, disp->disp);
  3848. nouveau_bo_unmap(disp->sync);
  3849. if (disp->sync)
  3850. nouveau_bo_unpin(disp->sync);
  3851. nouveau_bo_ref(NULL, &disp->sync);
  3852. nouveau_display(dev)->priv = NULL;
  3853. kfree(disp);
  3854. }
  3855. MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
  3856. static int nouveau_atomic = 0;
  3857. module_param_named(atomic, nouveau_atomic, int, 0400);
  3858. int
  3859. nv50_display_create(struct drm_device *dev)
  3860. {
  3861. struct nvif_device *device = &nouveau_drm(dev)->client.device;
  3862. struct nouveau_drm *drm = nouveau_drm(dev);
  3863. struct dcb_table *dcb = &drm->vbios.dcb;
  3864. struct drm_connector *connector, *tmp;
  3865. struct nv50_disp *disp;
  3866. struct dcb_output *dcbe;
  3867. int crtcs, ret, i;
  3868. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  3869. if (!disp)
  3870. return -ENOMEM;
  3871. mutex_init(&disp->mutex);
  3872. nouveau_display(dev)->priv = disp;
  3873. nouveau_display(dev)->dtor = nv50_display_destroy;
  3874. nouveau_display(dev)->init = nv50_display_init;
  3875. nouveau_display(dev)->fini = nv50_display_fini;
  3876. disp->disp = &nouveau_display(dev)->disp;
  3877. dev->mode_config.funcs = &nv50_disp_func;
  3878. dev->driver->driver_features |= DRIVER_PREFER_XBGR_30BPP;
  3879. if (nouveau_atomic)
  3880. dev->driver->driver_features |= DRIVER_ATOMIC;
  3881. /* small shared memory area we use for notifiers and semaphores */
  3882. ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
  3883. 0, 0x0000, NULL, NULL, &disp->sync);
  3884. if (!ret) {
  3885. ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
  3886. if (!ret) {
  3887. ret = nouveau_bo_map(disp->sync);
  3888. if (ret)
  3889. nouveau_bo_unpin(disp->sync);
  3890. }
  3891. if (ret)
  3892. nouveau_bo_ref(NULL, &disp->sync);
  3893. }
  3894. if (ret)
  3895. goto out;
  3896. /* allocate master evo channel */
  3897. ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
  3898. &disp->mast);
  3899. if (ret)
  3900. goto out;
  3901. /* create crtc objects to represent the hw heads */
  3902. if (disp->disp->oclass >= GF110_DISP)
  3903. crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
  3904. else
  3905. crtcs = 0x3;
  3906. for (i = 0; i < fls(crtcs); i++) {
  3907. if (!(crtcs & (1 << i)))
  3908. continue;
  3909. ret = nv50_head_create(dev, i);
  3910. if (ret)
  3911. goto out;
  3912. }
  3913. /* create encoder/connector objects based on VBIOS DCB table */
  3914. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  3915. connector = nouveau_connector_create(dev, dcbe->connector);
  3916. if (IS_ERR(connector))
  3917. continue;
  3918. if (dcbe->location == DCB_LOC_ON_CHIP) {
  3919. switch (dcbe->type) {
  3920. case DCB_OUTPUT_TMDS:
  3921. case DCB_OUTPUT_LVDS:
  3922. case DCB_OUTPUT_DP:
  3923. ret = nv50_sor_create(connector, dcbe);
  3924. break;
  3925. case DCB_OUTPUT_ANALOG:
  3926. ret = nv50_dac_create(connector, dcbe);
  3927. break;
  3928. default:
  3929. ret = -ENODEV;
  3930. break;
  3931. }
  3932. } else {
  3933. ret = nv50_pior_create(connector, dcbe);
  3934. }
  3935. if (ret) {
  3936. NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
  3937. dcbe->location, dcbe->type,
  3938. ffs(dcbe->or) - 1, ret);
  3939. ret = 0;
  3940. }
  3941. }
  3942. /* cull any connectors we created that don't have an encoder */
  3943. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  3944. if (connector->encoder_ids[0])
  3945. continue;
  3946. NV_WARN(drm, "%s has no encoders, removing\n",
  3947. connector->name);
  3948. connector->funcs->destroy(connector);
  3949. }
  3950. out:
  3951. if (ret)
  3952. nv50_display_destroy(dev);
  3953. return ret;
  3954. }