nv50_display.c 117 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include <drm/drmP.h>
  26. #include <drm/drm_atomic.h>
  27. #include <drm/drm_atomic_helper.h>
  28. #include <drm/drm_crtc_helper.h>
  29. #include <drm/drm_dp_helper.h>
  30. #include <drm/drm_fb_helper.h>
  31. #include <drm/drm_plane_helper.h>
  32. #include <nvif/class.h>
  33. #include <nvif/cl0002.h>
  34. #include <nvif/cl5070.h>
  35. #include <nvif/cl507a.h>
  36. #include <nvif/cl507b.h>
  37. #include <nvif/cl507c.h>
  38. #include <nvif/cl507d.h>
  39. #include <nvif/cl507e.h>
  40. #include <nvif/event.h>
  41. #include "nouveau_drv.h"
  42. #include "nouveau_dma.h"
  43. #include "nouveau_gem.h"
  44. #include "nouveau_connector.h"
  45. #include "nouveau_encoder.h"
  46. #include "nouveau_crtc.h"
  47. #include "nouveau_fence.h"
  48. #include "nouveau_fbcon.h"
  49. #include "nv50_display.h"
  50. #define EVO_DMA_NR 9
  51. #define EVO_MASTER (0x00)
  52. #define EVO_FLIP(c) (0x01 + (c))
  53. #define EVO_OVLY(c) (0x05 + (c))
  54. #define EVO_OIMM(c) (0x09 + (c))
  55. #define EVO_CURS(c) (0x0d + (c))
  56. /* offsets in shared sync bo of various structures */
  57. #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  58. #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
  59. #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
  60. #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
  61. #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  62. #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  63. /******************************************************************************
  64. * Atomic state
  65. *****************************************************************************/
  66. #define nv50_atom(p) container_of((p), struct nv50_atom, state)
  67. struct nv50_atom {
  68. struct drm_atomic_state state;
  69. struct list_head outp;
  70. bool lock_core;
  71. bool flush_disable;
  72. };
  73. struct nv50_outp_atom {
  74. struct list_head head;
  75. struct drm_encoder *encoder;
  76. bool flush_disable;
  77. union {
  78. struct {
  79. bool ctrl:1;
  80. };
  81. u8 mask;
  82. } clr;
  83. union {
  84. struct {
  85. bool ctrl:1;
  86. };
  87. u8 mask;
  88. } set;
  89. };
  90. #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
  91. struct nv50_head_atom {
  92. struct drm_crtc_state state;
  93. struct {
  94. u16 iW;
  95. u16 iH;
  96. u16 oW;
  97. u16 oH;
  98. } view;
  99. struct nv50_head_mode {
  100. bool interlace;
  101. u32 clock;
  102. struct {
  103. u16 active;
  104. u16 synce;
  105. u16 blanke;
  106. u16 blanks;
  107. } h;
  108. struct {
  109. u32 active;
  110. u16 synce;
  111. u16 blanke;
  112. u16 blanks;
  113. u16 blank2s;
  114. u16 blank2e;
  115. u16 blankus;
  116. } v;
  117. } mode;
  118. struct {
  119. u32 handle;
  120. u64 offset:40;
  121. } lut;
  122. struct {
  123. bool visible;
  124. u32 handle;
  125. u64 offset:40;
  126. u8 format;
  127. u8 kind:7;
  128. u8 layout:1;
  129. u8 block:4;
  130. u32 pitch:20;
  131. u16 x;
  132. u16 y;
  133. u16 w;
  134. u16 h;
  135. } core;
  136. struct {
  137. bool visible;
  138. u32 handle;
  139. u64 offset:40;
  140. u8 layout:1;
  141. u8 format:1;
  142. } curs;
  143. struct {
  144. u8 depth;
  145. u8 cpp;
  146. u16 x;
  147. u16 y;
  148. u16 w;
  149. u16 h;
  150. } base;
  151. struct {
  152. u8 cpp;
  153. } ovly;
  154. struct {
  155. bool enable:1;
  156. u8 bits:2;
  157. u8 mode:4;
  158. } dither;
  159. struct {
  160. struct {
  161. u16 cos:12;
  162. u16 sin:12;
  163. } sat;
  164. } procamp;
  165. union {
  166. struct {
  167. bool core:1;
  168. bool curs:1;
  169. };
  170. u8 mask;
  171. } clr;
  172. union {
  173. struct {
  174. bool core:1;
  175. bool curs:1;
  176. bool view:1;
  177. bool mode:1;
  178. bool base:1;
  179. bool ovly:1;
  180. bool dither:1;
  181. bool procamp:1;
  182. };
  183. u16 mask;
  184. } set;
  185. };
  186. static inline struct nv50_head_atom *
  187. nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
  188. {
  189. struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
  190. if (IS_ERR(statec))
  191. return (void *)statec;
  192. return nv50_head_atom(statec);
  193. }
  194. #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
  195. struct nv50_wndw_atom {
  196. struct drm_plane_state state;
  197. u8 interval;
  198. struct drm_rect clip;
  199. struct {
  200. u32 handle;
  201. u16 offset:12;
  202. bool awaken:1;
  203. } ntfy;
  204. struct {
  205. u32 handle;
  206. u16 offset:12;
  207. u32 acquire;
  208. u32 release;
  209. } sema;
  210. struct {
  211. u8 enable:2;
  212. } lut;
  213. struct {
  214. u8 mode:2;
  215. u8 interval:4;
  216. u8 format;
  217. u8 kind:7;
  218. u8 layout:1;
  219. u8 block:4;
  220. u32 pitch:20;
  221. u16 w;
  222. u16 h;
  223. u32 handle;
  224. u64 offset;
  225. } image;
  226. struct {
  227. u16 x;
  228. u16 y;
  229. } point;
  230. union {
  231. struct {
  232. bool ntfy:1;
  233. bool sema:1;
  234. bool image:1;
  235. };
  236. u8 mask;
  237. } clr;
  238. union {
  239. struct {
  240. bool ntfy:1;
  241. bool sema:1;
  242. bool image:1;
  243. bool lut:1;
  244. bool point:1;
  245. };
  246. u8 mask;
  247. } set;
  248. };
  249. /******************************************************************************
  250. * EVO channel
  251. *****************************************************************************/
  252. struct nv50_chan {
  253. struct nvif_object user;
  254. struct nvif_device *device;
  255. };
  256. static int
  257. nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
  258. const s32 *oclass, u8 head, void *data, u32 size,
  259. struct nv50_chan *chan)
  260. {
  261. struct nvif_sclass *sclass;
  262. int ret, i, n;
  263. chan->device = device;
  264. ret = n = nvif_object_sclass_get(disp, &sclass);
  265. if (ret < 0)
  266. return ret;
  267. while (oclass[0]) {
  268. for (i = 0; i < n; i++) {
  269. if (sclass[i].oclass == oclass[0]) {
  270. ret = nvif_object_init(disp, 0, oclass[0],
  271. data, size, &chan->user);
  272. if (ret == 0)
  273. nvif_object_map(&chan->user);
  274. nvif_object_sclass_put(&sclass);
  275. return ret;
  276. }
  277. }
  278. oclass++;
  279. }
  280. nvif_object_sclass_put(&sclass);
  281. return -ENOSYS;
  282. }
  283. static void
  284. nv50_chan_destroy(struct nv50_chan *chan)
  285. {
  286. nvif_object_fini(&chan->user);
  287. }
  288. /******************************************************************************
  289. * PIO EVO channel
  290. *****************************************************************************/
  291. struct nv50_pioc {
  292. struct nv50_chan base;
  293. };
  294. static void
  295. nv50_pioc_destroy(struct nv50_pioc *pioc)
  296. {
  297. nv50_chan_destroy(&pioc->base);
  298. }
  299. static int
  300. nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
  301. const s32 *oclass, u8 head, void *data, u32 size,
  302. struct nv50_pioc *pioc)
  303. {
  304. return nv50_chan_create(device, disp, oclass, head, data, size,
  305. &pioc->base);
  306. }
  307. /******************************************************************************
  308. * Overlay Immediate
  309. *****************************************************************************/
  310. struct nv50_oimm {
  311. struct nv50_pioc base;
  312. };
  313. static int
  314. nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
  315. int head, struct nv50_oimm *oimm)
  316. {
  317. struct nv50_disp_cursor_v0 args = {
  318. .head = head,
  319. };
  320. static const s32 oclass[] = {
  321. GK104_DISP_OVERLAY,
  322. GF110_DISP_OVERLAY,
  323. GT214_DISP_OVERLAY,
  324. G82_DISP_OVERLAY,
  325. NV50_DISP_OVERLAY,
  326. 0
  327. };
  328. return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
  329. &oimm->base);
  330. }
  331. /******************************************************************************
  332. * DMA EVO channel
  333. *****************************************************************************/
  334. struct nv50_dmac_ctxdma {
  335. struct list_head head;
  336. struct nvif_object object;
  337. };
  338. struct nv50_dmac {
  339. struct nv50_chan base;
  340. dma_addr_t handle;
  341. u32 *ptr;
  342. struct nvif_object sync;
  343. struct nvif_object vram;
  344. struct list_head ctxdma;
  345. /* Protects against concurrent pushbuf access to this channel, lock is
  346. * grabbed by evo_wait (if the pushbuf reservation is successful) and
  347. * dropped again by evo_kick. */
  348. struct mutex lock;
  349. };
  350. static void
  351. nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
  352. {
  353. nvif_object_fini(&ctxdma->object);
  354. list_del(&ctxdma->head);
  355. kfree(ctxdma);
  356. }
  357. static struct nv50_dmac_ctxdma *
  358. nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
  359. {
  360. struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
  361. struct nv50_dmac_ctxdma *ctxdma;
  362. const u8 kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
  363. const u32 handle = 0xfb000000 | kind;
  364. struct {
  365. struct nv_dma_v0 base;
  366. union {
  367. struct nv50_dma_v0 nv50;
  368. struct gf100_dma_v0 gf100;
  369. struct gf119_dma_v0 gf119;
  370. };
  371. } args = {};
  372. u32 argc = sizeof(args.base);
  373. int ret;
  374. list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
  375. if (ctxdma->object.handle == handle)
  376. return ctxdma;
  377. }
  378. if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
  379. return ERR_PTR(-ENOMEM);
  380. list_add(&ctxdma->head, &dmac->ctxdma);
  381. args.base.target = NV_DMA_V0_TARGET_VRAM;
  382. args.base.access = NV_DMA_V0_ACCESS_RDWR;
  383. args.base.start = 0;
  384. args.base.limit = drm->device.info.ram_user - 1;
  385. if (drm->device.info.chipset < 0x80) {
  386. args.nv50.part = NV50_DMA_V0_PART_256;
  387. argc += sizeof(args.nv50);
  388. } else
  389. if (drm->device.info.chipset < 0xc0) {
  390. args.nv50.part = NV50_DMA_V0_PART_256;
  391. args.nv50.kind = kind;
  392. argc += sizeof(args.nv50);
  393. } else
  394. if (drm->device.info.chipset < 0xd0) {
  395. args.gf100.kind = kind;
  396. argc += sizeof(args.gf100);
  397. } else {
  398. args.gf119.page = GF119_DMA_V0_PAGE_LP;
  399. args.gf119.kind = kind;
  400. argc += sizeof(args.gf119);
  401. }
  402. ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
  403. &args, argc, &ctxdma->object);
  404. if (ret) {
  405. nv50_dmac_ctxdma_del(ctxdma);
  406. return ERR_PTR(ret);
  407. }
  408. return ctxdma;
  409. }
  410. static void
  411. nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
  412. {
  413. struct nvif_device *device = dmac->base.device;
  414. struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
  415. list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
  416. nv50_dmac_ctxdma_del(ctxdma);
  417. }
  418. nvif_object_fini(&dmac->vram);
  419. nvif_object_fini(&dmac->sync);
  420. nv50_chan_destroy(&dmac->base);
  421. if (dmac->ptr) {
  422. struct device *dev = nvxx_device(device)->dev;
  423. dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
  424. }
  425. }
  426. static int
  427. nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
  428. const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
  429. struct nv50_dmac *dmac)
  430. {
  431. struct nv50_disp_core_channel_dma_v0 *args = data;
  432. struct nvif_object pushbuf;
  433. int ret;
  434. mutex_init(&dmac->lock);
  435. dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
  436. &dmac->handle, GFP_KERNEL);
  437. if (!dmac->ptr)
  438. return -ENOMEM;
  439. ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
  440. &(struct nv_dma_v0) {
  441. .target = NV_DMA_V0_TARGET_PCI_US,
  442. .access = NV_DMA_V0_ACCESS_RD,
  443. .start = dmac->handle + 0x0000,
  444. .limit = dmac->handle + 0x0fff,
  445. }, sizeof(struct nv_dma_v0), &pushbuf);
  446. if (ret)
  447. return ret;
  448. args->pushbuf = nvif_handle(&pushbuf);
  449. ret = nv50_chan_create(device, disp, oclass, head, data, size,
  450. &dmac->base);
  451. nvif_object_fini(&pushbuf);
  452. if (ret)
  453. return ret;
  454. ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
  455. &(struct nv_dma_v0) {
  456. .target = NV_DMA_V0_TARGET_VRAM,
  457. .access = NV_DMA_V0_ACCESS_RDWR,
  458. .start = syncbuf + 0x0000,
  459. .limit = syncbuf + 0x0fff,
  460. }, sizeof(struct nv_dma_v0),
  461. &dmac->sync);
  462. if (ret)
  463. return ret;
  464. ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
  465. &(struct nv_dma_v0) {
  466. .target = NV_DMA_V0_TARGET_VRAM,
  467. .access = NV_DMA_V0_ACCESS_RDWR,
  468. .start = 0,
  469. .limit = device->info.ram_user - 1,
  470. }, sizeof(struct nv_dma_v0),
  471. &dmac->vram);
  472. if (ret)
  473. return ret;
  474. INIT_LIST_HEAD(&dmac->ctxdma);
  475. return ret;
  476. }
  477. /******************************************************************************
  478. * Core
  479. *****************************************************************************/
  480. struct nv50_mast {
  481. struct nv50_dmac base;
  482. };
  483. static int
  484. nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
  485. u64 syncbuf, struct nv50_mast *core)
  486. {
  487. struct nv50_disp_core_channel_dma_v0 args = {
  488. .pushbuf = 0xb0007d00,
  489. };
  490. static const s32 oclass[] = {
  491. GP102_DISP_CORE_CHANNEL_DMA,
  492. GP100_DISP_CORE_CHANNEL_DMA,
  493. GM200_DISP_CORE_CHANNEL_DMA,
  494. GM107_DISP_CORE_CHANNEL_DMA,
  495. GK110_DISP_CORE_CHANNEL_DMA,
  496. GK104_DISP_CORE_CHANNEL_DMA,
  497. GF110_DISP_CORE_CHANNEL_DMA,
  498. GT214_DISP_CORE_CHANNEL_DMA,
  499. GT206_DISP_CORE_CHANNEL_DMA,
  500. GT200_DISP_CORE_CHANNEL_DMA,
  501. G82_DISP_CORE_CHANNEL_DMA,
  502. NV50_DISP_CORE_CHANNEL_DMA,
  503. 0
  504. };
  505. return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
  506. syncbuf, &core->base);
  507. }
  508. /******************************************************************************
  509. * Base
  510. *****************************************************************************/
  511. struct nv50_sync {
  512. struct nv50_dmac base;
  513. u32 addr;
  514. u32 data;
  515. };
  516. static int
  517. nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
  518. int head, u64 syncbuf, struct nv50_sync *base)
  519. {
  520. struct nv50_disp_base_channel_dma_v0 args = {
  521. .pushbuf = 0xb0007c00 | head,
  522. .head = head,
  523. };
  524. static const s32 oclass[] = {
  525. GK110_DISP_BASE_CHANNEL_DMA,
  526. GK104_DISP_BASE_CHANNEL_DMA,
  527. GF110_DISP_BASE_CHANNEL_DMA,
  528. GT214_DISP_BASE_CHANNEL_DMA,
  529. GT200_DISP_BASE_CHANNEL_DMA,
  530. G82_DISP_BASE_CHANNEL_DMA,
  531. NV50_DISP_BASE_CHANNEL_DMA,
  532. 0
  533. };
  534. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  535. syncbuf, &base->base);
  536. }
  537. /******************************************************************************
  538. * Overlay
  539. *****************************************************************************/
  540. struct nv50_ovly {
  541. struct nv50_dmac base;
  542. };
  543. static int
  544. nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
  545. int head, u64 syncbuf, struct nv50_ovly *ovly)
  546. {
  547. struct nv50_disp_overlay_channel_dma_v0 args = {
  548. .pushbuf = 0xb0007e00 | head,
  549. .head = head,
  550. };
  551. static const s32 oclass[] = {
  552. GK104_DISP_OVERLAY_CONTROL_DMA,
  553. GF110_DISP_OVERLAY_CONTROL_DMA,
  554. GT214_DISP_OVERLAY_CHANNEL_DMA,
  555. GT200_DISP_OVERLAY_CHANNEL_DMA,
  556. G82_DISP_OVERLAY_CHANNEL_DMA,
  557. NV50_DISP_OVERLAY_CHANNEL_DMA,
  558. 0
  559. };
  560. return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
  561. syncbuf, &ovly->base);
  562. }
  563. struct nv50_head {
  564. struct nouveau_crtc base;
  565. struct nv50_ovly ovly;
  566. struct nv50_oimm oimm;
  567. };
  568. #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
  569. #define nv50_ovly(c) (&nv50_head(c)->ovly)
  570. #define nv50_oimm(c) (&nv50_head(c)->oimm)
  571. #define nv50_chan(c) (&(c)->base.base)
  572. #define nv50_vers(c) nv50_chan(c)->user.oclass
  573. struct nv50_disp {
  574. struct nvif_object *disp;
  575. struct nv50_mast mast;
  576. struct nouveau_bo *sync;
  577. struct mutex mutex;
  578. };
  579. static struct nv50_disp *
  580. nv50_disp(struct drm_device *dev)
  581. {
  582. return nouveau_display(dev)->priv;
  583. }
  584. #define nv50_mast(d) (&nv50_disp(d)->mast)
  585. /******************************************************************************
  586. * EVO channel helpers
  587. *****************************************************************************/
  588. static u32 *
  589. evo_wait(void *evoc, int nr)
  590. {
  591. struct nv50_dmac *dmac = evoc;
  592. struct nvif_device *device = dmac->base.device;
  593. u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
  594. mutex_lock(&dmac->lock);
  595. if (put + nr >= (PAGE_SIZE / 4) - 8) {
  596. dmac->ptr[put] = 0x20000000;
  597. nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
  598. if (nvif_msec(device, 2000,
  599. if (!nvif_rd32(&dmac->base.user, 0x0004))
  600. break;
  601. ) < 0) {
  602. mutex_unlock(&dmac->lock);
  603. printk(KERN_ERR "nouveau: evo channel stalled\n");
  604. return NULL;
  605. }
  606. put = 0;
  607. }
  608. return dmac->ptr + put;
  609. }
  610. static void
  611. evo_kick(u32 *push, void *evoc)
  612. {
  613. struct nv50_dmac *dmac = evoc;
  614. nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
  615. mutex_unlock(&dmac->lock);
  616. }
  617. #define evo_mthd(p,m,s) do { \
  618. const u32 _m = (m), _s = (s); \
  619. if (drm_debug & DRM_UT_KMS) \
  620. printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \
  621. *((p)++) = ((_s << 18) | _m); \
  622. } while(0)
  623. #define evo_data(p,d) do { \
  624. const u32 _d = (d); \
  625. if (drm_debug & DRM_UT_KMS) \
  626. printk(KERN_ERR "\t%08x\n", _d); \
  627. *((p)++) = _d; \
  628. } while(0)
  629. /******************************************************************************
  630. * Plane
  631. *****************************************************************************/
  632. #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
  633. struct nv50_wndw {
  634. const struct nv50_wndw_func *func;
  635. struct nv50_dmac *dmac;
  636. struct drm_plane plane;
  637. struct nvif_notify notify;
  638. u16 ntfy;
  639. u16 sema;
  640. u32 data;
  641. };
  642. struct nv50_wndw_func {
  643. void *(*dtor)(struct nv50_wndw *);
  644. int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  645. struct nv50_head_atom *asyh);
  646. void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
  647. struct nv50_head_atom *asyh);
  648. void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
  649. struct nv50_wndw_atom *asyw);
  650. void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  651. void (*sema_clr)(struct nv50_wndw *);
  652. void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  653. void (*ntfy_clr)(struct nv50_wndw *);
  654. int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
  655. void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
  656. void (*image_clr)(struct nv50_wndw *);
  657. void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
  658. void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
  659. u32 (*update)(struct nv50_wndw *, u32 interlock);
  660. };
  661. static int
  662. nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  663. {
  664. if (asyw->set.ntfy)
  665. return wndw->func->ntfy_wait_begun(wndw, asyw);
  666. return 0;
  667. }
  668. static u32
  669. nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
  670. struct nv50_wndw_atom *asyw)
  671. {
  672. if (asyw->clr.sema && (!asyw->set.sema || flush))
  673. wndw->func->sema_clr(wndw);
  674. if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
  675. wndw->func->ntfy_clr(wndw);
  676. if (asyw->clr.image && (!asyw->set.image || flush))
  677. wndw->func->image_clr(wndw);
  678. return flush ? wndw->func->update(wndw, interlock) : 0;
  679. }
  680. static u32
  681. nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
  682. struct nv50_wndw_atom *asyw)
  683. {
  684. if (interlock) {
  685. asyw->image.mode = 0;
  686. asyw->image.interval = 1;
  687. }
  688. if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
  689. if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
  690. if (asyw->set.image) wndw->func->image_set(wndw, asyw);
  691. if (asyw->set.lut ) wndw->func->lut (wndw, asyw);
  692. if (asyw->set.point) wndw->func->point (wndw, asyw);
  693. return wndw->func->update(wndw, interlock);
  694. }
  695. static void
  696. nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
  697. struct nv50_wndw_atom *asyw,
  698. struct nv50_head_atom *asyh)
  699. {
  700. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  701. NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
  702. wndw->func->release(wndw, asyw, asyh);
  703. asyw->ntfy.handle = 0;
  704. asyw->sema.handle = 0;
  705. }
  706. static int
  707. nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
  708. struct nv50_wndw_atom *asyw,
  709. struct nv50_head_atom *asyh)
  710. {
  711. struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
  712. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  713. int ret;
  714. NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
  715. asyw->clip.x1 = 0;
  716. asyw->clip.y1 = 0;
  717. asyw->clip.x2 = asyh->state.mode.hdisplay;
  718. asyw->clip.y2 = asyh->state.mode.vdisplay;
  719. asyw->image.w = fb->base.width;
  720. asyw->image.h = fb->base.height;
  721. asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
  722. if (asyw->image.kind) {
  723. asyw->image.layout = 0;
  724. if (drm->device.info.chipset >= 0xc0)
  725. asyw->image.block = fb->nvbo->tile_mode >> 4;
  726. else
  727. asyw->image.block = fb->nvbo->tile_mode;
  728. asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
  729. } else {
  730. asyw->image.layout = 1;
  731. asyw->image.block = 0;
  732. asyw->image.pitch = fb->base.pitches[0];
  733. }
  734. ret = wndw->func->acquire(wndw, asyw, asyh);
  735. if (ret)
  736. return ret;
  737. if (asyw->set.image) {
  738. if (!(asyw->image.mode = asyw->interval ? 0 : 1))
  739. asyw->image.interval = asyw->interval;
  740. else
  741. asyw->image.interval = 0;
  742. }
  743. return 0;
  744. }
  745. static int
  746. nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
  747. {
  748. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  749. struct nv50_wndw *wndw = nv50_wndw(plane);
  750. struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
  751. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  752. struct nv50_head_atom *harm = NULL, *asyh = NULL;
  753. bool varm = false, asyv = false, asym = false;
  754. int ret;
  755. NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
  756. if (asyw->state.crtc) {
  757. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  758. if (IS_ERR(asyh))
  759. return PTR_ERR(asyh);
  760. asym = drm_atomic_crtc_needs_modeset(&asyh->state);
  761. asyv = asyh->state.active;
  762. }
  763. if (armw->state.crtc) {
  764. harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
  765. if (IS_ERR(harm))
  766. return PTR_ERR(harm);
  767. varm = harm->state.crtc->state->active;
  768. }
  769. if (asyv) {
  770. asyw->point.x = asyw->state.crtc_x;
  771. asyw->point.y = asyw->state.crtc_y;
  772. if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
  773. asyw->set.point = true;
  774. if (!varm || asym || armw->state.fb != asyw->state.fb) {
  775. ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
  776. if (ret)
  777. return ret;
  778. }
  779. } else
  780. if (varm) {
  781. nv50_wndw_atomic_check_release(wndw, asyw, harm);
  782. } else {
  783. return 0;
  784. }
  785. if (!asyv || asym) {
  786. asyw->clr.ntfy = armw->ntfy.handle != 0;
  787. asyw->clr.sema = armw->sema.handle != 0;
  788. if (wndw->func->image_clr)
  789. asyw->clr.image = armw->image.handle != 0;
  790. asyw->set.lut = wndw->func->lut && asyv;
  791. }
  792. return 0;
  793. }
  794. static void
  795. nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
  796. {
  797. struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
  798. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  799. NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
  800. if (!old_state->fb)
  801. return;
  802. nouveau_bo_unpin(fb->nvbo);
  803. }
  804. static int
  805. nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
  806. {
  807. struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
  808. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  809. struct nv50_wndw *wndw = nv50_wndw(plane);
  810. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  811. struct nv50_head_atom *asyh;
  812. struct nv50_dmac_ctxdma *ctxdma;
  813. int ret;
  814. NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
  815. if (!asyw->state.fb)
  816. return 0;
  817. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
  818. if (ret)
  819. return ret;
  820. ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
  821. if (IS_ERR(ctxdma)) {
  822. nouveau_bo_unpin(fb->nvbo);
  823. return PTR_ERR(ctxdma);
  824. }
  825. asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
  826. asyw->image.handle = ctxdma->object.handle;
  827. asyw->image.offset = fb->nvbo->bo.offset;
  828. if (wndw->func->prepare) {
  829. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  830. if (IS_ERR(asyh))
  831. return PTR_ERR(asyh);
  832. wndw->func->prepare(wndw, asyh, asyw);
  833. }
  834. return 0;
  835. }
  836. static const struct drm_plane_helper_funcs
  837. nv50_wndw_helper = {
  838. .prepare_fb = nv50_wndw_prepare_fb,
  839. .cleanup_fb = nv50_wndw_cleanup_fb,
  840. .atomic_check = nv50_wndw_atomic_check,
  841. };
  842. static void
  843. nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
  844. struct drm_plane_state *state)
  845. {
  846. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  847. __drm_atomic_helper_plane_destroy_state(&asyw->state);
  848. dma_fence_put(asyw->state.fence);
  849. kfree(asyw);
  850. }
  851. static struct drm_plane_state *
  852. nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
  853. {
  854. struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
  855. struct nv50_wndw_atom *asyw;
  856. if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
  857. return NULL;
  858. __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
  859. asyw->state.fence = NULL;
  860. asyw->interval = 1;
  861. asyw->sema = armw->sema;
  862. asyw->ntfy = armw->ntfy;
  863. asyw->image = armw->image;
  864. asyw->point = armw->point;
  865. asyw->lut = armw->lut;
  866. asyw->clr.mask = 0;
  867. asyw->set.mask = 0;
  868. return &asyw->state;
  869. }
  870. static void
  871. nv50_wndw_reset(struct drm_plane *plane)
  872. {
  873. struct nv50_wndw_atom *asyw;
  874. if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
  875. return;
  876. if (plane->state)
  877. plane->funcs->atomic_destroy_state(plane, plane->state);
  878. plane->state = &asyw->state;
  879. plane->state->plane = plane;
  880. plane->state->rotation = DRM_ROTATE_0;
  881. }
  882. static void
  883. nv50_wndw_destroy(struct drm_plane *plane)
  884. {
  885. struct nv50_wndw *wndw = nv50_wndw(plane);
  886. void *data;
  887. nvif_notify_fini(&wndw->notify);
  888. data = wndw->func->dtor(wndw);
  889. drm_plane_cleanup(&wndw->plane);
  890. kfree(data);
  891. }
  892. static const struct drm_plane_funcs
  893. nv50_wndw = {
  894. .update_plane = drm_atomic_helper_update_plane,
  895. .disable_plane = drm_atomic_helper_disable_plane,
  896. .destroy = nv50_wndw_destroy,
  897. .reset = nv50_wndw_reset,
  898. .set_property = drm_atomic_helper_plane_set_property,
  899. .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
  900. .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
  901. };
  902. static void
  903. nv50_wndw_fini(struct nv50_wndw *wndw)
  904. {
  905. nvif_notify_put(&wndw->notify);
  906. }
  907. static void
  908. nv50_wndw_init(struct nv50_wndw *wndw)
  909. {
  910. nvif_notify_get(&wndw->notify);
  911. }
  912. static int
  913. nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
  914. enum drm_plane_type type, const char *name, int index,
  915. struct nv50_dmac *dmac, const u32 *format, int nformat,
  916. struct nv50_wndw *wndw)
  917. {
  918. int ret;
  919. wndw->func = func;
  920. wndw->dmac = dmac;
  921. ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw, format,
  922. nformat, type, "%s-%d", name, index);
  923. if (ret)
  924. return ret;
  925. drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
  926. return 0;
  927. }
  928. /******************************************************************************
  929. * Cursor plane
  930. *****************************************************************************/
  931. #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
  932. struct nv50_curs {
  933. struct nv50_wndw wndw;
  934. struct nvif_object chan;
  935. };
  936. static u32
  937. nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
  938. {
  939. struct nv50_curs *curs = nv50_curs(wndw);
  940. nvif_wr32(&curs->chan, 0x0080, 0x00000000);
  941. return 0;
  942. }
  943. static void
  944. nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  945. {
  946. struct nv50_curs *curs = nv50_curs(wndw);
  947. nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
  948. }
  949. static void
  950. nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
  951. struct nv50_wndw_atom *asyw)
  952. {
  953. asyh->curs.handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
  954. asyh->curs.offset = asyw->image.offset;
  955. asyh->set.curs = asyh->curs.visible;
  956. }
  957. static void
  958. nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  959. struct nv50_head_atom *asyh)
  960. {
  961. asyh->curs.visible = false;
  962. }
  963. static int
  964. nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  965. struct nv50_head_atom *asyh)
  966. {
  967. int ret;
  968. ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
  969. DRM_PLANE_HELPER_NO_SCALING,
  970. DRM_PLANE_HELPER_NO_SCALING,
  971. true, true);
  972. asyh->curs.visible = asyw->state.visible;
  973. if (ret || !asyh->curs.visible)
  974. return ret;
  975. switch (asyw->state.fb->width) {
  976. case 32: asyh->curs.layout = 0; break;
  977. case 64: asyh->curs.layout = 1; break;
  978. default:
  979. return -EINVAL;
  980. }
  981. if (asyw->state.fb->width != asyw->state.fb->height)
  982. return -EINVAL;
  983. switch (asyw->state.fb->format->format) {
  984. case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
  985. default:
  986. WARN_ON(1);
  987. return -EINVAL;
  988. }
  989. return 0;
  990. }
  991. static void *
  992. nv50_curs_dtor(struct nv50_wndw *wndw)
  993. {
  994. struct nv50_curs *curs = nv50_curs(wndw);
  995. nvif_object_fini(&curs->chan);
  996. return curs;
  997. }
  998. static const u32
  999. nv50_curs_format[] = {
  1000. DRM_FORMAT_ARGB8888,
  1001. };
  1002. static const struct nv50_wndw_func
  1003. nv50_curs = {
  1004. .dtor = nv50_curs_dtor,
  1005. .acquire = nv50_curs_acquire,
  1006. .release = nv50_curs_release,
  1007. .prepare = nv50_curs_prepare,
  1008. .point = nv50_curs_point,
  1009. .update = nv50_curs_update,
  1010. };
  1011. static int
  1012. nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
  1013. struct nv50_curs **pcurs)
  1014. {
  1015. static const struct nvif_mclass curses[] = {
  1016. { GK104_DISP_CURSOR, 0 },
  1017. { GF110_DISP_CURSOR, 0 },
  1018. { GT214_DISP_CURSOR, 0 },
  1019. { G82_DISP_CURSOR, 0 },
  1020. { NV50_DISP_CURSOR, 0 },
  1021. {}
  1022. };
  1023. struct nv50_disp_cursor_v0 args = {
  1024. .head = head->base.index,
  1025. };
  1026. struct nv50_disp *disp = nv50_disp(drm->dev);
  1027. struct nv50_curs *curs;
  1028. int cid, ret;
  1029. cid = nvif_mclass(disp->disp, curses);
  1030. if (cid < 0) {
  1031. NV_ERROR(drm, "No supported cursor immediate class\n");
  1032. return cid;
  1033. }
  1034. if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
  1035. return -ENOMEM;
  1036. ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
  1037. "curs", head->base.index, &disp->mast.base,
  1038. nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
  1039. &curs->wndw);
  1040. if (ret) {
  1041. kfree(curs);
  1042. return ret;
  1043. }
  1044. ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
  1045. sizeof(args), &curs->chan);
  1046. if (ret) {
  1047. NV_ERROR(drm, "curs%04x allocation failed: %d\n",
  1048. curses[cid].oclass, ret);
  1049. return ret;
  1050. }
  1051. return 0;
  1052. }
  1053. /******************************************************************************
  1054. * Primary plane
  1055. *****************************************************************************/
  1056. #define nv50_base(p) container_of((p), struct nv50_base, wndw)
  1057. struct nv50_base {
  1058. struct nv50_wndw wndw;
  1059. struct nv50_sync chan;
  1060. int id;
  1061. };
  1062. static int
  1063. nv50_base_notify(struct nvif_notify *notify)
  1064. {
  1065. return NVIF_NOTIFY_KEEP;
  1066. }
  1067. static void
  1068. nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1069. {
  1070. struct nv50_base *base = nv50_base(wndw);
  1071. u32 *push;
  1072. if ((push = evo_wait(&base->chan, 2))) {
  1073. evo_mthd(push, 0x00e0, 1);
  1074. evo_data(push, asyw->lut.enable << 30);
  1075. evo_kick(push, &base->chan);
  1076. }
  1077. }
  1078. static void
  1079. nv50_base_image_clr(struct nv50_wndw *wndw)
  1080. {
  1081. struct nv50_base *base = nv50_base(wndw);
  1082. u32 *push;
  1083. if ((push = evo_wait(&base->chan, 4))) {
  1084. evo_mthd(push, 0x0084, 1);
  1085. evo_data(push, 0x00000000);
  1086. evo_mthd(push, 0x00c0, 1);
  1087. evo_data(push, 0x00000000);
  1088. evo_kick(push, &base->chan);
  1089. }
  1090. }
  1091. static void
  1092. nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1093. {
  1094. struct nv50_base *base = nv50_base(wndw);
  1095. const s32 oclass = base->chan.base.base.user.oclass;
  1096. u32 *push;
  1097. if ((push = evo_wait(&base->chan, 10))) {
  1098. evo_mthd(push, 0x0084, 1);
  1099. evo_data(push, (asyw->image.mode << 8) |
  1100. (asyw->image.interval << 4));
  1101. evo_mthd(push, 0x00c0, 1);
  1102. evo_data(push, asyw->image.handle);
  1103. if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1104. evo_mthd(push, 0x0800, 5);
  1105. evo_data(push, asyw->image.offset >> 8);
  1106. evo_data(push, 0x00000000);
  1107. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1108. evo_data(push, (asyw->image.layout << 20) |
  1109. asyw->image.pitch |
  1110. asyw->image.block);
  1111. evo_data(push, (asyw->image.kind << 16) |
  1112. (asyw->image.format << 8));
  1113. } else
  1114. if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1115. evo_mthd(push, 0x0800, 5);
  1116. evo_data(push, asyw->image.offset >> 8);
  1117. evo_data(push, 0x00000000);
  1118. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1119. evo_data(push, (asyw->image.layout << 20) |
  1120. asyw->image.pitch |
  1121. asyw->image.block);
  1122. evo_data(push, asyw->image.format << 8);
  1123. } else {
  1124. evo_mthd(push, 0x0400, 5);
  1125. evo_data(push, asyw->image.offset >> 8);
  1126. evo_data(push, 0x00000000);
  1127. evo_data(push, (asyw->image.h << 16) | asyw->image.w);
  1128. evo_data(push, (asyw->image.layout << 24) |
  1129. asyw->image.pitch |
  1130. asyw->image.block);
  1131. evo_data(push, asyw->image.format << 8);
  1132. }
  1133. evo_kick(push, &base->chan);
  1134. }
  1135. }
  1136. static void
  1137. nv50_base_ntfy_clr(struct nv50_wndw *wndw)
  1138. {
  1139. struct nv50_base *base = nv50_base(wndw);
  1140. u32 *push;
  1141. if ((push = evo_wait(&base->chan, 2))) {
  1142. evo_mthd(push, 0x00a4, 1);
  1143. evo_data(push, 0x00000000);
  1144. evo_kick(push, &base->chan);
  1145. }
  1146. }
  1147. static void
  1148. nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1149. {
  1150. struct nv50_base *base = nv50_base(wndw);
  1151. u32 *push;
  1152. if ((push = evo_wait(&base->chan, 3))) {
  1153. evo_mthd(push, 0x00a0, 2);
  1154. evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
  1155. evo_data(push, asyw->ntfy.handle);
  1156. evo_kick(push, &base->chan);
  1157. }
  1158. }
  1159. static void
  1160. nv50_base_sema_clr(struct nv50_wndw *wndw)
  1161. {
  1162. struct nv50_base *base = nv50_base(wndw);
  1163. u32 *push;
  1164. if ((push = evo_wait(&base->chan, 2))) {
  1165. evo_mthd(push, 0x0094, 1);
  1166. evo_data(push, 0x00000000);
  1167. evo_kick(push, &base->chan);
  1168. }
  1169. }
  1170. static void
  1171. nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1172. {
  1173. struct nv50_base *base = nv50_base(wndw);
  1174. u32 *push;
  1175. if ((push = evo_wait(&base->chan, 5))) {
  1176. evo_mthd(push, 0x0088, 4);
  1177. evo_data(push, asyw->sema.offset);
  1178. evo_data(push, asyw->sema.acquire);
  1179. evo_data(push, asyw->sema.release);
  1180. evo_data(push, asyw->sema.handle);
  1181. evo_kick(push, &base->chan);
  1182. }
  1183. }
  1184. static u32
  1185. nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
  1186. {
  1187. struct nv50_base *base = nv50_base(wndw);
  1188. u32 *push;
  1189. if (!(push = evo_wait(&base->chan, 2)))
  1190. return 0;
  1191. evo_mthd(push, 0x0080, 1);
  1192. evo_data(push, interlock);
  1193. evo_kick(push, &base->chan);
  1194. if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
  1195. return interlock ? 2 << (base->id * 8) : 0;
  1196. return interlock ? 2 << (base->id * 4) : 0;
  1197. }
  1198. static int
  1199. nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  1200. {
  1201. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  1202. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1203. if (nvif_msec(&drm->device, 2000ULL,
  1204. u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
  1205. if ((data & 0xc0000000) == 0x40000000)
  1206. break;
  1207. usleep_range(1, 2);
  1208. ) < 0)
  1209. return -ETIMEDOUT;
  1210. return 0;
  1211. }
  1212. static void
  1213. nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1214. struct nv50_head_atom *asyh)
  1215. {
  1216. asyh->base.cpp = 0;
  1217. }
  1218. static int
  1219. nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
  1220. struct nv50_head_atom *asyh)
  1221. {
  1222. const struct drm_framebuffer *fb = asyw->state.fb;
  1223. int ret;
  1224. if (!fb->format->depth)
  1225. return -EINVAL;
  1226. ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
  1227. DRM_PLANE_HELPER_NO_SCALING,
  1228. DRM_PLANE_HELPER_NO_SCALING,
  1229. false, true);
  1230. if (ret)
  1231. return ret;
  1232. asyh->base.depth = fb->format->depth;
  1233. asyh->base.cpp = fb->format->cpp[0];
  1234. asyh->base.x = asyw->state.src.x1 >> 16;
  1235. asyh->base.y = asyw->state.src.y1 >> 16;
  1236. asyh->base.w = asyw->state.fb->width;
  1237. asyh->base.h = asyw->state.fb->height;
  1238. switch (fb->format->format) {
  1239. case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break;
  1240. case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break;
  1241. case DRM_FORMAT_XRGB1555 :
  1242. case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break;
  1243. case DRM_FORMAT_XRGB8888 :
  1244. case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break;
  1245. case DRM_FORMAT_XBGR2101010:
  1246. case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
  1247. case DRM_FORMAT_XBGR8888 :
  1248. case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break;
  1249. default:
  1250. WARN_ON(1);
  1251. return -EINVAL;
  1252. }
  1253. asyw->lut.enable = 1;
  1254. asyw->set.image = true;
  1255. return 0;
  1256. }
  1257. static void *
  1258. nv50_base_dtor(struct nv50_wndw *wndw)
  1259. {
  1260. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  1261. struct nv50_base *base = nv50_base(wndw);
  1262. nv50_dmac_destroy(&base->chan.base, disp->disp);
  1263. return base;
  1264. }
  1265. static const u32
  1266. nv50_base_format[] = {
  1267. DRM_FORMAT_C8,
  1268. DRM_FORMAT_RGB565,
  1269. DRM_FORMAT_XRGB1555,
  1270. DRM_FORMAT_ARGB1555,
  1271. DRM_FORMAT_XRGB8888,
  1272. DRM_FORMAT_ARGB8888,
  1273. DRM_FORMAT_XBGR2101010,
  1274. DRM_FORMAT_ABGR2101010,
  1275. DRM_FORMAT_XBGR8888,
  1276. DRM_FORMAT_ABGR8888,
  1277. };
  1278. static const struct nv50_wndw_func
  1279. nv50_base = {
  1280. .dtor = nv50_base_dtor,
  1281. .acquire = nv50_base_acquire,
  1282. .release = nv50_base_release,
  1283. .sema_set = nv50_base_sema_set,
  1284. .sema_clr = nv50_base_sema_clr,
  1285. .ntfy_set = nv50_base_ntfy_set,
  1286. .ntfy_clr = nv50_base_ntfy_clr,
  1287. .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
  1288. .image_set = nv50_base_image_set,
  1289. .image_clr = nv50_base_image_clr,
  1290. .lut = nv50_base_lut,
  1291. .update = nv50_base_update,
  1292. };
  1293. static int
  1294. nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
  1295. struct nv50_base **pbase)
  1296. {
  1297. struct nv50_disp *disp = nv50_disp(drm->dev);
  1298. struct nv50_base *base;
  1299. int ret;
  1300. if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
  1301. return -ENOMEM;
  1302. base->id = head->base.index;
  1303. base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
  1304. base->wndw.sema = EVO_FLIP_SEM0(base->id);
  1305. base->wndw.data = 0x00000000;
  1306. ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
  1307. "base", base->id, &base->chan.base,
  1308. nv50_base_format, ARRAY_SIZE(nv50_base_format),
  1309. &base->wndw);
  1310. if (ret) {
  1311. kfree(base);
  1312. return ret;
  1313. }
  1314. ret = nv50_base_create(&drm->device, disp->disp, base->id,
  1315. disp->sync->bo.offset, &base->chan);
  1316. if (ret)
  1317. return ret;
  1318. return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
  1319. false,
  1320. NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
  1321. &(struct nvif_notify_uevent_req) {},
  1322. sizeof(struct nvif_notify_uevent_req),
  1323. sizeof(struct nvif_notify_uevent_rep),
  1324. &base->wndw.notify);
  1325. }
  1326. /******************************************************************************
  1327. * Head
  1328. *****************************************************************************/
  1329. static void
  1330. nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
  1331. {
  1332. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1333. u32 *push;
  1334. if ((push = evo_wait(core, 2))) {
  1335. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1336. evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
  1337. else
  1338. evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
  1339. evo_data(push, (asyh->procamp.sat.sin << 20) |
  1340. (asyh->procamp.sat.cos << 8));
  1341. evo_kick(push, core);
  1342. }
  1343. }
  1344. static void
  1345. nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
  1346. {
  1347. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1348. u32 *push;
  1349. if ((push = evo_wait(core, 2))) {
  1350. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1351. evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
  1352. else
  1353. if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
  1354. evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
  1355. else
  1356. evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
  1357. evo_data(push, (asyh->dither.mode << 3) |
  1358. (asyh->dither.bits << 1) |
  1359. asyh->dither.enable);
  1360. evo_kick(push, core);
  1361. }
  1362. }
  1363. static void
  1364. nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
  1365. {
  1366. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1367. u32 bounds = 0;
  1368. u32 *push;
  1369. if (asyh->base.cpp) {
  1370. switch (asyh->base.cpp) {
  1371. case 8: bounds |= 0x00000500; break;
  1372. case 4: bounds |= 0x00000300; break;
  1373. case 2: bounds |= 0x00000100; break;
  1374. default:
  1375. WARN_ON(1);
  1376. break;
  1377. }
  1378. bounds |= 0x00000001;
  1379. }
  1380. if ((push = evo_wait(core, 2))) {
  1381. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1382. evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
  1383. else
  1384. evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
  1385. evo_data(push, bounds);
  1386. evo_kick(push, core);
  1387. }
  1388. }
  1389. static void
  1390. nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
  1391. {
  1392. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1393. u32 bounds = 0;
  1394. u32 *push;
  1395. if (asyh->base.cpp) {
  1396. switch (asyh->base.cpp) {
  1397. case 8: bounds |= 0x00000500; break;
  1398. case 4: bounds |= 0x00000300; break;
  1399. case 2: bounds |= 0x00000100; break;
  1400. case 1: bounds |= 0x00000000; break;
  1401. default:
  1402. WARN_ON(1);
  1403. break;
  1404. }
  1405. bounds |= 0x00000001;
  1406. }
  1407. if ((push = evo_wait(core, 2))) {
  1408. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1409. evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
  1410. else
  1411. evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
  1412. evo_data(push, bounds);
  1413. evo_kick(push, core);
  1414. }
  1415. }
  1416. static void
  1417. nv50_head_curs_clr(struct nv50_head *head)
  1418. {
  1419. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1420. u32 *push;
  1421. if ((push = evo_wait(core, 4))) {
  1422. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1423. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1424. evo_data(push, 0x05000000);
  1425. } else
  1426. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1427. evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
  1428. evo_data(push, 0x05000000);
  1429. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1430. evo_data(push, 0x00000000);
  1431. } else {
  1432. evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
  1433. evo_data(push, 0x05000000);
  1434. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1435. evo_data(push, 0x00000000);
  1436. }
  1437. evo_kick(push, core);
  1438. }
  1439. }
  1440. static void
  1441. nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1442. {
  1443. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1444. u32 *push;
  1445. if ((push = evo_wait(core, 5))) {
  1446. if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
  1447. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1448. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1449. (asyh->curs.format << 24));
  1450. evo_data(push, asyh->curs.offset >> 8);
  1451. } else
  1452. if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
  1453. evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
  1454. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1455. (asyh->curs.format << 24));
  1456. evo_data(push, asyh->curs.offset >> 8);
  1457. evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
  1458. evo_data(push, asyh->curs.handle);
  1459. } else {
  1460. evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
  1461. evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
  1462. (asyh->curs.format << 24));
  1463. evo_data(push, asyh->curs.offset >> 8);
  1464. evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
  1465. evo_data(push, asyh->curs.handle);
  1466. }
  1467. evo_kick(push, core);
  1468. }
  1469. }
  1470. static void
  1471. nv50_head_core_clr(struct nv50_head *head)
  1472. {
  1473. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1474. u32 *push;
  1475. if ((push = evo_wait(core, 2))) {
  1476. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
  1477. evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
  1478. else
  1479. evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
  1480. evo_data(push, 0x00000000);
  1481. evo_kick(push, core);
  1482. }
  1483. }
  1484. static void
  1485. nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1486. {
  1487. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1488. u32 *push;
  1489. if ((push = evo_wait(core, 9))) {
  1490. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1491. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1492. evo_data(push, asyh->core.offset >> 8);
  1493. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1494. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1495. evo_data(push, asyh->core.layout << 20 |
  1496. (asyh->core.pitch >> 8) << 8 |
  1497. asyh->core.block);
  1498. evo_data(push, asyh->core.kind << 16 |
  1499. asyh->core.format << 8);
  1500. evo_data(push, asyh->core.handle);
  1501. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1502. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1503. /* EVO will complain with INVALID_STATE if we have an
  1504. * active cursor and (re)specify HeadSetContextDmaIso
  1505. * without also updating HeadSetOffsetCursor.
  1506. */
  1507. asyh->set.curs = asyh->curs.visible;
  1508. } else
  1509. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1510. evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
  1511. evo_data(push, asyh->core.offset >> 8);
  1512. evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
  1513. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1514. evo_data(push, asyh->core.layout << 20 |
  1515. (asyh->core.pitch >> 8) << 8 |
  1516. asyh->core.block);
  1517. evo_data(push, asyh->core.format << 8);
  1518. evo_data(push, asyh->core.handle);
  1519. evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
  1520. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1521. } else {
  1522. evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
  1523. evo_data(push, asyh->core.offset >> 8);
  1524. evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
  1525. evo_data(push, (asyh->core.h << 16) | asyh->core.w);
  1526. evo_data(push, asyh->core.layout << 24 |
  1527. (asyh->core.pitch >> 8) << 8 |
  1528. asyh->core.block);
  1529. evo_data(push, asyh->core.format << 8);
  1530. evo_data(push, asyh->core.handle);
  1531. evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
  1532. evo_data(push, (asyh->core.y << 16) | asyh->core.x);
  1533. }
  1534. evo_kick(push, core);
  1535. }
  1536. }
  1537. static void
  1538. nv50_head_lut_clr(struct nv50_head *head)
  1539. {
  1540. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1541. u32 *push;
  1542. if ((push = evo_wait(core, 4))) {
  1543. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1544. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1545. evo_data(push, 0x40000000);
  1546. } else
  1547. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1548. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
  1549. evo_data(push, 0x40000000);
  1550. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1551. evo_data(push, 0x00000000);
  1552. } else {
  1553. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
  1554. evo_data(push, 0x03000000);
  1555. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1556. evo_data(push, 0x00000000);
  1557. }
  1558. evo_kick(push, core);
  1559. }
  1560. }
  1561. static void
  1562. nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1563. {
  1564. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1565. u32 *push;
  1566. if ((push = evo_wait(core, 7))) {
  1567. if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
  1568. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1569. evo_data(push, 0xc0000000);
  1570. evo_data(push, asyh->lut.offset >> 8);
  1571. } else
  1572. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1573. evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
  1574. evo_data(push, 0xc0000000);
  1575. evo_data(push, asyh->lut.offset >> 8);
  1576. evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
  1577. evo_data(push, asyh->lut.handle);
  1578. } else {
  1579. evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
  1580. evo_data(push, 0x83000000);
  1581. evo_data(push, asyh->lut.offset >> 8);
  1582. evo_data(push, 0x00000000);
  1583. evo_data(push, 0x00000000);
  1584. evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
  1585. evo_data(push, asyh->lut.handle);
  1586. }
  1587. evo_kick(push, core);
  1588. }
  1589. }
  1590. static void
  1591. nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1592. {
  1593. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1594. struct nv50_head_mode *m = &asyh->mode;
  1595. u32 *push;
  1596. if ((push = evo_wait(core, 14))) {
  1597. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1598. evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
  1599. evo_data(push, 0x00800000 | m->clock);
  1600. evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
  1601. evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
  1602. evo_data(push, 0x00000000);
  1603. evo_data(push, (m->v.active << 16) | m->h.active );
  1604. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1605. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1606. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1607. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1608. evo_data(push, asyh->mode.v.blankus);
  1609. evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
  1610. evo_data(push, 0x00000000);
  1611. } else {
  1612. evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
  1613. evo_data(push, 0x00000000);
  1614. evo_data(push, (m->v.active << 16) | m->h.active );
  1615. evo_data(push, (m->v.synce << 16) | m->h.synce );
  1616. evo_data(push, (m->v.blanke << 16) | m->h.blanke );
  1617. evo_data(push, (m->v.blanks << 16) | m->h.blanks );
  1618. evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
  1619. evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
  1620. evo_data(push, 0x00000000); /* ??? */
  1621. evo_data(push, 0xffffff00);
  1622. evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
  1623. evo_data(push, m->clock * 1000);
  1624. evo_data(push, 0x00200000); /* ??? */
  1625. evo_data(push, m->clock * 1000);
  1626. }
  1627. evo_kick(push, core);
  1628. }
  1629. }
  1630. static void
  1631. nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
  1632. {
  1633. struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
  1634. u32 *push;
  1635. if ((push = evo_wait(core, 10))) {
  1636. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  1637. evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
  1638. evo_data(push, 0x00000000);
  1639. evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
  1640. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1641. evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
  1642. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1643. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1644. } else {
  1645. evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
  1646. evo_data(push, 0x00000000);
  1647. evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
  1648. evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
  1649. evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
  1650. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1651. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1652. evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
  1653. }
  1654. evo_kick(push, core);
  1655. }
  1656. }
  1657. static void
  1658. nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
  1659. {
  1660. if (asyh->clr.core && (!asyh->set.core || y))
  1661. nv50_head_lut_clr(head);
  1662. if (asyh->clr.core && (!asyh->set.core || y))
  1663. nv50_head_core_clr(head);
  1664. if (asyh->clr.curs && (!asyh->set.curs || y))
  1665. nv50_head_curs_clr(head);
  1666. }
  1667. static void
  1668. nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
  1669. {
  1670. if (asyh->set.view ) nv50_head_view (head, asyh);
  1671. if (asyh->set.mode ) nv50_head_mode (head, asyh);
  1672. if (asyh->set.core ) nv50_head_lut_set (head, asyh);
  1673. if (asyh->set.core ) nv50_head_core_set(head, asyh);
  1674. if (asyh->set.curs ) nv50_head_curs_set(head, asyh);
  1675. if (asyh->set.base ) nv50_head_base (head, asyh);
  1676. if (asyh->set.ovly ) nv50_head_ovly (head, asyh);
  1677. if (asyh->set.dither ) nv50_head_dither (head, asyh);
  1678. if (asyh->set.procamp) nv50_head_procamp (head, asyh);
  1679. }
  1680. static void
  1681. nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
  1682. struct nv50_head_atom *asyh,
  1683. struct nouveau_conn_atom *asyc)
  1684. {
  1685. const int vib = asyc->procamp.color_vibrance - 100;
  1686. const int hue = asyc->procamp.vibrant_hue - 90;
  1687. const int adj = (vib > 0) ? 50 : 0;
  1688. asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
  1689. asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
  1690. asyh->set.procamp = true;
  1691. }
  1692. static void
  1693. nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
  1694. struct nv50_head_atom *asyh,
  1695. struct nouveau_conn_atom *asyc)
  1696. {
  1697. struct drm_connector *connector = asyc->state.connector;
  1698. u32 mode = 0x00;
  1699. if (asyc->dither.mode == DITHERING_MODE_AUTO) {
  1700. if (asyh->base.depth > connector->display_info.bpc * 3)
  1701. mode = DITHERING_MODE_DYNAMIC2X2;
  1702. } else {
  1703. mode = asyc->dither.mode;
  1704. }
  1705. if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
  1706. if (connector->display_info.bpc >= 8)
  1707. mode |= DITHERING_DEPTH_8BPC;
  1708. } else {
  1709. mode |= asyc->dither.depth;
  1710. }
  1711. asyh->dither.enable = mode;
  1712. asyh->dither.bits = mode >> 1;
  1713. asyh->dither.mode = mode >> 3;
  1714. asyh->set.dither = true;
  1715. }
  1716. static void
  1717. nv50_head_atomic_check_view(struct nv50_head_atom *armh,
  1718. struct nv50_head_atom *asyh,
  1719. struct nouveau_conn_atom *asyc)
  1720. {
  1721. struct drm_connector *connector = asyc->state.connector;
  1722. struct drm_display_mode *omode = &asyh->state.adjusted_mode;
  1723. struct drm_display_mode *umode = &asyh->state.mode;
  1724. int mode = asyc->scaler.mode;
  1725. struct edid *edid;
  1726. if (connector->edid_blob_ptr)
  1727. edid = (struct edid *)connector->edid_blob_ptr->data;
  1728. else
  1729. edid = NULL;
  1730. if (!asyc->scaler.full) {
  1731. if (mode == DRM_MODE_SCALE_NONE)
  1732. omode = umode;
  1733. } else {
  1734. /* Non-EDID LVDS/eDP mode. */
  1735. mode = DRM_MODE_SCALE_FULLSCREEN;
  1736. }
  1737. asyh->view.iW = umode->hdisplay;
  1738. asyh->view.iH = umode->vdisplay;
  1739. asyh->view.oW = omode->hdisplay;
  1740. asyh->view.oH = omode->vdisplay;
  1741. if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
  1742. asyh->view.oH *= 2;
  1743. /* Add overscan compensation if necessary, will keep the aspect
  1744. * ratio the same as the backend mode unless overridden by the
  1745. * user setting both hborder and vborder properties.
  1746. */
  1747. if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
  1748. (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
  1749. drm_detect_hdmi_monitor(edid)))) {
  1750. u32 bX = asyc->scaler.underscan.hborder;
  1751. u32 bY = asyc->scaler.underscan.vborder;
  1752. u32 r = (asyh->view.oH << 19) / asyh->view.oW;
  1753. if (bX) {
  1754. asyh->view.oW -= (bX * 2);
  1755. if (bY) asyh->view.oH -= (bY * 2);
  1756. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1757. } else {
  1758. asyh->view.oW -= (asyh->view.oW >> 4) + 32;
  1759. if (bY) asyh->view.oH -= (bY * 2);
  1760. else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1761. }
  1762. }
  1763. /* Handle CENTER/ASPECT scaling, taking into account the areas
  1764. * removed already for overscan compensation.
  1765. */
  1766. switch (mode) {
  1767. case DRM_MODE_SCALE_CENTER:
  1768. asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
  1769. asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH);
  1770. /* fall-through */
  1771. case DRM_MODE_SCALE_ASPECT:
  1772. if (asyh->view.oH < asyh->view.oW) {
  1773. u32 r = (asyh->view.iW << 19) / asyh->view.iH;
  1774. asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
  1775. } else {
  1776. u32 r = (asyh->view.iH << 19) / asyh->view.iW;
  1777. asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
  1778. }
  1779. break;
  1780. default:
  1781. break;
  1782. }
  1783. asyh->set.view = true;
  1784. }
  1785. static void
  1786. nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
  1787. {
  1788. struct drm_display_mode *mode = &asyh->state.adjusted_mode;
  1789. u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
  1790. u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
  1791. u32 hbackp = mode->htotal - mode->hsync_end;
  1792. u32 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
  1793. u32 hfrontp = mode->hsync_start - mode->hdisplay;
  1794. u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
  1795. struct nv50_head_mode *m = &asyh->mode;
  1796. m->h.active = mode->htotal;
  1797. m->h.synce = mode->hsync_end - mode->hsync_start - 1;
  1798. m->h.blanke = m->h.synce + hbackp;
  1799. m->h.blanks = mode->htotal - hfrontp - 1;
  1800. m->v.active = mode->vtotal * vscan / ilace;
  1801. m->v.synce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
  1802. m->v.blanke = m->v.synce + vbackp;
  1803. m->v.blanks = m->v.active - vfrontp - 1;
  1804. /*XXX: Safe underestimate, even "0" works */
  1805. m->v.blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
  1806. m->v.blankus *= 1000;
  1807. m->v.blankus /= mode->clock;
  1808. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  1809. m->v.blank2e = m->v.active + m->v.synce + vbackp;
  1810. m->v.blank2s = m->v.blank2e + (mode->vdisplay * vscan / ilace);
  1811. m->v.active = (m->v.active * 2) + 1;
  1812. m->interlace = true;
  1813. } else {
  1814. m->v.blank2e = 0;
  1815. m->v.blank2s = 1;
  1816. m->interlace = false;
  1817. }
  1818. m->clock = mode->clock;
  1819. drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
  1820. asyh->set.mode = true;
  1821. }
  1822. static int
  1823. nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
  1824. {
  1825. struct nouveau_drm *drm = nouveau_drm(crtc->dev);
  1826. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1827. struct nv50_head *head = nv50_head(crtc);
  1828. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  1829. struct nv50_head_atom *asyh = nv50_head_atom(state);
  1830. struct nouveau_conn_atom *asyc = NULL;
  1831. struct drm_connector_state *conns;
  1832. struct drm_connector *conn;
  1833. int i;
  1834. NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
  1835. if (asyh->state.active) {
  1836. for_each_connector_in_state(asyh->state.state, conn, conns, i) {
  1837. if (conns->crtc == crtc) {
  1838. asyc = nouveau_conn_atom(conns);
  1839. break;
  1840. }
  1841. }
  1842. if (armh->state.active) {
  1843. if (asyc) {
  1844. if (asyh->state.mode_changed)
  1845. asyc->set.scaler = true;
  1846. if (armh->base.depth != asyh->base.depth)
  1847. asyc->set.dither = true;
  1848. }
  1849. } else {
  1850. asyc->set.mask = ~0;
  1851. asyh->set.mask = ~0;
  1852. }
  1853. if (asyh->state.mode_changed)
  1854. nv50_head_atomic_check_mode(head, asyh);
  1855. if (asyc) {
  1856. if (asyc->set.scaler)
  1857. nv50_head_atomic_check_view(armh, asyh, asyc);
  1858. if (asyc->set.dither)
  1859. nv50_head_atomic_check_dither(armh, asyh, asyc);
  1860. if (asyc->set.procamp)
  1861. nv50_head_atomic_check_procamp(armh, asyh, asyc);
  1862. }
  1863. if ((asyh->core.visible = (asyh->base.cpp != 0))) {
  1864. asyh->core.x = asyh->base.x;
  1865. asyh->core.y = asyh->base.y;
  1866. asyh->core.w = asyh->base.w;
  1867. asyh->core.h = asyh->base.h;
  1868. } else
  1869. if ((asyh->core.visible = asyh->curs.visible)) {
  1870. /*XXX: We need to either find some way of having the
  1871. * primary base layer appear black, while still
  1872. * being able to display the other layers, or we
  1873. * need to allocate a dummy black surface here.
  1874. */
  1875. asyh->core.x = 0;
  1876. asyh->core.y = 0;
  1877. asyh->core.w = asyh->state.mode.hdisplay;
  1878. asyh->core.h = asyh->state.mode.vdisplay;
  1879. }
  1880. asyh->core.handle = disp->mast.base.vram.handle;
  1881. asyh->core.offset = 0;
  1882. asyh->core.format = 0xcf;
  1883. asyh->core.kind = 0;
  1884. asyh->core.layout = 1;
  1885. asyh->core.block = 0;
  1886. asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
  1887. asyh->lut.handle = disp->mast.base.vram.handle;
  1888. asyh->lut.offset = head->base.lut.nvbo->bo.offset;
  1889. asyh->set.base = armh->base.cpp != asyh->base.cpp;
  1890. asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
  1891. } else {
  1892. asyh->core.visible = false;
  1893. asyh->curs.visible = false;
  1894. asyh->base.cpp = 0;
  1895. asyh->ovly.cpp = 0;
  1896. }
  1897. if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
  1898. if (asyh->core.visible) {
  1899. if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
  1900. asyh->set.core = true;
  1901. } else
  1902. if (armh->core.visible) {
  1903. asyh->clr.core = true;
  1904. }
  1905. if (asyh->curs.visible) {
  1906. if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
  1907. asyh->set.curs = true;
  1908. } else
  1909. if (armh->curs.visible) {
  1910. asyh->clr.curs = true;
  1911. }
  1912. } else {
  1913. asyh->clr.core = armh->core.visible;
  1914. asyh->clr.curs = armh->curs.visible;
  1915. asyh->set.core = asyh->core.visible;
  1916. asyh->set.curs = asyh->curs.visible;
  1917. }
  1918. if (asyh->clr.mask || asyh->set.mask)
  1919. nv50_atom(asyh->state.state)->lock_core = true;
  1920. return 0;
  1921. }
  1922. static void
  1923. nv50_head_lut_load(struct drm_crtc *crtc)
  1924. {
  1925. struct nv50_disp *disp = nv50_disp(crtc->dev);
  1926. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  1927. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  1928. int i;
  1929. for (i = 0; i < 256; i++) {
  1930. u16 r = nv_crtc->lut.r[i] >> 2;
  1931. u16 g = nv_crtc->lut.g[i] >> 2;
  1932. u16 b = nv_crtc->lut.b[i] >> 2;
  1933. if (disp->disp->oclass < GF110_DISP) {
  1934. writew(r + 0x0000, lut + (i * 0x08) + 0);
  1935. writew(g + 0x0000, lut + (i * 0x08) + 2);
  1936. writew(b + 0x0000, lut + (i * 0x08) + 4);
  1937. } else {
  1938. writew(r + 0x6000, lut + (i * 0x20) + 0);
  1939. writew(g + 0x6000, lut + (i * 0x20) + 2);
  1940. writew(b + 0x6000, lut + (i * 0x20) + 4);
  1941. }
  1942. }
  1943. }
  1944. static int
  1945. nv50_head_mode_set_base_atomic(struct drm_crtc *crtc,
  1946. struct drm_framebuffer *fb, int x, int y,
  1947. enum mode_set_atomic state)
  1948. {
  1949. WARN_ON(1);
  1950. return 0;
  1951. }
  1952. static const struct drm_crtc_helper_funcs
  1953. nv50_head_help = {
  1954. .mode_set_base_atomic = nv50_head_mode_set_base_atomic,
  1955. .load_lut = nv50_head_lut_load,
  1956. .atomic_check = nv50_head_atomic_check,
  1957. };
  1958. /* This is identical to the version in the atomic helpers, except that
  1959. * it supports non-vblanked ("async") page flips.
  1960. */
  1961. static int
  1962. nv50_head_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
  1963. struct drm_pending_vblank_event *event, u32 flags)
  1964. {
  1965. struct drm_plane *plane = crtc->primary;
  1966. struct drm_atomic_state *state;
  1967. struct drm_plane_state *plane_state;
  1968. struct drm_crtc_state *crtc_state;
  1969. int ret = 0;
  1970. state = drm_atomic_state_alloc(plane->dev);
  1971. if (!state)
  1972. return -ENOMEM;
  1973. state->acquire_ctx = drm_modeset_legacy_acquire_ctx(crtc);
  1974. retry:
  1975. crtc_state = drm_atomic_get_crtc_state(state, crtc);
  1976. if (IS_ERR(crtc_state)) {
  1977. ret = PTR_ERR(crtc_state);
  1978. goto fail;
  1979. }
  1980. crtc_state->event = event;
  1981. plane_state = drm_atomic_get_plane_state(state, plane);
  1982. if (IS_ERR(plane_state)) {
  1983. ret = PTR_ERR(plane_state);
  1984. goto fail;
  1985. }
  1986. ret = drm_atomic_set_crtc_for_plane(plane_state, crtc);
  1987. if (ret != 0)
  1988. goto fail;
  1989. drm_atomic_set_fb_for_plane(plane_state, fb);
  1990. /* Make sure we don't accidentally do a full modeset. */
  1991. state->allow_modeset = false;
  1992. if (!crtc_state->active) {
  1993. DRM_DEBUG_ATOMIC("[CRTC:%d] disabled, rejecting legacy flip\n",
  1994. crtc->base.id);
  1995. ret = -EINVAL;
  1996. goto fail;
  1997. }
  1998. if (flags & DRM_MODE_PAGE_FLIP_ASYNC)
  1999. nv50_wndw_atom(plane_state)->interval = 0;
  2000. ret = drm_atomic_nonblocking_commit(state);
  2001. fail:
  2002. if (ret == -EDEADLK)
  2003. goto backoff;
  2004. drm_atomic_state_put(state);
  2005. return ret;
  2006. backoff:
  2007. drm_atomic_state_clear(state);
  2008. drm_atomic_legacy_backoff(state);
  2009. /*
  2010. * Someone might have exchanged the framebuffer while we dropped locks
  2011. * in the backoff code. We need to fix up the fb refcount tracking the
  2012. * core does for us.
  2013. */
  2014. plane->old_fb = plane->fb;
  2015. goto retry;
  2016. }
  2017. static int
  2018. nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  2019. uint32_t size)
  2020. {
  2021. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  2022. u32 i;
  2023. for (i = 0; i < size; i++) {
  2024. nv_crtc->lut.r[i] = r[i];
  2025. nv_crtc->lut.g[i] = g[i];
  2026. nv_crtc->lut.b[i] = b[i];
  2027. }
  2028. nv50_head_lut_load(crtc);
  2029. return 0;
  2030. }
  2031. static void
  2032. nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
  2033. struct drm_crtc_state *state)
  2034. {
  2035. struct nv50_head_atom *asyh = nv50_head_atom(state);
  2036. __drm_atomic_helper_crtc_destroy_state(&asyh->state);
  2037. kfree(asyh);
  2038. }
  2039. static struct drm_crtc_state *
  2040. nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
  2041. {
  2042. struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
  2043. struct nv50_head_atom *asyh;
  2044. if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
  2045. return NULL;
  2046. __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
  2047. asyh->view = armh->view;
  2048. asyh->mode = armh->mode;
  2049. asyh->lut = armh->lut;
  2050. asyh->core = armh->core;
  2051. asyh->curs = armh->curs;
  2052. asyh->base = armh->base;
  2053. asyh->ovly = armh->ovly;
  2054. asyh->dither = armh->dither;
  2055. asyh->procamp = armh->procamp;
  2056. asyh->clr.mask = 0;
  2057. asyh->set.mask = 0;
  2058. return &asyh->state;
  2059. }
  2060. static void
  2061. __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
  2062. struct drm_crtc_state *state)
  2063. {
  2064. if (crtc->state)
  2065. crtc->funcs->atomic_destroy_state(crtc, crtc->state);
  2066. crtc->state = state;
  2067. crtc->state->crtc = crtc;
  2068. }
  2069. static void
  2070. nv50_head_reset(struct drm_crtc *crtc)
  2071. {
  2072. struct nv50_head_atom *asyh;
  2073. if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
  2074. return;
  2075. __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
  2076. }
  2077. static void
  2078. nv50_head_destroy(struct drm_crtc *crtc)
  2079. {
  2080. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  2081. struct nv50_disp *disp = nv50_disp(crtc->dev);
  2082. struct nv50_head *head = nv50_head(crtc);
  2083. nv50_dmac_destroy(&head->ovly.base, disp->disp);
  2084. nv50_pioc_destroy(&head->oimm.base);
  2085. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  2086. if (nv_crtc->lut.nvbo)
  2087. nouveau_bo_unpin(nv_crtc->lut.nvbo);
  2088. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  2089. drm_crtc_cleanup(crtc);
  2090. kfree(crtc);
  2091. }
  2092. static const struct drm_crtc_funcs
  2093. nv50_head_func = {
  2094. .reset = nv50_head_reset,
  2095. .gamma_set = nv50_head_gamma_set,
  2096. .destroy = nv50_head_destroy,
  2097. .set_config = drm_atomic_helper_set_config,
  2098. .page_flip = nv50_head_page_flip,
  2099. .set_property = drm_atomic_helper_crtc_set_property,
  2100. .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
  2101. .atomic_destroy_state = nv50_head_atomic_destroy_state,
  2102. };
  2103. static int
  2104. nv50_head_create(struct drm_device *dev, int index)
  2105. {
  2106. struct nouveau_drm *drm = nouveau_drm(dev);
  2107. struct nvif_device *device = &drm->device;
  2108. struct nv50_disp *disp = nv50_disp(dev);
  2109. struct nv50_head *head;
  2110. struct nv50_base *base;
  2111. struct nv50_curs *curs;
  2112. struct drm_crtc *crtc;
  2113. int ret, i;
  2114. head = kzalloc(sizeof(*head), GFP_KERNEL);
  2115. if (!head)
  2116. return -ENOMEM;
  2117. head->base.index = index;
  2118. for (i = 0; i < 256; i++) {
  2119. head->base.lut.r[i] = i << 8;
  2120. head->base.lut.g[i] = i << 8;
  2121. head->base.lut.b[i] = i << 8;
  2122. }
  2123. ret = nv50_base_new(drm, head, &base);
  2124. if (ret == 0)
  2125. ret = nv50_curs_new(drm, head, &curs);
  2126. if (ret) {
  2127. kfree(head);
  2128. return ret;
  2129. }
  2130. crtc = &head->base.base;
  2131. drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
  2132. &curs->wndw.plane, &nv50_head_func,
  2133. "head-%d", head->base.index);
  2134. drm_crtc_helper_add(crtc, &nv50_head_help);
  2135. drm_mode_crtc_set_gamma_size(crtc, 256);
  2136. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  2137. 0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
  2138. if (!ret) {
  2139. ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
  2140. if (!ret) {
  2141. ret = nouveau_bo_map(head->base.lut.nvbo);
  2142. if (ret)
  2143. nouveau_bo_unpin(head->base.lut.nvbo);
  2144. }
  2145. if (ret)
  2146. nouveau_bo_ref(NULL, &head->base.lut.nvbo);
  2147. }
  2148. if (ret)
  2149. goto out;
  2150. /* allocate overlay resources */
  2151. ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
  2152. if (ret)
  2153. goto out;
  2154. ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
  2155. &head->ovly);
  2156. if (ret)
  2157. goto out;
  2158. out:
  2159. if (ret)
  2160. nv50_head_destroy(crtc);
  2161. return ret;
  2162. }
  2163. /******************************************************************************
  2164. * Output path helpers
  2165. *****************************************************************************/
  2166. static int
  2167. nv50_outp_atomic_check_view(struct drm_encoder *encoder,
  2168. struct drm_crtc_state *crtc_state,
  2169. struct drm_connector_state *conn_state,
  2170. struct drm_display_mode *native_mode)
  2171. {
  2172. struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
  2173. struct drm_display_mode *mode = &crtc_state->mode;
  2174. struct drm_connector *connector = conn_state->connector;
  2175. struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
  2176. struct nouveau_drm *drm = nouveau_drm(encoder->dev);
  2177. NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
  2178. asyc->scaler.full = false;
  2179. if (!native_mode)
  2180. return 0;
  2181. if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
  2182. switch (connector->connector_type) {
  2183. case DRM_MODE_CONNECTOR_LVDS:
  2184. case DRM_MODE_CONNECTOR_eDP:
  2185. /* Force use of scaler for non-EDID modes. */
  2186. if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
  2187. break;
  2188. mode = native_mode;
  2189. asyc->scaler.full = true;
  2190. break;
  2191. default:
  2192. break;
  2193. }
  2194. } else {
  2195. mode = native_mode;
  2196. }
  2197. if (!drm_mode_equal(adjusted_mode, mode)) {
  2198. drm_mode_copy(adjusted_mode, mode);
  2199. crtc_state->mode_changed = true;
  2200. }
  2201. return 0;
  2202. }
  2203. static int
  2204. nv50_outp_atomic_check(struct drm_encoder *encoder,
  2205. struct drm_crtc_state *crtc_state,
  2206. struct drm_connector_state *conn_state)
  2207. {
  2208. struct nouveau_connector *nv_connector =
  2209. nouveau_connector(conn_state->connector);
  2210. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2211. nv_connector->native_mode);
  2212. }
  2213. /******************************************************************************
  2214. * DAC
  2215. *****************************************************************************/
  2216. static void
  2217. nv50_dac_dpms(struct drm_encoder *encoder, int mode)
  2218. {
  2219. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2220. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2221. struct {
  2222. struct nv50_disp_mthd_v1 base;
  2223. struct nv50_disp_dac_pwr_v0 pwr;
  2224. } args = {
  2225. .base.version = 1,
  2226. .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
  2227. .base.hasht = nv_encoder->dcb->hasht,
  2228. .base.hashm = nv_encoder->dcb->hashm,
  2229. .pwr.state = 1,
  2230. .pwr.data = 1,
  2231. .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
  2232. mode != DRM_MODE_DPMS_OFF),
  2233. .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
  2234. mode != DRM_MODE_DPMS_OFF),
  2235. };
  2236. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2237. }
  2238. static void
  2239. nv50_dac_disable(struct drm_encoder *encoder)
  2240. {
  2241. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2242. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2243. const int or = nv_encoder->or;
  2244. u32 *push;
  2245. if (nv_encoder->crtc) {
  2246. push = evo_wait(mast, 4);
  2247. if (push) {
  2248. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2249. evo_mthd(push, 0x0400 + (or * 0x080), 1);
  2250. evo_data(push, 0x00000000);
  2251. } else {
  2252. evo_mthd(push, 0x0180 + (or * 0x020), 1);
  2253. evo_data(push, 0x00000000);
  2254. }
  2255. evo_kick(push, mast);
  2256. }
  2257. }
  2258. nv_encoder->crtc = NULL;
  2259. }
  2260. static void
  2261. nv50_dac_enable(struct drm_encoder *encoder)
  2262. {
  2263. struct nv50_mast *mast = nv50_mast(encoder->dev);
  2264. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2265. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2266. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  2267. u32 *push;
  2268. push = evo_wait(mast, 8);
  2269. if (push) {
  2270. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  2271. u32 syncs = 0x00000000;
  2272. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2273. syncs |= 0x00000001;
  2274. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2275. syncs |= 0x00000002;
  2276. evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
  2277. evo_data(push, 1 << nv_crtc->index);
  2278. evo_data(push, syncs);
  2279. } else {
  2280. u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
  2281. u32 syncs = 0x00000001;
  2282. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  2283. syncs |= 0x00000008;
  2284. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  2285. syncs |= 0x00000010;
  2286. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  2287. magic |= 0x00000001;
  2288. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
  2289. evo_data(push, syncs);
  2290. evo_data(push, magic);
  2291. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
  2292. evo_data(push, 1 << nv_crtc->index);
  2293. }
  2294. evo_kick(push, mast);
  2295. }
  2296. nv_encoder->crtc = encoder->crtc;
  2297. }
  2298. static enum drm_connector_status
  2299. nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  2300. {
  2301. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2302. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2303. struct {
  2304. struct nv50_disp_mthd_v1 base;
  2305. struct nv50_disp_dac_load_v0 load;
  2306. } args = {
  2307. .base.version = 1,
  2308. .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
  2309. .base.hasht = nv_encoder->dcb->hasht,
  2310. .base.hashm = nv_encoder->dcb->hashm,
  2311. };
  2312. int ret;
  2313. args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
  2314. if (args.load.data == 0)
  2315. args.load.data = 340;
  2316. ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2317. if (ret || !args.load.load)
  2318. return connector_status_disconnected;
  2319. return connector_status_connected;
  2320. }
  2321. static const struct drm_encoder_helper_funcs
  2322. nv50_dac_help = {
  2323. .dpms = nv50_dac_dpms,
  2324. .atomic_check = nv50_outp_atomic_check,
  2325. .enable = nv50_dac_enable,
  2326. .disable = nv50_dac_disable,
  2327. .detect = nv50_dac_detect
  2328. };
  2329. static void
  2330. nv50_dac_destroy(struct drm_encoder *encoder)
  2331. {
  2332. drm_encoder_cleanup(encoder);
  2333. kfree(encoder);
  2334. }
  2335. static const struct drm_encoder_funcs
  2336. nv50_dac_func = {
  2337. .destroy = nv50_dac_destroy,
  2338. };
  2339. static int
  2340. nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
  2341. {
  2342. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2343. struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
  2344. struct nvkm_i2c_bus *bus;
  2345. struct nouveau_encoder *nv_encoder;
  2346. struct drm_encoder *encoder;
  2347. int type = DRM_MODE_ENCODER_DAC;
  2348. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  2349. if (!nv_encoder)
  2350. return -ENOMEM;
  2351. nv_encoder->dcb = dcbe;
  2352. nv_encoder->or = ffs(dcbe->or) - 1;
  2353. bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  2354. if (bus)
  2355. nv_encoder->i2c = &bus->i2c;
  2356. encoder = to_drm_encoder(nv_encoder);
  2357. encoder->possible_crtcs = dcbe->heads;
  2358. encoder->possible_clones = 0;
  2359. drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
  2360. "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
  2361. drm_encoder_helper_add(encoder, &nv50_dac_help);
  2362. drm_mode_connector_attach_encoder(connector, encoder);
  2363. return 0;
  2364. }
  2365. /******************************************************************************
  2366. * Audio
  2367. *****************************************************************************/
  2368. static void
  2369. nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2370. {
  2371. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2372. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2373. struct {
  2374. struct nv50_disp_mthd_v1 base;
  2375. struct nv50_disp_sor_hda_eld_v0 eld;
  2376. } args = {
  2377. .base.version = 1,
  2378. .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2379. .base.hasht = nv_encoder->dcb->hasht,
  2380. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2381. (0x0100 << nv_crtc->index),
  2382. };
  2383. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2384. }
  2385. static void
  2386. nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2387. {
  2388. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2389. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2390. struct nouveau_connector *nv_connector;
  2391. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2392. struct __packed {
  2393. struct {
  2394. struct nv50_disp_mthd_v1 mthd;
  2395. struct nv50_disp_sor_hda_eld_v0 eld;
  2396. } base;
  2397. u8 data[sizeof(nv_connector->base.eld)];
  2398. } args = {
  2399. .base.mthd.version = 1,
  2400. .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
  2401. .base.mthd.hasht = nv_encoder->dcb->hasht,
  2402. .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2403. (0x0100 << nv_crtc->index),
  2404. };
  2405. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2406. if (!drm_detect_monitor_audio(nv_connector->edid))
  2407. return;
  2408. drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
  2409. memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
  2410. nvif_mthd(disp->disp, 0, &args,
  2411. sizeof(args.base) + drm_eld_size(args.data));
  2412. }
  2413. /******************************************************************************
  2414. * HDMI
  2415. *****************************************************************************/
  2416. static void
  2417. nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
  2418. {
  2419. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2420. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2421. struct {
  2422. struct nv50_disp_mthd_v1 base;
  2423. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2424. } args = {
  2425. .base.version = 1,
  2426. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2427. .base.hasht = nv_encoder->dcb->hasht,
  2428. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2429. (0x0100 << nv_crtc->index),
  2430. };
  2431. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2432. }
  2433. static void
  2434. nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
  2435. {
  2436. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2437. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  2438. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2439. struct {
  2440. struct nv50_disp_mthd_v1 base;
  2441. struct nv50_disp_sor_hdmi_pwr_v0 pwr;
  2442. } args = {
  2443. .base.version = 1,
  2444. .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
  2445. .base.hasht = nv_encoder->dcb->hasht,
  2446. .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
  2447. (0x0100 << nv_crtc->index),
  2448. .pwr.state = 1,
  2449. .pwr.rekey = 56, /* binary driver, and tegra, constant */
  2450. };
  2451. struct nouveau_connector *nv_connector;
  2452. u32 max_ac_packet;
  2453. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  2454. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  2455. return;
  2456. max_ac_packet = mode->htotal - mode->hdisplay;
  2457. max_ac_packet -= args.pwr.rekey;
  2458. max_ac_packet -= 18; /* constant from tegra */
  2459. args.pwr.max_ac_packet = max_ac_packet / 32;
  2460. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  2461. nv50_audio_enable(encoder, mode);
  2462. }
  2463. /******************************************************************************
  2464. * MST
  2465. *****************************************************************************/
  2466. #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
  2467. #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
  2468. #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
  2469. struct nv50_mstm {
  2470. struct nouveau_encoder *outp;
  2471. struct drm_dp_mst_topology_mgr mgr;
  2472. struct nv50_msto *msto[4];
  2473. bool modified;
  2474. };
  2475. struct nv50_mstc {
  2476. struct nv50_mstm *mstm;
  2477. struct drm_dp_mst_port *port;
  2478. struct drm_connector connector;
  2479. struct drm_display_mode *native;
  2480. struct edid *edid;
  2481. int pbn;
  2482. };
  2483. struct nv50_msto {
  2484. struct drm_encoder encoder;
  2485. struct nv50_head *head;
  2486. struct nv50_mstc *mstc;
  2487. bool disabled;
  2488. };
  2489. static struct drm_dp_payload *
  2490. nv50_msto_payload(struct nv50_msto *msto)
  2491. {
  2492. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2493. struct nv50_mstc *mstc = msto->mstc;
  2494. struct nv50_mstm *mstm = mstc->mstm;
  2495. int vcpi = mstc->port->vcpi.vcpi, i;
  2496. NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
  2497. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2498. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2499. NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
  2500. mstm->outp->base.base.name, i, payload->vcpi,
  2501. payload->start_slot, payload->num_slots);
  2502. }
  2503. for (i = 0; i < mstm->mgr.max_payloads; i++) {
  2504. struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
  2505. if (payload->vcpi == vcpi)
  2506. return payload;
  2507. }
  2508. return NULL;
  2509. }
  2510. static void
  2511. nv50_msto_cleanup(struct nv50_msto *msto)
  2512. {
  2513. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2514. struct nv50_mstc *mstc = msto->mstc;
  2515. struct nv50_mstm *mstm = mstc->mstm;
  2516. NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
  2517. if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
  2518. drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
  2519. if (msto->disabled) {
  2520. msto->mstc = NULL;
  2521. msto->head = NULL;
  2522. msto->disabled = false;
  2523. }
  2524. }
  2525. static void
  2526. nv50_msto_prepare(struct nv50_msto *msto)
  2527. {
  2528. struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
  2529. struct nv50_mstc *mstc = msto->mstc;
  2530. struct nv50_mstm *mstm = mstc->mstm;
  2531. struct {
  2532. struct nv50_disp_mthd_v1 base;
  2533. struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
  2534. } args = {
  2535. .base.version = 1,
  2536. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
  2537. .base.hasht = mstm->outp->dcb->hasht,
  2538. .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
  2539. (0x0100 << msto->head->base.index),
  2540. };
  2541. NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
  2542. if (mstc->port && mstc->port->vcpi.vcpi > 0) {
  2543. struct drm_dp_payload *payload = nv50_msto_payload(msto);
  2544. if (payload) {
  2545. args.vcpi.start_slot = payload->start_slot;
  2546. args.vcpi.num_slots = payload->num_slots;
  2547. args.vcpi.pbn = mstc->port->vcpi.pbn;
  2548. args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
  2549. }
  2550. }
  2551. NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
  2552. msto->encoder.name, msto->head->base.base.name,
  2553. args.vcpi.start_slot, args.vcpi.num_slots,
  2554. args.vcpi.pbn, args.vcpi.aligned_pbn);
  2555. nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
  2556. }
  2557. static int
  2558. nv50_msto_atomic_check(struct drm_encoder *encoder,
  2559. struct drm_crtc_state *crtc_state,
  2560. struct drm_connector_state *conn_state)
  2561. {
  2562. struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
  2563. struct nv50_mstm *mstm = mstc->mstm;
  2564. int bpp = conn_state->connector->display_info.bpc * 3;
  2565. int slots;
  2566. mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
  2567. slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
  2568. if (slots < 0)
  2569. return slots;
  2570. return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
  2571. mstc->native);
  2572. }
  2573. static void
  2574. nv50_msto_enable(struct drm_encoder *encoder)
  2575. {
  2576. struct nv50_head *head = nv50_head(encoder->crtc);
  2577. struct nv50_msto *msto = nv50_msto(encoder);
  2578. struct nv50_mstc *mstc = NULL;
  2579. struct nv50_mstm *mstm = NULL;
  2580. struct drm_connector *connector;
  2581. u8 proto, depth;
  2582. int slots;
  2583. bool r;
  2584. drm_for_each_connector(connector, encoder->dev) {
  2585. if (connector->state->best_encoder == &msto->encoder) {
  2586. mstc = nv50_mstc(connector);
  2587. mstm = mstc->mstm;
  2588. break;
  2589. }
  2590. }
  2591. if (WARN_ON(!mstc))
  2592. return;
  2593. r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, &slots);
  2594. WARN_ON(!r);
  2595. if (mstm->outp->dcb->sorconf.link & 1)
  2596. proto = 0x8;
  2597. else
  2598. proto = 0x9;
  2599. switch (mstc->connector.display_info.bpc) {
  2600. case 6: depth = 0x2; break;
  2601. case 8: depth = 0x5; break;
  2602. case 10:
  2603. default: depth = 0x6; break;
  2604. }
  2605. mstm->outp->update(mstm->outp, head->base.index,
  2606. &head->base.base.state->adjusted_mode, proto, depth);
  2607. msto->head = head;
  2608. msto->mstc = mstc;
  2609. mstm->modified = true;
  2610. }
  2611. static void
  2612. nv50_msto_disable(struct drm_encoder *encoder)
  2613. {
  2614. struct nv50_msto *msto = nv50_msto(encoder);
  2615. struct nv50_mstc *mstc = msto->mstc;
  2616. struct nv50_mstm *mstm = mstc->mstm;
  2617. if (mstc->port)
  2618. drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
  2619. mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
  2620. mstm->modified = true;
  2621. msto->disabled = true;
  2622. }
  2623. static const struct drm_encoder_helper_funcs
  2624. nv50_msto_help = {
  2625. .disable = nv50_msto_disable,
  2626. .enable = nv50_msto_enable,
  2627. .atomic_check = nv50_msto_atomic_check,
  2628. };
  2629. static void
  2630. nv50_msto_destroy(struct drm_encoder *encoder)
  2631. {
  2632. struct nv50_msto *msto = nv50_msto(encoder);
  2633. drm_encoder_cleanup(&msto->encoder);
  2634. kfree(msto);
  2635. }
  2636. static const struct drm_encoder_funcs
  2637. nv50_msto = {
  2638. .destroy = nv50_msto_destroy,
  2639. };
  2640. static int
  2641. nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
  2642. struct nv50_msto **pmsto)
  2643. {
  2644. struct nv50_msto *msto;
  2645. int ret;
  2646. if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
  2647. return -ENOMEM;
  2648. ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
  2649. DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
  2650. if (ret) {
  2651. kfree(*pmsto);
  2652. *pmsto = NULL;
  2653. return ret;
  2654. }
  2655. drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
  2656. msto->encoder.possible_crtcs = heads;
  2657. return 0;
  2658. }
  2659. static struct drm_encoder *
  2660. nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
  2661. struct drm_connector_state *connector_state)
  2662. {
  2663. struct nv50_head *head = nv50_head(connector_state->crtc);
  2664. struct nv50_mstc *mstc = nv50_mstc(connector);
  2665. if (mstc->port) {
  2666. struct nv50_mstm *mstm = mstc->mstm;
  2667. return &mstm->msto[head->base.index]->encoder;
  2668. }
  2669. return NULL;
  2670. }
  2671. static struct drm_encoder *
  2672. nv50_mstc_best_encoder(struct drm_connector *connector)
  2673. {
  2674. struct nv50_mstc *mstc = nv50_mstc(connector);
  2675. if (mstc->port) {
  2676. struct nv50_mstm *mstm = mstc->mstm;
  2677. return &mstm->msto[0]->encoder;
  2678. }
  2679. return NULL;
  2680. }
  2681. static enum drm_mode_status
  2682. nv50_mstc_mode_valid(struct drm_connector *connector,
  2683. struct drm_display_mode *mode)
  2684. {
  2685. return MODE_OK;
  2686. }
  2687. static int
  2688. nv50_mstc_get_modes(struct drm_connector *connector)
  2689. {
  2690. struct nv50_mstc *mstc = nv50_mstc(connector);
  2691. int ret = 0;
  2692. mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
  2693. drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
  2694. if (mstc->edid) {
  2695. ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
  2696. drm_edid_to_eld(&mstc->connector, mstc->edid);
  2697. }
  2698. if (!mstc->connector.display_info.bpc)
  2699. mstc->connector.display_info.bpc = 8;
  2700. if (mstc->native)
  2701. drm_mode_destroy(mstc->connector.dev, mstc->native);
  2702. mstc->native = nouveau_conn_native_mode(&mstc->connector);
  2703. return ret;
  2704. }
  2705. static const struct drm_connector_helper_funcs
  2706. nv50_mstc_help = {
  2707. .get_modes = nv50_mstc_get_modes,
  2708. .mode_valid = nv50_mstc_mode_valid,
  2709. .best_encoder = nv50_mstc_best_encoder,
  2710. .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
  2711. };
  2712. static enum drm_connector_status
  2713. nv50_mstc_detect(struct drm_connector *connector, bool force)
  2714. {
  2715. struct nv50_mstc *mstc = nv50_mstc(connector);
  2716. if (!mstc->port)
  2717. return connector_status_disconnected;
  2718. return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
  2719. }
  2720. static void
  2721. nv50_mstc_destroy(struct drm_connector *connector)
  2722. {
  2723. struct nv50_mstc *mstc = nv50_mstc(connector);
  2724. drm_connector_cleanup(&mstc->connector);
  2725. kfree(mstc);
  2726. }
  2727. static const struct drm_connector_funcs
  2728. nv50_mstc = {
  2729. .dpms = drm_atomic_helper_connector_dpms,
  2730. .reset = nouveau_conn_reset,
  2731. .detect = nv50_mstc_detect,
  2732. .fill_modes = drm_helper_probe_single_connector_modes,
  2733. .set_property = drm_atomic_helper_connector_set_property,
  2734. .destroy = nv50_mstc_destroy,
  2735. .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
  2736. .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
  2737. .atomic_set_property = nouveau_conn_atomic_set_property,
  2738. .atomic_get_property = nouveau_conn_atomic_get_property,
  2739. };
  2740. static int
  2741. nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
  2742. const char *path, struct nv50_mstc **pmstc)
  2743. {
  2744. struct drm_device *dev = mstm->outp->base.base.dev;
  2745. struct nv50_mstc *mstc;
  2746. int ret, i;
  2747. if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
  2748. return -ENOMEM;
  2749. mstc->mstm = mstm;
  2750. mstc->port = port;
  2751. ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
  2752. DRM_MODE_CONNECTOR_DisplayPort);
  2753. if (ret) {
  2754. kfree(*pmstc);
  2755. *pmstc = NULL;
  2756. return ret;
  2757. }
  2758. drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
  2759. mstc->connector.funcs->reset(&mstc->connector);
  2760. nouveau_conn_attach_properties(&mstc->connector);
  2761. for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto; i++)
  2762. drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
  2763. drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
  2764. drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
  2765. drm_mode_connector_set_path_property(&mstc->connector, path);
  2766. return 0;
  2767. }
  2768. static void
  2769. nv50_mstm_cleanup(struct nv50_mstm *mstm)
  2770. {
  2771. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2772. struct drm_encoder *encoder;
  2773. int ret;
  2774. NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
  2775. ret = drm_dp_check_act_status(&mstm->mgr);
  2776. ret = drm_dp_update_payload_part2(&mstm->mgr);
  2777. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2778. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2779. struct nv50_msto *msto = nv50_msto(encoder);
  2780. struct nv50_mstc *mstc = msto->mstc;
  2781. if (mstc && mstc->mstm == mstm)
  2782. nv50_msto_cleanup(msto);
  2783. }
  2784. }
  2785. mstm->modified = false;
  2786. }
  2787. static void
  2788. nv50_mstm_prepare(struct nv50_mstm *mstm)
  2789. {
  2790. struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
  2791. struct drm_encoder *encoder;
  2792. int ret;
  2793. NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
  2794. ret = drm_dp_update_payload_part1(&mstm->mgr);
  2795. drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
  2796. if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  2797. struct nv50_msto *msto = nv50_msto(encoder);
  2798. struct nv50_mstc *mstc = msto->mstc;
  2799. if (mstc && mstc->mstm == mstm)
  2800. nv50_msto_prepare(msto);
  2801. }
  2802. }
  2803. }
  2804. static void
  2805. nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
  2806. {
  2807. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2808. drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
  2809. }
  2810. static void
  2811. nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
  2812. struct drm_connector *connector)
  2813. {
  2814. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2815. struct nv50_mstc *mstc = nv50_mstc(connector);
  2816. drm_connector_unregister(&mstc->connector);
  2817. drm_modeset_lock_all(drm->dev);
  2818. drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
  2819. mstc->port = NULL;
  2820. drm_modeset_unlock_all(drm->dev);
  2821. drm_connector_unreference(&mstc->connector);
  2822. }
  2823. static void
  2824. nv50_mstm_register_connector(struct drm_connector *connector)
  2825. {
  2826. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  2827. drm_modeset_lock_all(drm->dev);
  2828. drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
  2829. drm_modeset_unlock_all(drm->dev);
  2830. drm_connector_register(connector);
  2831. }
  2832. static struct drm_connector *
  2833. nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
  2834. struct drm_dp_mst_port *port, const char *path)
  2835. {
  2836. struct nv50_mstm *mstm = nv50_mstm(mgr);
  2837. struct nv50_mstc *mstc;
  2838. int ret;
  2839. ret = nv50_mstc_new(mstm, port, path, &mstc);
  2840. if (ret) {
  2841. if (mstc)
  2842. mstc->connector.funcs->destroy(&mstc->connector);
  2843. return NULL;
  2844. }
  2845. return &mstc->connector;
  2846. }
  2847. static const struct drm_dp_mst_topology_cbs
  2848. nv50_mstm = {
  2849. .add_connector = nv50_mstm_add_connector,
  2850. .register_connector = nv50_mstm_register_connector,
  2851. .destroy_connector = nv50_mstm_destroy_connector,
  2852. .hotplug = nv50_mstm_hotplug,
  2853. };
  2854. void
  2855. nv50_mstm_service(struct nv50_mstm *mstm)
  2856. {
  2857. struct drm_dp_aux *aux = mstm->mgr.aux;
  2858. bool handled = true;
  2859. int ret;
  2860. u8 esi[8] = {};
  2861. while (handled) {
  2862. ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
  2863. if (ret != 8) {
  2864. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2865. return;
  2866. }
  2867. drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
  2868. if (!handled)
  2869. break;
  2870. drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
  2871. }
  2872. }
  2873. void
  2874. nv50_mstm_remove(struct nv50_mstm *mstm)
  2875. {
  2876. if (mstm)
  2877. drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
  2878. }
  2879. static int
  2880. nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
  2881. {
  2882. struct nouveau_encoder *outp = mstm->outp;
  2883. struct {
  2884. struct nv50_disp_mthd_v1 base;
  2885. struct nv50_disp_sor_dp_mst_link_v0 mst;
  2886. } args = {
  2887. .base.version = 1,
  2888. .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
  2889. .base.hasht = outp->dcb->hasht,
  2890. .base.hashm = outp->dcb->hashm,
  2891. .mst.state = state,
  2892. };
  2893. struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
  2894. struct nvif_object *disp = &drm->display->disp;
  2895. int ret;
  2896. if (dpcd >= 0x12) {
  2897. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
  2898. if (ret < 0)
  2899. return ret;
  2900. dpcd &= ~DP_MST_EN;
  2901. if (state)
  2902. dpcd |= DP_MST_EN;
  2903. ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
  2904. if (ret < 0)
  2905. return ret;
  2906. }
  2907. return nvif_mthd(disp, 0, &args, sizeof(args));
  2908. }
  2909. int
  2910. nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
  2911. {
  2912. int ret, state = 0;
  2913. if (!mstm)
  2914. return 0;
  2915. if (dpcd[0] >= 0x12) {
  2916. ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
  2917. if (ret < 0)
  2918. return ret;
  2919. if (!(dpcd[1] & DP_MST_CAP))
  2920. dpcd[0] = 0x11;
  2921. else
  2922. state = allow;
  2923. }
  2924. ret = nv50_mstm_enable(mstm, dpcd[0], state);
  2925. if (ret)
  2926. return ret;
  2927. ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
  2928. if (ret)
  2929. return nv50_mstm_enable(mstm, dpcd[0], 0);
  2930. return mstm->mgr.mst_state;
  2931. }
  2932. static void
  2933. nv50_mstm_fini(struct nv50_mstm *mstm)
  2934. {
  2935. if (mstm && mstm->mgr.mst_state)
  2936. drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
  2937. }
  2938. static void
  2939. nv50_mstm_init(struct nv50_mstm *mstm)
  2940. {
  2941. if (mstm && mstm->mgr.mst_state)
  2942. drm_dp_mst_topology_mgr_resume(&mstm->mgr);
  2943. }
  2944. static void
  2945. nv50_mstm_del(struct nv50_mstm **pmstm)
  2946. {
  2947. struct nv50_mstm *mstm = *pmstm;
  2948. if (mstm) {
  2949. kfree(*pmstm);
  2950. *pmstm = NULL;
  2951. }
  2952. }
  2953. static int
  2954. nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
  2955. int conn_base_id, struct nv50_mstm **pmstm)
  2956. {
  2957. const int max_payloads = hweight8(outp->dcb->heads);
  2958. struct drm_device *dev = outp->base.base.dev;
  2959. struct nv50_mstm *mstm;
  2960. int ret, i;
  2961. u8 dpcd;
  2962. /* This is a workaround for some monitors not functioning
  2963. * correctly in MST mode on initial module load. I think
  2964. * some bad interaction with the VBIOS may be responsible.
  2965. *
  2966. * A good ol' off and on again seems to work here ;)
  2967. */
  2968. ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
  2969. if (ret >= 0 && dpcd >= 0x12)
  2970. drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
  2971. if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
  2972. return -ENOMEM;
  2973. mstm->outp = outp;
  2974. mstm->mgr.cbs = &nv50_mstm;
  2975. ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max,
  2976. max_payloads, conn_base_id);
  2977. if (ret)
  2978. return ret;
  2979. for (i = 0; i < max_payloads; i++) {
  2980. ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
  2981. i, &mstm->msto[i]);
  2982. if (ret)
  2983. return ret;
  2984. }
  2985. return 0;
  2986. }
  2987. /******************************************************************************
  2988. * SOR
  2989. *****************************************************************************/
  2990. static void
  2991. nv50_sor_dpms(struct drm_encoder *encoder, int mode)
  2992. {
  2993. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  2994. struct nv50_disp *disp = nv50_disp(encoder->dev);
  2995. struct {
  2996. struct nv50_disp_mthd_v1 base;
  2997. struct nv50_disp_sor_pwr_v0 pwr;
  2998. } args = {
  2999. .base.version = 1,
  3000. .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
  3001. .base.hasht = nv_encoder->dcb->hasht,
  3002. .base.hashm = nv_encoder->dcb->hashm,
  3003. .pwr.state = mode == DRM_MODE_DPMS_ON,
  3004. };
  3005. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  3006. }
  3007. static void
  3008. nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
  3009. struct drm_display_mode *mode, u8 proto, u8 depth)
  3010. {
  3011. struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
  3012. u32 *push;
  3013. if (!mode) {
  3014. nv_encoder->ctrl &= ~BIT(head);
  3015. if (!(nv_encoder->ctrl & 0x0000000f))
  3016. nv_encoder->ctrl = 0;
  3017. } else {
  3018. nv_encoder->ctrl |= proto << 8;
  3019. nv_encoder->ctrl |= BIT(head);
  3020. }
  3021. if ((push = evo_wait(core, 6))) {
  3022. if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
  3023. if (mode) {
  3024. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3025. nv_encoder->ctrl |= 0x00001000;
  3026. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3027. nv_encoder->ctrl |= 0x00002000;
  3028. nv_encoder->ctrl |= depth << 16;
  3029. }
  3030. evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
  3031. } else {
  3032. if (mode) {
  3033. u32 magic = 0x31ec6000 | (head << 25);
  3034. u32 syncs = 0x00000001;
  3035. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3036. syncs |= 0x00000008;
  3037. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3038. syncs |= 0x00000010;
  3039. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  3040. magic |= 0x00000001;
  3041. evo_mthd(push, 0x0404 + (head * 0x300), 2);
  3042. evo_data(push, syncs | (depth << 6));
  3043. evo_data(push, magic);
  3044. }
  3045. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  3046. }
  3047. evo_data(push, nv_encoder->ctrl);
  3048. evo_kick(push, core);
  3049. }
  3050. }
  3051. static void
  3052. nv50_sor_disable(struct drm_encoder *encoder)
  3053. {
  3054. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3055. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  3056. nv_encoder->crtc = NULL;
  3057. if (nv_crtc) {
  3058. struct nvkm_i2c_aux *aux = nv_encoder->aux;
  3059. u8 pwr;
  3060. if (aux) {
  3061. int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
  3062. if (ret == 0) {
  3063. pwr &= ~DP_SET_POWER_MASK;
  3064. pwr |= DP_SET_POWER_D3;
  3065. nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
  3066. }
  3067. }
  3068. nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
  3069. nv50_audio_disable(encoder, nv_crtc);
  3070. nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
  3071. }
  3072. }
  3073. static void
  3074. nv50_sor_enable(struct drm_encoder *encoder)
  3075. {
  3076. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3077. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3078. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3079. struct {
  3080. struct nv50_disp_mthd_v1 base;
  3081. struct nv50_disp_sor_lvds_script_v0 lvds;
  3082. } lvds = {
  3083. .base.version = 1,
  3084. .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
  3085. .base.hasht = nv_encoder->dcb->hasht,
  3086. .base.hashm = nv_encoder->dcb->hashm,
  3087. };
  3088. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3089. struct drm_device *dev = encoder->dev;
  3090. struct nouveau_drm *drm = nouveau_drm(dev);
  3091. struct nouveau_connector *nv_connector;
  3092. struct nvbios *bios = &drm->vbios;
  3093. u8 proto = 0xf;
  3094. u8 depth = 0x0;
  3095. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3096. nv_encoder->crtc = encoder->crtc;
  3097. switch (nv_encoder->dcb->type) {
  3098. case DCB_OUTPUT_TMDS:
  3099. if (nv_encoder->dcb->sorconf.link & 1) {
  3100. proto = 0x1;
  3101. /* Only enable dual-link if:
  3102. * - Need to (i.e. rate > 165MHz)
  3103. * - DCB says we can
  3104. * - Not an HDMI monitor, since there's no dual-link
  3105. * on HDMI.
  3106. */
  3107. if (mode->clock >= 165000 &&
  3108. nv_encoder->dcb->duallink_possible &&
  3109. !drm_detect_hdmi_monitor(nv_connector->edid))
  3110. proto |= 0x4;
  3111. } else {
  3112. proto = 0x2;
  3113. }
  3114. nv50_hdmi_enable(&nv_encoder->base.base, mode);
  3115. break;
  3116. case DCB_OUTPUT_LVDS:
  3117. proto = 0x0;
  3118. if (bios->fp_no_ddc) {
  3119. if (bios->fp.dual_link)
  3120. lvds.lvds.script |= 0x0100;
  3121. if (bios->fp.if_is_24bit)
  3122. lvds.lvds.script |= 0x0200;
  3123. } else {
  3124. if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
  3125. if (((u8 *)nv_connector->edid)[121] == 2)
  3126. lvds.lvds.script |= 0x0100;
  3127. } else
  3128. if (mode->clock >= bios->fp.duallink_transition_clk) {
  3129. lvds.lvds.script |= 0x0100;
  3130. }
  3131. if (lvds.lvds.script & 0x0100) {
  3132. if (bios->fp.strapless_is_24bit & 2)
  3133. lvds.lvds.script |= 0x0200;
  3134. } else {
  3135. if (bios->fp.strapless_is_24bit & 1)
  3136. lvds.lvds.script |= 0x0200;
  3137. }
  3138. if (nv_connector->base.display_info.bpc == 8)
  3139. lvds.lvds.script |= 0x0200;
  3140. }
  3141. nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
  3142. break;
  3143. case DCB_OUTPUT_DP:
  3144. if (nv_connector->base.display_info.bpc == 6)
  3145. depth = 0x2;
  3146. else
  3147. if (nv_connector->base.display_info.bpc == 8)
  3148. depth = 0x5;
  3149. else
  3150. depth = 0x6;
  3151. if (nv_encoder->dcb->sorconf.link & 1)
  3152. proto = 0x8;
  3153. else
  3154. proto = 0x9;
  3155. nv50_audio_enable(encoder, mode);
  3156. break;
  3157. default:
  3158. BUG_ON(1);
  3159. break;
  3160. }
  3161. nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
  3162. }
  3163. static const struct drm_encoder_helper_funcs
  3164. nv50_sor_help = {
  3165. .dpms = nv50_sor_dpms,
  3166. .atomic_check = nv50_outp_atomic_check,
  3167. .enable = nv50_sor_enable,
  3168. .disable = nv50_sor_disable,
  3169. };
  3170. static void
  3171. nv50_sor_destroy(struct drm_encoder *encoder)
  3172. {
  3173. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3174. nv50_mstm_del(&nv_encoder->dp.mstm);
  3175. drm_encoder_cleanup(encoder);
  3176. kfree(encoder);
  3177. }
  3178. static const struct drm_encoder_funcs
  3179. nv50_sor_func = {
  3180. .destroy = nv50_sor_destroy,
  3181. };
  3182. static int
  3183. nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3184. {
  3185. struct nouveau_connector *nv_connector = nouveau_connector(connector);
  3186. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3187. struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
  3188. struct nouveau_encoder *nv_encoder;
  3189. struct drm_encoder *encoder;
  3190. int type, ret;
  3191. switch (dcbe->type) {
  3192. case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
  3193. case DCB_OUTPUT_TMDS:
  3194. case DCB_OUTPUT_DP:
  3195. default:
  3196. type = DRM_MODE_ENCODER_TMDS;
  3197. break;
  3198. }
  3199. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3200. if (!nv_encoder)
  3201. return -ENOMEM;
  3202. nv_encoder->dcb = dcbe;
  3203. nv_encoder->or = ffs(dcbe->or) - 1;
  3204. nv_encoder->update = nv50_sor_update;
  3205. encoder = to_drm_encoder(nv_encoder);
  3206. encoder->possible_crtcs = dcbe->heads;
  3207. encoder->possible_clones = 0;
  3208. drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
  3209. "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3210. drm_encoder_helper_add(encoder, &nv50_sor_help);
  3211. drm_mode_connector_attach_encoder(connector, encoder);
  3212. if (dcbe->type == DCB_OUTPUT_DP) {
  3213. struct nvkm_i2c_aux *aux =
  3214. nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
  3215. if (aux) {
  3216. nv_encoder->i2c = &aux->i2c;
  3217. nv_encoder->aux = aux;
  3218. }
  3219. /*TODO: Use DP Info Table to check for support. */
  3220. if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
  3221. ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
  3222. nv_connector->base.base.id,
  3223. &nv_encoder->dp.mstm);
  3224. if (ret)
  3225. return ret;
  3226. }
  3227. } else {
  3228. struct nvkm_i2c_bus *bus =
  3229. nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
  3230. if (bus)
  3231. nv_encoder->i2c = &bus->i2c;
  3232. }
  3233. return 0;
  3234. }
  3235. /******************************************************************************
  3236. * PIOR
  3237. *****************************************************************************/
  3238. static void
  3239. nv50_pior_dpms(struct drm_encoder *encoder, int mode)
  3240. {
  3241. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3242. struct nv50_disp *disp = nv50_disp(encoder->dev);
  3243. struct {
  3244. struct nv50_disp_mthd_v1 base;
  3245. struct nv50_disp_pior_pwr_v0 pwr;
  3246. } args = {
  3247. .base.version = 1,
  3248. .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
  3249. .base.hasht = nv_encoder->dcb->hasht,
  3250. .base.hashm = nv_encoder->dcb->hashm,
  3251. .pwr.state = mode == DRM_MODE_DPMS_ON,
  3252. .pwr.type = nv_encoder->dcb->type,
  3253. };
  3254. nvif_mthd(disp->disp, 0, &args, sizeof(args));
  3255. }
  3256. static int
  3257. nv50_pior_atomic_check(struct drm_encoder *encoder,
  3258. struct drm_crtc_state *crtc_state,
  3259. struct drm_connector_state *conn_state)
  3260. {
  3261. int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
  3262. if (ret)
  3263. return ret;
  3264. crtc_state->adjusted_mode.clock *= 2;
  3265. return 0;
  3266. }
  3267. static void
  3268. nv50_pior_disable(struct drm_encoder *encoder)
  3269. {
  3270. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3271. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3272. const int or = nv_encoder->or;
  3273. u32 *push;
  3274. if (nv_encoder->crtc) {
  3275. push = evo_wait(mast, 4);
  3276. if (push) {
  3277. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3278. evo_mthd(push, 0x0700 + (or * 0x040), 1);
  3279. evo_data(push, 0x00000000);
  3280. }
  3281. evo_kick(push, mast);
  3282. }
  3283. }
  3284. nv_encoder->crtc = NULL;
  3285. }
  3286. static void
  3287. nv50_pior_enable(struct drm_encoder *encoder)
  3288. {
  3289. struct nv50_mast *mast = nv50_mast(encoder->dev);
  3290. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  3291. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  3292. struct nouveau_connector *nv_connector;
  3293. struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
  3294. u8 owner = 1 << nv_crtc->index;
  3295. u8 proto, depth;
  3296. u32 *push;
  3297. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  3298. switch (nv_connector->base.display_info.bpc) {
  3299. case 10: depth = 0x6; break;
  3300. case 8: depth = 0x5; break;
  3301. case 6: depth = 0x2; break;
  3302. default: depth = 0x0; break;
  3303. }
  3304. switch (nv_encoder->dcb->type) {
  3305. case DCB_OUTPUT_TMDS:
  3306. case DCB_OUTPUT_DP:
  3307. proto = 0x0;
  3308. break;
  3309. default:
  3310. BUG_ON(1);
  3311. break;
  3312. }
  3313. push = evo_wait(mast, 8);
  3314. if (push) {
  3315. if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
  3316. u32 ctrl = (depth << 16) | (proto << 8) | owner;
  3317. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  3318. ctrl |= 0x00001000;
  3319. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  3320. ctrl |= 0x00002000;
  3321. evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
  3322. evo_data(push, ctrl);
  3323. }
  3324. evo_kick(push, mast);
  3325. }
  3326. nv_encoder->crtc = encoder->crtc;
  3327. }
  3328. static const struct drm_encoder_helper_funcs
  3329. nv50_pior_help = {
  3330. .dpms = nv50_pior_dpms,
  3331. .atomic_check = nv50_pior_atomic_check,
  3332. .enable = nv50_pior_enable,
  3333. .disable = nv50_pior_disable,
  3334. };
  3335. static void
  3336. nv50_pior_destroy(struct drm_encoder *encoder)
  3337. {
  3338. drm_encoder_cleanup(encoder);
  3339. kfree(encoder);
  3340. }
  3341. static const struct drm_encoder_funcs
  3342. nv50_pior_func = {
  3343. .destroy = nv50_pior_destroy,
  3344. };
  3345. static int
  3346. nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
  3347. {
  3348. struct nouveau_drm *drm = nouveau_drm(connector->dev);
  3349. struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
  3350. struct nvkm_i2c_bus *bus = NULL;
  3351. struct nvkm_i2c_aux *aux = NULL;
  3352. struct i2c_adapter *ddc;
  3353. struct nouveau_encoder *nv_encoder;
  3354. struct drm_encoder *encoder;
  3355. int type;
  3356. switch (dcbe->type) {
  3357. case DCB_OUTPUT_TMDS:
  3358. bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
  3359. ddc = bus ? &bus->i2c : NULL;
  3360. type = DRM_MODE_ENCODER_TMDS;
  3361. break;
  3362. case DCB_OUTPUT_DP:
  3363. aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
  3364. ddc = aux ? &aux->i2c : NULL;
  3365. type = DRM_MODE_ENCODER_TMDS;
  3366. break;
  3367. default:
  3368. return -ENODEV;
  3369. }
  3370. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  3371. if (!nv_encoder)
  3372. return -ENOMEM;
  3373. nv_encoder->dcb = dcbe;
  3374. nv_encoder->or = ffs(dcbe->or) - 1;
  3375. nv_encoder->i2c = ddc;
  3376. nv_encoder->aux = aux;
  3377. encoder = to_drm_encoder(nv_encoder);
  3378. encoder->possible_crtcs = dcbe->heads;
  3379. encoder->possible_clones = 0;
  3380. drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
  3381. "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
  3382. drm_encoder_helper_add(encoder, &nv50_pior_help);
  3383. drm_mode_connector_attach_encoder(connector, encoder);
  3384. return 0;
  3385. }
  3386. /******************************************************************************
  3387. * Atomic
  3388. *****************************************************************************/
  3389. static void
  3390. nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
  3391. {
  3392. struct nv50_disp *disp = nv50_disp(drm->dev);
  3393. struct nv50_dmac *core = &disp->mast.base;
  3394. struct nv50_mstm *mstm;
  3395. struct drm_encoder *encoder;
  3396. u32 *push;
  3397. NV_ATOMIC(drm, "commit core %08x\n", interlock);
  3398. drm_for_each_encoder(encoder, drm->dev) {
  3399. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3400. mstm = nouveau_encoder(encoder)->dp.mstm;
  3401. if (mstm && mstm->modified)
  3402. nv50_mstm_prepare(mstm);
  3403. }
  3404. }
  3405. if ((push = evo_wait(core, 5))) {
  3406. evo_mthd(push, 0x0084, 1);
  3407. evo_data(push, 0x80000000);
  3408. evo_mthd(push, 0x0080, 2);
  3409. evo_data(push, interlock);
  3410. evo_data(push, 0x00000000);
  3411. nouveau_bo_wr32(disp->sync, 0, 0x00000000);
  3412. evo_kick(push, core);
  3413. if (nvif_msec(&drm->device, 2000ULL,
  3414. if (nouveau_bo_rd32(disp->sync, 0))
  3415. break;
  3416. usleep_range(1, 2);
  3417. ) < 0)
  3418. NV_ERROR(drm, "EVO timeout\n");
  3419. }
  3420. drm_for_each_encoder(encoder, drm->dev) {
  3421. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3422. mstm = nouveau_encoder(encoder)->dp.mstm;
  3423. if (mstm && mstm->modified)
  3424. nv50_mstm_cleanup(mstm);
  3425. }
  3426. }
  3427. }
  3428. static void
  3429. nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
  3430. {
  3431. struct drm_device *dev = state->dev;
  3432. struct drm_crtc_state *crtc_state;
  3433. struct drm_crtc *crtc;
  3434. struct drm_plane_state *plane_state;
  3435. struct drm_plane *plane;
  3436. struct nouveau_drm *drm = nouveau_drm(dev);
  3437. struct nv50_disp *disp = nv50_disp(dev);
  3438. struct nv50_atom *atom = nv50_atom(state);
  3439. struct nv50_outp_atom *outp, *outt;
  3440. u32 interlock_core = 0;
  3441. u32 interlock_chan = 0;
  3442. int i;
  3443. NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
  3444. drm_atomic_helper_wait_for_fences(dev, state, false);
  3445. drm_atomic_helper_wait_for_dependencies(state);
  3446. drm_atomic_helper_update_legacy_modeset_state(dev, state);
  3447. if (atom->lock_core)
  3448. mutex_lock(&disp->mutex);
  3449. /* Disable head(s). */
  3450. for_each_crtc_in_state(state, crtc, crtc_state, i) {
  3451. struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
  3452. struct nv50_head *head = nv50_head(crtc);
  3453. NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
  3454. asyh->clr.mask, asyh->set.mask);
  3455. if (asyh->clr.mask) {
  3456. nv50_head_flush_clr(head, asyh, atom->flush_disable);
  3457. interlock_core |= 1;
  3458. }
  3459. }
  3460. /* Disable plane(s). */
  3461. for_each_plane_in_state(state, plane, plane_state, i) {
  3462. struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
  3463. struct nv50_wndw *wndw = nv50_wndw(plane);
  3464. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
  3465. asyw->clr.mask, asyw->set.mask);
  3466. if (!asyw->clr.mask)
  3467. continue;
  3468. interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
  3469. atom->flush_disable,
  3470. asyw);
  3471. }
  3472. /* Disable output path(s). */
  3473. list_for_each_entry(outp, &atom->outp, head) {
  3474. const struct drm_encoder_helper_funcs *help;
  3475. struct drm_encoder *encoder;
  3476. encoder = outp->encoder;
  3477. help = encoder->helper_private;
  3478. NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
  3479. outp->clr.mask, outp->set.mask);
  3480. if (outp->clr.mask) {
  3481. help->disable(encoder);
  3482. interlock_core |= 1;
  3483. if (outp->flush_disable) {
  3484. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3485. interlock_core = 0;
  3486. interlock_chan = 0;
  3487. }
  3488. }
  3489. }
  3490. /* Flush disable. */
  3491. if (interlock_core) {
  3492. if (atom->flush_disable) {
  3493. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3494. interlock_core = 0;
  3495. interlock_chan = 0;
  3496. }
  3497. }
  3498. /* Update output path(s). */
  3499. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3500. const struct drm_encoder_helper_funcs *help;
  3501. struct drm_encoder *encoder;
  3502. encoder = outp->encoder;
  3503. help = encoder->helper_private;
  3504. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
  3505. outp->set.mask, outp->clr.mask);
  3506. if (outp->set.mask) {
  3507. help->enable(encoder);
  3508. interlock_core = 1;
  3509. }
  3510. list_del(&outp->head);
  3511. kfree(outp);
  3512. }
  3513. /* Update head(s). */
  3514. for_each_crtc_in_state(state, crtc, crtc_state, i) {
  3515. struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
  3516. struct nv50_head *head = nv50_head(crtc);
  3517. NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
  3518. asyh->set.mask, asyh->clr.mask);
  3519. if (asyh->set.mask) {
  3520. nv50_head_flush_set(head, asyh);
  3521. interlock_core = 1;
  3522. }
  3523. }
  3524. /* Update plane(s). */
  3525. for_each_plane_in_state(state, plane, plane_state, i) {
  3526. struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
  3527. struct nv50_wndw *wndw = nv50_wndw(plane);
  3528. NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
  3529. asyw->set.mask, asyw->clr.mask);
  3530. if ( !asyw->set.mask &&
  3531. (!asyw->clr.mask || atom->flush_disable))
  3532. continue;
  3533. interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
  3534. }
  3535. /* Flush update. */
  3536. if (interlock_core) {
  3537. if (!interlock_chan && atom->state.legacy_cursor_update) {
  3538. u32 *push = evo_wait(&disp->mast, 2);
  3539. if (push) {
  3540. evo_mthd(push, 0x0080, 1);
  3541. evo_data(push, 0x00000000);
  3542. evo_kick(push, &disp->mast);
  3543. }
  3544. } else {
  3545. nv50_disp_atomic_commit_core(drm, interlock_chan);
  3546. }
  3547. }
  3548. if (atom->lock_core)
  3549. mutex_unlock(&disp->mutex);
  3550. /* Wait for HW to signal completion. */
  3551. for_each_plane_in_state(state, plane, plane_state, i) {
  3552. struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
  3553. struct nv50_wndw *wndw = nv50_wndw(plane);
  3554. int ret = nv50_wndw_wait_armed(wndw, asyw);
  3555. if (ret)
  3556. NV_ERROR(drm, "%s: timeout\n", plane->name);
  3557. }
  3558. for_each_crtc_in_state(state, crtc, crtc_state, i) {
  3559. if (crtc->state->event) {
  3560. unsigned long flags;
  3561. /* Get correct count/ts if racing with vblank irq */
  3562. drm_accurate_vblank_count(crtc);
  3563. spin_lock_irqsave(&crtc->dev->event_lock, flags);
  3564. drm_crtc_send_vblank_event(crtc, crtc->state->event);
  3565. spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
  3566. crtc->state->event = NULL;
  3567. }
  3568. }
  3569. drm_atomic_helper_commit_hw_done(state);
  3570. drm_atomic_helper_cleanup_planes(dev, state);
  3571. drm_atomic_helper_commit_cleanup_done(state);
  3572. drm_atomic_state_put(state);
  3573. }
  3574. static void
  3575. nv50_disp_atomic_commit_work(struct work_struct *work)
  3576. {
  3577. struct drm_atomic_state *state =
  3578. container_of(work, typeof(*state), commit_work);
  3579. nv50_disp_atomic_commit_tail(state);
  3580. }
  3581. static int
  3582. nv50_disp_atomic_commit(struct drm_device *dev,
  3583. struct drm_atomic_state *state, bool nonblock)
  3584. {
  3585. struct nouveau_drm *drm = nouveau_drm(dev);
  3586. struct nv50_disp *disp = nv50_disp(dev);
  3587. struct drm_plane_state *plane_state;
  3588. struct drm_plane *plane;
  3589. struct drm_crtc *crtc;
  3590. bool active = false;
  3591. int ret, i;
  3592. ret = pm_runtime_get_sync(dev->dev);
  3593. if (ret < 0 && ret != -EACCES)
  3594. return ret;
  3595. ret = drm_atomic_helper_setup_commit(state, nonblock);
  3596. if (ret)
  3597. goto done;
  3598. INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
  3599. ret = drm_atomic_helper_prepare_planes(dev, state);
  3600. if (ret)
  3601. goto done;
  3602. if (!nonblock) {
  3603. ret = drm_atomic_helper_wait_for_fences(dev, state, true);
  3604. if (ret)
  3605. goto done;
  3606. }
  3607. for_each_plane_in_state(state, plane, plane_state, i) {
  3608. struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane_state);
  3609. struct nv50_wndw *wndw = nv50_wndw(plane);
  3610. if (asyw->set.image) {
  3611. asyw->ntfy.handle = wndw->dmac->sync.handle;
  3612. asyw->ntfy.offset = wndw->ntfy;
  3613. asyw->ntfy.awaken = false;
  3614. asyw->set.ntfy = true;
  3615. nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
  3616. wndw->ntfy ^= 0x10;
  3617. }
  3618. }
  3619. drm_atomic_helper_swap_state(state, true);
  3620. drm_atomic_state_get(state);
  3621. if (nonblock)
  3622. queue_work(system_unbound_wq, &state->commit_work);
  3623. else
  3624. nv50_disp_atomic_commit_tail(state);
  3625. drm_for_each_crtc(crtc, dev) {
  3626. if (crtc->state->enable) {
  3627. if (!drm->have_disp_power_ref) {
  3628. drm->have_disp_power_ref = true;
  3629. return ret;
  3630. }
  3631. active = true;
  3632. break;
  3633. }
  3634. }
  3635. if (!active && drm->have_disp_power_ref) {
  3636. pm_runtime_put_autosuspend(dev->dev);
  3637. drm->have_disp_power_ref = false;
  3638. }
  3639. done:
  3640. pm_runtime_put_autosuspend(dev->dev);
  3641. return ret;
  3642. }
  3643. static struct nv50_outp_atom *
  3644. nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
  3645. {
  3646. struct nv50_outp_atom *outp;
  3647. list_for_each_entry(outp, &atom->outp, head) {
  3648. if (outp->encoder == encoder)
  3649. return outp;
  3650. }
  3651. outp = kzalloc(sizeof(*outp), GFP_KERNEL);
  3652. if (!outp)
  3653. return ERR_PTR(-ENOMEM);
  3654. list_add(&outp->head, &atom->outp);
  3655. outp->encoder = encoder;
  3656. return outp;
  3657. }
  3658. static int
  3659. nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
  3660. struct drm_connector *connector)
  3661. {
  3662. struct drm_encoder *encoder = connector->state->best_encoder;
  3663. struct drm_crtc_state *crtc_state;
  3664. struct drm_crtc *crtc;
  3665. struct nv50_outp_atom *outp;
  3666. if (!(crtc = connector->state->crtc))
  3667. return 0;
  3668. crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
  3669. if (crtc->state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
  3670. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3671. if (IS_ERR(outp))
  3672. return PTR_ERR(outp);
  3673. if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
  3674. outp->flush_disable = true;
  3675. atom->flush_disable = true;
  3676. }
  3677. outp->clr.ctrl = true;
  3678. atom->lock_core = true;
  3679. }
  3680. return 0;
  3681. }
  3682. static int
  3683. nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
  3684. struct drm_connector_state *connector_state)
  3685. {
  3686. struct drm_encoder *encoder = connector_state->best_encoder;
  3687. struct drm_crtc_state *crtc_state;
  3688. struct drm_crtc *crtc;
  3689. struct nv50_outp_atom *outp;
  3690. if (!(crtc = connector_state->crtc))
  3691. return 0;
  3692. crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
  3693. if (crtc_state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
  3694. outp = nv50_disp_outp_atomic_add(atom, encoder);
  3695. if (IS_ERR(outp))
  3696. return PTR_ERR(outp);
  3697. outp->set.ctrl = true;
  3698. atom->lock_core = true;
  3699. }
  3700. return 0;
  3701. }
  3702. static int
  3703. nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
  3704. {
  3705. struct nv50_atom *atom = nv50_atom(state);
  3706. struct drm_connector_state *connector_state;
  3707. struct drm_connector *connector;
  3708. int ret, i;
  3709. ret = drm_atomic_helper_check(dev, state);
  3710. if (ret)
  3711. return ret;
  3712. for_each_connector_in_state(state, connector, connector_state, i) {
  3713. ret = nv50_disp_outp_atomic_check_clr(atom, connector);
  3714. if (ret)
  3715. return ret;
  3716. ret = nv50_disp_outp_atomic_check_set(atom, connector_state);
  3717. if (ret)
  3718. return ret;
  3719. }
  3720. return 0;
  3721. }
  3722. static void
  3723. nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
  3724. {
  3725. struct nv50_atom *atom = nv50_atom(state);
  3726. struct nv50_outp_atom *outp, *outt;
  3727. list_for_each_entry_safe(outp, outt, &atom->outp, head) {
  3728. list_del(&outp->head);
  3729. kfree(outp);
  3730. }
  3731. drm_atomic_state_default_clear(state);
  3732. }
  3733. static void
  3734. nv50_disp_atomic_state_free(struct drm_atomic_state *state)
  3735. {
  3736. struct nv50_atom *atom = nv50_atom(state);
  3737. drm_atomic_state_default_release(&atom->state);
  3738. kfree(atom);
  3739. }
  3740. static struct drm_atomic_state *
  3741. nv50_disp_atomic_state_alloc(struct drm_device *dev)
  3742. {
  3743. struct nv50_atom *atom;
  3744. if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
  3745. drm_atomic_state_init(dev, &atom->state) < 0) {
  3746. kfree(atom);
  3747. return NULL;
  3748. }
  3749. INIT_LIST_HEAD(&atom->outp);
  3750. return &atom->state;
  3751. }
  3752. static const struct drm_mode_config_funcs
  3753. nv50_disp_func = {
  3754. .fb_create = nouveau_user_framebuffer_create,
  3755. .output_poll_changed = nouveau_fbcon_output_poll_changed,
  3756. .atomic_check = nv50_disp_atomic_check,
  3757. .atomic_commit = nv50_disp_atomic_commit,
  3758. .atomic_state_alloc = nv50_disp_atomic_state_alloc,
  3759. .atomic_state_clear = nv50_disp_atomic_state_clear,
  3760. .atomic_state_free = nv50_disp_atomic_state_free,
  3761. };
  3762. /******************************************************************************
  3763. * Init
  3764. *****************************************************************************/
  3765. void
  3766. nv50_display_fini(struct drm_device *dev)
  3767. {
  3768. struct nouveau_encoder *nv_encoder;
  3769. struct drm_encoder *encoder;
  3770. struct drm_plane *plane;
  3771. drm_for_each_plane(plane, dev) {
  3772. struct nv50_wndw *wndw = nv50_wndw(plane);
  3773. if (plane->funcs != &nv50_wndw)
  3774. continue;
  3775. nv50_wndw_fini(wndw);
  3776. }
  3777. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3778. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3779. nv_encoder = nouveau_encoder(encoder);
  3780. nv50_mstm_fini(nv_encoder->dp.mstm);
  3781. }
  3782. }
  3783. }
  3784. int
  3785. nv50_display_init(struct drm_device *dev)
  3786. {
  3787. struct drm_encoder *encoder;
  3788. struct drm_plane *plane;
  3789. struct drm_crtc *crtc;
  3790. u32 *push;
  3791. push = evo_wait(nv50_mast(dev), 32);
  3792. if (!push)
  3793. return -EBUSY;
  3794. evo_mthd(push, 0x0088, 1);
  3795. evo_data(push, nv50_mast(dev)->base.sync.handle);
  3796. evo_kick(push, nv50_mast(dev));
  3797. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  3798. if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
  3799. const struct drm_encoder_helper_funcs *help;
  3800. struct nouveau_encoder *nv_encoder;
  3801. nv_encoder = nouveau_encoder(encoder);
  3802. help = encoder->helper_private;
  3803. if (help && help->dpms)
  3804. help->dpms(encoder, DRM_MODE_DPMS_ON);
  3805. nv50_mstm_init(nv_encoder->dp.mstm);
  3806. }
  3807. }
  3808. drm_for_each_crtc(crtc, dev) {
  3809. nv50_head_lut_load(crtc);
  3810. }
  3811. drm_for_each_plane(plane, dev) {
  3812. struct nv50_wndw *wndw = nv50_wndw(plane);
  3813. if (plane->funcs != &nv50_wndw)
  3814. continue;
  3815. nv50_wndw_init(wndw);
  3816. }
  3817. return 0;
  3818. }
  3819. void
  3820. nv50_display_destroy(struct drm_device *dev)
  3821. {
  3822. struct nv50_disp *disp = nv50_disp(dev);
  3823. nv50_dmac_destroy(&disp->mast.base, disp->disp);
  3824. nouveau_bo_unmap(disp->sync);
  3825. if (disp->sync)
  3826. nouveau_bo_unpin(disp->sync);
  3827. nouveau_bo_ref(NULL, &disp->sync);
  3828. nouveau_display(dev)->priv = NULL;
  3829. kfree(disp);
  3830. }
  3831. MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
  3832. static int nouveau_atomic = 0;
  3833. module_param_named(atomic, nouveau_atomic, int, 0400);
  3834. int
  3835. nv50_display_create(struct drm_device *dev)
  3836. {
  3837. struct nvif_device *device = &nouveau_drm(dev)->device;
  3838. struct nouveau_drm *drm = nouveau_drm(dev);
  3839. struct dcb_table *dcb = &drm->vbios.dcb;
  3840. struct drm_connector *connector, *tmp;
  3841. struct nv50_disp *disp;
  3842. struct dcb_output *dcbe;
  3843. int crtcs, ret, i;
  3844. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  3845. if (!disp)
  3846. return -ENOMEM;
  3847. mutex_init(&disp->mutex);
  3848. nouveau_display(dev)->priv = disp;
  3849. nouveau_display(dev)->dtor = nv50_display_destroy;
  3850. nouveau_display(dev)->init = nv50_display_init;
  3851. nouveau_display(dev)->fini = nv50_display_fini;
  3852. disp->disp = &nouveau_display(dev)->disp;
  3853. dev->mode_config.funcs = &nv50_disp_func;
  3854. if (nouveau_atomic)
  3855. dev->driver->driver_features |= DRIVER_ATOMIC;
  3856. /* small shared memory area we use for notifiers and semaphores */
  3857. ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
  3858. 0, 0x0000, NULL, NULL, &disp->sync);
  3859. if (!ret) {
  3860. ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
  3861. if (!ret) {
  3862. ret = nouveau_bo_map(disp->sync);
  3863. if (ret)
  3864. nouveau_bo_unpin(disp->sync);
  3865. }
  3866. if (ret)
  3867. nouveau_bo_ref(NULL, &disp->sync);
  3868. }
  3869. if (ret)
  3870. goto out;
  3871. /* allocate master evo channel */
  3872. ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
  3873. &disp->mast);
  3874. if (ret)
  3875. goto out;
  3876. /* create crtc objects to represent the hw heads */
  3877. if (disp->disp->oclass >= GF110_DISP)
  3878. crtcs = nvif_rd32(&device->object, 0x022448);
  3879. else
  3880. crtcs = 2;
  3881. for (i = 0; i < crtcs; i++) {
  3882. ret = nv50_head_create(dev, i);
  3883. if (ret)
  3884. goto out;
  3885. }
  3886. /* create encoder/connector objects based on VBIOS DCB table */
  3887. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  3888. connector = nouveau_connector_create(dev, dcbe->connector);
  3889. if (IS_ERR(connector))
  3890. continue;
  3891. if (dcbe->location == DCB_LOC_ON_CHIP) {
  3892. switch (dcbe->type) {
  3893. case DCB_OUTPUT_TMDS:
  3894. case DCB_OUTPUT_LVDS:
  3895. case DCB_OUTPUT_DP:
  3896. ret = nv50_sor_create(connector, dcbe);
  3897. break;
  3898. case DCB_OUTPUT_ANALOG:
  3899. ret = nv50_dac_create(connector, dcbe);
  3900. break;
  3901. default:
  3902. ret = -ENODEV;
  3903. break;
  3904. }
  3905. } else {
  3906. ret = nv50_pior_create(connector, dcbe);
  3907. }
  3908. if (ret) {
  3909. NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
  3910. dcbe->location, dcbe->type,
  3911. ffs(dcbe->or) - 1, ret);
  3912. ret = 0;
  3913. }
  3914. }
  3915. /* cull any connectors we created that don't have an encoder */
  3916. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  3917. if (connector->encoder_ids[0])
  3918. continue;
  3919. NV_WARN(drm, "%s has no encoders, removing\n",
  3920. connector->name);
  3921. connector->funcs->destroy(connector);
  3922. }
  3923. out:
  3924. if (ret)
  3925. nv50_display_destroy(dev);
  3926. return ret;
  3927. }