atombios_encoders.c 70 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "amdgpu_display.h"
  32. #include "atom.h"
  33. #include "atombios_encoders.h"
  34. #include "atombios_dp.h"
  35. #include <linux/backlight.h>
  36. #include "bif/bif_4_1_d.h"
  37. u8
  38. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  39. {
  40. u8 backlight_level;
  41. u32 bios_2_scratch;
  42. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  43. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  44. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  45. return backlight_level;
  46. }
  47. void
  48. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  49. u8 backlight_level)
  50. {
  51. u32 bios_2_scratch;
  52. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  53. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  54. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  55. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  56. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  57. }
  58. u8
  59. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  60. {
  61. struct drm_device *dev = amdgpu_encoder->base.dev;
  62. struct amdgpu_device *adev = dev->dev_private;
  63. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  64. return 0;
  65. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  66. }
  67. void
  68. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  69. u8 level)
  70. {
  71. struct drm_encoder *encoder = &amdgpu_encoder->base;
  72. struct drm_device *dev = amdgpu_encoder->base.dev;
  73. struct amdgpu_device *adev = dev->dev_private;
  74. struct amdgpu_encoder_atom_dig *dig;
  75. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  76. return;
  77. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  78. amdgpu_encoder->enc_priv) {
  79. dig = amdgpu_encoder->enc_priv;
  80. dig->backlight_level = level;
  81. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  82. switch (amdgpu_encoder->encoder_id) {
  83. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  84. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  86. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  87. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  88. if (dig->backlight_level == 0)
  89. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  90. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  91. else {
  92. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  93. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  94. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  95. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  96. }
  97. break;
  98. default:
  99. break;
  100. }
  101. }
  102. }
  103. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  104. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  105. {
  106. u8 level;
  107. /* Convert brightness to hardware level */
  108. if (bd->props.brightness < 0)
  109. level = 0;
  110. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  111. level = AMDGPU_MAX_BL_LEVEL;
  112. else
  113. level = bd->props.brightness;
  114. return level;
  115. }
  116. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  117. {
  118. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  119. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  120. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  121. amdgpu_atombios_encoder_backlight_level(bd));
  122. return 0;
  123. }
  124. static int
  125. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  126. {
  127. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  128. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  129. struct drm_device *dev = amdgpu_encoder->base.dev;
  130. struct amdgpu_device *adev = dev->dev_private;
  131. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  132. }
  133. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  134. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  135. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  136. };
  137. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  138. struct drm_connector *drm_connector)
  139. {
  140. struct drm_device *dev = amdgpu_encoder->base.dev;
  141. struct amdgpu_device *adev = dev->dev_private;
  142. struct backlight_device *bd;
  143. struct backlight_properties props;
  144. struct amdgpu_backlight_privdata *pdata;
  145. struct amdgpu_encoder_atom_dig *dig;
  146. u8 backlight_level;
  147. char bl_name[16];
  148. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  149. * so don't register a backlight device
  150. */
  151. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  152. (adev->pdev->device == 0x6741))
  153. return;
  154. if (!amdgpu_encoder->enc_priv)
  155. return;
  156. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  157. return;
  158. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  159. if (!pdata) {
  160. DRM_ERROR("Memory allocation failed\n");
  161. goto error;
  162. }
  163. memset(&props, 0, sizeof(props));
  164. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  165. props.type = BACKLIGHT_RAW;
  166. snprintf(bl_name, sizeof(bl_name),
  167. "amdgpu_bl%d", dev->primary->index);
  168. bd = backlight_device_register(bl_name, drm_connector->kdev,
  169. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  170. if (IS_ERR(bd)) {
  171. DRM_ERROR("Backlight registration failed\n");
  172. goto error;
  173. }
  174. pdata->encoder = amdgpu_encoder;
  175. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  176. dig = amdgpu_encoder->enc_priv;
  177. dig->bl_dev = bd;
  178. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  179. bd->props.power = FB_BLANK_UNBLANK;
  180. backlight_update_status(bd);
  181. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  182. return;
  183. error:
  184. kfree(pdata);
  185. return;
  186. }
  187. void
  188. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  189. {
  190. struct drm_device *dev = amdgpu_encoder->base.dev;
  191. struct amdgpu_device *adev = dev->dev_private;
  192. struct backlight_device *bd = NULL;
  193. struct amdgpu_encoder_atom_dig *dig;
  194. if (!amdgpu_encoder->enc_priv)
  195. return;
  196. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  197. return;
  198. dig = amdgpu_encoder->enc_priv;
  199. bd = dig->bl_dev;
  200. dig->bl_dev = NULL;
  201. if (bd) {
  202. struct amdgpu_legacy_backlight_privdata *pdata;
  203. pdata = bl_get_data(bd);
  204. backlight_device_unregister(bd);
  205. kfree(pdata);
  206. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  207. }
  208. }
  209. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  210. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  211. {
  212. }
  213. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  214. {
  215. }
  216. #endif
  217. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  218. {
  219. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  220. switch (amdgpu_encoder->encoder_id) {
  221. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  222. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  223. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  226. return true;
  227. default:
  228. return false;
  229. }
  230. }
  231. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  232. const struct drm_display_mode *mode,
  233. struct drm_display_mode *adjusted_mode)
  234. {
  235. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  236. /* set the active encoder to connector routing */
  237. amdgpu_encoder_set_active_device(encoder);
  238. drm_mode_set_crtcinfo(adjusted_mode, 0);
  239. /* hw bug */
  240. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  241. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  242. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  243. /* vertical FP must be at least 1 */
  244. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  245. adjusted_mode->crtc_vsync_start++;
  246. /* get the native mode for scaling */
  247. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  248. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  249. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  250. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  251. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  252. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  253. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  254. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  255. }
  256. return true;
  257. }
  258. static void
  259. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  260. {
  261. struct drm_device *dev = encoder->dev;
  262. struct amdgpu_device *adev = dev->dev_private;
  263. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  264. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  265. int index = 0;
  266. memset(&args, 0, sizeof(args));
  267. switch (amdgpu_encoder->encoder_id) {
  268. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  269. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  270. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  271. break;
  272. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  273. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  274. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  275. break;
  276. }
  277. args.ucAction = action;
  278. args.ucDacStandard = ATOM_DAC1_PS2;
  279. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  280. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  281. }
  282. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  283. {
  284. int bpc = 8;
  285. if (encoder->crtc) {
  286. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  287. bpc = amdgpu_crtc->bpc;
  288. }
  289. switch (bpc) {
  290. case 0:
  291. return PANEL_BPC_UNDEFINE;
  292. case 6:
  293. return PANEL_6BIT_PER_COLOR;
  294. case 8:
  295. default:
  296. return PANEL_8BIT_PER_COLOR;
  297. case 10:
  298. return PANEL_10BIT_PER_COLOR;
  299. case 12:
  300. return PANEL_12BIT_PER_COLOR;
  301. case 16:
  302. return PANEL_16BIT_PER_COLOR;
  303. }
  304. }
  305. union dvo_encoder_control {
  306. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  307. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  308. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  309. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  310. };
  311. static void
  312. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  313. {
  314. struct drm_device *dev = encoder->dev;
  315. struct amdgpu_device *adev = dev->dev_private;
  316. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  317. union dvo_encoder_control args;
  318. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  319. uint8_t frev, crev;
  320. memset(&args, 0, sizeof(args));
  321. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  322. return;
  323. switch (frev) {
  324. case 1:
  325. switch (crev) {
  326. case 1:
  327. /* R4xx, R5xx */
  328. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  329. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  330. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  331. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  332. break;
  333. case 2:
  334. /* RS600/690/740 */
  335. args.dvo.sDVOEncoder.ucAction = action;
  336. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  337. /* DFP1, CRT1, TV1 depending on the type of port */
  338. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  339. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  340. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  341. break;
  342. case 3:
  343. /* R6xx */
  344. args.dvo_v3.ucAction = action;
  345. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  346. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  347. break;
  348. case 4:
  349. /* DCE8 */
  350. args.dvo_v4.ucAction = action;
  351. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  352. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  353. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  354. break;
  355. default:
  356. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  357. break;
  358. }
  359. break;
  360. default:
  361. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  362. break;
  363. }
  364. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  365. }
  366. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  367. {
  368. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  369. struct drm_connector *connector;
  370. struct amdgpu_connector *amdgpu_connector;
  371. struct amdgpu_connector_atom_dig *dig_connector;
  372. /* dp bridges are always DP */
  373. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  374. return ATOM_ENCODER_MODE_DP;
  375. /* DVO is always DVO */
  376. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  377. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  378. return ATOM_ENCODER_MODE_DVO;
  379. connector = amdgpu_get_connector_for_encoder(encoder);
  380. /* if we don't have an active device yet, just use one of
  381. * the connectors tied to the encoder.
  382. */
  383. if (!connector)
  384. connector = amdgpu_get_connector_for_encoder_init(encoder);
  385. amdgpu_connector = to_amdgpu_connector(connector);
  386. switch (connector->connector_type) {
  387. case DRM_MODE_CONNECTOR_DVII:
  388. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  389. if (amdgpu_audio != 0) {
  390. if (amdgpu_connector->use_digital &&
  391. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  392. return ATOM_ENCODER_MODE_HDMI;
  393. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  394. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  395. return ATOM_ENCODER_MODE_HDMI;
  396. else if (amdgpu_connector->use_digital)
  397. return ATOM_ENCODER_MODE_DVI;
  398. else
  399. return ATOM_ENCODER_MODE_CRT;
  400. } else if (amdgpu_connector->use_digital) {
  401. return ATOM_ENCODER_MODE_DVI;
  402. } else {
  403. return ATOM_ENCODER_MODE_CRT;
  404. }
  405. break;
  406. case DRM_MODE_CONNECTOR_DVID:
  407. case DRM_MODE_CONNECTOR_HDMIA:
  408. default:
  409. if (amdgpu_audio != 0) {
  410. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  411. return ATOM_ENCODER_MODE_HDMI;
  412. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  413. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  414. return ATOM_ENCODER_MODE_HDMI;
  415. else
  416. return ATOM_ENCODER_MODE_DVI;
  417. } else {
  418. return ATOM_ENCODER_MODE_DVI;
  419. }
  420. break;
  421. case DRM_MODE_CONNECTOR_LVDS:
  422. return ATOM_ENCODER_MODE_LVDS;
  423. break;
  424. case DRM_MODE_CONNECTOR_DisplayPort:
  425. dig_connector = amdgpu_connector->con_priv;
  426. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  427. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  428. return ATOM_ENCODER_MODE_DP;
  429. } else if (amdgpu_audio != 0) {
  430. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  431. return ATOM_ENCODER_MODE_HDMI;
  432. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  433. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  434. return ATOM_ENCODER_MODE_HDMI;
  435. else
  436. return ATOM_ENCODER_MODE_DVI;
  437. } else {
  438. return ATOM_ENCODER_MODE_DVI;
  439. }
  440. break;
  441. case DRM_MODE_CONNECTOR_eDP:
  442. return ATOM_ENCODER_MODE_DP;
  443. case DRM_MODE_CONNECTOR_DVIA:
  444. case DRM_MODE_CONNECTOR_VGA:
  445. return ATOM_ENCODER_MODE_CRT;
  446. break;
  447. case DRM_MODE_CONNECTOR_Composite:
  448. case DRM_MODE_CONNECTOR_SVIDEO:
  449. case DRM_MODE_CONNECTOR_9PinDIN:
  450. /* fix me */
  451. return ATOM_ENCODER_MODE_TV;
  452. /*return ATOM_ENCODER_MODE_CV;*/
  453. break;
  454. }
  455. }
  456. /*
  457. * DIG Encoder/Transmitter Setup
  458. *
  459. * DCE 6.0
  460. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  461. * Supports up to 6 digital outputs
  462. * - 6 DIG encoder blocks.
  463. * - DIG to PHY mapping is hardcoded
  464. * DIG1 drives UNIPHY0 link A, A+B
  465. * DIG2 drives UNIPHY0 link B
  466. * DIG3 drives UNIPHY1 link A, A+B
  467. * DIG4 drives UNIPHY1 link B
  468. * DIG5 drives UNIPHY2 link A, A+B
  469. * DIG6 drives UNIPHY2 link B
  470. *
  471. * Routing
  472. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  473. * Examples:
  474. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  475. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  476. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  477. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  478. */
  479. union dig_encoder_control {
  480. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  481. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  482. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  483. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  484. DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
  485. };
  486. void
  487. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  488. int action, int panel_mode)
  489. {
  490. struct drm_device *dev = encoder->dev;
  491. struct amdgpu_device *adev = dev->dev_private;
  492. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  493. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  494. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  495. union dig_encoder_control args;
  496. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  497. uint8_t frev, crev;
  498. int dp_clock = 0;
  499. int dp_lane_count = 0;
  500. int hpd_id = AMDGPU_HPD_NONE;
  501. if (connector) {
  502. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  503. struct amdgpu_connector_atom_dig *dig_connector =
  504. amdgpu_connector->con_priv;
  505. dp_clock = dig_connector->dp_clock;
  506. dp_lane_count = dig_connector->dp_lane_count;
  507. hpd_id = amdgpu_connector->hpd.hpd;
  508. }
  509. /* no dig encoder assigned */
  510. if (dig->dig_encoder == -1)
  511. return;
  512. memset(&args, 0, sizeof(args));
  513. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  514. return;
  515. switch (frev) {
  516. case 1:
  517. switch (crev) {
  518. case 1:
  519. args.v1.ucAction = action;
  520. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  521. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  522. args.v3.ucPanelMode = panel_mode;
  523. else
  524. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  525. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  526. args.v1.ucLaneNum = dp_lane_count;
  527. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  528. args.v1.ucLaneNum = 8;
  529. else
  530. args.v1.ucLaneNum = 4;
  531. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  532. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  533. switch (amdgpu_encoder->encoder_id) {
  534. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  535. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  536. break;
  537. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  538. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  539. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  540. break;
  541. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  542. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  543. break;
  544. }
  545. if (dig->linkb)
  546. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  547. else
  548. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  549. break;
  550. case 2:
  551. case 3:
  552. args.v3.ucAction = action;
  553. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  554. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  555. args.v3.ucPanelMode = panel_mode;
  556. else
  557. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  558. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  559. args.v3.ucLaneNum = dp_lane_count;
  560. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  561. args.v3.ucLaneNum = 8;
  562. else
  563. args.v3.ucLaneNum = 4;
  564. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  565. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  566. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  567. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  568. break;
  569. case 4:
  570. args.v4.ucAction = action;
  571. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  572. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  573. args.v4.ucPanelMode = panel_mode;
  574. else
  575. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  576. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  577. args.v4.ucLaneNum = dp_lane_count;
  578. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  579. args.v4.ucLaneNum = 8;
  580. else
  581. args.v4.ucLaneNum = 4;
  582. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  583. if (dp_clock == 540000)
  584. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  585. else if (dp_clock == 324000)
  586. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  587. else if (dp_clock == 270000)
  588. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  589. else
  590. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  591. }
  592. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  593. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  594. if (hpd_id == AMDGPU_HPD_NONE)
  595. args.v4.ucHPD_ID = 0;
  596. else
  597. args.v4.ucHPD_ID = hpd_id + 1;
  598. break;
  599. case 5:
  600. switch (action) {
  601. case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
  602. args.v5.asDPPanelModeParam.ucAction = action;
  603. args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
  604. args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
  605. break;
  606. case ATOM_ENCODER_CMD_STREAM_SETUP:
  607. args.v5.asStreamParam.ucAction = action;
  608. args.v5.asStreamParam.ucDigId = dig->dig_encoder;
  609. args.v5.asStreamParam.ucDigMode =
  610. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  611. if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
  612. args.v5.asStreamParam.ucLaneNum = dp_lane_count;
  613. else if (amdgpu_dig_monitor_is_duallink(encoder,
  614. amdgpu_encoder->pixel_clock))
  615. args.v5.asStreamParam.ucLaneNum = 8;
  616. else
  617. args.v5.asStreamParam.ucLaneNum = 4;
  618. args.v5.asStreamParam.ulPixelClock =
  619. cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  620. args.v5.asStreamParam.ucBitPerColor =
  621. amdgpu_atombios_encoder_get_bpc(encoder);
  622. args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
  623. break;
  624. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
  625. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
  626. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
  627. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
  628. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
  629. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
  630. case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
  631. case ATOM_ENCODER_CMD_DP_VIDEO_ON:
  632. args.v5.asCmdParam.ucAction = action;
  633. args.v5.asCmdParam.ucDigId = dig->dig_encoder;
  634. break;
  635. default:
  636. DRM_ERROR("Unsupported action 0x%x\n", action);
  637. break;
  638. }
  639. break;
  640. default:
  641. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  642. break;
  643. }
  644. break;
  645. default:
  646. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  647. break;
  648. }
  649. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  650. }
  651. union dig_transmitter_control {
  652. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  653. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  654. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  655. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  656. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  657. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
  658. };
  659. void
  660. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  661. uint8_t lane_num, uint8_t lane_set)
  662. {
  663. struct drm_device *dev = encoder->dev;
  664. struct amdgpu_device *adev = dev->dev_private;
  665. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  666. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  667. struct drm_connector *connector;
  668. union dig_transmitter_control args;
  669. int index = 0;
  670. uint8_t frev, crev;
  671. bool is_dp = false;
  672. int pll_id = 0;
  673. int dp_clock = 0;
  674. int dp_lane_count = 0;
  675. int connector_object_id = 0;
  676. int igp_lane_info = 0;
  677. int dig_encoder = dig->dig_encoder;
  678. int hpd_id = AMDGPU_HPD_NONE;
  679. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  680. connector = amdgpu_get_connector_for_encoder_init(encoder);
  681. /* just needed to avoid bailing in the encoder check. the encoder
  682. * isn't used for init
  683. */
  684. dig_encoder = 0;
  685. } else
  686. connector = amdgpu_get_connector_for_encoder(encoder);
  687. if (connector) {
  688. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  689. struct amdgpu_connector_atom_dig *dig_connector =
  690. amdgpu_connector->con_priv;
  691. hpd_id = amdgpu_connector->hpd.hpd;
  692. dp_clock = dig_connector->dp_clock;
  693. dp_lane_count = dig_connector->dp_lane_count;
  694. connector_object_id =
  695. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  696. }
  697. if (encoder->crtc) {
  698. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  699. pll_id = amdgpu_crtc->pll_id;
  700. }
  701. /* no dig encoder assigned */
  702. if (dig_encoder == -1)
  703. return;
  704. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  705. is_dp = true;
  706. memset(&args, 0, sizeof(args));
  707. switch (amdgpu_encoder->encoder_id) {
  708. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  709. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  710. break;
  711. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  712. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  713. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  714. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  715. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  716. break;
  717. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  718. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  719. break;
  720. }
  721. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  722. return;
  723. switch (frev) {
  724. case 1:
  725. switch (crev) {
  726. case 1:
  727. args.v1.ucAction = action;
  728. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  729. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  730. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  731. args.v1.asMode.ucLaneSel = lane_num;
  732. args.v1.asMode.ucLaneSet = lane_set;
  733. } else {
  734. if (is_dp)
  735. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  736. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  737. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  738. else
  739. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  740. }
  741. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  742. if (dig_encoder)
  743. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  744. else
  745. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  746. if ((adev->flags & AMD_IS_APU) &&
  747. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  748. if (is_dp ||
  749. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  750. if (igp_lane_info & 0x1)
  751. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  752. else if (igp_lane_info & 0x2)
  753. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  754. else if (igp_lane_info & 0x4)
  755. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  756. else if (igp_lane_info & 0x8)
  757. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  758. } else {
  759. if (igp_lane_info & 0x3)
  760. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  761. else if (igp_lane_info & 0xc)
  762. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  763. }
  764. }
  765. if (dig->linkb)
  766. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  767. else
  768. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  769. if (is_dp)
  770. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  771. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  772. if (dig->coherent_mode)
  773. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  774. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  775. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  776. }
  777. break;
  778. case 2:
  779. args.v2.ucAction = action;
  780. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  781. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  782. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  783. args.v2.asMode.ucLaneSel = lane_num;
  784. args.v2.asMode.ucLaneSet = lane_set;
  785. } else {
  786. if (is_dp)
  787. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  788. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  789. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  790. else
  791. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  792. }
  793. args.v2.acConfig.ucEncoderSel = dig_encoder;
  794. if (dig->linkb)
  795. args.v2.acConfig.ucLinkSel = 1;
  796. switch (amdgpu_encoder->encoder_id) {
  797. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  798. args.v2.acConfig.ucTransmitterSel = 0;
  799. break;
  800. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  801. args.v2.acConfig.ucTransmitterSel = 1;
  802. break;
  803. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  804. args.v2.acConfig.ucTransmitterSel = 2;
  805. break;
  806. }
  807. if (is_dp) {
  808. args.v2.acConfig.fCoherentMode = 1;
  809. args.v2.acConfig.fDPConnector = 1;
  810. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  811. if (dig->coherent_mode)
  812. args.v2.acConfig.fCoherentMode = 1;
  813. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  814. args.v2.acConfig.fDualLinkConnector = 1;
  815. }
  816. break;
  817. case 3:
  818. args.v3.ucAction = action;
  819. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  820. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  821. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  822. args.v3.asMode.ucLaneSel = lane_num;
  823. args.v3.asMode.ucLaneSet = lane_set;
  824. } else {
  825. if (is_dp)
  826. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  827. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  828. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  829. else
  830. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  831. }
  832. if (is_dp)
  833. args.v3.ucLaneNum = dp_lane_count;
  834. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  835. args.v3.ucLaneNum = 8;
  836. else
  837. args.v3.ucLaneNum = 4;
  838. if (dig->linkb)
  839. args.v3.acConfig.ucLinkSel = 1;
  840. if (dig_encoder & 1)
  841. args.v3.acConfig.ucEncoderSel = 1;
  842. /* Select the PLL for the PHY
  843. * DP PHY should be clocked from external src if there is
  844. * one.
  845. */
  846. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  847. if (is_dp && adev->clock.dp_extclk)
  848. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  849. else
  850. args.v3.acConfig.ucRefClkSource = pll_id;
  851. switch (amdgpu_encoder->encoder_id) {
  852. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  853. args.v3.acConfig.ucTransmitterSel = 0;
  854. break;
  855. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  856. args.v3.acConfig.ucTransmitterSel = 1;
  857. break;
  858. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  859. args.v3.acConfig.ucTransmitterSel = 2;
  860. break;
  861. }
  862. if (is_dp)
  863. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  864. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  865. if (dig->coherent_mode)
  866. args.v3.acConfig.fCoherentMode = 1;
  867. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  868. args.v3.acConfig.fDualLinkConnector = 1;
  869. }
  870. break;
  871. case 4:
  872. args.v4.ucAction = action;
  873. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  874. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  875. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  876. args.v4.asMode.ucLaneSel = lane_num;
  877. args.v4.asMode.ucLaneSet = lane_set;
  878. } else {
  879. if (is_dp)
  880. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  881. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  882. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  883. else
  884. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  885. }
  886. if (is_dp)
  887. args.v4.ucLaneNum = dp_lane_count;
  888. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  889. args.v4.ucLaneNum = 8;
  890. else
  891. args.v4.ucLaneNum = 4;
  892. if (dig->linkb)
  893. args.v4.acConfig.ucLinkSel = 1;
  894. if (dig_encoder & 1)
  895. args.v4.acConfig.ucEncoderSel = 1;
  896. /* Select the PLL for the PHY
  897. * DP PHY should be clocked from external src if there is
  898. * one.
  899. */
  900. /* On DCE5 DCPLL usually generates the DP ref clock */
  901. if (is_dp) {
  902. if (adev->clock.dp_extclk)
  903. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  904. else
  905. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  906. } else
  907. args.v4.acConfig.ucRefClkSource = pll_id;
  908. switch (amdgpu_encoder->encoder_id) {
  909. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  910. args.v4.acConfig.ucTransmitterSel = 0;
  911. break;
  912. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  913. args.v4.acConfig.ucTransmitterSel = 1;
  914. break;
  915. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  916. args.v4.acConfig.ucTransmitterSel = 2;
  917. break;
  918. }
  919. if (is_dp)
  920. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  921. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  922. if (dig->coherent_mode)
  923. args.v4.acConfig.fCoherentMode = 1;
  924. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  925. args.v4.acConfig.fDualLinkConnector = 1;
  926. }
  927. break;
  928. case 5:
  929. args.v5.ucAction = action;
  930. if (is_dp)
  931. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  932. else
  933. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  934. switch (amdgpu_encoder->encoder_id) {
  935. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  936. if (dig->linkb)
  937. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  938. else
  939. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  940. break;
  941. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  942. if (dig->linkb)
  943. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  944. else
  945. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  946. break;
  947. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  948. if (dig->linkb)
  949. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  950. else
  951. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  952. break;
  953. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  954. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  955. break;
  956. }
  957. if (is_dp)
  958. args.v5.ucLaneNum = dp_lane_count;
  959. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  960. args.v5.ucLaneNum = 8;
  961. else
  962. args.v5.ucLaneNum = 4;
  963. args.v5.ucConnObjId = connector_object_id;
  964. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  965. if (is_dp && adev->clock.dp_extclk)
  966. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  967. else
  968. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  969. if (is_dp)
  970. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  971. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  972. if (dig->coherent_mode)
  973. args.v5.asConfig.ucCoherentMode = 1;
  974. }
  975. if (hpd_id == AMDGPU_HPD_NONE)
  976. args.v5.asConfig.ucHPDSel = 0;
  977. else
  978. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  979. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  980. args.v5.ucDPLaneSet = lane_set;
  981. break;
  982. case 6:
  983. args.v6.ucAction = action;
  984. if (is_dp)
  985. args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
  986. else
  987. args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  988. switch (amdgpu_encoder->encoder_id) {
  989. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  990. if (dig->linkb)
  991. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  992. else
  993. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  994. break;
  995. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  996. if (dig->linkb)
  997. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  998. else
  999. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  1000. break;
  1001. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1002. if (dig->linkb)
  1003. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  1004. else
  1005. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  1006. break;
  1007. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1008. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  1009. break;
  1010. }
  1011. if (is_dp)
  1012. args.v6.ucLaneNum = dp_lane_count;
  1013. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1014. args.v6.ucLaneNum = 8;
  1015. else
  1016. args.v6.ucLaneNum = 4;
  1017. args.v6.ucConnObjId = connector_object_id;
  1018. if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
  1019. args.v6.ucDPLaneSet = lane_set;
  1020. else
  1021. args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1022. if (hpd_id == AMDGPU_HPD_NONE)
  1023. args.v6.ucHPDSel = 0;
  1024. else
  1025. args.v6.ucHPDSel = hpd_id + 1;
  1026. args.v6.ucDigEncoderSel = 1 << dig_encoder;
  1027. break;
  1028. default:
  1029. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1030. break;
  1031. }
  1032. break;
  1033. default:
  1034. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1035. break;
  1036. }
  1037. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1038. }
  1039. bool
  1040. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  1041. int action)
  1042. {
  1043. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1044. struct drm_device *dev = amdgpu_connector->base.dev;
  1045. struct amdgpu_device *adev = dev->dev_private;
  1046. union dig_transmitter_control args;
  1047. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  1048. uint8_t frev, crev;
  1049. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  1050. goto done;
  1051. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  1052. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  1053. goto done;
  1054. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1055. goto done;
  1056. memset(&args, 0, sizeof(args));
  1057. args.v1.ucAction = action;
  1058. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1059. /* wait for the panel to power up */
  1060. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  1061. int i;
  1062. for (i = 0; i < 300; i++) {
  1063. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  1064. return true;
  1065. mdelay(1);
  1066. }
  1067. return false;
  1068. }
  1069. done:
  1070. return true;
  1071. }
  1072. union external_encoder_control {
  1073. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  1074. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  1075. };
  1076. static void
  1077. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  1078. struct drm_encoder *ext_encoder,
  1079. int action)
  1080. {
  1081. struct drm_device *dev = encoder->dev;
  1082. struct amdgpu_device *adev = dev->dev_private;
  1083. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1084. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  1085. union external_encoder_control args;
  1086. struct drm_connector *connector;
  1087. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1088. u8 frev, crev;
  1089. int dp_clock = 0;
  1090. int dp_lane_count = 0;
  1091. int connector_object_id = 0;
  1092. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1093. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1094. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1095. else
  1096. connector = amdgpu_get_connector_for_encoder(encoder);
  1097. if (connector) {
  1098. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1099. struct amdgpu_connector_atom_dig *dig_connector =
  1100. amdgpu_connector->con_priv;
  1101. dp_clock = dig_connector->dp_clock;
  1102. dp_lane_count = dig_connector->dp_lane_count;
  1103. connector_object_id =
  1104. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1105. }
  1106. memset(&args, 0, sizeof(args));
  1107. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1108. return;
  1109. switch (frev) {
  1110. case 1:
  1111. /* no params on frev 1 */
  1112. break;
  1113. case 2:
  1114. switch (crev) {
  1115. case 1:
  1116. case 2:
  1117. args.v1.sDigEncoder.ucAction = action;
  1118. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1119. args.v1.sDigEncoder.ucEncoderMode =
  1120. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1121. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1122. if (dp_clock == 270000)
  1123. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1124. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1125. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1126. args.v1.sDigEncoder.ucLaneNum = 8;
  1127. else
  1128. args.v1.sDigEncoder.ucLaneNum = 4;
  1129. break;
  1130. case 3:
  1131. args.v3.sExtEncoder.ucAction = action;
  1132. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1133. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1134. else
  1135. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1136. args.v3.sExtEncoder.ucEncoderMode =
  1137. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1138. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1139. if (dp_clock == 270000)
  1140. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1141. else if (dp_clock == 540000)
  1142. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1143. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1144. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1145. args.v3.sExtEncoder.ucLaneNum = 8;
  1146. else
  1147. args.v3.sExtEncoder.ucLaneNum = 4;
  1148. switch (ext_enum) {
  1149. case GRAPH_OBJECT_ENUM_ID1:
  1150. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1151. break;
  1152. case GRAPH_OBJECT_ENUM_ID2:
  1153. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1154. break;
  1155. case GRAPH_OBJECT_ENUM_ID3:
  1156. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1157. break;
  1158. }
  1159. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1160. break;
  1161. default:
  1162. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1163. return;
  1164. }
  1165. break;
  1166. default:
  1167. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1168. return;
  1169. }
  1170. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1171. }
  1172. static void
  1173. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1174. {
  1175. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1176. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1177. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1178. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1179. struct amdgpu_connector *amdgpu_connector = NULL;
  1180. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1181. if (connector) {
  1182. amdgpu_connector = to_amdgpu_connector(connector);
  1183. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1184. }
  1185. if (action == ATOM_ENABLE) {
  1186. if (!connector)
  1187. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1188. else
  1189. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1190. /* setup and enable the encoder */
  1191. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1192. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1193. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1194. dig->panel_mode);
  1195. if (ext_encoder)
  1196. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1197. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1198. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1199. connector) {
  1200. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1201. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1202. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1203. amdgpu_dig_connector->edp_on = true;
  1204. }
  1205. }
  1206. /* enable the transmitter */
  1207. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1208. ATOM_TRANSMITTER_ACTION_ENABLE,
  1209. 0, 0);
  1210. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1211. connector) {
  1212. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1213. amdgpu_atombios_dp_link_train(encoder, connector);
  1214. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1215. }
  1216. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1217. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1218. if (ext_encoder)
  1219. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1220. } else {
  1221. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1222. connector)
  1223. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1224. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1225. if (ext_encoder)
  1226. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1227. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1228. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1229. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1230. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1231. connector)
  1232. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1233. /* disable the transmitter */
  1234. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1235. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1236. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1237. connector) {
  1238. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1239. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1240. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1241. amdgpu_dig_connector->edp_on = false;
  1242. }
  1243. }
  1244. }
  1245. }
  1246. void
  1247. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1248. {
  1249. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1250. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1251. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1252. amdgpu_encoder->active_device);
  1253. switch (amdgpu_encoder->encoder_id) {
  1254. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1255. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1256. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1257. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1258. switch (mode) {
  1259. case DRM_MODE_DPMS_ON:
  1260. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1261. break;
  1262. case DRM_MODE_DPMS_STANDBY:
  1263. case DRM_MODE_DPMS_SUSPEND:
  1264. case DRM_MODE_DPMS_OFF:
  1265. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1266. break;
  1267. }
  1268. break;
  1269. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1270. switch (mode) {
  1271. case DRM_MODE_DPMS_ON:
  1272. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1273. break;
  1274. case DRM_MODE_DPMS_STANDBY:
  1275. case DRM_MODE_DPMS_SUSPEND:
  1276. case DRM_MODE_DPMS_OFF:
  1277. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1278. break;
  1279. }
  1280. break;
  1281. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1282. switch (mode) {
  1283. case DRM_MODE_DPMS_ON:
  1284. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1285. break;
  1286. case DRM_MODE_DPMS_STANDBY:
  1287. case DRM_MODE_DPMS_SUSPEND:
  1288. case DRM_MODE_DPMS_OFF:
  1289. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1290. break;
  1291. }
  1292. break;
  1293. default:
  1294. return;
  1295. }
  1296. }
  1297. union crtc_source_param {
  1298. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1299. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1300. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1301. };
  1302. void
  1303. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1304. {
  1305. struct drm_device *dev = encoder->dev;
  1306. struct amdgpu_device *adev = dev->dev_private;
  1307. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1308. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1309. union crtc_source_param args;
  1310. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1311. uint8_t frev, crev;
  1312. struct amdgpu_encoder_atom_dig *dig;
  1313. memset(&args, 0, sizeof(args));
  1314. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1315. return;
  1316. switch (frev) {
  1317. case 1:
  1318. switch (crev) {
  1319. case 1:
  1320. default:
  1321. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1322. switch (amdgpu_encoder->encoder_id) {
  1323. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1324. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1325. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1326. break;
  1327. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1328. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1329. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1330. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1331. else
  1332. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1333. break;
  1334. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1335. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1336. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1337. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1338. break;
  1339. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1340. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1341. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1342. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1343. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1344. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1345. else
  1346. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1347. break;
  1348. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1349. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1350. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1351. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1352. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1353. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1354. else
  1355. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1356. break;
  1357. }
  1358. break;
  1359. case 2:
  1360. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1361. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1362. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1363. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1364. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1365. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1366. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1367. else
  1368. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1369. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1370. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1371. } else {
  1372. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1373. }
  1374. switch (amdgpu_encoder->encoder_id) {
  1375. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1376. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1377. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1378. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1379. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1380. dig = amdgpu_encoder->enc_priv;
  1381. switch (dig->dig_encoder) {
  1382. case 0:
  1383. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1384. break;
  1385. case 1:
  1386. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1387. break;
  1388. case 2:
  1389. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1390. break;
  1391. case 3:
  1392. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1393. break;
  1394. case 4:
  1395. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1396. break;
  1397. case 5:
  1398. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1399. break;
  1400. case 6:
  1401. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1402. break;
  1403. }
  1404. break;
  1405. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1406. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1407. break;
  1408. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1409. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1410. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1411. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1412. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1413. else
  1414. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1415. break;
  1416. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1417. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1418. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1419. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1420. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1421. else
  1422. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1423. break;
  1424. }
  1425. break;
  1426. case 3:
  1427. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1428. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1429. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1430. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1431. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1432. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1433. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1434. else
  1435. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1436. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1437. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1438. } else {
  1439. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1440. }
  1441. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1442. switch (amdgpu_encoder->encoder_id) {
  1443. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1444. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1445. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1446. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1447. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1448. dig = amdgpu_encoder->enc_priv;
  1449. switch (dig->dig_encoder) {
  1450. case 0:
  1451. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1452. break;
  1453. case 1:
  1454. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1455. break;
  1456. case 2:
  1457. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1458. break;
  1459. case 3:
  1460. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1461. break;
  1462. case 4:
  1463. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1464. break;
  1465. case 5:
  1466. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1467. break;
  1468. case 6:
  1469. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1470. break;
  1471. }
  1472. break;
  1473. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1474. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1475. break;
  1476. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1477. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1478. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1479. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1480. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1481. else
  1482. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1483. break;
  1484. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1485. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1486. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1487. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1488. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1489. else
  1490. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1491. break;
  1492. }
  1493. break;
  1494. }
  1495. break;
  1496. default:
  1497. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1498. return;
  1499. }
  1500. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1501. }
  1502. /* This only needs to be called once at startup */
  1503. void
  1504. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1505. {
  1506. struct drm_device *dev = adev->ddev;
  1507. struct drm_encoder *encoder;
  1508. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1509. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1510. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1511. switch (amdgpu_encoder->encoder_id) {
  1512. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1513. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1514. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1515. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1516. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1517. 0, 0);
  1518. break;
  1519. }
  1520. if (ext_encoder)
  1521. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1522. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1523. }
  1524. }
  1525. static bool
  1526. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1527. struct drm_connector *connector)
  1528. {
  1529. struct drm_device *dev = encoder->dev;
  1530. struct amdgpu_device *adev = dev->dev_private;
  1531. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1532. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1533. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1534. ATOM_DEVICE_CV_SUPPORT |
  1535. ATOM_DEVICE_CRT_SUPPORT)) {
  1536. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1537. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1538. uint8_t frev, crev;
  1539. memset(&args, 0, sizeof(args));
  1540. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1541. return false;
  1542. args.sDacload.ucMisc = 0;
  1543. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1544. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1545. args.sDacload.ucDacType = ATOM_DAC_A;
  1546. else
  1547. args.sDacload.ucDacType = ATOM_DAC_B;
  1548. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1549. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1550. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1551. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1552. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1553. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1554. if (crev >= 3)
  1555. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1556. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1557. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1558. if (crev >= 3)
  1559. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1560. }
  1561. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1562. return true;
  1563. } else
  1564. return false;
  1565. }
  1566. enum drm_connector_status
  1567. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1568. struct drm_connector *connector)
  1569. {
  1570. struct drm_device *dev = encoder->dev;
  1571. struct amdgpu_device *adev = dev->dev_private;
  1572. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1573. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1574. uint32_t bios_0_scratch;
  1575. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1576. DRM_DEBUG_KMS("detect returned false \n");
  1577. return connector_status_unknown;
  1578. }
  1579. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1580. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1581. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1582. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1583. return connector_status_connected;
  1584. }
  1585. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1586. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1587. return connector_status_connected;
  1588. }
  1589. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1590. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1591. return connector_status_connected;
  1592. }
  1593. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1594. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1595. return connector_status_connected; /* CTV */
  1596. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1597. return connector_status_connected; /* STV */
  1598. }
  1599. return connector_status_disconnected;
  1600. }
  1601. enum drm_connector_status
  1602. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1603. struct drm_connector *connector)
  1604. {
  1605. struct drm_device *dev = encoder->dev;
  1606. struct amdgpu_device *adev = dev->dev_private;
  1607. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1608. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1609. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1610. u32 bios_0_scratch;
  1611. if (!ext_encoder)
  1612. return connector_status_unknown;
  1613. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1614. return connector_status_unknown;
  1615. /* load detect on the dp bridge */
  1616. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1617. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1618. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1619. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1620. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1621. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1622. return connector_status_connected;
  1623. }
  1624. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1625. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1626. return connector_status_connected;
  1627. }
  1628. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1629. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1630. return connector_status_connected;
  1631. }
  1632. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1633. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1634. return connector_status_connected; /* CTV */
  1635. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1636. return connector_status_connected; /* STV */
  1637. }
  1638. return connector_status_disconnected;
  1639. }
  1640. void
  1641. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1642. {
  1643. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1644. if (ext_encoder)
  1645. /* ddc_setup on the dp bridge */
  1646. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1647. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1648. }
  1649. void
  1650. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1651. struct drm_encoder *encoder,
  1652. bool connected)
  1653. {
  1654. struct drm_device *dev = connector->dev;
  1655. struct amdgpu_device *adev = dev->dev_private;
  1656. struct amdgpu_connector *amdgpu_connector =
  1657. to_amdgpu_connector(connector);
  1658. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1659. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1660. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1661. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1662. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1663. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1664. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1665. if (connected) {
  1666. DRM_DEBUG_KMS("LCD1 connected\n");
  1667. bios_0_scratch |= ATOM_S0_LCD1;
  1668. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1669. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1670. } else {
  1671. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1672. bios_0_scratch &= ~ATOM_S0_LCD1;
  1673. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1674. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1675. }
  1676. }
  1677. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1678. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1679. if (connected) {
  1680. DRM_DEBUG_KMS("CRT1 connected\n");
  1681. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1682. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1683. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1684. } else {
  1685. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1686. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1687. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1688. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1689. }
  1690. }
  1691. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1692. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1693. if (connected) {
  1694. DRM_DEBUG_KMS("CRT2 connected\n");
  1695. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1696. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1697. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1698. } else {
  1699. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1700. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1701. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1702. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1703. }
  1704. }
  1705. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1706. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1707. if (connected) {
  1708. DRM_DEBUG_KMS("DFP1 connected\n");
  1709. bios_0_scratch |= ATOM_S0_DFP1;
  1710. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1711. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1712. } else {
  1713. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1714. bios_0_scratch &= ~ATOM_S0_DFP1;
  1715. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1716. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1717. }
  1718. }
  1719. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1720. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1721. if (connected) {
  1722. DRM_DEBUG_KMS("DFP2 connected\n");
  1723. bios_0_scratch |= ATOM_S0_DFP2;
  1724. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1725. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1726. } else {
  1727. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1728. bios_0_scratch &= ~ATOM_S0_DFP2;
  1729. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1730. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1731. }
  1732. }
  1733. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1734. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1735. if (connected) {
  1736. DRM_DEBUG_KMS("DFP3 connected\n");
  1737. bios_0_scratch |= ATOM_S0_DFP3;
  1738. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1739. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1740. } else {
  1741. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1742. bios_0_scratch &= ~ATOM_S0_DFP3;
  1743. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1744. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1745. }
  1746. }
  1747. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1748. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1749. if (connected) {
  1750. DRM_DEBUG_KMS("DFP4 connected\n");
  1751. bios_0_scratch |= ATOM_S0_DFP4;
  1752. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1753. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1754. } else {
  1755. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1756. bios_0_scratch &= ~ATOM_S0_DFP4;
  1757. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1758. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1759. }
  1760. }
  1761. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1762. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1763. if (connected) {
  1764. DRM_DEBUG_KMS("DFP5 connected\n");
  1765. bios_0_scratch |= ATOM_S0_DFP5;
  1766. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1767. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1768. } else {
  1769. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1770. bios_0_scratch &= ~ATOM_S0_DFP5;
  1771. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1772. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1773. }
  1774. }
  1775. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1776. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1777. if (connected) {
  1778. DRM_DEBUG_KMS("DFP6 connected\n");
  1779. bios_0_scratch |= ATOM_S0_DFP6;
  1780. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1781. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1782. } else {
  1783. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1784. bios_0_scratch &= ~ATOM_S0_DFP6;
  1785. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1786. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1787. }
  1788. }
  1789. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1790. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1791. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1792. }
  1793. union lvds_info {
  1794. struct _ATOM_LVDS_INFO info;
  1795. struct _ATOM_LVDS_INFO_V12 info_12;
  1796. };
  1797. struct amdgpu_encoder_atom_dig *
  1798. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1799. {
  1800. struct drm_device *dev = encoder->base.dev;
  1801. struct amdgpu_device *adev = dev->dev_private;
  1802. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1803. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1804. uint16_t data_offset, misc;
  1805. union lvds_info *lvds_info;
  1806. uint8_t frev, crev;
  1807. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1808. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1809. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1810. &frev, &crev, &data_offset)) {
  1811. lvds_info =
  1812. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1813. lvds =
  1814. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1815. if (!lvds)
  1816. return NULL;
  1817. lvds->native_mode.clock =
  1818. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1819. lvds->native_mode.hdisplay =
  1820. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1821. lvds->native_mode.vdisplay =
  1822. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1823. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1824. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1825. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1826. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1827. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1828. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1829. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1830. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1831. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1832. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1833. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1834. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1835. lvds->panel_pwr_delay =
  1836. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1837. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1838. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1839. if (misc & ATOM_VSYNC_POLARITY)
  1840. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1841. if (misc & ATOM_HSYNC_POLARITY)
  1842. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1843. if (misc & ATOM_COMPOSITESYNC)
  1844. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1845. if (misc & ATOM_INTERLACE)
  1846. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1847. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1848. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1849. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1850. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1851. /* set crtc values */
  1852. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1853. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1854. encoder->native_mode = lvds->native_mode;
  1855. if (encoder_enum == 2)
  1856. lvds->linkb = true;
  1857. else
  1858. lvds->linkb = false;
  1859. /* parse the lcd record table */
  1860. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1861. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1862. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1863. bool bad_record = false;
  1864. u8 *record;
  1865. if ((frev == 1) && (crev < 2))
  1866. /* absolute */
  1867. record = (u8 *)(mode_info->atom_context->bios +
  1868. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1869. else
  1870. /* relative */
  1871. record = (u8 *)(mode_info->atom_context->bios +
  1872. data_offset +
  1873. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1874. while (*record != ATOM_RECORD_END_TYPE) {
  1875. switch (*record) {
  1876. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1877. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1878. break;
  1879. case LCD_RTS_RECORD_TYPE:
  1880. record += sizeof(ATOM_LCD_RTS_RECORD);
  1881. break;
  1882. case LCD_CAP_RECORD_TYPE:
  1883. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1884. break;
  1885. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1886. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1887. if (fake_edid_record->ucFakeEDIDLength) {
  1888. struct edid *edid;
  1889. int edid_size =
  1890. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1891. edid = kmalloc(edid_size, GFP_KERNEL);
  1892. if (edid) {
  1893. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1894. fake_edid_record->ucFakeEDIDLength);
  1895. if (drm_edid_is_valid(edid)) {
  1896. adev->mode_info.bios_hardcoded_edid = edid;
  1897. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1898. } else
  1899. kfree(edid);
  1900. }
  1901. }
  1902. record += fake_edid_record->ucFakeEDIDLength ?
  1903. fake_edid_record->ucFakeEDIDLength + 2 :
  1904. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1905. break;
  1906. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1907. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1908. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1909. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1910. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1911. break;
  1912. default:
  1913. DRM_ERROR("Bad LCD record %d\n", *record);
  1914. bad_record = true;
  1915. break;
  1916. }
  1917. if (bad_record)
  1918. break;
  1919. }
  1920. }
  1921. }
  1922. return lvds;
  1923. }
  1924. struct amdgpu_encoder_atom_dig *
  1925. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1926. {
  1927. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1928. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1929. if (!dig)
  1930. return NULL;
  1931. /* coherent mode by default */
  1932. dig->coherent_mode = true;
  1933. dig->dig_encoder = -1;
  1934. if (encoder_enum == 2)
  1935. dig->linkb = true;
  1936. else
  1937. dig->linkb = false;
  1938. return dig;
  1939. }