atombios_encoders.c 67 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. if (dig->backlight_level == 0)
  87. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  88. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  89. else {
  90. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  91. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  92. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  93. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  94. }
  95. break;
  96. default:
  97. break;
  98. }
  99. }
  100. }
  101. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  102. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  103. {
  104. u8 level;
  105. /* Convert brightness to hardware level */
  106. if (bd->props.brightness < 0)
  107. level = 0;
  108. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  109. level = AMDGPU_MAX_BL_LEVEL;
  110. else
  111. level = bd->props.brightness;
  112. return level;
  113. }
  114. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  115. {
  116. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  117. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  118. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  119. amdgpu_atombios_encoder_backlight_level(bd));
  120. return 0;
  121. }
  122. static int
  123. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  124. {
  125. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  126. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  127. struct drm_device *dev = amdgpu_encoder->base.dev;
  128. struct amdgpu_device *adev = dev->dev_private;
  129. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  130. }
  131. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  132. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  133. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  134. };
  135. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  136. struct drm_connector *drm_connector)
  137. {
  138. struct drm_device *dev = amdgpu_encoder->base.dev;
  139. struct amdgpu_device *adev = dev->dev_private;
  140. struct backlight_device *bd;
  141. struct backlight_properties props;
  142. struct amdgpu_backlight_privdata *pdata;
  143. struct amdgpu_encoder_atom_dig *dig;
  144. u8 backlight_level;
  145. char bl_name[16];
  146. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  147. * so don't register a backlight device
  148. */
  149. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  150. (adev->pdev->device == 0x6741))
  151. return;
  152. if (!amdgpu_encoder->enc_priv)
  153. return;
  154. if (!adev->is_atom_bios)
  155. return;
  156. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  157. return;
  158. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  159. if (!pdata) {
  160. DRM_ERROR("Memory allocation failed\n");
  161. goto error;
  162. }
  163. memset(&props, 0, sizeof(props));
  164. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  165. props.type = BACKLIGHT_RAW;
  166. snprintf(bl_name, sizeof(bl_name),
  167. "amdgpu_bl%d", dev->primary->index);
  168. bd = backlight_device_register(bl_name, drm_connector->kdev,
  169. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  170. if (IS_ERR(bd)) {
  171. DRM_ERROR("Backlight registration failed\n");
  172. goto error;
  173. }
  174. pdata->encoder = amdgpu_encoder;
  175. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  176. dig = amdgpu_encoder->enc_priv;
  177. dig->bl_dev = bd;
  178. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  179. bd->props.power = FB_BLANK_UNBLANK;
  180. backlight_update_status(bd);
  181. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  182. return;
  183. error:
  184. kfree(pdata);
  185. return;
  186. }
  187. void
  188. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  189. {
  190. struct drm_device *dev = amdgpu_encoder->base.dev;
  191. struct amdgpu_device *adev = dev->dev_private;
  192. struct backlight_device *bd = NULL;
  193. struct amdgpu_encoder_atom_dig *dig;
  194. if (!amdgpu_encoder->enc_priv)
  195. return;
  196. if (!adev->is_atom_bios)
  197. return;
  198. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  199. return;
  200. dig = amdgpu_encoder->enc_priv;
  201. bd = dig->bl_dev;
  202. dig->bl_dev = NULL;
  203. if (bd) {
  204. struct amdgpu_legacy_backlight_privdata *pdata;
  205. pdata = bl_get_data(bd);
  206. backlight_device_unregister(bd);
  207. kfree(pdata);
  208. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  209. }
  210. }
  211. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  212. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  213. {
  214. }
  215. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  216. {
  217. }
  218. #endif
  219. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  220. {
  221. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  222. switch (amdgpu_encoder->encoder_id) {
  223. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  228. return true;
  229. default:
  230. return false;
  231. }
  232. }
  233. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  234. const struct drm_display_mode *mode,
  235. struct drm_display_mode *adjusted_mode)
  236. {
  237. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  238. /* set the active encoder to connector routing */
  239. amdgpu_encoder_set_active_device(encoder);
  240. drm_mode_set_crtcinfo(adjusted_mode, 0);
  241. /* hw bug */
  242. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  243. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  244. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  245. /* get the native mode for scaling */
  246. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  247. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  248. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  249. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  250. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  251. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  252. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  253. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  254. }
  255. return true;
  256. }
  257. static void
  258. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  259. {
  260. struct drm_device *dev = encoder->dev;
  261. struct amdgpu_device *adev = dev->dev_private;
  262. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  263. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  264. int index = 0;
  265. memset(&args, 0, sizeof(args));
  266. switch (amdgpu_encoder->encoder_id) {
  267. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  268. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  269. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  270. break;
  271. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  273. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  274. break;
  275. }
  276. args.ucAction = action;
  277. args.ucDacStandard = ATOM_DAC1_PS2;
  278. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  279. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  280. }
  281. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  282. {
  283. int bpc = 8;
  284. if (encoder->crtc) {
  285. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  286. bpc = amdgpu_crtc->bpc;
  287. }
  288. switch (bpc) {
  289. case 0:
  290. return PANEL_BPC_UNDEFINE;
  291. case 6:
  292. return PANEL_6BIT_PER_COLOR;
  293. case 8:
  294. default:
  295. return PANEL_8BIT_PER_COLOR;
  296. case 10:
  297. return PANEL_10BIT_PER_COLOR;
  298. case 12:
  299. return PANEL_12BIT_PER_COLOR;
  300. case 16:
  301. return PANEL_16BIT_PER_COLOR;
  302. }
  303. }
  304. union dvo_encoder_control {
  305. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  306. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  307. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  308. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  309. };
  310. static void
  311. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  312. {
  313. struct drm_device *dev = encoder->dev;
  314. struct amdgpu_device *adev = dev->dev_private;
  315. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  316. union dvo_encoder_control args;
  317. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  318. uint8_t frev, crev;
  319. memset(&args, 0, sizeof(args));
  320. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  321. return;
  322. switch (frev) {
  323. case 1:
  324. switch (crev) {
  325. case 1:
  326. /* R4xx, R5xx */
  327. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  328. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  329. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  330. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  331. break;
  332. case 2:
  333. /* RS600/690/740 */
  334. args.dvo.sDVOEncoder.ucAction = action;
  335. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  336. /* DFP1, CRT1, TV1 depending on the type of port */
  337. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  338. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  339. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  340. break;
  341. case 3:
  342. /* R6xx */
  343. args.dvo_v3.ucAction = action;
  344. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  345. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  346. break;
  347. case 4:
  348. /* DCE8 */
  349. args.dvo_v4.ucAction = action;
  350. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  351. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  352. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  353. break;
  354. default:
  355. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  356. break;
  357. }
  358. break;
  359. default:
  360. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  361. break;
  362. }
  363. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  364. }
  365. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  366. {
  367. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  368. struct drm_connector *connector;
  369. struct amdgpu_connector *amdgpu_connector;
  370. struct amdgpu_connector_atom_dig *dig_connector;
  371. /* dp bridges are always DP */
  372. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  373. return ATOM_ENCODER_MODE_DP;
  374. /* DVO is always DVO */
  375. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  376. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  377. return ATOM_ENCODER_MODE_DVO;
  378. connector = amdgpu_get_connector_for_encoder(encoder);
  379. /* if we don't have an active device yet, just use one of
  380. * the connectors tied to the encoder.
  381. */
  382. if (!connector)
  383. connector = amdgpu_get_connector_for_encoder_init(encoder);
  384. amdgpu_connector = to_amdgpu_connector(connector);
  385. switch (connector->connector_type) {
  386. case DRM_MODE_CONNECTOR_DVII:
  387. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  388. if (amdgpu_audio != 0) {
  389. if (amdgpu_connector->use_digital &&
  390. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  391. return ATOM_ENCODER_MODE_HDMI;
  392. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  393. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  394. return ATOM_ENCODER_MODE_HDMI;
  395. else if (amdgpu_connector->use_digital)
  396. return ATOM_ENCODER_MODE_DVI;
  397. else
  398. return ATOM_ENCODER_MODE_CRT;
  399. } else if (amdgpu_connector->use_digital) {
  400. return ATOM_ENCODER_MODE_DVI;
  401. } else {
  402. return ATOM_ENCODER_MODE_CRT;
  403. }
  404. break;
  405. case DRM_MODE_CONNECTOR_DVID:
  406. case DRM_MODE_CONNECTOR_HDMIA:
  407. default:
  408. if (amdgpu_audio != 0) {
  409. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  410. return ATOM_ENCODER_MODE_HDMI;
  411. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  412. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  413. return ATOM_ENCODER_MODE_HDMI;
  414. else
  415. return ATOM_ENCODER_MODE_DVI;
  416. } else {
  417. return ATOM_ENCODER_MODE_DVI;
  418. }
  419. break;
  420. case DRM_MODE_CONNECTOR_LVDS:
  421. return ATOM_ENCODER_MODE_LVDS;
  422. break;
  423. case DRM_MODE_CONNECTOR_DisplayPort:
  424. dig_connector = amdgpu_connector->con_priv;
  425. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  426. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  427. return ATOM_ENCODER_MODE_DP;
  428. } else if (amdgpu_audio != 0) {
  429. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  430. return ATOM_ENCODER_MODE_HDMI;
  431. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  432. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  433. return ATOM_ENCODER_MODE_HDMI;
  434. else
  435. return ATOM_ENCODER_MODE_DVI;
  436. } else {
  437. return ATOM_ENCODER_MODE_DVI;
  438. }
  439. break;
  440. case DRM_MODE_CONNECTOR_eDP:
  441. return ATOM_ENCODER_MODE_DP;
  442. case DRM_MODE_CONNECTOR_DVIA:
  443. case DRM_MODE_CONNECTOR_VGA:
  444. return ATOM_ENCODER_MODE_CRT;
  445. break;
  446. case DRM_MODE_CONNECTOR_Composite:
  447. case DRM_MODE_CONNECTOR_SVIDEO:
  448. case DRM_MODE_CONNECTOR_9PinDIN:
  449. /* fix me */
  450. return ATOM_ENCODER_MODE_TV;
  451. /*return ATOM_ENCODER_MODE_CV;*/
  452. break;
  453. }
  454. }
  455. /*
  456. * DIG Encoder/Transmitter Setup
  457. *
  458. * DCE 6.0
  459. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  460. * Supports up to 6 digital outputs
  461. * - 6 DIG encoder blocks.
  462. * - DIG to PHY mapping is hardcoded
  463. * DIG1 drives UNIPHY0 link A, A+B
  464. * DIG2 drives UNIPHY0 link B
  465. * DIG3 drives UNIPHY1 link A, A+B
  466. * DIG4 drives UNIPHY1 link B
  467. * DIG5 drives UNIPHY2 link A, A+B
  468. * DIG6 drives UNIPHY2 link B
  469. *
  470. * Routing
  471. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  472. * Examples:
  473. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  474. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  475. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  476. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  477. */
  478. union dig_encoder_control {
  479. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  480. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  481. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  482. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  483. };
  484. void
  485. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  486. int action, int panel_mode)
  487. {
  488. struct drm_device *dev = encoder->dev;
  489. struct amdgpu_device *adev = dev->dev_private;
  490. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  491. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  492. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  493. union dig_encoder_control args;
  494. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  495. uint8_t frev, crev;
  496. int dp_clock = 0;
  497. int dp_lane_count = 0;
  498. int hpd_id = AMDGPU_HPD_NONE;
  499. if (connector) {
  500. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  501. struct amdgpu_connector_atom_dig *dig_connector =
  502. amdgpu_connector->con_priv;
  503. dp_clock = dig_connector->dp_clock;
  504. dp_lane_count = dig_connector->dp_lane_count;
  505. hpd_id = amdgpu_connector->hpd.hpd;
  506. }
  507. /* no dig encoder assigned */
  508. if (dig->dig_encoder == -1)
  509. return;
  510. memset(&args, 0, sizeof(args));
  511. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  512. return;
  513. switch (frev) {
  514. case 1:
  515. switch (crev) {
  516. case 1:
  517. args.v1.ucAction = action;
  518. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  519. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  520. args.v3.ucPanelMode = panel_mode;
  521. else
  522. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  523. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  524. args.v1.ucLaneNum = dp_lane_count;
  525. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  526. args.v1.ucLaneNum = 8;
  527. else
  528. args.v1.ucLaneNum = 4;
  529. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  530. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  531. switch (amdgpu_encoder->encoder_id) {
  532. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  533. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  534. break;
  535. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  536. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  537. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  538. break;
  539. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  540. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  541. break;
  542. }
  543. if (dig->linkb)
  544. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  545. else
  546. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  547. break;
  548. case 2:
  549. case 3:
  550. args.v3.ucAction = action;
  551. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  552. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  553. args.v3.ucPanelMode = panel_mode;
  554. else
  555. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  556. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  557. args.v3.ucLaneNum = dp_lane_count;
  558. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  559. args.v3.ucLaneNum = 8;
  560. else
  561. args.v3.ucLaneNum = 4;
  562. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  563. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  564. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  565. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  566. break;
  567. case 4:
  568. args.v4.ucAction = action;
  569. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  570. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  571. args.v4.ucPanelMode = panel_mode;
  572. else
  573. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  574. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  575. args.v4.ucLaneNum = dp_lane_count;
  576. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  577. args.v4.ucLaneNum = 8;
  578. else
  579. args.v4.ucLaneNum = 4;
  580. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  581. if (dp_clock == 540000)
  582. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  583. else if (dp_clock == 324000)
  584. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  585. else if (dp_clock == 270000)
  586. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  587. else
  588. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  589. }
  590. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  591. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  592. if (hpd_id == AMDGPU_HPD_NONE)
  593. args.v4.ucHPD_ID = 0;
  594. else
  595. args.v4.ucHPD_ID = hpd_id + 1;
  596. break;
  597. default:
  598. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  599. break;
  600. }
  601. break;
  602. default:
  603. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  604. break;
  605. }
  606. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  607. }
  608. union dig_transmitter_control {
  609. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  610. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  611. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  612. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  613. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  614. };
  615. void
  616. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  617. uint8_t lane_num, uint8_t lane_set)
  618. {
  619. struct drm_device *dev = encoder->dev;
  620. struct amdgpu_device *adev = dev->dev_private;
  621. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  622. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  623. struct drm_connector *connector;
  624. union dig_transmitter_control args;
  625. int index = 0;
  626. uint8_t frev, crev;
  627. bool is_dp = false;
  628. int pll_id = 0;
  629. int dp_clock = 0;
  630. int dp_lane_count = 0;
  631. int connector_object_id = 0;
  632. int igp_lane_info = 0;
  633. int dig_encoder = dig->dig_encoder;
  634. int hpd_id = AMDGPU_HPD_NONE;
  635. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  636. connector = amdgpu_get_connector_for_encoder_init(encoder);
  637. /* just needed to avoid bailing in the encoder check. the encoder
  638. * isn't used for init
  639. */
  640. dig_encoder = 0;
  641. } else
  642. connector = amdgpu_get_connector_for_encoder(encoder);
  643. if (connector) {
  644. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  645. struct amdgpu_connector_atom_dig *dig_connector =
  646. amdgpu_connector->con_priv;
  647. hpd_id = amdgpu_connector->hpd.hpd;
  648. dp_clock = dig_connector->dp_clock;
  649. dp_lane_count = dig_connector->dp_lane_count;
  650. connector_object_id =
  651. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  652. }
  653. if (encoder->crtc) {
  654. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  655. pll_id = amdgpu_crtc->pll_id;
  656. }
  657. /* no dig encoder assigned */
  658. if (dig_encoder == -1)
  659. return;
  660. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  661. is_dp = true;
  662. memset(&args, 0, sizeof(args));
  663. switch (amdgpu_encoder->encoder_id) {
  664. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  665. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  666. break;
  667. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  668. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  669. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  670. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  671. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  672. break;
  673. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  674. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  675. break;
  676. }
  677. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  678. return;
  679. switch (frev) {
  680. case 1:
  681. switch (crev) {
  682. case 1:
  683. args.v1.ucAction = action;
  684. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  685. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  686. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  687. args.v1.asMode.ucLaneSel = lane_num;
  688. args.v1.asMode.ucLaneSet = lane_set;
  689. } else {
  690. if (is_dp)
  691. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  692. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  693. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  694. else
  695. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  696. }
  697. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  698. if (dig_encoder)
  699. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  700. else
  701. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  702. if ((adev->flags & AMD_IS_APU) &&
  703. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  704. if (is_dp ||
  705. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  706. if (igp_lane_info & 0x1)
  707. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  708. else if (igp_lane_info & 0x2)
  709. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  710. else if (igp_lane_info & 0x4)
  711. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  712. else if (igp_lane_info & 0x8)
  713. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  714. } else {
  715. if (igp_lane_info & 0x3)
  716. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  717. else if (igp_lane_info & 0xc)
  718. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  719. }
  720. }
  721. if (dig->linkb)
  722. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  723. else
  724. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  725. if (is_dp)
  726. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  727. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  728. if (dig->coherent_mode)
  729. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  730. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  731. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  732. }
  733. break;
  734. case 2:
  735. args.v2.ucAction = action;
  736. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  737. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  738. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  739. args.v2.asMode.ucLaneSel = lane_num;
  740. args.v2.asMode.ucLaneSet = lane_set;
  741. } else {
  742. if (is_dp)
  743. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  744. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  745. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  746. else
  747. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  748. }
  749. args.v2.acConfig.ucEncoderSel = dig_encoder;
  750. if (dig->linkb)
  751. args.v2.acConfig.ucLinkSel = 1;
  752. switch (amdgpu_encoder->encoder_id) {
  753. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  754. args.v2.acConfig.ucTransmitterSel = 0;
  755. break;
  756. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  757. args.v2.acConfig.ucTransmitterSel = 1;
  758. break;
  759. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  760. args.v2.acConfig.ucTransmitterSel = 2;
  761. break;
  762. }
  763. if (is_dp) {
  764. args.v2.acConfig.fCoherentMode = 1;
  765. args.v2.acConfig.fDPConnector = 1;
  766. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  767. if (dig->coherent_mode)
  768. args.v2.acConfig.fCoherentMode = 1;
  769. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  770. args.v2.acConfig.fDualLinkConnector = 1;
  771. }
  772. break;
  773. case 3:
  774. args.v3.ucAction = action;
  775. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  776. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  777. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  778. args.v3.asMode.ucLaneSel = lane_num;
  779. args.v3.asMode.ucLaneSet = lane_set;
  780. } else {
  781. if (is_dp)
  782. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  783. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  784. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  785. else
  786. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  787. }
  788. if (is_dp)
  789. args.v3.ucLaneNum = dp_lane_count;
  790. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  791. args.v3.ucLaneNum = 8;
  792. else
  793. args.v3.ucLaneNum = 4;
  794. if (dig->linkb)
  795. args.v3.acConfig.ucLinkSel = 1;
  796. if (dig_encoder & 1)
  797. args.v3.acConfig.ucEncoderSel = 1;
  798. /* Select the PLL for the PHY
  799. * DP PHY should be clocked from external src if there is
  800. * one.
  801. */
  802. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  803. if (is_dp && adev->clock.dp_extclk)
  804. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  805. else
  806. args.v3.acConfig.ucRefClkSource = pll_id;
  807. switch (amdgpu_encoder->encoder_id) {
  808. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  809. args.v3.acConfig.ucTransmitterSel = 0;
  810. break;
  811. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  812. args.v3.acConfig.ucTransmitterSel = 1;
  813. break;
  814. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  815. args.v3.acConfig.ucTransmitterSel = 2;
  816. break;
  817. }
  818. if (is_dp)
  819. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  820. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  821. if (dig->coherent_mode)
  822. args.v3.acConfig.fCoherentMode = 1;
  823. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  824. args.v3.acConfig.fDualLinkConnector = 1;
  825. }
  826. break;
  827. case 4:
  828. args.v4.ucAction = action;
  829. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  830. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  831. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  832. args.v4.asMode.ucLaneSel = lane_num;
  833. args.v4.asMode.ucLaneSet = lane_set;
  834. } else {
  835. if (is_dp)
  836. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  837. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  838. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  839. else
  840. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  841. }
  842. if (is_dp)
  843. args.v4.ucLaneNum = dp_lane_count;
  844. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  845. args.v4.ucLaneNum = 8;
  846. else
  847. args.v4.ucLaneNum = 4;
  848. if (dig->linkb)
  849. args.v4.acConfig.ucLinkSel = 1;
  850. if (dig_encoder & 1)
  851. args.v4.acConfig.ucEncoderSel = 1;
  852. /* Select the PLL for the PHY
  853. * DP PHY should be clocked from external src if there is
  854. * one.
  855. */
  856. /* On DCE5 DCPLL usually generates the DP ref clock */
  857. if (is_dp) {
  858. if (adev->clock.dp_extclk)
  859. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  860. else
  861. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  862. } else
  863. args.v4.acConfig.ucRefClkSource = pll_id;
  864. switch (amdgpu_encoder->encoder_id) {
  865. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  866. args.v4.acConfig.ucTransmitterSel = 0;
  867. break;
  868. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  869. args.v4.acConfig.ucTransmitterSel = 1;
  870. break;
  871. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  872. args.v4.acConfig.ucTransmitterSel = 2;
  873. break;
  874. }
  875. if (is_dp)
  876. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  877. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  878. if (dig->coherent_mode)
  879. args.v4.acConfig.fCoherentMode = 1;
  880. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  881. args.v4.acConfig.fDualLinkConnector = 1;
  882. }
  883. break;
  884. case 5:
  885. args.v5.ucAction = action;
  886. if (is_dp)
  887. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  888. else
  889. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  890. switch (amdgpu_encoder->encoder_id) {
  891. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  892. if (dig->linkb)
  893. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  894. else
  895. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  896. break;
  897. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  898. if (dig->linkb)
  899. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  900. else
  901. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  902. break;
  903. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  904. if (dig->linkb)
  905. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  906. else
  907. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  908. break;
  909. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  910. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  911. break;
  912. }
  913. if (is_dp)
  914. args.v5.ucLaneNum = dp_lane_count;
  915. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  916. args.v5.ucLaneNum = 8;
  917. else
  918. args.v5.ucLaneNum = 4;
  919. args.v5.ucConnObjId = connector_object_id;
  920. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  921. if (is_dp && adev->clock.dp_extclk)
  922. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  923. else
  924. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  925. if (is_dp)
  926. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  927. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  928. if (dig->coherent_mode)
  929. args.v5.asConfig.ucCoherentMode = 1;
  930. }
  931. if (hpd_id == AMDGPU_HPD_NONE)
  932. args.v5.asConfig.ucHPDSel = 0;
  933. else
  934. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  935. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  936. args.v5.ucDPLaneSet = lane_set;
  937. break;
  938. default:
  939. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  940. break;
  941. }
  942. break;
  943. default:
  944. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  945. break;
  946. }
  947. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  948. }
  949. bool
  950. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  951. int action)
  952. {
  953. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  954. struct drm_device *dev = amdgpu_connector->base.dev;
  955. struct amdgpu_device *adev = dev->dev_private;
  956. union dig_transmitter_control args;
  957. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  958. uint8_t frev, crev;
  959. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  960. goto done;
  961. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  962. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  963. goto done;
  964. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  965. goto done;
  966. memset(&args, 0, sizeof(args));
  967. args.v1.ucAction = action;
  968. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  969. /* wait for the panel to power up */
  970. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  971. int i;
  972. for (i = 0; i < 300; i++) {
  973. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  974. return true;
  975. mdelay(1);
  976. }
  977. return false;
  978. }
  979. done:
  980. return true;
  981. }
  982. union external_encoder_control {
  983. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  984. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  985. };
  986. static void
  987. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  988. struct drm_encoder *ext_encoder,
  989. int action)
  990. {
  991. struct drm_device *dev = encoder->dev;
  992. struct amdgpu_device *adev = dev->dev_private;
  993. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  994. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  995. union external_encoder_control args;
  996. struct drm_connector *connector;
  997. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  998. u8 frev, crev;
  999. int dp_clock = 0;
  1000. int dp_lane_count = 0;
  1001. int connector_object_id = 0;
  1002. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1003. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1004. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1005. else
  1006. connector = amdgpu_get_connector_for_encoder(encoder);
  1007. if (connector) {
  1008. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1009. struct amdgpu_connector_atom_dig *dig_connector =
  1010. amdgpu_connector->con_priv;
  1011. dp_clock = dig_connector->dp_clock;
  1012. dp_lane_count = dig_connector->dp_lane_count;
  1013. connector_object_id =
  1014. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1015. }
  1016. memset(&args, 0, sizeof(args));
  1017. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1018. return;
  1019. switch (frev) {
  1020. case 1:
  1021. /* no params on frev 1 */
  1022. break;
  1023. case 2:
  1024. switch (crev) {
  1025. case 1:
  1026. case 2:
  1027. args.v1.sDigEncoder.ucAction = action;
  1028. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1029. args.v1.sDigEncoder.ucEncoderMode =
  1030. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1031. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1032. if (dp_clock == 270000)
  1033. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1034. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1035. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1036. args.v1.sDigEncoder.ucLaneNum = 8;
  1037. else
  1038. args.v1.sDigEncoder.ucLaneNum = 4;
  1039. break;
  1040. case 3:
  1041. args.v3.sExtEncoder.ucAction = action;
  1042. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1043. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1044. else
  1045. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1046. args.v3.sExtEncoder.ucEncoderMode =
  1047. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1048. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1049. if (dp_clock == 270000)
  1050. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1051. else if (dp_clock == 540000)
  1052. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1053. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1054. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1055. args.v3.sExtEncoder.ucLaneNum = 8;
  1056. else
  1057. args.v3.sExtEncoder.ucLaneNum = 4;
  1058. switch (ext_enum) {
  1059. case GRAPH_OBJECT_ENUM_ID1:
  1060. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1061. break;
  1062. case GRAPH_OBJECT_ENUM_ID2:
  1063. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1064. break;
  1065. case GRAPH_OBJECT_ENUM_ID3:
  1066. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1067. break;
  1068. }
  1069. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1070. break;
  1071. default:
  1072. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1073. return;
  1074. }
  1075. break;
  1076. default:
  1077. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1078. return;
  1079. }
  1080. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1081. }
  1082. static void
  1083. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1084. {
  1085. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1086. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1087. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1088. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1089. struct amdgpu_connector *amdgpu_connector = NULL;
  1090. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1091. if (connector) {
  1092. amdgpu_connector = to_amdgpu_connector(connector);
  1093. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1094. }
  1095. if (action == ATOM_ENABLE) {
  1096. if (!connector)
  1097. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1098. else
  1099. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1100. /* setup and enable the encoder */
  1101. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1102. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1103. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1104. dig->panel_mode);
  1105. if (ext_encoder)
  1106. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1107. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1108. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1109. connector) {
  1110. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1111. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1112. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1113. amdgpu_dig_connector->edp_on = true;
  1114. }
  1115. }
  1116. /* enable the transmitter */
  1117. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1118. ATOM_TRANSMITTER_ACTION_ENABLE,
  1119. 0, 0);
  1120. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1121. connector) {
  1122. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1123. amdgpu_atombios_dp_link_train(encoder, connector);
  1124. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1125. }
  1126. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1127. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1128. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  1129. if (ext_encoder)
  1130. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1131. } else {
  1132. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1133. connector)
  1134. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1135. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1136. if (ext_encoder)
  1137. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1138. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1139. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1140. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1141. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1142. connector)
  1143. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1144. /* disable the transmitter */
  1145. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1146. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1147. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1148. connector) {
  1149. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1150. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1151. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1152. amdgpu_dig_connector->edp_on = false;
  1153. }
  1154. }
  1155. }
  1156. }
  1157. void
  1158. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1159. {
  1160. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1161. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1162. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1163. amdgpu_encoder->active_device);
  1164. switch (amdgpu_encoder->encoder_id) {
  1165. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1166. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1167. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1168. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1169. switch (mode) {
  1170. case DRM_MODE_DPMS_ON:
  1171. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1172. break;
  1173. case DRM_MODE_DPMS_STANDBY:
  1174. case DRM_MODE_DPMS_SUSPEND:
  1175. case DRM_MODE_DPMS_OFF:
  1176. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1177. break;
  1178. }
  1179. break;
  1180. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1181. switch (mode) {
  1182. case DRM_MODE_DPMS_ON:
  1183. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1184. break;
  1185. case DRM_MODE_DPMS_STANDBY:
  1186. case DRM_MODE_DPMS_SUSPEND:
  1187. case DRM_MODE_DPMS_OFF:
  1188. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1189. break;
  1190. }
  1191. break;
  1192. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1193. switch (mode) {
  1194. case DRM_MODE_DPMS_ON:
  1195. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1196. break;
  1197. case DRM_MODE_DPMS_STANDBY:
  1198. case DRM_MODE_DPMS_SUSPEND:
  1199. case DRM_MODE_DPMS_OFF:
  1200. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1201. break;
  1202. }
  1203. break;
  1204. default:
  1205. return;
  1206. }
  1207. }
  1208. union crtc_source_param {
  1209. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1210. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1211. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1212. };
  1213. void
  1214. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1215. {
  1216. struct drm_device *dev = encoder->dev;
  1217. struct amdgpu_device *adev = dev->dev_private;
  1218. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1219. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1220. union crtc_source_param args;
  1221. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1222. uint8_t frev, crev;
  1223. struct amdgpu_encoder_atom_dig *dig;
  1224. memset(&args, 0, sizeof(args));
  1225. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1226. return;
  1227. switch (frev) {
  1228. case 1:
  1229. switch (crev) {
  1230. case 1:
  1231. default:
  1232. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1233. switch (amdgpu_encoder->encoder_id) {
  1234. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1235. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1236. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1237. break;
  1238. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1239. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1240. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1241. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1242. else
  1243. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1244. break;
  1245. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1246. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1247. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1248. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1249. break;
  1250. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1251. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1252. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1253. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1254. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1255. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1256. else
  1257. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1258. break;
  1259. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1260. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1261. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1262. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1263. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1264. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1265. else
  1266. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1267. break;
  1268. }
  1269. break;
  1270. case 2:
  1271. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1272. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1273. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1274. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1275. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1276. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1277. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1278. else
  1279. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1280. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1281. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1282. } else {
  1283. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1284. }
  1285. switch (amdgpu_encoder->encoder_id) {
  1286. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1287. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1288. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1289. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1290. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1291. dig = amdgpu_encoder->enc_priv;
  1292. switch (dig->dig_encoder) {
  1293. case 0:
  1294. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1295. break;
  1296. case 1:
  1297. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1298. break;
  1299. case 2:
  1300. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1301. break;
  1302. case 3:
  1303. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1304. break;
  1305. case 4:
  1306. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1307. break;
  1308. case 5:
  1309. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1310. break;
  1311. case 6:
  1312. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1313. break;
  1314. }
  1315. break;
  1316. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1317. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1318. break;
  1319. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1320. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1321. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1322. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1323. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1324. else
  1325. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1326. break;
  1327. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1328. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1329. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1330. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1331. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1332. else
  1333. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1334. break;
  1335. }
  1336. break;
  1337. case 3:
  1338. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1339. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1340. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1341. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1342. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1343. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1344. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1345. else
  1346. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1347. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1348. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1349. } else {
  1350. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1351. }
  1352. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1353. switch (amdgpu_encoder->encoder_id) {
  1354. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1355. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1356. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1357. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1358. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1359. dig = amdgpu_encoder->enc_priv;
  1360. switch (dig->dig_encoder) {
  1361. case 0:
  1362. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1363. break;
  1364. case 1:
  1365. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1366. break;
  1367. case 2:
  1368. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1369. break;
  1370. case 3:
  1371. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1372. break;
  1373. case 4:
  1374. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1375. break;
  1376. case 5:
  1377. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1378. break;
  1379. case 6:
  1380. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1381. break;
  1382. }
  1383. break;
  1384. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1385. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1386. break;
  1387. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1388. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1389. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1390. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1391. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1392. else
  1393. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1394. break;
  1395. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1396. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1397. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1398. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1399. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1400. else
  1401. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1402. break;
  1403. }
  1404. break;
  1405. }
  1406. break;
  1407. default:
  1408. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1409. return;
  1410. }
  1411. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1412. }
  1413. /* This only needs to be called once at startup */
  1414. void
  1415. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1416. {
  1417. struct drm_device *dev = adev->ddev;
  1418. struct drm_encoder *encoder;
  1419. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1420. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1421. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1422. switch (amdgpu_encoder->encoder_id) {
  1423. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1424. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1425. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1426. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1427. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1428. 0, 0);
  1429. break;
  1430. }
  1431. if (ext_encoder)
  1432. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1433. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1434. }
  1435. }
  1436. static bool
  1437. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1438. struct drm_connector *connector)
  1439. {
  1440. struct drm_device *dev = encoder->dev;
  1441. struct amdgpu_device *adev = dev->dev_private;
  1442. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1443. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1444. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1445. ATOM_DEVICE_CV_SUPPORT |
  1446. ATOM_DEVICE_CRT_SUPPORT)) {
  1447. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1448. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1449. uint8_t frev, crev;
  1450. memset(&args, 0, sizeof(args));
  1451. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1452. return false;
  1453. args.sDacload.ucMisc = 0;
  1454. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1455. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1456. args.sDacload.ucDacType = ATOM_DAC_A;
  1457. else
  1458. args.sDacload.ucDacType = ATOM_DAC_B;
  1459. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1460. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1461. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1462. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1463. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1464. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1465. if (crev >= 3)
  1466. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1467. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1468. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1469. if (crev >= 3)
  1470. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1471. }
  1472. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1473. return true;
  1474. } else
  1475. return false;
  1476. }
  1477. enum drm_connector_status
  1478. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1479. struct drm_connector *connector)
  1480. {
  1481. struct drm_device *dev = encoder->dev;
  1482. struct amdgpu_device *adev = dev->dev_private;
  1483. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1484. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1485. uint32_t bios_0_scratch;
  1486. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1487. DRM_DEBUG_KMS("detect returned false \n");
  1488. return connector_status_unknown;
  1489. }
  1490. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1491. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1492. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1493. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1494. return connector_status_connected;
  1495. }
  1496. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1497. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1498. return connector_status_connected;
  1499. }
  1500. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1501. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1502. return connector_status_connected;
  1503. }
  1504. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1505. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1506. return connector_status_connected; /* CTV */
  1507. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1508. return connector_status_connected; /* STV */
  1509. }
  1510. return connector_status_disconnected;
  1511. }
  1512. enum drm_connector_status
  1513. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1514. struct drm_connector *connector)
  1515. {
  1516. struct drm_device *dev = encoder->dev;
  1517. struct amdgpu_device *adev = dev->dev_private;
  1518. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1519. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1520. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1521. u32 bios_0_scratch;
  1522. if (!ext_encoder)
  1523. return connector_status_unknown;
  1524. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1525. return connector_status_unknown;
  1526. /* load detect on the dp bridge */
  1527. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1528. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1529. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1530. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1531. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1532. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1533. return connector_status_connected;
  1534. }
  1535. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1536. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1537. return connector_status_connected;
  1538. }
  1539. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1540. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1541. return connector_status_connected;
  1542. }
  1543. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1544. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1545. return connector_status_connected; /* CTV */
  1546. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1547. return connector_status_connected; /* STV */
  1548. }
  1549. return connector_status_disconnected;
  1550. }
  1551. void
  1552. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1553. {
  1554. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1555. if (ext_encoder)
  1556. /* ddc_setup on the dp bridge */
  1557. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1558. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1559. }
  1560. void
  1561. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1562. struct drm_encoder *encoder,
  1563. bool connected)
  1564. {
  1565. struct drm_device *dev = connector->dev;
  1566. struct amdgpu_device *adev = dev->dev_private;
  1567. struct amdgpu_connector *amdgpu_connector =
  1568. to_amdgpu_connector(connector);
  1569. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1570. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1571. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1572. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1573. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1574. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1575. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1576. if (connected) {
  1577. DRM_DEBUG_KMS("LCD1 connected\n");
  1578. bios_0_scratch |= ATOM_S0_LCD1;
  1579. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1580. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1581. } else {
  1582. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1583. bios_0_scratch &= ~ATOM_S0_LCD1;
  1584. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1585. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1586. }
  1587. }
  1588. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1589. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1590. if (connected) {
  1591. DRM_DEBUG_KMS("CRT1 connected\n");
  1592. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1593. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1594. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1595. } else {
  1596. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1597. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1598. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1599. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1600. }
  1601. }
  1602. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1603. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1604. if (connected) {
  1605. DRM_DEBUG_KMS("CRT2 connected\n");
  1606. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1607. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1608. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1609. } else {
  1610. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1611. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1612. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1613. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1614. }
  1615. }
  1616. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1617. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1618. if (connected) {
  1619. DRM_DEBUG_KMS("DFP1 connected\n");
  1620. bios_0_scratch |= ATOM_S0_DFP1;
  1621. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1622. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1623. } else {
  1624. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1625. bios_0_scratch &= ~ATOM_S0_DFP1;
  1626. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1627. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1628. }
  1629. }
  1630. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1631. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1632. if (connected) {
  1633. DRM_DEBUG_KMS("DFP2 connected\n");
  1634. bios_0_scratch |= ATOM_S0_DFP2;
  1635. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1636. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1637. } else {
  1638. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1639. bios_0_scratch &= ~ATOM_S0_DFP2;
  1640. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1641. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1642. }
  1643. }
  1644. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1645. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1646. if (connected) {
  1647. DRM_DEBUG_KMS("DFP3 connected\n");
  1648. bios_0_scratch |= ATOM_S0_DFP3;
  1649. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1650. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1651. } else {
  1652. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1653. bios_0_scratch &= ~ATOM_S0_DFP3;
  1654. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1655. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1656. }
  1657. }
  1658. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1659. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1660. if (connected) {
  1661. DRM_DEBUG_KMS("DFP4 connected\n");
  1662. bios_0_scratch |= ATOM_S0_DFP4;
  1663. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1664. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1665. } else {
  1666. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1667. bios_0_scratch &= ~ATOM_S0_DFP4;
  1668. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1669. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1670. }
  1671. }
  1672. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1673. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1674. if (connected) {
  1675. DRM_DEBUG_KMS("DFP5 connected\n");
  1676. bios_0_scratch |= ATOM_S0_DFP5;
  1677. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1678. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1679. } else {
  1680. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1681. bios_0_scratch &= ~ATOM_S0_DFP5;
  1682. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1683. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1684. }
  1685. }
  1686. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1687. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1688. if (connected) {
  1689. DRM_DEBUG_KMS("DFP6 connected\n");
  1690. bios_0_scratch |= ATOM_S0_DFP6;
  1691. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1692. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1693. } else {
  1694. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1695. bios_0_scratch &= ~ATOM_S0_DFP6;
  1696. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1697. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1698. }
  1699. }
  1700. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1701. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1702. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1703. }
  1704. union lvds_info {
  1705. struct _ATOM_LVDS_INFO info;
  1706. struct _ATOM_LVDS_INFO_V12 info_12;
  1707. };
  1708. struct amdgpu_encoder_atom_dig *
  1709. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1710. {
  1711. struct drm_device *dev = encoder->base.dev;
  1712. struct amdgpu_device *adev = dev->dev_private;
  1713. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1714. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1715. uint16_t data_offset, misc;
  1716. union lvds_info *lvds_info;
  1717. uint8_t frev, crev;
  1718. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1719. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1720. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1721. &frev, &crev, &data_offset)) {
  1722. lvds_info =
  1723. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1724. lvds =
  1725. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1726. if (!lvds)
  1727. return NULL;
  1728. lvds->native_mode.clock =
  1729. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1730. lvds->native_mode.hdisplay =
  1731. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1732. lvds->native_mode.vdisplay =
  1733. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1734. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1735. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1736. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1737. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1738. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1739. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1740. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1741. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1742. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1743. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1744. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1745. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1746. lvds->panel_pwr_delay =
  1747. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1748. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1749. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1750. if (misc & ATOM_VSYNC_POLARITY)
  1751. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1752. if (misc & ATOM_HSYNC_POLARITY)
  1753. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1754. if (misc & ATOM_COMPOSITESYNC)
  1755. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1756. if (misc & ATOM_INTERLACE)
  1757. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1758. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1759. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1760. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1761. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1762. /* set crtc values */
  1763. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1764. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1765. encoder->native_mode = lvds->native_mode;
  1766. if (encoder_enum == 2)
  1767. lvds->linkb = true;
  1768. else
  1769. lvds->linkb = false;
  1770. /* parse the lcd record table */
  1771. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1772. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1773. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1774. bool bad_record = false;
  1775. u8 *record;
  1776. if ((frev == 1) && (crev < 2))
  1777. /* absolute */
  1778. record = (u8 *)(mode_info->atom_context->bios +
  1779. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1780. else
  1781. /* relative */
  1782. record = (u8 *)(mode_info->atom_context->bios +
  1783. data_offset +
  1784. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1785. while (*record != ATOM_RECORD_END_TYPE) {
  1786. switch (*record) {
  1787. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1788. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1789. break;
  1790. case LCD_RTS_RECORD_TYPE:
  1791. record += sizeof(ATOM_LCD_RTS_RECORD);
  1792. break;
  1793. case LCD_CAP_RECORD_TYPE:
  1794. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1795. break;
  1796. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1797. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1798. if (fake_edid_record->ucFakeEDIDLength) {
  1799. struct edid *edid;
  1800. int edid_size =
  1801. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1802. edid = kmalloc(edid_size, GFP_KERNEL);
  1803. if (edid) {
  1804. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1805. fake_edid_record->ucFakeEDIDLength);
  1806. if (drm_edid_is_valid(edid)) {
  1807. adev->mode_info.bios_hardcoded_edid = edid;
  1808. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1809. } else
  1810. kfree(edid);
  1811. }
  1812. }
  1813. record += fake_edid_record->ucFakeEDIDLength ?
  1814. fake_edid_record->ucFakeEDIDLength + 2 :
  1815. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1816. break;
  1817. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1818. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1819. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1820. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1821. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1822. break;
  1823. default:
  1824. DRM_ERROR("Bad LCD record %d\n", *record);
  1825. bad_record = true;
  1826. break;
  1827. }
  1828. if (bad_record)
  1829. break;
  1830. }
  1831. }
  1832. }
  1833. return lvds;
  1834. }
  1835. struct amdgpu_encoder_atom_dig *
  1836. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1837. {
  1838. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1839. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1840. if (!dig)
  1841. return NULL;
  1842. /* coherent mode by default */
  1843. dig->coherent_mode = true;
  1844. dig->dig_encoder = -1;
  1845. if (encoder_enum == 2)
  1846. dig->linkb = true;
  1847. else
  1848. dig->linkb = false;
  1849. return dig;
  1850. }