atombios_encoders.c 67 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. if (dig->backlight_level == 0)
  87. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  88. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  89. else {
  90. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  91. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  92. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  93. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  94. }
  95. break;
  96. default:
  97. break;
  98. }
  99. }
  100. }
  101. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  102. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  103. {
  104. u8 level;
  105. /* Convert brightness to hardware level */
  106. if (bd->props.brightness < 0)
  107. level = 0;
  108. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  109. level = AMDGPU_MAX_BL_LEVEL;
  110. else
  111. level = bd->props.brightness;
  112. return level;
  113. }
  114. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  115. {
  116. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  117. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  118. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  119. amdgpu_atombios_encoder_backlight_level(bd));
  120. return 0;
  121. }
  122. static int
  123. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  124. {
  125. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  126. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  127. struct drm_device *dev = amdgpu_encoder->base.dev;
  128. struct amdgpu_device *adev = dev->dev_private;
  129. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  130. }
  131. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  132. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  133. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  134. };
  135. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  136. struct drm_connector *drm_connector)
  137. {
  138. struct drm_device *dev = amdgpu_encoder->base.dev;
  139. struct amdgpu_device *adev = dev->dev_private;
  140. struct backlight_device *bd;
  141. struct backlight_properties props;
  142. struct amdgpu_backlight_privdata *pdata;
  143. struct amdgpu_encoder_atom_dig *dig;
  144. u8 backlight_level;
  145. char bl_name[16];
  146. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  147. * so don't register a backlight device
  148. */
  149. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  150. (adev->pdev->device == 0x6741))
  151. return;
  152. if (!amdgpu_encoder->enc_priv)
  153. return;
  154. if (!adev->is_atom_bios)
  155. return;
  156. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  157. return;
  158. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  159. if (!pdata) {
  160. DRM_ERROR("Memory allocation failed\n");
  161. goto error;
  162. }
  163. memset(&props, 0, sizeof(props));
  164. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  165. props.type = BACKLIGHT_RAW;
  166. snprintf(bl_name, sizeof(bl_name),
  167. "amdgpu_bl%d", dev->primary->index);
  168. bd = backlight_device_register(bl_name, drm_connector->kdev,
  169. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  170. if (IS_ERR(bd)) {
  171. DRM_ERROR("Backlight registration failed\n");
  172. goto error;
  173. }
  174. pdata->encoder = amdgpu_encoder;
  175. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  176. dig = amdgpu_encoder->enc_priv;
  177. dig->bl_dev = bd;
  178. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  179. bd->props.power = FB_BLANK_UNBLANK;
  180. backlight_update_status(bd);
  181. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  182. return;
  183. error:
  184. kfree(pdata);
  185. return;
  186. }
  187. void
  188. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  189. {
  190. struct drm_device *dev = amdgpu_encoder->base.dev;
  191. struct amdgpu_device *adev = dev->dev_private;
  192. struct backlight_device *bd = NULL;
  193. struct amdgpu_encoder_atom_dig *dig;
  194. if (!amdgpu_encoder->enc_priv)
  195. return;
  196. if (!adev->is_atom_bios)
  197. return;
  198. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  199. return;
  200. dig = amdgpu_encoder->enc_priv;
  201. bd = dig->bl_dev;
  202. dig->bl_dev = NULL;
  203. if (bd) {
  204. struct amdgpu_legacy_backlight_privdata *pdata;
  205. pdata = bl_get_data(bd);
  206. backlight_device_unregister(bd);
  207. kfree(pdata);
  208. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  209. }
  210. }
  211. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  212. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  213. {
  214. }
  215. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  216. {
  217. }
  218. #endif
  219. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  220. {
  221. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  222. switch (amdgpu_encoder->encoder_id) {
  223. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  228. return true;
  229. default:
  230. return false;
  231. }
  232. }
  233. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  234. const struct drm_display_mode *mode,
  235. struct drm_display_mode *adjusted_mode)
  236. {
  237. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  238. /* set the active encoder to connector routing */
  239. amdgpu_encoder_set_active_device(encoder);
  240. drm_mode_set_crtcinfo(adjusted_mode, 0);
  241. /* hw bug */
  242. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  243. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  244. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  245. /* vertical FP must be at least 1 */
  246. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  247. adjusted_mode->crtc_vsync_start++;
  248. /* get the native mode for scaling */
  249. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  250. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  251. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  252. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  253. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  254. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  255. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  256. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  257. }
  258. return true;
  259. }
  260. static void
  261. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  262. {
  263. struct drm_device *dev = encoder->dev;
  264. struct amdgpu_device *adev = dev->dev_private;
  265. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  266. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  267. int index = 0;
  268. memset(&args, 0, sizeof(args));
  269. switch (amdgpu_encoder->encoder_id) {
  270. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  271. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  272. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  273. break;
  274. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  275. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  276. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  277. break;
  278. }
  279. args.ucAction = action;
  280. args.ucDacStandard = ATOM_DAC1_PS2;
  281. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  282. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  283. }
  284. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  285. {
  286. int bpc = 8;
  287. if (encoder->crtc) {
  288. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  289. bpc = amdgpu_crtc->bpc;
  290. }
  291. switch (bpc) {
  292. case 0:
  293. return PANEL_BPC_UNDEFINE;
  294. case 6:
  295. return PANEL_6BIT_PER_COLOR;
  296. case 8:
  297. default:
  298. return PANEL_8BIT_PER_COLOR;
  299. case 10:
  300. return PANEL_10BIT_PER_COLOR;
  301. case 12:
  302. return PANEL_12BIT_PER_COLOR;
  303. case 16:
  304. return PANEL_16BIT_PER_COLOR;
  305. }
  306. }
  307. union dvo_encoder_control {
  308. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  309. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  310. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  311. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  312. };
  313. static void
  314. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  315. {
  316. struct drm_device *dev = encoder->dev;
  317. struct amdgpu_device *adev = dev->dev_private;
  318. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  319. union dvo_encoder_control args;
  320. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  321. uint8_t frev, crev;
  322. memset(&args, 0, sizeof(args));
  323. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  324. return;
  325. switch (frev) {
  326. case 1:
  327. switch (crev) {
  328. case 1:
  329. /* R4xx, R5xx */
  330. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  331. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  332. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  333. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  334. break;
  335. case 2:
  336. /* RS600/690/740 */
  337. args.dvo.sDVOEncoder.ucAction = action;
  338. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  339. /* DFP1, CRT1, TV1 depending on the type of port */
  340. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  341. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  342. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  343. break;
  344. case 3:
  345. /* R6xx */
  346. args.dvo_v3.ucAction = action;
  347. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  348. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  349. break;
  350. case 4:
  351. /* DCE8 */
  352. args.dvo_v4.ucAction = action;
  353. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  354. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  355. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  356. break;
  357. default:
  358. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  359. break;
  360. }
  361. break;
  362. default:
  363. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  364. break;
  365. }
  366. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  367. }
  368. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  369. {
  370. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  371. struct drm_connector *connector;
  372. struct amdgpu_connector *amdgpu_connector;
  373. struct amdgpu_connector_atom_dig *dig_connector;
  374. /* dp bridges are always DP */
  375. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  376. return ATOM_ENCODER_MODE_DP;
  377. /* DVO is always DVO */
  378. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  379. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  380. return ATOM_ENCODER_MODE_DVO;
  381. connector = amdgpu_get_connector_for_encoder(encoder);
  382. /* if we don't have an active device yet, just use one of
  383. * the connectors tied to the encoder.
  384. */
  385. if (!connector)
  386. connector = amdgpu_get_connector_for_encoder_init(encoder);
  387. amdgpu_connector = to_amdgpu_connector(connector);
  388. switch (connector->connector_type) {
  389. case DRM_MODE_CONNECTOR_DVII:
  390. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  391. if (amdgpu_audio != 0) {
  392. if (amdgpu_connector->use_digital &&
  393. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  394. return ATOM_ENCODER_MODE_HDMI;
  395. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  396. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  397. return ATOM_ENCODER_MODE_HDMI;
  398. else if (amdgpu_connector->use_digital)
  399. return ATOM_ENCODER_MODE_DVI;
  400. else
  401. return ATOM_ENCODER_MODE_CRT;
  402. } else if (amdgpu_connector->use_digital) {
  403. return ATOM_ENCODER_MODE_DVI;
  404. } else {
  405. return ATOM_ENCODER_MODE_CRT;
  406. }
  407. break;
  408. case DRM_MODE_CONNECTOR_DVID:
  409. case DRM_MODE_CONNECTOR_HDMIA:
  410. default:
  411. if (amdgpu_audio != 0) {
  412. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  413. return ATOM_ENCODER_MODE_HDMI;
  414. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  415. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  416. return ATOM_ENCODER_MODE_HDMI;
  417. else
  418. return ATOM_ENCODER_MODE_DVI;
  419. } else {
  420. return ATOM_ENCODER_MODE_DVI;
  421. }
  422. break;
  423. case DRM_MODE_CONNECTOR_LVDS:
  424. return ATOM_ENCODER_MODE_LVDS;
  425. break;
  426. case DRM_MODE_CONNECTOR_DisplayPort:
  427. dig_connector = amdgpu_connector->con_priv;
  428. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  429. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  430. return ATOM_ENCODER_MODE_DP;
  431. } else if (amdgpu_audio != 0) {
  432. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  433. return ATOM_ENCODER_MODE_HDMI;
  434. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  435. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  436. return ATOM_ENCODER_MODE_HDMI;
  437. else
  438. return ATOM_ENCODER_MODE_DVI;
  439. } else {
  440. return ATOM_ENCODER_MODE_DVI;
  441. }
  442. break;
  443. case DRM_MODE_CONNECTOR_eDP:
  444. return ATOM_ENCODER_MODE_DP;
  445. case DRM_MODE_CONNECTOR_DVIA:
  446. case DRM_MODE_CONNECTOR_VGA:
  447. return ATOM_ENCODER_MODE_CRT;
  448. break;
  449. case DRM_MODE_CONNECTOR_Composite:
  450. case DRM_MODE_CONNECTOR_SVIDEO:
  451. case DRM_MODE_CONNECTOR_9PinDIN:
  452. /* fix me */
  453. return ATOM_ENCODER_MODE_TV;
  454. /*return ATOM_ENCODER_MODE_CV;*/
  455. break;
  456. }
  457. }
  458. /*
  459. * DIG Encoder/Transmitter Setup
  460. *
  461. * DCE 6.0
  462. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  463. * Supports up to 6 digital outputs
  464. * - 6 DIG encoder blocks.
  465. * - DIG to PHY mapping is hardcoded
  466. * DIG1 drives UNIPHY0 link A, A+B
  467. * DIG2 drives UNIPHY0 link B
  468. * DIG3 drives UNIPHY1 link A, A+B
  469. * DIG4 drives UNIPHY1 link B
  470. * DIG5 drives UNIPHY2 link A, A+B
  471. * DIG6 drives UNIPHY2 link B
  472. *
  473. * Routing
  474. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  475. * Examples:
  476. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  477. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  478. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  479. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  480. */
  481. union dig_encoder_control {
  482. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  483. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  484. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  485. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  486. };
  487. void
  488. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  489. int action, int panel_mode)
  490. {
  491. struct drm_device *dev = encoder->dev;
  492. struct amdgpu_device *adev = dev->dev_private;
  493. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  494. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  495. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  496. union dig_encoder_control args;
  497. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  498. uint8_t frev, crev;
  499. int dp_clock = 0;
  500. int dp_lane_count = 0;
  501. int hpd_id = AMDGPU_HPD_NONE;
  502. if (connector) {
  503. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  504. struct amdgpu_connector_atom_dig *dig_connector =
  505. amdgpu_connector->con_priv;
  506. dp_clock = dig_connector->dp_clock;
  507. dp_lane_count = dig_connector->dp_lane_count;
  508. hpd_id = amdgpu_connector->hpd.hpd;
  509. }
  510. /* no dig encoder assigned */
  511. if (dig->dig_encoder == -1)
  512. return;
  513. memset(&args, 0, sizeof(args));
  514. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  515. return;
  516. switch (frev) {
  517. case 1:
  518. switch (crev) {
  519. case 1:
  520. args.v1.ucAction = action;
  521. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  522. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  523. args.v3.ucPanelMode = panel_mode;
  524. else
  525. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  526. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  527. args.v1.ucLaneNum = dp_lane_count;
  528. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  529. args.v1.ucLaneNum = 8;
  530. else
  531. args.v1.ucLaneNum = 4;
  532. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  533. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  534. switch (amdgpu_encoder->encoder_id) {
  535. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  536. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  537. break;
  538. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  539. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  540. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  541. break;
  542. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  543. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  544. break;
  545. }
  546. if (dig->linkb)
  547. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  548. else
  549. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  550. break;
  551. case 2:
  552. case 3:
  553. args.v3.ucAction = action;
  554. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  555. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  556. args.v3.ucPanelMode = panel_mode;
  557. else
  558. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  559. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  560. args.v3.ucLaneNum = dp_lane_count;
  561. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  562. args.v3.ucLaneNum = 8;
  563. else
  564. args.v3.ucLaneNum = 4;
  565. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  566. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  567. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  568. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  569. break;
  570. case 4:
  571. args.v4.ucAction = action;
  572. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  573. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  574. args.v4.ucPanelMode = panel_mode;
  575. else
  576. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  577. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  578. args.v4.ucLaneNum = dp_lane_count;
  579. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  580. args.v4.ucLaneNum = 8;
  581. else
  582. args.v4.ucLaneNum = 4;
  583. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  584. if (dp_clock == 540000)
  585. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  586. else if (dp_clock == 324000)
  587. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  588. else if (dp_clock == 270000)
  589. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  590. else
  591. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  592. }
  593. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  594. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  595. if (hpd_id == AMDGPU_HPD_NONE)
  596. args.v4.ucHPD_ID = 0;
  597. else
  598. args.v4.ucHPD_ID = hpd_id + 1;
  599. break;
  600. default:
  601. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  602. break;
  603. }
  604. break;
  605. default:
  606. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  607. break;
  608. }
  609. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  610. }
  611. union dig_transmitter_control {
  612. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  613. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  614. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  615. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  616. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  617. };
  618. void
  619. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  620. uint8_t lane_num, uint8_t lane_set)
  621. {
  622. struct drm_device *dev = encoder->dev;
  623. struct amdgpu_device *adev = dev->dev_private;
  624. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  625. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  626. struct drm_connector *connector;
  627. union dig_transmitter_control args;
  628. int index = 0;
  629. uint8_t frev, crev;
  630. bool is_dp = false;
  631. int pll_id = 0;
  632. int dp_clock = 0;
  633. int dp_lane_count = 0;
  634. int connector_object_id = 0;
  635. int igp_lane_info = 0;
  636. int dig_encoder = dig->dig_encoder;
  637. int hpd_id = AMDGPU_HPD_NONE;
  638. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  639. connector = amdgpu_get_connector_for_encoder_init(encoder);
  640. /* just needed to avoid bailing in the encoder check. the encoder
  641. * isn't used for init
  642. */
  643. dig_encoder = 0;
  644. } else
  645. connector = amdgpu_get_connector_for_encoder(encoder);
  646. if (connector) {
  647. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  648. struct amdgpu_connector_atom_dig *dig_connector =
  649. amdgpu_connector->con_priv;
  650. hpd_id = amdgpu_connector->hpd.hpd;
  651. dp_clock = dig_connector->dp_clock;
  652. dp_lane_count = dig_connector->dp_lane_count;
  653. connector_object_id =
  654. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  655. }
  656. if (encoder->crtc) {
  657. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  658. pll_id = amdgpu_crtc->pll_id;
  659. }
  660. /* no dig encoder assigned */
  661. if (dig_encoder == -1)
  662. return;
  663. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  664. is_dp = true;
  665. memset(&args, 0, sizeof(args));
  666. switch (amdgpu_encoder->encoder_id) {
  667. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  668. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  669. break;
  670. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  671. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  672. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  673. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  674. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  675. break;
  676. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  677. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  678. break;
  679. }
  680. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  681. return;
  682. switch (frev) {
  683. case 1:
  684. switch (crev) {
  685. case 1:
  686. args.v1.ucAction = action;
  687. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  688. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  689. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  690. args.v1.asMode.ucLaneSel = lane_num;
  691. args.v1.asMode.ucLaneSet = lane_set;
  692. } else {
  693. if (is_dp)
  694. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  695. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  696. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  697. else
  698. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  699. }
  700. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  701. if (dig_encoder)
  702. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  703. else
  704. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  705. if ((adev->flags & AMD_IS_APU) &&
  706. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  707. if (is_dp ||
  708. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  709. if (igp_lane_info & 0x1)
  710. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  711. else if (igp_lane_info & 0x2)
  712. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  713. else if (igp_lane_info & 0x4)
  714. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  715. else if (igp_lane_info & 0x8)
  716. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  717. } else {
  718. if (igp_lane_info & 0x3)
  719. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  720. else if (igp_lane_info & 0xc)
  721. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  722. }
  723. }
  724. if (dig->linkb)
  725. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  726. else
  727. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  728. if (is_dp)
  729. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  730. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  731. if (dig->coherent_mode)
  732. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  733. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  734. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  735. }
  736. break;
  737. case 2:
  738. args.v2.ucAction = action;
  739. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  740. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  741. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  742. args.v2.asMode.ucLaneSel = lane_num;
  743. args.v2.asMode.ucLaneSet = lane_set;
  744. } else {
  745. if (is_dp)
  746. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  747. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  748. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  749. else
  750. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  751. }
  752. args.v2.acConfig.ucEncoderSel = dig_encoder;
  753. if (dig->linkb)
  754. args.v2.acConfig.ucLinkSel = 1;
  755. switch (amdgpu_encoder->encoder_id) {
  756. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  757. args.v2.acConfig.ucTransmitterSel = 0;
  758. break;
  759. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  760. args.v2.acConfig.ucTransmitterSel = 1;
  761. break;
  762. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  763. args.v2.acConfig.ucTransmitterSel = 2;
  764. break;
  765. }
  766. if (is_dp) {
  767. args.v2.acConfig.fCoherentMode = 1;
  768. args.v2.acConfig.fDPConnector = 1;
  769. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  770. if (dig->coherent_mode)
  771. args.v2.acConfig.fCoherentMode = 1;
  772. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  773. args.v2.acConfig.fDualLinkConnector = 1;
  774. }
  775. break;
  776. case 3:
  777. args.v3.ucAction = action;
  778. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  779. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  780. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  781. args.v3.asMode.ucLaneSel = lane_num;
  782. args.v3.asMode.ucLaneSet = lane_set;
  783. } else {
  784. if (is_dp)
  785. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  786. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  787. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  788. else
  789. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  790. }
  791. if (is_dp)
  792. args.v3.ucLaneNum = dp_lane_count;
  793. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  794. args.v3.ucLaneNum = 8;
  795. else
  796. args.v3.ucLaneNum = 4;
  797. if (dig->linkb)
  798. args.v3.acConfig.ucLinkSel = 1;
  799. if (dig_encoder & 1)
  800. args.v3.acConfig.ucEncoderSel = 1;
  801. /* Select the PLL for the PHY
  802. * DP PHY should be clocked from external src if there is
  803. * one.
  804. */
  805. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  806. if (is_dp && adev->clock.dp_extclk)
  807. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  808. else
  809. args.v3.acConfig.ucRefClkSource = pll_id;
  810. switch (amdgpu_encoder->encoder_id) {
  811. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  812. args.v3.acConfig.ucTransmitterSel = 0;
  813. break;
  814. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  815. args.v3.acConfig.ucTransmitterSel = 1;
  816. break;
  817. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  818. args.v3.acConfig.ucTransmitterSel = 2;
  819. break;
  820. }
  821. if (is_dp)
  822. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  823. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  824. if (dig->coherent_mode)
  825. args.v3.acConfig.fCoherentMode = 1;
  826. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  827. args.v3.acConfig.fDualLinkConnector = 1;
  828. }
  829. break;
  830. case 4:
  831. args.v4.ucAction = action;
  832. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  833. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  834. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  835. args.v4.asMode.ucLaneSel = lane_num;
  836. args.v4.asMode.ucLaneSet = lane_set;
  837. } else {
  838. if (is_dp)
  839. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  840. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  841. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  842. else
  843. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  844. }
  845. if (is_dp)
  846. args.v4.ucLaneNum = dp_lane_count;
  847. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  848. args.v4.ucLaneNum = 8;
  849. else
  850. args.v4.ucLaneNum = 4;
  851. if (dig->linkb)
  852. args.v4.acConfig.ucLinkSel = 1;
  853. if (dig_encoder & 1)
  854. args.v4.acConfig.ucEncoderSel = 1;
  855. /* Select the PLL for the PHY
  856. * DP PHY should be clocked from external src if there is
  857. * one.
  858. */
  859. /* On DCE5 DCPLL usually generates the DP ref clock */
  860. if (is_dp) {
  861. if (adev->clock.dp_extclk)
  862. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  863. else
  864. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  865. } else
  866. args.v4.acConfig.ucRefClkSource = pll_id;
  867. switch (amdgpu_encoder->encoder_id) {
  868. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  869. args.v4.acConfig.ucTransmitterSel = 0;
  870. break;
  871. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  872. args.v4.acConfig.ucTransmitterSel = 1;
  873. break;
  874. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  875. args.v4.acConfig.ucTransmitterSel = 2;
  876. break;
  877. }
  878. if (is_dp)
  879. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  880. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  881. if (dig->coherent_mode)
  882. args.v4.acConfig.fCoherentMode = 1;
  883. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  884. args.v4.acConfig.fDualLinkConnector = 1;
  885. }
  886. break;
  887. case 5:
  888. args.v5.ucAction = action;
  889. if (is_dp)
  890. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  891. else
  892. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  893. switch (amdgpu_encoder->encoder_id) {
  894. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  895. if (dig->linkb)
  896. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  897. else
  898. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  899. break;
  900. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  901. if (dig->linkb)
  902. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  903. else
  904. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  905. break;
  906. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  907. if (dig->linkb)
  908. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  909. else
  910. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  911. break;
  912. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  913. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  914. break;
  915. }
  916. if (is_dp)
  917. args.v5.ucLaneNum = dp_lane_count;
  918. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  919. args.v5.ucLaneNum = 8;
  920. else
  921. args.v5.ucLaneNum = 4;
  922. args.v5.ucConnObjId = connector_object_id;
  923. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  924. if (is_dp && adev->clock.dp_extclk)
  925. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  926. else
  927. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  928. if (is_dp)
  929. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  930. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  931. if (dig->coherent_mode)
  932. args.v5.asConfig.ucCoherentMode = 1;
  933. }
  934. if (hpd_id == AMDGPU_HPD_NONE)
  935. args.v5.asConfig.ucHPDSel = 0;
  936. else
  937. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  938. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  939. args.v5.ucDPLaneSet = lane_set;
  940. break;
  941. default:
  942. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  943. break;
  944. }
  945. break;
  946. default:
  947. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  948. break;
  949. }
  950. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  951. }
  952. bool
  953. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  954. int action)
  955. {
  956. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  957. struct drm_device *dev = amdgpu_connector->base.dev;
  958. struct amdgpu_device *adev = dev->dev_private;
  959. union dig_transmitter_control args;
  960. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  961. uint8_t frev, crev;
  962. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  963. goto done;
  964. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  965. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  966. goto done;
  967. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  968. goto done;
  969. memset(&args, 0, sizeof(args));
  970. args.v1.ucAction = action;
  971. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  972. /* wait for the panel to power up */
  973. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  974. int i;
  975. for (i = 0; i < 300; i++) {
  976. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  977. return true;
  978. mdelay(1);
  979. }
  980. return false;
  981. }
  982. done:
  983. return true;
  984. }
  985. union external_encoder_control {
  986. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  987. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  988. };
  989. static void
  990. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  991. struct drm_encoder *ext_encoder,
  992. int action)
  993. {
  994. struct drm_device *dev = encoder->dev;
  995. struct amdgpu_device *adev = dev->dev_private;
  996. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  997. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  998. union external_encoder_control args;
  999. struct drm_connector *connector;
  1000. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1001. u8 frev, crev;
  1002. int dp_clock = 0;
  1003. int dp_lane_count = 0;
  1004. int connector_object_id = 0;
  1005. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1006. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1007. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1008. else
  1009. connector = amdgpu_get_connector_for_encoder(encoder);
  1010. if (connector) {
  1011. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1012. struct amdgpu_connector_atom_dig *dig_connector =
  1013. amdgpu_connector->con_priv;
  1014. dp_clock = dig_connector->dp_clock;
  1015. dp_lane_count = dig_connector->dp_lane_count;
  1016. connector_object_id =
  1017. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1018. }
  1019. memset(&args, 0, sizeof(args));
  1020. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1021. return;
  1022. switch (frev) {
  1023. case 1:
  1024. /* no params on frev 1 */
  1025. break;
  1026. case 2:
  1027. switch (crev) {
  1028. case 1:
  1029. case 2:
  1030. args.v1.sDigEncoder.ucAction = action;
  1031. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1032. args.v1.sDigEncoder.ucEncoderMode =
  1033. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1034. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1035. if (dp_clock == 270000)
  1036. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1037. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1038. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1039. args.v1.sDigEncoder.ucLaneNum = 8;
  1040. else
  1041. args.v1.sDigEncoder.ucLaneNum = 4;
  1042. break;
  1043. case 3:
  1044. args.v3.sExtEncoder.ucAction = action;
  1045. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1046. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1047. else
  1048. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1049. args.v3.sExtEncoder.ucEncoderMode =
  1050. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1051. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1052. if (dp_clock == 270000)
  1053. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1054. else if (dp_clock == 540000)
  1055. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1056. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1057. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1058. args.v3.sExtEncoder.ucLaneNum = 8;
  1059. else
  1060. args.v3.sExtEncoder.ucLaneNum = 4;
  1061. switch (ext_enum) {
  1062. case GRAPH_OBJECT_ENUM_ID1:
  1063. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1064. break;
  1065. case GRAPH_OBJECT_ENUM_ID2:
  1066. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1067. break;
  1068. case GRAPH_OBJECT_ENUM_ID3:
  1069. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1070. break;
  1071. }
  1072. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1073. break;
  1074. default:
  1075. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1076. return;
  1077. }
  1078. break;
  1079. default:
  1080. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1081. return;
  1082. }
  1083. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1084. }
  1085. static void
  1086. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1087. {
  1088. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1089. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1090. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1091. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1092. struct amdgpu_connector *amdgpu_connector = NULL;
  1093. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1094. if (connector) {
  1095. amdgpu_connector = to_amdgpu_connector(connector);
  1096. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1097. }
  1098. if (action == ATOM_ENABLE) {
  1099. if (!connector)
  1100. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1101. else
  1102. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1103. /* setup and enable the encoder */
  1104. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1105. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1106. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1107. dig->panel_mode);
  1108. if (ext_encoder)
  1109. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1110. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1111. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1112. connector) {
  1113. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1114. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1115. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1116. amdgpu_dig_connector->edp_on = true;
  1117. }
  1118. }
  1119. /* enable the transmitter */
  1120. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1121. ATOM_TRANSMITTER_ACTION_ENABLE,
  1122. 0, 0);
  1123. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1124. connector) {
  1125. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1126. amdgpu_atombios_dp_link_train(encoder, connector);
  1127. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1128. }
  1129. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1130. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1131. if (ext_encoder)
  1132. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1133. } else {
  1134. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1135. connector)
  1136. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1137. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1138. if (ext_encoder)
  1139. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1140. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1141. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1142. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1143. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1144. connector)
  1145. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1146. /* disable the transmitter */
  1147. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1148. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1149. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1150. connector) {
  1151. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1152. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1153. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1154. amdgpu_dig_connector->edp_on = false;
  1155. }
  1156. }
  1157. }
  1158. }
  1159. void
  1160. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1161. {
  1162. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1163. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1164. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1165. amdgpu_encoder->active_device);
  1166. switch (amdgpu_encoder->encoder_id) {
  1167. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1168. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1169. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1170. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1171. switch (mode) {
  1172. case DRM_MODE_DPMS_ON:
  1173. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1174. break;
  1175. case DRM_MODE_DPMS_STANDBY:
  1176. case DRM_MODE_DPMS_SUSPEND:
  1177. case DRM_MODE_DPMS_OFF:
  1178. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1179. break;
  1180. }
  1181. break;
  1182. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1183. switch (mode) {
  1184. case DRM_MODE_DPMS_ON:
  1185. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1186. break;
  1187. case DRM_MODE_DPMS_STANDBY:
  1188. case DRM_MODE_DPMS_SUSPEND:
  1189. case DRM_MODE_DPMS_OFF:
  1190. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1191. break;
  1192. }
  1193. break;
  1194. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1195. switch (mode) {
  1196. case DRM_MODE_DPMS_ON:
  1197. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1198. break;
  1199. case DRM_MODE_DPMS_STANDBY:
  1200. case DRM_MODE_DPMS_SUSPEND:
  1201. case DRM_MODE_DPMS_OFF:
  1202. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1203. break;
  1204. }
  1205. break;
  1206. default:
  1207. return;
  1208. }
  1209. }
  1210. union crtc_source_param {
  1211. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1212. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1213. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1214. };
  1215. void
  1216. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1217. {
  1218. struct drm_device *dev = encoder->dev;
  1219. struct amdgpu_device *adev = dev->dev_private;
  1220. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1221. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1222. union crtc_source_param args;
  1223. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1224. uint8_t frev, crev;
  1225. struct amdgpu_encoder_atom_dig *dig;
  1226. memset(&args, 0, sizeof(args));
  1227. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1228. return;
  1229. switch (frev) {
  1230. case 1:
  1231. switch (crev) {
  1232. case 1:
  1233. default:
  1234. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1235. switch (amdgpu_encoder->encoder_id) {
  1236. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1237. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1238. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1239. break;
  1240. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1241. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1242. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1243. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1244. else
  1245. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1246. break;
  1247. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1248. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1249. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1250. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1251. break;
  1252. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1253. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1254. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1255. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1256. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1257. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1258. else
  1259. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1260. break;
  1261. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1262. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1263. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1264. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1265. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1266. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1267. else
  1268. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1269. break;
  1270. }
  1271. break;
  1272. case 2:
  1273. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1274. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1275. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1276. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1277. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1278. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1279. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1280. else
  1281. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1282. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1283. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1284. } else {
  1285. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1286. }
  1287. switch (amdgpu_encoder->encoder_id) {
  1288. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1289. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1290. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1291. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1292. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1293. dig = amdgpu_encoder->enc_priv;
  1294. switch (dig->dig_encoder) {
  1295. case 0:
  1296. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1297. break;
  1298. case 1:
  1299. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1300. break;
  1301. case 2:
  1302. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1303. break;
  1304. case 3:
  1305. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1306. break;
  1307. case 4:
  1308. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1309. break;
  1310. case 5:
  1311. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1312. break;
  1313. case 6:
  1314. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1315. break;
  1316. }
  1317. break;
  1318. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1319. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1320. break;
  1321. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1322. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1323. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1324. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1325. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1326. else
  1327. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1328. break;
  1329. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1330. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1331. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1332. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1333. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1334. else
  1335. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1336. break;
  1337. }
  1338. break;
  1339. case 3:
  1340. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1341. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1342. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1343. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1344. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1345. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1346. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1347. else
  1348. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1349. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1350. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1351. } else {
  1352. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1353. }
  1354. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1355. switch (amdgpu_encoder->encoder_id) {
  1356. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1357. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1358. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1359. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1360. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1361. dig = amdgpu_encoder->enc_priv;
  1362. switch (dig->dig_encoder) {
  1363. case 0:
  1364. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1365. break;
  1366. case 1:
  1367. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1368. break;
  1369. case 2:
  1370. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1371. break;
  1372. case 3:
  1373. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1374. break;
  1375. case 4:
  1376. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1377. break;
  1378. case 5:
  1379. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1380. break;
  1381. case 6:
  1382. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1383. break;
  1384. }
  1385. break;
  1386. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1387. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1388. break;
  1389. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1390. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1391. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1392. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1393. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1394. else
  1395. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1396. break;
  1397. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1398. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1399. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1400. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1401. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1402. else
  1403. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1404. break;
  1405. }
  1406. break;
  1407. }
  1408. break;
  1409. default:
  1410. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1411. return;
  1412. }
  1413. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1414. }
  1415. /* This only needs to be called once at startup */
  1416. void
  1417. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1418. {
  1419. struct drm_device *dev = adev->ddev;
  1420. struct drm_encoder *encoder;
  1421. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1422. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1423. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1424. switch (amdgpu_encoder->encoder_id) {
  1425. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1426. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1427. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1428. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1429. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1430. 0, 0);
  1431. break;
  1432. }
  1433. if (ext_encoder)
  1434. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1435. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1436. }
  1437. }
  1438. static bool
  1439. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1440. struct drm_connector *connector)
  1441. {
  1442. struct drm_device *dev = encoder->dev;
  1443. struct amdgpu_device *adev = dev->dev_private;
  1444. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1445. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1446. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1447. ATOM_DEVICE_CV_SUPPORT |
  1448. ATOM_DEVICE_CRT_SUPPORT)) {
  1449. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1450. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1451. uint8_t frev, crev;
  1452. memset(&args, 0, sizeof(args));
  1453. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1454. return false;
  1455. args.sDacload.ucMisc = 0;
  1456. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1457. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1458. args.sDacload.ucDacType = ATOM_DAC_A;
  1459. else
  1460. args.sDacload.ucDacType = ATOM_DAC_B;
  1461. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1462. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1463. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1464. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1465. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1466. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1467. if (crev >= 3)
  1468. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1469. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1470. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1471. if (crev >= 3)
  1472. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1473. }
  1474. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1475. return true;
  1476. } else
  1477. return false;
  1478. }
  1479. enum drm_connector_status
  1480. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1481. struct drm_connector *connector)
  1482. {
  1483. struct drm_device *dev = encoder->dev;
  1484. struct amdgpu_device *adev = dev->dev_private;
  1485. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1486. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1487. uint32_t bios_0_scratch;
  1488. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1489. DRM_DEBUG_KMS("detect returned false \n");
  1490. return connector_status_unknown;
  1491. }
  1492. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1493. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1494. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1495. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1496. return connector_status_connected;
  1497. }
  1498. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1499. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1500. return connector_status_connected;
  1501. }
  1502. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1503. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1504. return connector_status_connected;
  1505. }
  1506. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1507. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1508. return connector_status_connected; /* CTV */
  1509. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1510. return connector_status_connected; /* STV */
  1511. }
  1512. return connector_status_disconnected;
  1513. }
  1514. enum drm_connector_status
  1515. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1516. struct drm_connector *connector)
  1517. {
  1518. struct drm_device *dev = encoder->dev;
  1519. struct amdgpu_device *adev = dev->dev_private;
  1520. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1521. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1522. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1523. u32 bios_0_scratch;
  1524. if (!ext_encoder)
  1525. return connector_status_unknown;
  1526. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1527. return connector_status_unknown;
  1528. /* load detect on the dp bridge */
  1529. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1530. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1531. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1532. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1533. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1534. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1535. return connector_status_connected;
  1536. }
  1537. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1538. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1539. return connector_status_connected;
  1540. }
  1541. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1542. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1543. return connector_status_connected;
  1544. }
  1545. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1546. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1547. return connector_status_connected; /* CTV */
  1548. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1549. return connector_status_connected; /* STV */
  1550. }
  1551. return connector_status_disconnected;
  1552. }
  1553. void
  1554. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1555. {
  1556. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1557. if (ext_encoder)
  1558. /* ddc_setup on the dp bridge */
  1559. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1560. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1561. }
  1562. void
  1563. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1564. struct drm_encoder *encoder,
  1565. bool connected)
  1566. {
  1567. struct drm_device *dev = connector->dev;
  1568. struct amdgpu_device *adev = dev->dev_private;
  1569. struct amdgpu_connector *amdgpu_connector =
  1570. to_amdgpu_connector(connector);
  1571. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1572. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1573. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1574. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1575. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1576. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1577. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1578. if (connected) {
  1579. DRM_DEBUG_KMS("LCD1 connected\n");
  1580. bios_0_scratch |= ATOM_S0_LCD1;
  1581. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1582. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1583. } else {
  1584. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1585. bios_0_scratch &= ~ATOM_S0_LCD1;
  1586. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1587. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1588. }
  1589. }
  1590. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1591. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1592. if (connected) {
  1593. DRM_DEBUG_KMS("CRT1 connected\n");
  1594. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1595. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1596. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1597. } else {
  1598. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1599. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1600. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1601. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1602. }
  1603. }
  1604. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1605. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1606. if (connected) {
  1607. DRM_DEBUG_KMS("CRT2 connected\n");
  1608. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1609. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1610. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1611. } else {
  1612. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1613. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1614. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1615. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1616. }
  1617. }
  1618. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1619. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1620. if (connected) {
  1621. DRM_DEBUG_KMS("DFP1 connected\n");
  1622. bios_0_scratch |= ATOM_S0_DFP1;
  1623. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1624. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1625. } else {
  1626. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1627. bios_0_scratch &= ~ATOM_S0_DFP1;
  1628. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1629. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1630. }
  1631. }
  1632. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1633. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1634. if (connected) {
  1635. DRM_DEBUG_KMS("DFP2 connected\n");
  1636. bios_0_scratch |= ATOM_S0_DFP2;
  1637. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1638. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1639. } else {
  1640. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1641. bios_0_scratch &= ~ATOM_S0_DFP2;
  1642. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1643. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1644. }
  1645. }
  1646. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1647. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1648. if (connected) {
  1649. DRM_DEBUG_KMS("DFP3 connected\n");
  1650. bios_0_scratch |= ATOM_S0_DFP3;
  1651. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1652. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1653. } else {
  1654. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1655. bios_0_scratch &= ~ATOM_S0_DFP3;
  1656. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1657. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1658. }
  1659. }
  1660. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1661. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1662. if (connected) {
  1663. DRM_DEBUG_KMS("DFP4 connected\n");
  1664. bios_0_scratch |= ATOM_S0_DFP4;
  1665. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1666. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1667. } else {
  1668. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1669. bios_0_scratch &= ~ATOM_S0_DFP4;
  1670. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1671. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1672. }
  1673. }
  1674. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1675. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1676. if (connected) {
  1677. DRM_DEBUG_KMS("DFP5 connected\n");
  1678. bios_0_scratch |= ATOM_S0_DFP5;
  1679. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1680. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1681. } else {
  1682. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1683. bios_0_scratch &= ~ATOM_S0_DFP5;
  1684. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1685. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1686. }
  1687. }
  1688. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1689. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1690. if (connected) {
  1691. DRM_DEBUG_KMS("DFP6 connected\n");
  1692. bios_0_scratch |= ATOM_S0_DFP6;
  1693. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1694. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1695. } else {
  1696. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1697. bios_0_scratch &= ~ATOM_S0_DFP6;
  1698. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1699. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1700. }
  1701. }
  1702. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1703. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1704. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1705. }
  1706. union lvds_info {
  1707. struct _ATOM_LVDS_INFO info;
  1708. struct _ATOM_LVDS_INFO_V12 info_12;
  1709. };
  1710. struct amdgpu_encoder_atom_dig *
  1711. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1712. {
  1713. struct drm_device *dev = encoder->base.dev;
  1714. struct amdgpu_device *adev = dev->dev_private;
  1715. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1716. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1717. uint16_t data_offset, misc;
  1718. union lvds_info *lvds_info;
  1719. uint8_t frev, crev;
  1720. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1721. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1722. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1723. &frev, &crev, &data_offset)) {
  1724. lvds_info =
  1725. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1726. lvds =
  1727. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1728. if (!lvds)
  1729. return NULL;
  1730. lvds->native_mode.clock =
  1731. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1732. lvds->native_mode.hdisplay =
  1733. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1734. lvds->native_mode.vdisplay =
  1735. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1736. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1737. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1738. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1739. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1740. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1741. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1742. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1743. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1744. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1745. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1746. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1747. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1748. lvds->panel_pwr_delay =
  1749. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1750. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1751. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1752. if (misc & ATOM_VSYNC_POLARITY)
  1753. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1754. if (misc & ATOM_HSYNC_POLARITY)
  1755. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1756. if (misc & ATOM_COMPOSITESYNC)
  1757. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1758. if (misc & ATOM_INTERLACE)
  1759. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1760. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1761. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1762. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1763. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1764. /* set crtc values */
  1765. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1766. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1767. encoder->native_mode = lvds->native_mode;
  1768. if (encoder_enum == 2)
  1769. lvds->linkb = true;
  1770. else
  1771. lvds->linkb = false;
  1772. /* parse the lcd record table */
  1773. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1774. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1775. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1776. bool bad_record = false;
  1777. u8 *record;
  1778. if ((frev == 1) && (crev < 2))
  1779. /* absolute */
  1780. record = (u8 *)(mode_info->atom_context->bios +
  1781. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1782. else
  1783. /* relative */
  1784. record = (u8 *)(mode_info->atom_context->bios +
  1785. data_offset +
  1786. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1787. while (*record != ATOM_RECORD_END_TYPE) {
  1788. switch (*record) {
  1789. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1790. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1791. break;
  1792. case LCD_RTS_RECORD_TYPE:
  1793. record += sizeof(ATOM_LCD_RTS_RECORD);
  1794. break;
  1795. case LCD_CAP_RECORD_TYPE:
  1796. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1797. break;
  1798. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1799. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1800. if (fake_edid_record->ucFakeEDIDLength) {
  1801. struct edid *edid;
  1802. int edid_size =
  1803. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1804. edid = kmalloc(edid_size, GFP_KERNEL);
  1805. if (edid) {
  1806. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1807. fake_edid_record->ucFakeEDIDLength);
  1808. if (drm_edid_is_valid(edid)) {
  1809. adev->mode_info.bios_hardcoded_edid = edid;
  1810. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1811. } else
  1812. kfree(edid);
  1813. }
  1814. }
  1815. record += fake_edid_record->ucFakeEDIDLength ?
  1816. fake_edid_record->ucFakeEDIDLength + 2 :
  1817. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1818. break;
  1819. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1820. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1821. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1822. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1823. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1824. break;
  1825. default:
  1826. DRM_ERROR("Bad LCD record %d\n", *record);
  1827. bad_record = true;
  1828. break;
  1829. }
  1830. if (bad_record)
  1831. break;
  1832. }
  1833. }
  1834. }
  1835. return lvds;
  1836. }
  1837. struct amdgpu_encoder_atom_dig *
  1838. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1839. {
  1840. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1841. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1842. if (!dig)
  1843. return NULL;
  1844. /* coherent mode by default */
  1845. dig->coherent_mode = true;
  1846. dig->dig_encoder = -1;
  1847. if (encoder_enum == 2)
  1848. dig->linkb = true;
  1849. else
  1850. dig->linkb = false;
  1851. return dig;
  1852. }