atombios_encoders.c 70 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  87. if (dig->backlight_level == 0)
  88. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  89. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  90. else {
  91. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  92. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  93. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  94. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  95. }
  96. break;
  97. default:
  98. break;
  99. }
  100. }
  101. }
  102. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  103. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  104. {
  105. u8 level;
  106. /* Convert brightness to hardware level */
  107. if (bd->props.brightness < 0)
  108. level = 0;
  109. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  110. level = AMDGPU_MAX_BL_LEVEL;
  111. else
  112. level = bd->props.brightness;
  113. return level;
  114. }
  115. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  116. {
  117. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  118. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  119. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  120. amdgpu_atombios_encoder_backlight_level(bd));
  121. return 0;
  122. }
  123. static int
  124. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  125. {
  126. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  127. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  128. struct drm_device *dev = amdgpu_encoder->base.dev;
  129. struct amdgpu_device *adev = dev->dev_private;
  130. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  131. }
  132. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  133. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  134. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  135. };
  136. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  137. struct drm_connector *drm_connector)
  138. {
  139. struct drm_device *dev = amdgpu_encoder->base.dev;
  140. struct amdgpu_device *adev = dev->dev_private;
  141. struct backlight_device *bd;
  142. struct backlight_properties props;
  143. struct amdgpu_backlight_privdata *pdata;
  144. struct amdgpu_encoder_atom_dig *dig;
  145. u8 backlight_level;
  146. char bl_name[16];
  147. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  148. * so don't register a backlight device
  149. */
  150. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  151. (adev->pdev->device == 0x6741))
  152. return;
  153. if (!amdgpu_encoder->enc_priv)
  154. return;
  155. if (!adev->is_atom_bios)
  156. return;
  157. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  158. return;
  159. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  160. if (!pdata) {
  161. DRM_ERROR("Memory allocation failed\n");
  162. goto error;
  163. }
  164. memset(&props, 0, sizeof(props));
  165. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  166. props.type = BACKLIGHT_RAW;
  167. snprintf(bl_name, sizeof(bl_name),
  168. "amdgpu_bl%d", dev->primary->index);
  169. bd = backlight_device_register(bl_name, drm_connector->kdev,
  170. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  171. if (IS_ERR(bd)) {
  172. DRM_ERROR("Backlight registration failed\n");
  173. goto error;
  174. }
  175. pdata->encoder = amdgpu_encoder;
  176. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  177. dig = amdgpu_encoder->enc_priv;
  178. dig->bl_dev = bd;
  179. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  180. bd->props.power = FB_BLANK_UNBLANK;
  181. backlight_update_status(bd);
  182. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  183. return;
  184. error:
  185. kfree(pdata);
  186. return;
  187. }
  188. void
  189. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  190. {
  191. struct drm_device *dev = amdgpu_encoder->base.dev;
  192. struct amdgpu_device *adev = dev->dev_private;
  193. struct backlight_device *bd = NULL;
  194. struct amdgpu_encoder_atom_dig *dig;
  195. if (!amdgpu_encoder->enc_priv)
  196. return;
  197. if (!adev->is_atom_bios)
  198. return;
  199. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  200. return;
  201. dig = amdgpu_encoder->enc_priv;
  202. bd = dig->bl_dev;
  203. dig->bl_dev = NULL;
  204. if (bd) {
  205. struct amdgpu_legacy_backlight_privdata *pdata;
  206. pdata = bl_get_data(bd);
  207. backlight_device_unregister(bd);
  208. kfree(pdata);
  209. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  210. }
  211. }
  212. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  213. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  214. {
  215. }
  216. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  217. {
  218. }
  219. #endif
  220. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  221. {
  222. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  223. switch (amdgpu_encoder->encoder_id) {
  224. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  228. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  229. return true;
  230. default:
  231. return false;
  232. }
  233. }
  234. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  235. const struct drm_display_mode *mode,
  236. struct drm_display_mode *adjusted_mode)
  237. {
  238. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  239. /* set the active encoder to connector routing */
  240. amdgpu_encoder_set_active_device(encoder);
  241. drm_mode_set_crtcinfo(adjusted_mode, 0);
  242. /* hw bug */
  243. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  244. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  245. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  246. /* vertical FP must be at least 1 */
  247. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  248. adjusted_mode->crtc_vsync_start++;
  249. /* get the native mode for scaling */
  250. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  251. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  252. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  253. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  254. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  255. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  256. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  257. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  258. }
  259. return true;
  260. }
  261. static void
  262. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  263. {
  264. struct drm_device *dev = encoder->dev;
  265. struct amdgpu_device *adev = dev->dev_private;
  266. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  267. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  268. int index = 0;
  269. memset(&args, 0, sizeof(args));
  270. switch (amdgpu_encoder->encoder_id) {
  271. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  273. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  274. break;
  275. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  276. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  277. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  278. break;
  279. }
  280. args.ucAction = action;
  281. args.ucDacStandard = ATOM_DAC1_PS2;
  282. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  283. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  284. }
  285. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  286. {
  287. int bpc = 8;
  288. if (encoder->crtc) {
  289. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  290. bpc = amdgpu_crtc->bpc;
  291. }
  292. switch (bpc) {
  293. case 0:
  294. return PANEL_BPC_UNDEFINE;
  295. case 6:
  296. return PANEL_6BIT_PER_COLOR;
  297. case 8:
  298. default:
  299. return PANEL_8BIT_PER_COLOR;
  300. case 10:
  301. return PANEL_10BIT_PER_COLOR;
  302. case 12:
  303. return PANEL_12BIT_PER_COLOR;
  304. case 16:
  305. return PANEL_16BIT_PER_COLOR;
  306. }
  307. }
  308. union dvo_encoder_control {
  309. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  310. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  311. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  312. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  313. };
  314. static void
  315. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  316. {
  317. struct drm_device *dev = encoder->dev;
  318. struct amdgpu_device *adev = dev->dev_private;
  319. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  320. union dvo_encoder_control args;
  321. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  322. uint8_t frev, crev;
  323. memset(&args, 0, sizeof(args));
  324. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  325. return;
  326. switch (frev) {
  327. case 1:
  328. switch (crev) {
  329. case 1:
  330. /* R4xx, R5xx */
  331. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  332. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  333. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  334. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  335. break;
  336. case 2:
  337. /* RS600/690/740 */
  338. args.dvo.sDVOEncoder.ucAction = action;
  339. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  340. /* DFP1, CRT1, TV1 depending on the type of port */
  341. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  342. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  343. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  344. break;
  345. case 3:
  346. /* R6xx */
  347. args.dvo_v3.ucAction = action;
  348. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  349. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  350. break;
  351. case 4:
  352. /* DCE8 */
  353. args.dvo_v4.ucAction = action;
  354. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  355. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  356. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  357. break;
  358. default:
  359. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  360. break;
  361. }
  362. break;
  363. default:
  364. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  365. break;
  366. }
  367. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  368. }
  369. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  370. {
  371. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  372. struct drm_connector *connector;
  373. struct amdgpu_connector *amdgpu_connector;
  374. struct amdgpu_connector_atom_dig *dig_connector;
  375. /* dp bridges are always DP */
  376. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  377. return ATOM_ENCODER_MODE_DP;
  378. /* DVO is always DVO */
  379. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  380. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  381. return ATOM_ENCODER_MODE_DVO;
  382. connector = amdgpu_get_connector_for_encoder(encoder);
  383. /* if we don't have an active device yet, just use one of
  384. * the connectors tied to the encoder.
  385. */
  386. if (!connector)
  387. connector = amdgpu_get_connector_for_encoder_init(encoder);
  388. amdgpu_connector = to_amdgpu_connector(connector);
  389. switch (connector->connector_type) {
  390. case DRM_MODE_CONNECTOR_DVII:
  391. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  392. if (amdgpu_audio != 0) {
  393. if (amdgpu_connector->use_digital &&
  394. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  395. return ATOM_ENCODER_MODE_HDMI;
  396. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  397. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  398. return ATOM_ENCODER_MODE_HDMI;
  399. else if (amdgpu_connector->use_digital)
  400. return ATOM_ENCODER_MODE_DVI;
  401. else
  402. return ATOM_ENCODER_MODE_CRT;
  403. } else if (amdgpu_connector->use_digital) {
  404. return ATOM_ENCODER_MODE_DVI;
  405. } else {
  406. return ATOM_ENCODER_MODE_CRT;
  407. }
  408. break;
  409. case DRM_MODE_CONNECTOR_DVID:
  410. case DRM_MODE_CONNECTOR_HDMIA:
  411. default:
  412. if (amdgpu_audio != 0) {
  413. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  414. return ATOM_ENCODER_MODE_HDMI;
  415. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  416. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  417. return ATOM_ENCODER_MODE_HDMI;
  418. else
  419. return ATOM_ENCODER_MODE_DVI;
  420. } else {
  421. return ATOM_ENCODER_MODE_DVI;
  422. }
  423. break;
  424. case DRM_MODE_CONNECTOR_LVDS:
  425. return ATOM_ENCODER_MODE_LVDS;
  426. break;
  427. case DRM_MODE_CONNECTOR_DisplayPort:
  428. dig_connector = amdgpu_connector->con_priv;
  429. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  430. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  431. return ATOM_ENCODER_MODE_DP;
  432. } else if (amdgpu_audio != 0) {
  433. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  434. return ATOM_ENCODER_MODE_HDMI;
  435. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  436. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  437. return ATOM_ENCODER_MODE_HDMI;
  438. else
  439. return ATOM_ENCODER_MODE_DVI;
  440. } else {
  441. return ATOM_ENCODER_MODE_DVI;
  442. }
  443. break;
  444. case DRM_MODE_CONNECTOR_eDP:
  445. return ATOM_ENCODER_MODE_DP;
  446. case DRM_MODE_CONNECTOR_DVIA:
  447. case DRM_MODE_CONNECTOR_VGA:
  448. return ATOM_ENCODER_MODE_CRT;
  449. break;
  450. case DRM_MODE_CONNECTOR_Composite:
  451. case DRM_MODE_CONNECTOR_SVIDEO:
  452. case DRM_MODE_CONNECTOR_9PinDIN:
  453. /* fix me */
  454. return ATOM_ENCODER_MODE_TV;
  455. /*return ATOM_ENCODER_MODE_CV;*/
  456. break;
  457. }
  458. }
  459. /*
  460. * DIG Encoder/Transmitter Setup
  461. *
  462. * DCE 6.0
  463. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  464. * Supports up to 6 digital outputs
  465. * - 6 DIG encoder blocks.
  466. * - DIG to PHY mapping is hardcoded
  467. * DIG1 drives UNIPHY0 link A, A+B
  468. * DIG2 drives UNIPHY0 link B
  469. * DIG3 drives UNIPHY1 link A, A+B
  470. * DIG4 drives UNIPHY1 link B
  471. * DIG5 drives UNIPHY2 link A, A+B
  472. * DIG6 drives UNIPHY2 link B
  473. *
  474. * Routing
  475. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  476. * Examples:
  477. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  478. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  479. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  480. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  481. */
  482. union dig_encoder_control {
  483. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  484. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  485. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  486. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  487. DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
  488. };
  489. void
  490. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  491. int action, int panel_mode)
  492. {
  493. struct drm_device *dev = encoder->dev;
  494. struct amdgpu_device *adev = dev->dev_private;
  495. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  496. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  497. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  498. union dig_encoder_control args;
  499. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  500. uint8_t frev, crev;
  501. int dp_clock = 0;
  502. int dp_lane_count = 0;
  503. int hpd_id = AMDGPU_HPD_NONE;
  504. if (connector) {
  505. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  506. struct amdgpu_connector_atom_dig *dig_connector =
  507. amdgpu_connector->con_priv;
  508. dp_clock = dig_connector->dp_clock;
  509. dp_lane_count = dig_connector->dp_lane_count;
  510. hpd_id = amdgpu_connector->hpd.hpd;
  511. }
  512. /* no dig encoder assigned */
  513. if (dig->dig_encoder == -1)
  514. return;
  515. memset(&args, 0, sizeof(args));
  516. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  517. return;
  518. switch (frev) {
  519. case 1:
  520. switch (crev) {
  521. case 1:
  522. args.v1.ucAction = action;
  523. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  524. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  525. args.v3.ucPanelMode = panel_mode;
  526. else
  527. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  528. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  529. args.v1.ucLaneNum = dp_lane_count;
  530. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  531. args.v1.ucLaneNum = 8;
  532. else
  533. args.v1.ucLaneNum = 4;
  534. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  535. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  536. switch (amdgpu_encoder->encoder_id) {
  537. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  538. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  539. break;
  540. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  541. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  542. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  543. break;
  544. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  545. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  546. break;
  547. }
  548. if (dig->linkb)
  549. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  550. else
  551. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  552. break;
  553. case 2:
  554. case 3:
  555. args.v3.ucAction = action;
  556. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  557. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  558. args.v3.ucPanelMode = panel_mode;
  559. else
  560. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  561. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  562. args.v3.ucLaneNum = dp_lane_count;
  563. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  564. args.v3.ucLaneNum = 8;
  565. else
  566. args.v3.ucLaneNum = 4;
  567. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  568. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  569. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  570. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  571. break;
  572. case 4:
  573. args.v4.ucAction = action;
  574. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  575. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  576. args.v4.ucPanelMode = panel_mode;
  577. else
  578. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  579. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  580. args.v4.ucLaneNum = dp_lane_count;
  581. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  582. args.v4.ucLaneNum = 8;
  583. else
  584. args.v4.ucLaneNum = 4;
  585. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  586. if (dp_clock == 540000)
  587. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  588. else if (dp_clock == 324000)
  589. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  590. else if (dp_clock == 270000)
  591. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  592. else
  593. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  594. }
  595. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  596. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  597. if (hpd_id == AMDGPU_HPD_NONE)
  598. args.v4.ucHPD_ID = 0;
  599. else
  600. args.v4.ucHPD_ID = hpd_id + 1;
  601. break;
  602. case 5:
  603. switch (action) {
  604. case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
  605. args.v5.asDPPanelModeParam.ucAction = action;
  606. args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
  607. args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
  608. break;
  609. case ATOM_ENCODER_CMD_STREAM_SETUP:
  610. args.v5.asStreamParam.ucAction = action;
  611. args.v5.asStreamParam.ucDigId = dig->dig_encoder;
  612. args.v5.asStreamParam.ucDigMode =
  613. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  614. if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
  615. args.v5.asStreamParam.ucLaneNum = dp_lane_count;
  616. else if (amdgpu_dig_monitor_is_duallink(encoder,
  617. amdgpu_encoder->pixel_clock))
  618. args.v5.asStreamParam.ucLaneNum = 8;
  619. else
  620. args.v5.asStreamParam.ucLaneNum = 4;
  621. args.v5.asStreamParam.ulPixelClock =
  622. cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  623. args.v5.asStreamParam.ucBitPerColor =
  624. amdgpu_atombios_encoder_get_bpc(encoder);
  625. args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
  626. break;
  627. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
  628. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
  629. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
  630. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
  631. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
  632. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
  633. case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
  634. case ATOM_ENCODER_CMD_DP_VIDEO_ON:
  635. args.v5.asCmdParam.ucAction = action;
  636. args.v5.asCmdParam.ucDigId = dig->dig_encoder;
  637. break;
  638. default:
  639. DRM_ERROR("Unsupported action 0x%x\n", action);
  640. break;
  641. }
  642. break;
  643. default:
  644. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  645. break;
  646. }
  647. break;
  648. default:
  649. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  650. break;
  651. }
  652. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  653. }
  654. union dig_transmitter_control {
  655. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  656. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  657. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  658. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  659. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  660. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
  661. };
  662. void
  663. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  664. uint8_t lane_num, uint8_t lane_set)
  665. {
  666. struct drm_device *dev = encoder->dev;
  667. struct amdgpu_device *adev = dev->dev_private;
  668. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  669. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  670. struct drm_connector *connector;
  671. union dig_transmitter_control args;
  672. int index = 0;
  673. uint8_t frev, crev;
  674. bool is_dp = false;
  675. int pll_id = 0;
  676. int dp_clock = 0;
  677. int dp_lane_count = 0;
  678. int connector_object_id = 0;
  679. int igp_lane_info = 0;
  680. int dig_encoder = dig->dig_encoder;
  681. int hpd_id = AMDGPU_HPD_NONE;
  682. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  683. connector = amdgpu_get_connector_for_encoder_init(encoder);
  684. /* just needed to avoid bailing in the encoder check. the encoder
  685. * isn't used for init
  686. */
  687. dig_encoder = 0;
  688. } else
  689. connector = amdgpu_get_connector_for_encoder(encoder);
  690. if (connector) {
  691. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  692. struct amdgpu_connector_atom_dig *dig_connector =
  693. amdgpu_connector->con_priv;
  694. hpd_id = amdgpu_connector->hpd.hpd;
  695. dp_clock = dig_connector->dp_clock;
  696. dp_lane_count = dig_connector->dp_lane_count;
  697. connector_object_id =
  698. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  699. }
  700. if (encoder->crtc) {
  701. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  702. pll_id = amdgpu_crtc->pll_id;
  703. }
  704. /* no dig encoder assigned */
  705. if (dig_encoder == -1)
  706. return;
  707. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  708. is_dp = true;
  709. memset(&args, 0, sizeof(args));
  710. switch (amdgpu_encoder->encoder_id) {
  711. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  712. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  713. break;
  714. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  715. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  716. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  717. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  718. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  719. break;
  720. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  721. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  722. break;
  723. }
  724. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  725. return;
  726. switch (frev) {
  727. case 1:
  728. switch (crev) {
  729. case 1:
  730. args.v1.ucAction = action;
  731. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  732. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  733. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  734. args.v1.asMode.ucLaneSel = lane_num;
  735. args.v1.asMode.ucLaneSet = lane_set;
  736. } else {
  737. if (is_dp)
  738. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  739. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  740. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  741. else
  742. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  743. }
  744. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  745. if (dig_encoder)
  746. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  747. else
  748. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  749. if ((adev->flags & AMD_IS_APU) &&
  750. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  751. if (is_dp ||
  752. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  753. if (igp_lane_info & 0x1)
  754. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  755. else if (igp_lane_info & 0x2)
  756. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  757. else if (igp_lane_info & 0x4)
  758. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  759. else if (igp_lane_info & 0x8)
  760. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  761. } else {
  762. if (igp_lane_info & 0x3)
  763. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  764. else if (igp_lane_info & 0xc)
  765. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  766. }
  767. }
  768. if (dig->linkb)
  769. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  770. else
  771. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  772. if (is_dp)
  773. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  774. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  775. if (dig->coherent_mode)
  776. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  777. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  778. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  779. }
  780. break;
  781. case 2:
  782. args.v2.ucAction = action;
  783. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  784. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  785. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  786. args.v2.asMode.ucLaneSel = lane_num;
  787. args.v2.asMode.ucLaneSet = lane_set;
  788. } else {
  789. if (is_dp)
  790. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  791. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  792. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  793. else
  794. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  795. }
  796. args.v2.acConfig.ucEncoderSel = dig_encoder;
  797. if (dig->linkb)
  798. args.v2.acConfig.ucLinkSel = 1;
  799. switch (amdgpu_encoder->encoder_id) {
  800. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  801. args.v2.acConfig.ucTransmitterSel = 0;
  802. break;
  803. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  804. args.v2.acConfig.ucTransmitterSel = 1;
  805. break;
  806. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  807. args.v2.acConfig.ucTransmitterSel = 2;
  808. break;
  809. }
  810. if (is_dp) {
  811. args.v2.acConfig.fCoherentMode = 1;
  812. args.v2.acConfig.fDPConnector = 1;
  813. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  814. if (dig->coherent_mode)
  815. args.v2.acConfig.fCoherentMode = 1;
  816. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  817. args.v2.acConfig.fDualLinkConnector = 1;
  818. }
  819. break;
  820. case 3:
  821. args.v3.ucAction = action;
  822. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  823. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  824. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  825. args.v3.asMode.ucLaneSel = lane_num;
  826. args.v3.asMode.ucLaneSet = lane_set;
  827. } else {
  828. if (is_dp)
  829. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  830. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  831. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  832. else
  833. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  834. }
  835. if (is_dp)
  836. args.v3.ucLaneNum = dp_lane_count;
  837. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  838. args.v3.ucLaneNum = 8;
  839. else
  840. args.v3.ucLaneNum = 4;
  841. if (dig->linkb)
  842. args.v3.acConfig.ucLinkSel = 1;
  843. if (dig_encoder & 1)
  844. args.v3.acConfig.ucEncoderSel = 1;
  845. /* Select the PLL for the PHY
  846. * DP PHY should be clocked from external src if there is
  847. * one.
  848. */
  849. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  850. if (is_dp && adev->clock.dp_extclk)
  851. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  852. else
  853. args.v3.acConfig.ucRefClkSource = pll_id;
  854. switch (amdgpu_encoder->encoder_id) {
  855. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  856. args.v3.acConfig.ucTransmitterSel = 0;
  857. break;
  858. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  859. args.v3.acConfig.ucTransmitterSel = 1;
  860. break;
  861. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  862. args.v3.acConfig.ucTransmitterSel = 2;
  863. break;
  864. }
  865. if (is_dp)
  866. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  867. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  868. if (dig->coherent_mode)
  869. args.v3.acConfig.fCoherentMode = 1;
  870. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  871. args.v3.acConfig.fDualLinkConnector = 1;
  872. }
  873. break;
  874. case 4:
  875. args.v4.ucAction = action;
  876. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  877. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  878. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  879. args.v4.asMode.ucLaneSel = lane_num;
  880. args.v4.asMode.ucLaneSet = lane_set;
  881. } else {
  882. if (is_dp)
  883. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  884. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  885. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  886. else
  887. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  888. }
  889. if (is_dp)
  890. args.v4.ucLaneNum = dp_lane_count;
  891. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  892. args.v4.ucLaneNum = 8;
  893. else
  894. args.v4.ucLaneNum = 4;
  895. if (dig->linkb)
  896. args.v4.acConfig.ucLinkSel = 1;
  897. if (dig_encoder & 1)
  898. args.v4.acConfig.ucEncoderSel = 1;
  899. /* Select the PLL for the PHY
  900. * DP PHY should be clocked from external src if there is
  901. * one.
  902. */
  903. /* On DCE5 DCPLL usually generates the DP ref clock */
  904. if (is_dp) {
  905. if (adev->clock.dp_extclk)
  906. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  907. else
  908. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  909. } else
  910. args.v4.acConfig.ucRefClkSource = pll_id;
  911. switch (amdgpu_encoder->encoder_id) {
  912. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  913. args.v4.acConfig.ucTransmitterSel = 0;
  914. break;
  915. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  916. args.v4.acConfig.ucTransmitterSel = 1;
  917. break;
  918. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  919. args.v4.acConfig.ucTransmitterSel = 2;
  920. break;
  921. }
  922. if (is_dp)
  923. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  924. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  925. if (dig->coherent_mode)
  926. args.v4.acConfig.fCoherentMode = 1;
  927. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  928. args.v4.acConfig.fDualLinkConnector = 1;
  929. }
  930. break;
  931. case 5:
  932. args.v5.ucAction = action;
  933. if (is_dp)
  934. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  935. else
  936. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  937. switch (amdgpu_encoder->encoder_id) {
  938. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  939. if (dig->linkb)
  940. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  941. else
  942. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  943. break;
  944. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  945. if (dig->linkb)
  946. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  947. else
  948. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  949. break;
  950. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  951. if (dig->linkb)
  952. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  953. else
  954. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  955. break;
  956. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  957. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  958. break;
  959. }
  960. if (is_dp)
  961. args.v5.ucLaneNum = dp_lane_count;
  962. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  963. args.v5.ucLaneNum = 8;
  964. else
  965. args.v5.ucLaneNum = 4;
  966. args.v5.ucConnObjId = connector_object_id;
  967. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  968. if (is_dp && adev->clock.dp_extclk)
  969. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  970. else
  971. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  972. if (is_dp)
  973. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  974. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  975. if (dig->coherent_mode)
  976. args.v5.asConfig.ucCoherentMode = 1;
  977. }
  978. if (hpd_id == AMDGPU_HPD_NONE)
  979. args.v5.asConfig.ucHPDSel = 0;
  980. else
  981. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  982. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  983. args.v5.ucDPLaneSet = lane_set;
  984. break;
  985. case 6:
  986. args.v6.ucAction = action;
  987. if (is_dp)
  988. args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
  989. else
  990. args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  991. switch (amdgpu_encoder->encoder_id) {
  992. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  993. if (dig->linkb)
  994. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  995. else
  996. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  997. break;
  998. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  999. if (dig->linkb)
  1000. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  1001. else
  1002. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  1003. break;
  1004. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1005. if (dig->linkb)
  1006. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  1007. else
  1008. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  1009. break;
  1010. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1011. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  1012. break;
  1013. }
  1014. if (is_dp)
  1015. args.v6.ucLaneNum = dp_lane_count;
  1016. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1017. args.v6.ucLaneNum = 8;
  1018. else
  1019. args.v6.ucLaneNum = 4;
  1020. args.v6.ucConnObjId = connector_object_id;
  1021. if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
  1022. args.v6.ucDPLaneSet = lane_set;
  1023. else
  1024. args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1025. if (hpd_id == AMDGPU_HPD_NONE)
  1026. args.v6.ucHPDSel = 0;
  1027. else
  1028. args.v6.ucHPDSel = hpd_id + 1;
  1029. args.v6.ucDigEncoderSel = 1 << dig_encoder;
  1030. break;
  1031. default:
  1032. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1033. break;
  1034. }
  1035. break;
  1036. default:
  1037. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1038. break;
  1039. }
  1040. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1041. }
  1042. bool
  1043. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  1044. int action)
  1045. {
  1046. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1047. struct drm_device *dev = amdgpu_connector->base.dev;
  1048. struct amdgpu_device *adev = dev->dev_private;
  1049. union dig_transmitter_control args;
  1050. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  1051. uint8_t frev, crev;
  1052. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  1053. goto done;
  1054. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  1055. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  1056. goto done;
  1057. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1058. goto done;
  1059. memset(&args, 0, sizeof(args));
  1060. args.v1.ucAction = action;
  1061. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1062. /* wait for the panel to power up */
  1063. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  1064. int i;
  1065. for (i = 0; i < 300; i++) {
  1066. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  1067. return true;
  1068. mdelay(1);
  1069. }
  1070. return false;
  1071. }
  1072. done:
  1073. return true;
  1074. }
  1075. union external_encoder_control {
  1076. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  1077. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  1078. };
  1079. static void
  1080. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  1081. struct drm_encoder *ext_encoder,
  1082. int action)
  1083. {
  1084. struct drm_device *dev = encoder->dev;
  1085. struct amdgpu_device *adev = dev->dev_private;
  1086. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1087. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  1088. union external_encoder_control args;
  1089. struct drm_connector *connector;
  1090. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1091. u8 frev, crev;
  1092. int dp_clock = 0;
  1093. int dp_lane_count = 0;
  1094. int connector_object_id = 0;
  1095. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1096. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1097. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1098. else
  1099. connector = amdgpu_get_connector_for_encoder(encoder);
  1100. if (connector) {
  1101. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1102. struct amdgpu_connector_atom_dig *dig_connector =
  1103. amdgpu_connector->con_priv;
  1104. dp_clock = dig_connector->dp_clock;
  1105. dp_lane_count = dig_connector->dp_lane_count;
  1106. connector_object_id =
  1107. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1108. }
  1109. memset(&args, 0, sizeof(args));
  1110. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1111. return;
  1112. switch (frev) {
  1113. case 1:
  1114. /* no params on frev 1 */
  1115. break;
  1116. case 2:
  1117. switch (crev) {
  1118. case 1:
  1119. case 2:
  1120. args.v1.sDigEncoder.ucAction = action;
  1121. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1122. args.v1.sDigEncoder.ucEncoderMode =
  1123. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1124. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1125. if (dp_clock == 270000)
  1126. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1127. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1128. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1129. args.v1.sDigEncoder.ucLaneNum = 8;
  1130. else
  1131. args.v1.sDigEncoder.ucLaneNum = 4;
  1132. break;
  1133. case 3:
  1134. args.v3.sExtEncoder.ucAction = action;
  1135. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1136. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1137. else
  1138. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1139. args.v3.sExtEncoder.ucEncoderMode =
  1140. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1141. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1142. if (dp_clock == 270000)
  1143. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1144. else if (dp_clock == 540000)
  1145. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1146. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1147. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1148. args.v3.sExtEncoder.ucLaneNum = 8;
  1149. else
  1150. args.v3.sExtEncoder.ucLaneNum = 4;
  1151. switch (ext_enum) {
  1152. case GRAPH_OBJECT_ENUM_ID1:
  1153. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1154. break;
  1155. case GRAPH_OBJECT_ENUM_ID2:
  1156. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1157. break;
  1158. case GRAPH_OBJECT_ENUM_ID3:
  1159. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1160. break;
  1161. }
  1162. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1163. break;
  1164. default:
  1165. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1166. return;
  1167. }
  1168. break;
  1169. default:
  1170. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1171. return;
  1172. }
  1173. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1174. }
  1175. static void
  1176. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1177. {
  1178. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1179. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1180. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1181. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1182. struct amdgpu_connector *amdgpu_connector = NULL;
  1183. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1184. if (connector) {
  1185. amdgpu_connector = to_amdgpu_connector(connector);
  1186. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1187. }
  1188. if (action == ATOM_ENABLE) {
  1189. if (!connector)
  1190. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1191. else
  1192. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1193. /* setup and enable the encoder */
  1194. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1195. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1196. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1197. dig->panel_mode);
  1198. if (ext_encoder)
  1199. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1200. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1201. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1202. connector) {
  1203. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1204. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1205. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1206. amdgpu_dig_connector->edp_on = true;
  1207. }
  1208. }
  1209. /* enable the transmitter */
  1210. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1211. ATOM_TRANSMITTER_ACTION_ENABLE,
  1212. 0, 0);
  1213. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1214. connector) {
  1215. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1216. amdgpu_atombios_dp_link_train(encoder, connector);
  1217. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1218. }
  1219. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1220. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1221. if (ext_encoder)
  1222. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1223. } else {
  1224. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1225. connector)
  1226. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1227. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1228. if (ext_encoder)
  1229. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1230. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1231. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1232. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1233. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1234. connector)
  1235. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1236. /* disable the transmitter */
  1237. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1238. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1239. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1240. connector) {
  1241. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1242. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1243. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1244. amdgpu_dig_connector->edp_on = false;
  1245. }
  1246. }
  1247. }
  1248. }
  1249. void
  1250. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1251. {
  1252. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1253. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1254. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1255. amdgpu_encoder->active_device);
  1256. switch (amdgpu_encoder->encoder_id) {
  1257. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1258. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1259. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1260. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1261. switch (mode) {
  1262. case DRM_MODE_DPMS_ON:
  1263. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1264. break;
  1265. case DRM_MODE_DPMS_STANDBY:
  1266. case DRM_MODE_DPMS_SUSPEND:
  1267. case DRM_MODE_DPMS_OFF:
  1268. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1269. break;
  1270. }
  1271. break;
  1272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1273. switch (mode) {
  1274. case DRM_MODE_DPMS_ON:
  1275. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1276. break;
  1277. case DRM_MODE_DPMS_STANDBY:
  1278. case DRM_MODE_DPMS_SUSPEND:
  1279. case DRM_MODE_DPMS_OFF:
  1280. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1281. break;
  1282. }
  1283. break;
  1284. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1285. switch (mode) {
  1286. case DRM_MODE_DPMS_ON:
  1287. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1288. break;
  1289. case DRM_MODE_DPMS_STANDBY:
  1290. case DRM_MODE_DPMS_SUSPEND:
  1291. case DRM_MODE_DPMS_OFF:
  1292. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1293. break;
  1294. }
  1295. break;
  1296. default:
  1297. return;
  1298. }
  1299. }
  1300. union crtc_source_param {
  1301. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1302. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1303. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1304. };
  1305. void
  1306. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1307. {
  1308. struct drm_device *dev = encoder->dev;
  1309. struct amdgpu_device *adev = dev->dev_private;
  1310. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1311. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1312. union crtc_source_param args;
  1313. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1314. uint8_t frev, crev;
  1315. struct amdgpu_encoder_atom_dig *dig;
  1316. memset(&args, 0, sizeof(args));
  1317. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1318. return;
  1319. switch (frev) {
  1320. case 1:
  1321. switch (crev) {
  1322. case 1:
  1323. default:
  1324. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1325. switch (amdgpu_encoder->encoder_id) {
  1326. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1327. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1328. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1329. break;
  1330. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1331. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1332. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1333. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1334. else
  1335. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1336. break;
  1337. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1338. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1339. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1340. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1341. break;
  1342. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1343. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1344. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1345. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1346. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1347. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1348. else
  1349. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1350. break;
  1351. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1352. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1353. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1354. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1355. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1356. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1357. else
  1358. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1359. break;
  1360. }
  1361. break;
  1362. case 2:
  1363. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1364. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1365. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1366. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1367. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1368. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1369. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1370. else
  1371. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1372. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1373. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1374. } else {
  1375. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1376. }
  1377. switch (amdgpu_encoder->encoder_id) {
  1378. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1379. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1380. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1381. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1382. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1383. dig = amdgpu_encoder->enc_priv;
  1384. switch (dig->dig_encoder) {
  1385. case 0:
  1386. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1387. break;
  1388. case 1:
  1389. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1390. break;
  1391. case 2:
  1392. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1393. break;
  1394. case 3:
  1395. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1396. break;
  1397. case 4:
  1398. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1399. break;
  1400. case 5:
  1401. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1402. break;
  1403. case 6:
  1404. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1405. break;
  1406. }
  1407. break;
  1408. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1409. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1410. break;
  1411. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1412. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1413. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1414. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1415. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1416. else
  1417. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1418. break;
  1419. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1420. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1421. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1422. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1423. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1424. else
  1425. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1426. break;
  1427. }
  1428. break;
  1429. case 3:
  1430. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1431. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1432. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1433. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1434. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1435. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1436. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1437. else
  1438. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1439. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1440. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1441. } else {
  1442. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1443. }
  1444. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1445. switch (amdgpu_encoder->encoder_id) {
  1446. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1447. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1448. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1449. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1450. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1451. dig = amdgpu_encoder->enc_priv;
  1452. switch (dig->dig_encoder) {
  1453. case 0:
  1454. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1455. break;
  1456. case 1:
  1457. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1458. break;
  1459. case 2:
  1460. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1461. break;
  1462. case 3:
  1463. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1464. break;
  1465. case 4:
  1466. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1467. break;
  1468. case 5:
  1469. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1470. break;
  1471. case 6:
  1472. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1473. break;
  1474. }
  1475. break;
  1476. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1477. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1478. break;
  1479. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1480. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1481. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1482. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1483. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1484. else
  1485. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1486. break;
  1487. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1488. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1489. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1490. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1491. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1492. else
  1493. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1494. break;
  1495. }
  1496. break;
  1497. }
  1498. break;
  1499. default:
  1500. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1501. return;
  1502. }
  1503. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1504. }
  1505. /* This only needs to be called once at startup */
  1506. void
  1507. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1508. {
  1509. struct drm_device *dev = adev->ddev;
  1510. struct drm_encoder *encoder;
  1511. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1512. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1513. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1514. switch (amdgpu_encoder->encoder_id) {
  1515. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1516. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1517. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1518. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1519. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1520. 0, 0);
  1521. break;
  1522. }
  1523. if (ext_encoder)
  1524. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1525. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1526. }
  1527. }
  1528. static bool
  1529. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1530. struct drm_connector *connector)
  1531. {
  1532. struct drm_device *dev = encoder->dev;
  1533. struct amdgpu_device *adev = dev->dev_private;
  1534. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1535. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1536. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1537. ATOM_DEVICE_CV_SUPPORT |
  1538. ATOM_DEVICE_CRT_SUPPORT)) {
  1539. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1540. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1541. uint8_t frev, crev;
  1542. memset(&args, 0, sizeof(args));
  1543. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1544. return false;
  1545. args.sDacload.ucMisc = 0;
  1546. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1547. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1548. args.sDacload.ucDacType = ATOM_DAC_A;
  1549. else
  1550. args.sDacload.ucDacType = ATOM_DAC_B;
  1551. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1552. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1553. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1554. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1555. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1556. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1557. if (crev >= 3)
  1558. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1559. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1560. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1561. if (crev >= 3)
  1562. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1563. }
  1564. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1565. return true;
  1566. } else
  1567. return false;
  1568. }
  1569. enum drm_connector_status
  1570. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1571. struct drm_connector *connector)
  1572. {
  1573. struct drm_device *dev = encoder->dev;
  1574. struct amdgpu_device *adev = dev->dev_private;
  1575. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1576. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1577. uint32_t bios_0_scratch;
  1578. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1579. DRM_DEBUG_KMS("detect returned false \n");
  1580. return connector_status_unknown;
  1581. }
  1582. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1583. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1584. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1585. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1586. return connector_status_connected;
  1587. }
  1588. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1589. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1590. return connector_status_connected;
  1591. }
  1592. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1593. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1594. return connector_status_connected;
  1595. }
  1596. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1597. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1598. return connector_status_connected; /* CTV */
  1599. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1600. return connector_status_connected; /* STV */
  1601. }
  1602. return connector_status_disconnected;
  1603. }
  1604. enum drm_connector_status
  1605. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1606. struct drm_connector *connector)
  1607. {
  1608. struct drm_device *dev = encoder->dev;
  1609. struct amdgpu_device *adev = dev->dev_private;
  1610. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1611. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1612. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1613. u32 bios_0_scratch;
  1614. if (!ext_encoder)
  1615. return connector_status_unknown;
  1616. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1617. return connector_status_unknown;
  1618. /* load detect on the dp bridge */
  1619. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1620. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1621. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1622. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1623. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1624. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1625. return connector_status_connected;
  1626. }
  1627. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1628. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1629. return connector_status_connected;
  1630. }
  1631. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1632. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1633. return connector_status_connected;
  1634. }
  1635. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1636. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1637. return connector_status_connected; /* CTV */
  1638. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1639. return connector_status_connected; /* STV */
  1640. }
  1641. return connector_status_disconnected;
  1642. }
  1643. void
  1644. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1645. {
  1646. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1647. if (ext_encoder)
  1648. /* ddc_setup on the dp bridge */
  1649. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1650. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1651. }
  1652. void
  1653. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1654. struct drm_encoder *encoder,
  1655. bool connected)
  1656. {
  1657. struct drm_device *dev = connector->dev;
  1658. struct amdgpu_device *adev = dev->dev_private;
  1659. struct amdgpu_connector *amdgpu_connector =
  1660. to_amdgpu_connector(connector);
  1661. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1662. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1663. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1664. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1665. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1666. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1667. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1668. if (connected) {
  1669. DRM_DEBUG_KMS("LCD1 connected\n");
  1670. bios_0_scratch |= ATOM_S0_LCD1;
  1671. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1672. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1673. } else {
  1674. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1675. bios_0_scratch &= ~ATOM_S0_LCD1;
  1676. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1677. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1678. }
  1679. }
  1680. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1681. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1682. if (connected) {
  1683. DRM_DEBUG_KMS("CRT1 connected\n");
  1684. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1685. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1686. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1687. } else {
  1688. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1689. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1690. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1691. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1692. }
  1693. }
  1694. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1695. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1696. if (connected) {
  1697. DRM_DEBUG_KMS("CRT2 connected\n");
  1698. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1699. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1700. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1701. } else {
  1702. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1703. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1704. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1705. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1706. }
  1707. }
  1708. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1709. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1710. if (connected) {
  1711. DRM_DEBUG_KMS("DFP1 connected\n");
  1712. bios_0_scratch |= ATOM_S0_DFP1;
  1713. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1714. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1715. } else {
  1716. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1717. bios_0_scratch &= ~ATOM_S0_DFP1;
  1718. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1719. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1720. }
  1721. }
  1722. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1723. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1724. if (connected) {
  1725. DRM_DEBUG_KMS("DFP2 connected\n");
  1726. bios_0_scratch |= ATOM_S0_DFP2;
  1727. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1728. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1729. } else {
  1730. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1731. bios_0_scratch &= ~ATOM_S0_DFP2;
  1732. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1733. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1734. }
  1735. }
  1736. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1737. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1738. if (connected) {
  1739. DRM_DEBUG_KMS("DFP3 connected\n");
  1740. bios_0_scratch |= ATOM_S0_DFP3;
  1741. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1742. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1743. } else {
  1744. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1745. bios_0_scratch &= ~ATOM_S0_DFP3;
  1746. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1747. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1748. }
  1749. }
  1750. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1751. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1752. if (connected) {
  1753. DRM_DEBUG_KMS("DFP4 connected\n");
  1754. bios_0_scratch |= ATOM_S0_DFP4;
  1755. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1756. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1757. } else {
  1758. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1759. bios_0_scratch &= ~ATOM_S0_DFP4;
  1760. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1761. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1762. }
  1763. }
  1764. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1765. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1766. if (connected) {
  1767. DRM_DEBUG_KMS("DFP5 connected\n");
  1768. bios_0_scratch |= ATOM_S0_DFP5;
  1769. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1770. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1771. } else {
  1772. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1773. bios_0_scratch &= ~ATOM_S0_DFP5;
  1774. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1775. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1776. }
  1777. }
  1778. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1779. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1780. if (connected) {
  1781. DRM_DEBUG_KMS("DFP6 connected\n");
  1782. bios_0_scratch |= ATOM_S0_DFP6;
  1783. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1784. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1785. } else {
  1786. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1787. bios_0_scratch &= ~ATOM_S0_DFP6;
  1788. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1789. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1790. }
  1791. }
  1792. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1793. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1794. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1795. }
  1796. union lvds_info {
  1797. struct _ATOM_LVDS_INFO info;
  1798. struct _ATOM_LVDS_INFO_V12 info_12;
  1799. };
  1800. struct amdgpu_encoder_atom_dig *
  1801. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1802. {
  1803. struct drm_device *dev = encoder->base.dev;
  1804. struct amdgpu_device *adev = dev->dev_private;
  1805. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1806. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1807. uint16_t data_offset, misc;
  1808. union lvds_info *lvds_info;
  1809. uint8_t frev, crev;
  1810. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1811. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1812. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1813. &frev, &crev, &data_offset)) {
  1814. lvds_info =
  1815. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1816. lvds =
  1817. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1818. if (!lvds)
  1819. return NULL;
  1820. lvds->native_mode.clock =
  1821. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1822. lvds->native_mode.hdisplay =
  1823. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1824. lvds->native_mode.vdisplay =
  1825. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1826. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1827. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1828. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1829. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1830. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1831. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1832. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1833. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1834. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1835. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1836. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1837. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1838. lvds->panel_pwr_delay =
  1839. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1840. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1841. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1842. if (misc & ATOM_VSYNC_POLARITY)
  1843. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1844. if (misc & ATOM_HSYNC_POLARITY)
  1845. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1846. if (misc & ATOM_COMPOSITESYNC)
  1847. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1848. if (misc & ATOM_INTERLACE)
  1849. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1850. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1851. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1852. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1853. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1854. /* set crtc values */
  1855. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1856. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1857. encoder->native_mode = lvds->native_mode;
  1858. if (encoder_enum == 2)
  1859. lvds->linkb = true;
  1860. else
  1861. lvds->linkb = false;
  1862. /* parse the lcd record table */
  1863. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1864. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1865. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1866. bool bad_record = false;
  1867. u8 *record;
  1868. if ((frev == 1) && (crev < 2))
  1869. /* absolute */
  1870. record = (u8 *)(mode_info->atom_context->bios +
  1871. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1872. else
  1873. /* relative */
  1874. record = (u8 *)(mode_info->atom_context->bios +
  1875. data_offset +
  1876. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1877. while (*record != ATOM_RECORD_END_TYPE) {
  1878. switch (*record) {
  1879. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1880. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1881. break;
  1882. case LCD_RTS_RECORD_TYPE:
  1883. record += sizeof(ATOM_LCD_RTS_RECORD);
  1884. break;
  1885. case LCD_CAP_RECORD_TYPE:
  1886. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1887. break;
  1888. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1889. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1890. if (fake_edid_record->ucFakeEDIDLength) {
  1891. struct edid *edid;
  1892. int edid_size =
  1893. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1894. edid = kmalloc(edid_size, GFP_KERNEL);
  1895. if (edid) {
  1896. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1897. fake_edid_record->ucFakeEDIDLength);
  1898. if (drm_edid_is_valid(edid)) {
  1899. adev->mode_info.bios_hardcoded_edid = edid;
  1900. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1901. } else
  1902. kfree(edid);
  1903. }
  1904. }
  1905. record += fake_edid_record->ucFakeEDIDLength ?
  1906. fake_edid_record->ucFakeEDIDLength + 2 :
  1907. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1908. break;
  1909. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1910. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1911. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1912. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1913. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1914. break;
  1915. default:
  1916. DRM_ERROR("Bad LCD record %d\n", *record);
  1917. bad_record = true;
  1918. break;
  1919. }
  1920. if (bad_record)
  1921. break;
  1922. }
  1923. }
  1924. }
  1925. return lvds;
  1926. }
  1927. struct amdgpu_encoder_atom_dig *
  1928. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1929. {
  1930. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1931. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1932. if (!dig)
  1933. return NULL;
  1934. /* coherent mode by default */
  1935. dig->coherent_mode = true;
  1936. dig->dig_encoder = -1;
  1937. if (encoder_enum == 2)
  1938. dig->linkb = true;
  1939. else
  1940. dig->linkb = false;
  1941. return dig;
  1942. }