atombios_encoders.c 67 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. if (dig->backlight_level == 0)
  87. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  88. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  89. else {
  90. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  91. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  92. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  93. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  94. }
  95. break;
  96. default:
  97. break;
  98. }
  99. }
  100. }
  101. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  102. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  103. {
  104. u8 level;
  105. /* Convert brightness to hardware level */
  106. if (bd->props.brightness < 0)
  107. level = 0;
  108. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  109. level = AMDGPU_MAX_BL_LEVEL;
  110. else
  111. level = bd->props.brightness;
  112. return level;
  113. }
  114. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  115. {
  116. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  117. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  118. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  119. amdgpu_atombios_encoder_backlight_level(bd));
  120. return 0;
  121. }
  122. static int
  123. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  124. {
  125. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  126. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  127. struct drm_device *dev = amdgpu_encoder->base.dev;
  128. struct amdgpu_device *adev = dev->dev_private;
  129. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  130. }
  131. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  132. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  133. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  134. };
  135. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  136. struct drm_connector *drm_connector)
  137. {
  138. struct drm_device *dev = amdgpu_encoder->base.dev;
  139. struct amdgpu_device *adev = dev->dev_private;
  140. struct backlight_device *bd;
  141. struct backlight_properties props;
  142. struct amdgpu_backlight_privdata *pdata;
  143. struct amdgpu_encoder_atom_dig *dig;
  144. u8 backlight_level;
  145. char bl_name[16];
  146. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  147. * so don't register a backlight device
  148. */
  149. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  150. (adev->pdev->device == 0x6741))
  151. return;
  152. if (!amdgpu_encoder->enc_priv)
  153. return;
  154. if (!adev->is_atom_bios)
  155. return;
  156. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  157. return;
  158. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  159. if (!pdata) {
  160. DRM_ERROR("Memory allocation failed\n");
  161. goto error;
  162. }
  163. memset(&props, 0, sizeof(props));
  164. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  165. props.type = BACKLIGHT_RAW;
  166. snprintf(bl_name, sizeof(bl_name),
  167. "amdgpu_bl%d", dev->primary->index);
  168. bd = backlight_device_register(bl_name, drm_connector->kdev,
  169. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  170. if (IS_ERR(bd)) {
  171. DRM_ERROR("Backlight registration failed\n");
  172. goto error;
  173. }
  174. pdata->encoder = amdgpu_encoder;
  175. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  176. dig = amdgpu_encoder->enc_priv;
  177. dig->bl_dev = bd;
  178. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  179. bd->props.power = FB_BLANK_UNBLANK;
  180. backlight_update_status(bd);
  181. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  182. return;
  183. error:
  184. kfree(pdata);
  185. return;
  186. }
  187. void
  188. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  189. {
  190. struct drm_device *dev = amdgpu_encoder->base.dev;
  191. struct amdgpu_device *adev = dev->dev_private;
  192. struct backlight_device *bd = NULL;
  193. struct amdgpu_encoder_atom_dig *dig;
  194. if (!amdgpu_encoder->enc_priv)
  195. return;
  196. if (!adev->is_atom_bios)
  197. return;
  198. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  199. return;
  200. dig = amdgpu_encoder->enc_priv;
  201. bd = dig->bl_dev;
  202. dig->bl_dev = NULL;
  203. if (bd) {
  204. struct amdgpu_legacy_backlight_privdata *pdata;
  205. pdata = bl_get_data(bd);
  206. backlight_device_unregister(bd);
  207. kfree(pdata);
  208. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  209. }
  210. }
  211. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  212. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  213. {
  214. }
  215. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  216. {
  217. }
  218. #endif
  219. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  220. {
  221. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  222. switch (amdgpu_encoder->encoder_id) {
  223. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  228. return true;
  229. default:
  230. return false;
  231. }
  232. }
  233. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  234. const struct drm_display_mode *mode,
  235. struct drm_display_mode *adjusted_mode)
  236. {
  237. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  238. /* set the active encoder to connector routing */
  239. amdgpu_encoder_set_active_device(encoder);
  240. drm_mode_set_crtcinfo(adjusted_mode, 0);
  241. /* hw bug */
  242. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  243. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  244. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  245. /* get the native mode for scaling */
  246. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  247. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  248. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  249. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  250. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  251. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  252. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  253. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  254. }
  255. return true;
  256. }
  257. static void
  258. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  259. {
  260. struct drm_device *dev = encoder->dev;
  261. struct amdgpu_device *adev = dev->dev_private;
  262. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  263. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  264. int index = 0;
  265. memset(&args, 0, sizeof(args));
  266. switch (amdgpu_encoder->encoder_id) {
  267. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  268. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  269. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  270. break;
  271. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  273. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  274. break;
  275. }
  276. args.ucAction = action;
  277. args.ucDacStandard = ATOM_DAC1_PS2;
  278. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  279. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  280. }
  281. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  282. {
  283. int bpc = 8;
  284. if (encoder->crtc) {
  285. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  286. bpc = amdgpu_crtc->bpc;
  287. }
  288. switch (bpc) {
  289. case 0:
  290. return PANEL_BPC_UNDEFINE;
  291. case 6:
  292. return PANEL_6BIT_PER_COLOR;
  293. case 8:
  294. default:
  295. return PANEL_8BIT_PER_COLOR;
  296. case 10:
  297. return PANEL_10BIT_PER_COLOR;
  298. case 12:
  299. return PANEL_12BIT_PER_COLOR;
  300. case 16:
  301. return PANEL_16BIT_PER_COLOR;
  302. }
  303. }
  304. union dvo_encoder_control {
  305. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  306. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  307. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  308. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  309. };
  310. static void
  311. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  312. {
  313. struct drm_device *dev = encoder->dev;
  314. struct amdgpu_device *adev = dev->dev_private;
  315. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  316. union dvo_encoder_control args;
  317. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  318. uint8_t frev, crev;
  319. memset(&args, 0, sizeof(args));
  320. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  321. return;
  322. switch (frev) {
  323. case 1:
  324. switch (crev) {
  325. case 1:
  326. /* R4xx, R5xx */
  327. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  328. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  329. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  330. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  331. break;
  332. case 2:
  333. /* RS600/690/740 */
  334. args.dvo.sDVOEncoder.ucAction = action;
  335. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  336. /* DFP1, CRT1, TV1 depending on the type of port */
  337. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  338. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  339. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  340. break;
  341. case 3:
  342. /* R6xx */
  343. args.dvo_v3.ucAction = action;
  344. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  345. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  346. break;
  347. case 4:
  348. /* DCE8 */
  349. args.dvo_v4.ucAction = action;
  350. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  351. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  352. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  353. break;
  354. default:
  355. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  356. break;
  357. }
  358. break;
  359. default:
  360. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  361. break;
  362. }
  363. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  364. }
  365. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  366. {
  367. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  368. struct drm_connector *connector;
  369. struct amdgpu_connector *amdgpu_connector;
  370. struct amdgpu_connector_atom_dig *dig_connector;
  371. /* dp bridges are always DP */
  372. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  373. return ATOM_ENCODER_MODE_DP;
  374. /* DVO is always DVO */
  375. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  376. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  377. return ATOM_ENCODER_MODE_DVO;
  378. connector = amdgpu_get_connector_for_encoder(encoder);
  379. /* if we don't have an active device yet, just use one of
  380. * the connectors tied to the encoder.
  381. */
  382. if (!connector)
  383. connector = amdgpu_get_connector_for_encoder_init(encoder);
  384. amdgpu_connector = to_amdgpu_connector(connector);
  385. switch (connector->connector_type) {
  386. case DRM_MODE_CONNECTOR_DVII:
  387. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  388. if (amdgpu_audio != 0) {
  389. if (amdgpu_connector->use_digital &&
  390. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  391. return ATOM_ENCODER_MODE_HDMI;
  392. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  393. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  394. return ATOM_ENCODER_MODE_HDMI;
  395. else if (amdgpu_connector->use_digital)
  396. return ATOM_ENCODER_MODE_DVI;
  397. else
  398. return ATOM_ENCODER_MODE_CRT;
  399. } else if (amdgpu_connector->use_digital) {
  400. return ATOM_ENCODER_MODE_DVI;
  401. } else {
  402. return ATOM_ENCODER_MODE_CRT;
  403. }
  404. break;
  405. case DRM_MODE_CONNECTOR_DVID:
  406. case DRM_MODE_CONNECTOR_HDMIA:
  407. default:
  408. if (amdgpu_audio != 0) {
  409. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  410. return ATOM_ENCODER_MODE_HDMI;
  411. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  412. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  413. return ATOM_ENCODER_MODE_HDMI;
  414. else
  415. return ATOM_ENCODER_MODE_DVI;
  416. } else {
  417. return ATOM_ENCODER_MODE_DVI;
  418. }
  419. break;
  420. case DRM_MODE_CONNECTOR_LVDS:
  421. return ATOM_ENCODER_MODE_LVDS;
  422. break;
  423. case DRM_MODE_CONNECTOR_DisplayPort:
  424. dig_connector = amdgpu_connector->con_priv;
  425. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  426. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  427. return ATOM_ENCODER_MODE_DP;
  428. } else if (amdgpu_audio != 0) {
  429. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  430. return ATOM_ENCODER_MODE_HDMI;
  431. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  432. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  433. return ATOM_ENCODER_MODE_HDMI;
  434. else
  435. return ATOM_ENCODER_MODE_DVI;
  436. } else {
  437. return ATOM_ENCODER_MODE_DVI;
  438. }
  439. break;
  440. case DRM_MODE_CONNECTOR_eDP:
  441. return ATOM_ENCODER_MODE_DP;
  442. case DRM_MODE_CONNECTOR_DVIA:
  443. case DRM_MODE_CONNECTOR_VGA:
  444. return ATOM_ENCODER_MODE_CRT;
  445. break;
  446. case DRM_MODE_CONNECTOR_Composite:
  447. case DRM_MODE_CONNECTOR_SVIDEO:
  448. case DRM_MODE_CONNECTOR_9PinDIN:
  449. /* fix me */
  450. return ATOM_ENCODER_MODE_TV;
  451. /*return ATOM_ENCODER_MODE_CV;*/
  452. break;
  453. }
  454. }
  455. /*
  456. * DIG Encoder/Transmitter Setup
  457. *
  458. * DCE 6.0
  459. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  460. * Supports up to 6 digital outputs
  461. * - 6 DIG encoder blocks.
  462. * - DIG to PHY mapping is hardcoded
  463. * DIG1 drives UNIPHY0 link A, A+B
  464. * DIG2 drives UNIPHY0 link B
  465. * DIG3 drives UNIPHY1 link A, A+B
  466. * DIG4 drives UNIPHY1 link B
  467. * DIG5 drives UNIPHY2 link A, A+B
  468. * DIG6 drives UNIPHY2 link B
  469. *
  470. * Routing
  471. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  472. * Examples:
  473. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  474. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  475. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  476. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  477. */
  478. union dig_encoder_control {
  479. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  480. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  481. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  482. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  483. };
  484. void
  485. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  486. int action, int panel_mode)
  487. {
  488. struct drm_device *dev = encoder->dev;
  489. struct amdgpu_device *adev = dev->dev_private;
  490. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  491. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  492. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  493. union dig_encoder_control args;
  494. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  495. uint8_t frev, crev;
  496. int dp_clock = 0;
  497. int dp_lane_count = 0;
  498. int hpd_id = AMDGPU_HPD_NONE;
  499. if (connector) {
  500. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  501. struct amdgpu_connector_atom_dig *dig_connector =
  502. amdgpu_connector->con_priv;
  503. dp_clock = dig_connector->dp_clock;
  504. dp_lane_count = dig_connector->dp_lane_count;
  505. hpd_id = amdgpu_connector->hpd.hpd;
  506. }
  507. /* no dig encoder assigned */
  508. if (dig->dig_encoder == -1)
  509. return;
  510. memset(&args, 0, sizeof(args));
  511. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  512. return;
  513. switch (frev) {
  514. case 1:
  515. switch (crev) {
  516. case 1:
  517. args.v1.ucAction = action;
  518. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  519. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  520. args.v3.ucPanelMode = panel_mode;
  521. else
  522. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  523. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  524. args.v1.ucLaneNum = dp_lane_count;
  525. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  526. args.v1.ucLaneNum = 8;
  527. else
  528. args.v1.ucLaneNum = 4;
  529. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  530. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  531. switch (amdgpu_encoder->encoder_id) {
  532. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  533. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  534. break;
  535. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  536. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  537. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  538. break;
  539. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  540. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  541. break;
  542. }
  543. if (dig->linkb)
  544. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  545. else
  546. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  547. break;
  548. case 2:
  549. case 3:
  550. args.v3.ucAction = action;
  551. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  552. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  553. args.v3.ucPanelMode = panel_mode;
  554. else
  555. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  556. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  557. args.v3.ucLaneNum = dp_lane_count;
  558. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  559. args.v3.ucLaneNum = 8;
  560. else
  561. args.v3.ucLaneNum = 4;
  562. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  563. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  564. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  565. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  566. break;
  567. case 4:
  568. args.v4.ucAction = action;
  569. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  570. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  571. args.v4.ucPanelMode = panel_mode;
  572. else
  573. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  574. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  575. args.v4.ucLaneNum = dp_lane_count;
  576. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  577. args.v4.ucLaneNum = 8;
  578. else
  579. args.v4.ucLaneNum = 4;
  580. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  581. if (dp_clock == 540000)
  582. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  583. else if (dp_clock == 324000)
  584. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  585. else if (dp_clock == 270000)
  586. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  587. else
  588. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  589. }
  590. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  591. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  592. if (hpd_id == AMDGPU_HPD_NONE)
  593. args.v4.ucHPD_ID = 0;
  594. else
  595. args.v4.ucHPD_ID = hpd_id + 1;
  596. break;
  597. default:
  598. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  599. break;
  600. }
  601. break;
  602. default:
  603. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  604. break;
  605. }
  606. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  607. }
  608. union dig_transmitter_control {
  609. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  610. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  611. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  612. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  613. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  614. };
  615. void
  616. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  617. uint8_t lane_num, uint8_t lane_set)
  618. {
  619. struct drm_device *dev = encoder->dev;
  620. struct amdgpu_device *adev = dev->dev_private;
  621. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  622. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  623. struct drm_connector *connector;
  624. union dig_transmitter_control args;
  625. int index = 0;
  626. uint8_t frev, crev;
  627. bool is_dp = false;
  628. int pll_id = 0;
  629. int dp_clock = 0;
  630. int dp_lane_count = 0;
  631. int connector_object_id = 0;
  632. int igp_lane_info = 0;
  633. int dig_encoder = dig->dig_encoder;
  634. int hpd_id = AMDGPU_HPD_NONE;
  635. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  636. connector = amdgpu_get_connector_for_encoder_init(encoder);
  637. /* just needed to avoid bailing in the encoder check. the encoder
  638. * isn't used for init
  639. */
  640. dig_encoder = 0;
  641. } else
  642. connector = amdgpu_get_connector_for_encoder(encoder);
  643. if (connector) {
  644. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  645. struct amdgpu_connector_atom_dig *dig_connector =
  646. amdgpu_connector->con_priv;
  647. hpd_id = amdgpu_connector->hpd.hpd;
  648. dp_clock = dig_connector->dp_clock;
  649. dp_lane_count = dig_connector->dp_lane_count;
  650. connector_object_id =
  651. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  652. }
  653. if (encoder->crtc) {
  654. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  655. pll_id = amdgpu_crtc->pll_id;
  656. }
  657. /* no dig encoder assigned */
  658. if (dig_encoder == -1)
  659. return;
  660. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  661. is_dp = true;
  662. memset(&args, 0, sizeof(args));
  663. switch (amdgpu_encoder->encoder_id) {
  664. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  665. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  666. break;
  667. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  668. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  669. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  670. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  671. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  672. break;
  673. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  674. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  675. break;
  676. }
  677. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  678. return;
  679. switch (frev) {
  680. case 1:
  681. switch (crev) {
  682. case 1:
  683. args.v1.ucAction = action;
  684. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  685. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  686. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  687. args.v1.asMode.ucLaneSel = lane_num;
  688. args.v1.asMode.ucLaneSet = lane_set;
  689. } else {
  690. if (is_dp)
  691. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  692. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  693. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  694. else
  695. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  696. }
  697. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  698. if (dig_encoder)
  699. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  700. else
  701. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  702. if ((adev->flags & AMD_IS_APU) &&
  703. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  704. if (is_dp ||
  705. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  706. if (igp_lane_info & 0x1)
  707. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  708. else if (igp_lane_info & 0x2)
  709. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  710. else if (igp_lane_info & 0x4)
  711. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  712. else if (igp_lane_info & 0x8)
  713. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  714. } else {
  715. if (igp_lane_info & 0x3)
  716. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  717. else if (igp_lane_info & 0xc)
  718. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  719. }
  720. }
  721. if (dig->linkb)
  722. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  723. else
  724. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  725. if (is_dp)
  726. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  727. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  728. if (dig->coherent_mode)
  729. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  730. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  731. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  732. }
  733. break;
  734. case 2:
  735. args.v2.ucAction = action;
  736. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  737. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  738. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  739. args.v2.asMode.ucLaneSel = lane_num;
  740. args.v2.asMode.ucLaneSet = lane_set;
  741. } else {
  742. if (is_dp)
  743. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  744. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  745. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  746. else
  747. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  748. }
  749. args.v2.acConfig.ucEncoderSel = dig_encoder;
  750. if (dig->linkb)
  751. args.v2.acConfig.ucLinkSel = 1;
  752. switch (amdgpu_encoder->encoder_id) {
  753. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  754. args.v2.acConfig.ucTransmitterSel = 0;
  755. break;
  756. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  757. args.v2.acConfig.ucTransmitterSel = 1;
  758. break;
  759. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  760. args.v2.acConfig.ucTransmitterSel = 2;
  761. break;
  762. }
  763. if (is_dp) {
  764. args.v2.acConfig.fCoherentMode = 1;
  765. args.v2.acConfig.fDPConnector = 1;
  766. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  767. if (dig->coherent_mode)
  768. args.v2.acConfig.fCoherentMode = 1;
  769. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  770. args.v2.acConfig.fDualLinkConnector = 1;
  771. }
  772. break;
  773. case 3:
  774. args.v3.ucAction = action;
  775. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  776. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  777. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  778. args.v3.asMode.ucLaneSel = lane_num;
  779. args.v3.asMode.ucLaneSet = lane_set;
  780. } else {
  781. if (is_dp)
  782. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  783. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  784. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  785. else
  786. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  787. }
  788. if (is_dp)
  789. args.v3.ucLaneNum = dp_lane_count;
  790. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  791. args.v3.ucLaneNum = 8;
  792. else
  793. args.v3.ucLaneNum = 4;
  794. if (dig->linkb)
  795. args.v3.acConfig.ucLinkSel = 1;
  796. if (dig_encoder & 1)
  797. args.v3.acConfig.ucEncoderSel = 1;
  798. /* Select the PLL for the PHY
  799. * DP PHY should be clocked from external src if there is
  800. * one.
  801. */
  802. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  803. if (is_dp && adev->clock.dp_extclk)
  804. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  805. else
  806. args.v3.acConfig.ucRefClkSource = pll_id;
  807. switch (amdgpu_encoder->encoder_id) {
  808. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  809. args.v3.acConfig.ucTransmitterSel = 0;
  810. break;
  811. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  812. args.v3.acConfig.ucTransmitterSel = 1;
  813. break;
  814. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  815. args.v3.acConfig.ucTransmitterSel = 2;
  816. break;
  817. }
  818. if (is_dp)
  819. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  820. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  821. if (dig->coherent_mode)
  822. args.v3.acConfig.fCoherentMode = 1;
  823. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  824. args.v3.acConfig.fDualLinkConnector = 1;
  825. }
  826. break;
  827. case 4:
  828. args.v4.ucAction = action;
  829. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  830. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  831. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  832. args.v4.asMode.ucLaneSel = lane_num;
  833. args.v4.asMode.ucLaneSet = lane_set;
  834. } else {
  835. if (is_dp)
  836. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  837. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  838. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  839. else
  840. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  841. }
  842. if (is_dp)
  843. args.v4.ucLaneNum = dp_lane_count;
  844. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  845. args.v4.ucLaneNum = 8;
  846. else
  847. args.v4.ucLaneNum = 4;
  848. if (dig->linkb)
  849. args.v4.acConfig.ucLinkSel = 1;
  850. if (dig_encoder & 1)
  851. args.v4.acConfig.ucEncoderSel = 1;
  852. /* Select the PLL for the PHY
  853. * DP PHY should be clocked from external src if there is
  854. * one.
  855. */
  856. /* On DCE5 DCPLL usually generates the DP ref clock */
  857. if (is_dp) {
  858. if (adev->clock.dp_extclk)
  859. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  860. else
  861. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  862. } else
  863. args.v4.acConfig.ucRefClkSource = pll_id;
  864. switch (amdgpu_encoder->encoder_id) {
  865. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  866. args.v4.acConfig.ucTransmitterSel = 0;
  867. break;
  868. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  869. args.v4.acConfig.ucTransmitterSel = 1;
  870. break;
  871. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  872. args.v4.acConfig.ucTransmitterSel = 2;
  873. break;
  874. }
  875. if (is_dp)
  876. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  877. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  878. if (dig->coherent_mode)
  879. args.v4.acConfig.fCoherentMode = 1;
  880. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  881. args.v4.acConfig.fDualLinkConnector = 1;
  882. }
  883. break;
  884. case 5:
  885. args.v5.ucAction = action;
  886. if (is_dp)
  887. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  888. else
  889. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  890. switch (amdgpu_encoder->encoder_id) {
  891. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  892. if (dig->linkb)
  893. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  894. else
  895. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  896. break;
  897. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  898. if (dig->linkb)
  899. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  900. else
  901. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  902. break;
  903. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  904. if (dig->linkb)
  905. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  906. else
  907. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  908. break;
  909. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  910. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  911. break;
  912. }
  913. if (is_dp)
  914. args.v5.ucLaneNum = dp_lane_count;
  915. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  916. args.v5.ucLaneNum = 8;
  917. else
  918. args.v5.ucLaneNum = 4;
  919. args.v5.ucConnObjId = connector_object_id;
  920. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  921. if (is_dp && adev->clock.dp_extclk)
  922. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  923. else
  924. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  925. if (is_dp)
  926. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  927. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  928. if (dig->coherent_mode)
  929. args.v5.asConfig.ucCoherentMode = 1;
  930. }
  931. if (hpd_id == AMDGPU_HPD_NONE)
  932. args.v5.asConfig.ucHPDSel = 0;
  933. else
  934. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  935. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  936. args.v5.ucDPLaneSet = lane_set;
  937. break;
  938. default:
  939. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  940. break;
  941. }
  942. break;
  943. default:
  944. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  945. break;
  946. }
  947. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  948. }
  949. bool
  950. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  951. int action)
  952. {
  953. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  954. struct drm_device *dev = amdgpu_connector->base.dev;
  955. struct amdgpu_device *adev = dev->dev_private;
  956. union dig_transmitter_control args;
  957. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  958. uint8_t frev, crev;
  959. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  960. goto done;
  961. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  962. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  963. goto done;
  964. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  965. goto done;
  966. memset(&args, 0, sizeof(args));
  967. args.v1.ucAction = action;
  968. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  969. /* wait for the panel to power up */
  970. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  971. int i;
  972. for (i = 0; i < 300; i++) {
  973. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  974. return true;
  975. mdelay(1);
  976. }
  977. return false;
  978. }
  979. done:
  980. return true;
  981. }
  982. union external_encoder_control {
  983. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  984. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  985. };
  986. static void
  987. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  988. struct drm_encoder *ext_encoder,
  989. int action)
  990. {
  991. struct drm_device *dev = encoder->dev;
  992. struct amdgpu_device *adev = dev->dev_private;
  993. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  994. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  995. union external_encoder_control args;
  996. struct drm_connector *connector;
  997. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  998. u8 frev, crev;
  999. int dp_clock = 0;
  1000. int dp_lane_count = 0;
  1001. int connector_object_id = 0;
  1002. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1003. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1004. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1005. else
  1006. connector = amdgpu_get_connector_for_encoder(encoder);
  1007. if (connector) {
  1008. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1009. struct amdgpu_connector_atom_dig *dig_connector =
  1010. amdgpu_connector->con_priv;
  1011. dp_clock = dig_connector->dp_clock;
  1012. dp_lane_count = dig_connector->dp_lane_count;
  1013. connector_object_id =
  1014. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1015. }
  1016. memset(&args, 0, sizeof(args));
  1017. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1018. return;
  1019. switch (frev) {
  1020. case 1:
  1021. /* no params on frev 1 */
  1022. break;
  1023. case 2:
  1024. switch (crev) {
  1025. case 1:
  1026. case 2:
  1027. args.v1.sDigEncoder.ucAction = action;
  1028. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1029. args.v1.sDigEncoder.ucEncoderMode =
  1030. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1031. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1032. if (dp_clock == 270000)
  1033. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1034. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1035. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1036. args.v1.sDigEncoder.ucLaneNum = 8;
  1037. else
  1038. args.v1.sDigEncoder.ucLaneNum = 4;
  1039. break;
  1040. case 3:
  1041. args.v3.sExtEncoder.ucAction = action;
  1042. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1043. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1044. else
  1045. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1046. args.v3.sExtEncoder.ucEncoderMode =
  1047. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1048. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1049. if (dp_clock == 270000)
  1050. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1051. else if (dp_clock == 540000)
  1052. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1053. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1054. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1055. args.v3.sExtEncoder.ucLaneNum = 8;
  1056. else
  1057. args.v3.sExtEncoder.ucLaneNum = 4;
  1058. switch (ext_enum) {
  1059. case GRAPH_OBJECT_ENUM_ID1:
  1060. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1061. break;
  1062. case GRAPH_OBJECT_ENUM_ID2:
  1063. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1064. break;
  1065. case GRAPH_OBJECT_ENUM_ID3:
  1066. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1067. break;
  1068. }
  1069. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1070. break;
  1071. default:
  1072. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1073. return;
  1074. }
  1075. break;
  1076. default:
  1077. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1078. return;
  1079. }
  1080. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1081. }
  1082. static void
  1083. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1084. {
  1085. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1086. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1087. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1088. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1089. struct amdgpu_connector *amdgpu_connector = NULL;
  1090. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1091. if (connector) {
  1092. amdgpu_connector = to_amdgpu_connector(connector);
  1093. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1094. }
  1095. if (action == ATOM_ENABLE) {
  1096. if (!connector)
  1097. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1098. else
  1099. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1100. /* setup and enable the encoder */
  1101. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1102. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1103. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1104. dig->panel_mode);
  1105. if (ext_encoder)
  1106. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1107. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1108. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1109. connector) {
  1110. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1111. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1112. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1113. amdgpu_dig_connector->edp_on = true;
  1114. }
  1115. }
  1116. /* enable the transmitter */
  1117. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1118. ATOM_TRANSMITTER_ACTION_ENABLE,
  1119. 0, 0);
  1120. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1121. connector) {
  1122. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1123. amdgpu_atombios_dp_link_train(encoder, connector);
  1124. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1125. }
  1126. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1127. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1128. if (ext_encoder)
  1129. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1130. } else {
  1131. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1132. connector)
  1133. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1134. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1135. if (ext_encoder)
  1136. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1137. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1138. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1139. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1140. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1141. connector)
  1142. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1143. /* disable the transmitter */
  1144. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1145. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1146. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1147. connector) {
  1148. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1149. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1150. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1151. amdgpu_dig_connector->edp_on = false;
  1152. }
  1153. }
  1154. }
  1155. }
  1156. void
  1157. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1158. {
  1159. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1160. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1161. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1162. amdgpu_encoder->active_device);
  1163. switch (amdgpu_encoder->encoder_id) {
  1164. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1165. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1166. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1167. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1168. switch (mode) {
  1169. case DRM_MODE_DPMS_ON:
  1170. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1171. break;
  1172. case DRM_MODE_DPMS_STANDBY:
  1173. case DRM_MODE_DPMS_SUSPEND:
  1174. case DRM_MODE_DPMS_OFF:
  1175. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1176. break;
  1177. }
  1178. break;
  1179. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1180. switch (mode) {
  1181. case DRM_MODE_DPMS_ON:
  1182. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1183. break;
  1184. case DRM_MODE_DPMS_STANDBY:
  1185. case DRM_MODE_DPMS_SUSPEND:
  1186. case DRM_MODE_DPMS_OFF:
  1187. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1188. break;
  1189. }
  1190. break;
  1191. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1192. switch (mode) {
  1193. case DRM_MODE_DPMS_ON:
  1194. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1195. break;
  1196. case DRM_MODE_DPMS_STANDBY:
  1197. case DRM_MODE_DPMS_SUSPEND:
  1198. case DRM_MODE_DPMS_OFF:
  1199. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1200. break;
  1201. }
  1202. break;
  1203. default:
  1204. return;
  1205. }
  1206. }
  1207. union crtc_source_param {
  1208. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1209. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1210. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1211. };
  1212. void
  1213. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1214. {
  1215. struct drm_device *dev = encoder->dev;
  1216. struct amdgpu_device *adev = dev->dev_private;
  1217. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1218. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1219. union crtc_source_param args;
  1220. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1221. uint8_t frev, crev;
  1222. struct amdgpu_encoder_atom_dig *dig;
  1223. memset(&args, 0, sizeof(args));
  1224. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1225. return;
  1226. switch (frev) {
  1227. case 1:
  1228. switch (crev) {
  1229. case 1:
  1230. default:
  1231. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1232. switch (amdgpu_encoder->encoder_id) {
  1233. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1234. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1235. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1236. break;
  1237. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1238. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1239. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1240. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1241. else
  1242. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1243. break;
  1244. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1245. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1246. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1247. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1248. break;
  1249. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1250. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1251. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1252. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1253. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1254. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1255. else
  1256. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1257. break;
  1258. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1259. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1260. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1261. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1262. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1263. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1264. else
  1265. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1266. break;
  1267. }
  1268. break;
  1269. case 2:
  1270. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1271. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1272. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1273. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1274. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1275. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1276. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1277. else
  1278. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1279. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1280. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1281. } else {
  1282. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1283. }
  1284. switch (amdgpu_encoder->encoder_id) {
  1285. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1286. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1287. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1288. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1289. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1290. dig = amdgpu_encoder->enc_priv;
  1291. switch (dig->dig_encoder) {
  1292. case 0:
  1293. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1294. break;
  1295. case 1:
  1296. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1297. break;
  1298. case 2:
  1299. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1300. break;
  1301. case 3:
  1302. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1303. break;
  1304. case 4:
  1305. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1306. break;
  1307. case 5:
  1308. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1309. break;
  1310. case 6:
  1311. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1312. break;
  1313. }
  1314. break;
  1315. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1316. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1317. break;
  1318. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1319. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1320. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1321. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1322. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1323. else
  1324. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1325. break;
  1326. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1327. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1328. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1329. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1330. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1331. else
  1332. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1333. break;
  1334. }
  1335. break;
  1336. case 3:
  1337. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1338. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1339. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1340. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1341. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1342. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1343. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1344. else
  1345. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1346. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1347. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1348. } else {
  1349. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1350. }
  1351. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1352. switch (amdgpu_encoder->encoder_id) {
  1353. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1354. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1355. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1356. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1357. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1358. dig = amdgpu_encoder->enc_priv;
  1359. switch (dig->dig_encoder) {
  1360. case 0:
  1361. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1362. break;
  1363. case 1:
  1364. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1365. break;
  1366. case 2:
  1367. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1368. break;
  1369. case 3:
  1370. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1371. break;
  1372. case 4:
  1373. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1374. break;
  1375. case 5:
  1376. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1377. break;
  1378. case 6:
  1379. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1380. break;
  1381. }
  1382. break;
  1383. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1384. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1385. break;
  1386. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1387. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1388. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1389. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1390. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1391. else
  1392. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1393. break;
  1394. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1395. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1396. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1397. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1398. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1399. else
  1400. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1401. break;
  1402. }
  1403. break;
  1404. }
  1405. break;
  1406. default:
  1407. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1408. return;
  1409. }
  1410. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1411. }
  1412. /* This only needs to be called once at startup */
  1413. void
  1414. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1415. {
  1416. struct drm_device *dev = adev->ddev;
  1417. struct drm_encoder *encoder;
  1418. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1419. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1420. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1421. switch (amdgpu_encoder->encoder_id) {
  1422. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1423. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1424. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1425. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1426. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1427. 0, 0);
  1428. break;
  1429. }
  1430. if (ext_encoder)
  1431. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1432. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1433. }
  1434. }
  1435. static bool
  1436. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1437. struct drm_connector *connector)
  1438. {
  1439. struct drm_device *dev = encoder->dev;
  1440. struct amdgpu_device *adev = dev->dev_private;
  1441. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1442. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1443. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1444. ATOM_DEVICE_CV_SUPPORT |
  1445. ATOM_DEVICE_CRT_SUPPORT)) {
  1446. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1447. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1448. uint8_t frev, crev;
  1449. memset(&args, 0, sizeof(args));
  1450. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1451. return false;
  1452. args.sDacload.ucMisc = 0;
  1453. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1454. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1455. args.sDacload.ucDacType = ATOM_DAC_A;
  1456. else
  1457. args.sDacload.ucDacType = ATOM_DAC_B;
  1458. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1459. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1460. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1461. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1462. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1463. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1464. if (crev >= 3)
  1465. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1466. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1467. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1468. if (crev >= 3)
  1469. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1470. }
  1471. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1472. return true;
  1473. } else
  1474. return false;
  1475. }
  1476. enum drm_connector_status
  1477. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1478. struct drm_connector *connector)
  1479. {
  1480. struct drm_device *dev = encoder->dev;
  1481. struct amdgpu_device *adev = dev->dev_private;
  1482. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1483. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1484. uint32_t bios_0_scratch;
  1485. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1486. DRM_DEBUG_KMS("detect returned false \n");
  1487. return connector_status_unknown;
  1488. }
  1489. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1490. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1491. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1492. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1493. return connector_status_connected;
  1494. }
  1495. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1496. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1497. return connector_status_connected;
  1498. }
  1499. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1500. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1501. return connector_status_connected;
  1502. }
  1503. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1504. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1505. return connector_status_connected; /* CTV */
  1506. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1507. return connector_status_connected; /* STV */
  1508. }
  1509. return connector_status_disconnected;
  1510. }
  1511. enum drm_connector_status
  1512. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1513. struct drm_connector *connector)
  1514. {
  1515. struct drm_device *dev = encoder->dev;
  1516. struct amdgpu_device *adev = dev->dev_private;
  1517. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1518. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1519. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1520. u32 bios_0_scratch;
  1521. if (!ext_encoder)
  1522. return connector_status_unknown;
  1523. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1524. return connector_status_unknown;
  1525. /* load detect on the dp bridge */
  1526. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1527. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1528. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1529. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1530. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1531. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1532. return connector_status_connected;
  1533. }
  1534. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1535. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1536. return connector_status_connected;
  1537. }
  1538. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1539. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1540. return connector_status_connected;
  1541. }
  1542. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1543. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1544. return connector_status_connected; /* CTV */
  1545. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1546. return connector_status_connected; /* STV */
  1547. }
  1548. return connector_status_disconnected;
  1549. }
  1550. void
  1551. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1552. {
  1553. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1554. if (ext_encoder)
  1555. /* ddc_setup on the dp bridge */
  1556. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1557. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1558. }
  1559. void
  1560. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1561. struct drm_encoder *encoder,
  1562. bool connected)
  1563. {
  1564. struct drm_device *dev = connector->dev;
  1565. struct amdgpu_device *adev = dev->dev_private;
  1566. struct amdgpu_connector *amdgpu_connector =
  1567. to_amdgpu_connector(connector);
  1568. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1569. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1570. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1571. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1572. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1573. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1574. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1575. if (connected) {
  1576. DRM_DEBUG_KMS("LCD1 connected\n");
  1577. bios_0_scratch |= ATOM_S0_LCD1;
  1578. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1579. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1580. } else {
  1581. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1582. bios_0_scratch &= ~ATOM_S0_LCD1;
  1583. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1584. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1585. }
  1586. }
  1587. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1588. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1589. if (connected) {
  1590. DRM_DEBUG_KMS("CRT1 connected\n");
  1591. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1592. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1593. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1594. } else {
  1595. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1596. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1597. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1598. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1599. }
  1600. }
  1601. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1602. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1603. if (connected) {
  1604. DRM_DEBUG_KMS("CRT2 connected\n");
  1605. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1606. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1607. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1608. } else {
  1609. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1610. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1611. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1612. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1613. }
  1614. }
  1615. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1616. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1617. if (connected) {
  1618. DRM_DEBUG_KMS("DFP1 connected\n");
  1619. bios_0_scratch |= ATOM_S0_DFP1;
  1620. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1621. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1622. } else {
  1623. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1624. bios_0_scratch &= ~ATOM_S0_DFP1;
  1625. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1626. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1627. }
  1628. }
  1629. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1630. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1631. if (connected) {
  1632. DRM_DEBUG_KMS("DFP2 connected\n");
  1633. bios_0_scratch |= ATOM_S0_DFP2;
  1634. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1635. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1636. } else {
  1637. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1638. bios_0_scratch &= ~ATOM_S0_DFP2;
  1639. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1640. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1641. }
  1642. }
  1643. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1644. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1645. if (connected) {
  1646. DRM_DEBUG_KMS("DFP3 connected\n");
  1647. bios_0_scratch |= ATOM_S0_DFP3;
  1648. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1649. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1650. } else {
  1651. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1652. bios_0_scratch &= ~ATOM_S0_DFP3;
  1653. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1654. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1655. }
  1656. }
  1657. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1658. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1659. if (connected) {
  1660. DRM_DEBUG_KMS("DFP4 connected\n");
  1661. bios_0_scratch |= ATOM_S0_DFP4;
  1662. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1663. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1664. } else {
  1665. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1666. bios_0_scratch &= ~ATOM_S0_DFP4;
  1667. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1668. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1669. }
  1670. }
  1671. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1672. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1673. if (connected) {
  1674. DRM_DEBUG_KMS("DFP5 connected\n");
  1675. bios_0_scratch |= ATOM_S0_DFP5;
  1676. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1677. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1678. } else {
  1679. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1680. bios_0_scratch &= ~ATOM_S0_DFP5;
  1681. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1682. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1683. }
  1684. }
  1685. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1686. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1687. if (connected) {
  1688. DRM_DEBUG_KMS("DFP6 connected\n");
  1689. bios_0_scratch |= ATOM_S0_DFP6;
  1690. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1691. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1692. } else {
  1693. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1694. bios_0_scratch &= ~ATOM_S0_DFP6;
  1695. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1696. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1697. }
  1698. }
  1699. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1700. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1701. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1702. }
  1703. union lvds_info {
  1704. struct _ATOM_LVDS_INFO info;
  1705. struct _ATOM_LVDS_INFO_V12 info_12;
  1706. };
  1707. struct amdgpu_encoder_atom_dig *
  1708. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1709. {
  1710. struct drm_device *dev = encoder->base.dev;
  1711. struct amdgpu_device *adev = dev->dev_private;
  1712. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1713. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1714. uint16_t data_offset, misc;
  1715. union lvds_info *lvds_info;
  1716. uint8_t frev, crev;
  1717. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1718. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1719. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1720. &frev, &crev, &data_offset)) {
  1721. lvds_info =
  1722. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1723. lvds =
  1724. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1725. if (!lvds)
  1726. return NULL;
  1727. lvds->native_mode.clock =
  1728. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1729. lvds->native_mode.hdisplay =
  1730. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1731. lvds->native_mode.vdisplay =
  1732. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1733. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1734. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1735. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1736. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1737. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1738. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1739. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1740. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1741. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1742. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1743. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1744. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1745. lvds->panel_pwr_delay =
  1746. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1747. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1748. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1749. if (misc & ATOM_VSYNC_POLARITY)
  1750. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1751. if (misc & ATOM_HSYNC_POLARITY)
  1752. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1753. if (misc & ATOM_COMPOSITESYNC)
  1754. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1755. if (misc & ATOM_INTERLACE)
  1756. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1757. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1758. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1759. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1760. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1761. /* set crtc values */
  1762. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1763. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1764. encoder->native_mode = lvds->native_mode;
  1765. if (encoder_enum == 2)
  1766. lvds->linkb = true;
  1767. else
  1768. lvds->linkb = false;
  1769. /* parse the lcd record table */
  1770. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1771. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1772. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1773. bool bad_record = false;
  1774. u8 *record;
  1775. if ((frev == 1) && (crev < 2))
  1776. /* absolute */
  1777. record = (u8 *)(mode_info->atom_context->bios +
  1778. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1779. else
  1780. /* relative */
  1781. record = (u8 *)(mode_info->atom_context->bios +
  1782. data_offset +
  1783. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1784. while (*record != ATOM_RECORD_END_TYPE) {
  1785. switch (*record) {
  1786. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1787. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1788. break;
  1789. case LCD_RTS_RECORD_TYPE:
  1790. record += sizeof(ATOM_LCD_RTS_RECORD);
  1791. break;
  1792. case LCD_CAP_RECORD_TYPE:
  1793. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1794. break;
  1795. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1796. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1797. if (fake_edid_record->ucFakeEDIDLength) {
  1798. struct edid *edid;
  1799. int edid_size =
  1800. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1801. edid = kmalloc(edid_size, GFP_KERNEL);
  1802. if (edid) {
  1803. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1804. fake_edid_record->ucFakeEDIDLength);
  1805. if (drm_edid_is_valid(edid)) {
  1806. adev->mode_info.bios_hardcoded_edid = edid;
  1807. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1808. } else
  1809. kfree(edid);
  1810. }
  1811. }
  1812. record += fake_edid_record->ucFakeEDIDLength ?
  1813. fake_edid_record->ucFakeEDIDLength + 2 :
  1814. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1815. break;
  1816. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1817. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1818. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1819. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1820. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1821. break;
  1822. default:
  1823. DRM_ERROR("Bad LCD record %d\n", *record);
  1824. bad_record = true;
  1825. break;
  1826. }
  1827. if (bad_record)
  1828. break;
  1829. }
  1830. }
  1831. }
  1832. return lvds;
  1833. }
  1834. struct amdgpu_encoder_atom_dig *
  1835. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1836. {
  1837. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1838. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1839. if (!dig)
  1840. return NULL;
  1841. /* coherent mode by default */
  1842. dig->coherent_mode = true;
  1843. dig->dig_encoder = -1;
  1844. if (encoder_enum == 2)
  1845. dig->linkb = true;
  1846. else
  1847. dig->linkb = false;
  1848. return dig;
  1849. }