atombios_encoders.c 70 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. if (dig->backlight_level == 0)
  87. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  88. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  89. else {
  90. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  91. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  92. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  93. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  94. }
  95. break;
  96. default:
  97. break;
  98. }
  99. }
  100. }
  101. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  102. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  103. {
  104. u8 level;
  105. /* Convert brightness to hardware level */
  106. if (bd->props.brightness < 0)
  107. level = 0;
  108. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  109. level = AMDGPU_MAX_BL_LEVEL;
  110. else
  111. level = bd->props.brightness;
  112. return level;
  113. }
  114. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  115. {
  116. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  117. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  118. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  119. amdgpu_atombios_encoder_backlight_level(bd));
  120. return 0;
  121. }
  122. static int
  123. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  124. {
  125. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  126. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  127. struct drm_device *dev = amdgpu_encoder->base.dev;
  128. struct amdgpu_device *adev = dev->dev_private;
  129. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  130. }
  131. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  132. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  133. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  134. };
  135. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  136. struct drm_connector *drm_connector)
  137. {
  138. struct drm_device *dev = amdgpu_encoder->base.dev;
  139. struct amdgpu_device *adev = dev->dev_private;
  140. struct backlight_device *bd;
  141. struct backlight_properties props;
  142. struct amdgpu_backlight_privdata *pdata;
  143. struct amdgpu_encoder_atom_dig *dig;
  144. u8 backlight_level;
  145. char bl_name[16];
  146. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  147. * so don't register a backlight device
  148. */
  149. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  150. (adev->pdev->device == 0x6741))
  151. return;
  152. if (!amdgpu_encoder->enc_priv)
  153. return;
  154. if (!adev->is_atom_bios)
  155. return;
  156. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  157. return;
  158. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  159. if (!pdata) {
  160. DRM_ERROR("Memory allocation failed\n");
  161. goto error;
  162. }
  163. memset(&props, 0, sizeof(props));
  164. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  165. props.type = BACKLIGHT_RAW;
  166. snprintf(bl_name, sizeof(bl_name),
  167. "amdgpu_bl%d", dev->primary->index);
  168. bd = backlight_device_register(bl_name, drm_connector->kdev,
  169. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  170. if (IS_ERR(bd)) {
  171. DRM_ERROR("Backlight registration failed\n");
  172. goto error;
  173. }
  174. pdata->encoder = amdgpu_encoder;
  175. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  176. dig = amdgpu_encoder->enc_priv;
  177. dig->bl_dev = bd;
  178. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  179. bd->props.power = FB_BLANK_UNBLANK;
  180. backlight_update_status(bd);
  181. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  182. return;
  183. error:
  184. kfree(pdata);
  185. return;
  186. }
  187. void
  188. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  189. {
  190. struct drm_device *dev = amdgpu_encoder->base.dev;
  191. struct amdgpu_device *adev = dev->dev_private;
  192. struct backlight_device *bd = NULL;
  193. struct amdgpu_encoder_atom_dig *dig;
  194. if (!amdgpu_encoder->enc_priv)
  195. return;
  196. if (!adev->is_atom_bios)
  197. return;
  198. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  199. return;
  200. dig = amdgpu_encoder->enc_priv;
  201. bd = dig->bl_dev;
  202. dig->bl_dev = NULL;
  203. if (bd) {
  204. struct amdgpu_legacy_backlight_privdata *pdata;
  205. pdata = bl_get_data(bd);
  206. backlight_device_unregister(bd);
  207. kfree(pdata);
  208. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  209. }
  210. }
  211. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  212. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  213. {
  214. }
  215. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  216. {
  217. }
  218. #endif
  219. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  220. {
  221. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  222. switch (amdgpu_encoder->encoder_id) {
  223. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  228. return true;
  229. default:
  230. return false;
  231. }
  232. }
  233. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  234. const struct drm_display_mode *mode,
  235. struct drm_display_mode *adjusted_mode)
  236. {
  237. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  238. /* set the active encoder to connector routing */
  239. amdgpu_encoder_set_active_device(encoder);
  240. drm_mode_set_crtcinfo(adjusted_mode, 0);
  241. /* hw bug */
  242. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  243. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  244. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  245. /* vertical FP must be at least 1 */
  246. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  247. adjusted_mode->crtc_vsync_start++;
  248. /* get the native mode for scaling */
  249. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  250. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  251. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  252. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  253. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  254. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  255. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  256. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  257. }
  258. return true;
  259. }
  260. static void
  261. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  262. {
  263. struct drm_device *dev = encoder->dev;
  264. struct amdgpu_device *adev = dev->dev_private;
  265. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  266. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  267. int index = 0;
  268. memset(&args, 0, sizeof(args));
  269. switch (amdgpu_encoder->encoder_id) {
  270. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  271. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  272. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  273. break;
  274. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  275. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  276. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  277. break;
  278. }
  279. args.ucAction = action;
  280. args.ucDacStandard = ATOM_DAC1_PS2;
  281. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  282. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  283. }
  284. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  285. {
  286. int bpc = 8;
  287. if (encoder->crtc) {
  288. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  289. bpc = amdgpu_crtc->bpc;
  290. }
  291. switch (bpc) {
  292. case 0:
  293. return PANEL_BPC_UNDEFINE;
  294. case 6:
  295. return PANEL_6BIT_PER_COLOR;
  296. case 8:
  297. default:
  298. return PANEL_8BIT_PER_COLOR;
  299. case 10:
  300. return PANEL_10BIT_PER_COLOR;
  301. case 12:
  302. return PANEL_12BIT_PER_COLOR;
  303. case 16:
  304. return PANEL_16BIT_PER_COLOR;
  305. }
  306. }
  307. union dvo_encoder_control {
  308. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  309. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  310. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  311. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  312. };
  313. static void
  314. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  315. {
  316. struct drm_device *dev = encoder->dev;
  317. struct amdgpu_device *adev = dev->dev_private;
  318. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  319. union dvo_encoder_control args;
  320. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  321. uint8_t frev, crev;
  322. memset(&args, 0, sizeof(args));
  323. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  324. return;
  325. switch (frev) {
  326. case 1:
  327. switch (crev) {
  328. case 1:
  329. /* R4xx, R5xx */
  330. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  331. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  332. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  333. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  334. break;
  335. case 2:
  336. /* RS600/690/740 */
  337. args.dvo.sDVOEncoder.ucAction = action;
  338. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  339. /* DFP1, CRT1, TV1 depending on the type of port */
  340. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  341. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  342. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  343. break;
  344. case 3:
  345. /* R6xx */
  346. args.dvo_v3.ucAction = action;
  347. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  348. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  349. break;
  350. case 4:
  351. /* DCE8 */
  352. args.dvo_v4.ucAction = action;
  353. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  354. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  355. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  356. break;
  357. default:
  358. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  359. break;
  360. }
  361. break;
  362. default:
  363. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  364. break;
  365. }
  366. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  367. }
  368. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  369. {
  370. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  371. struct drm_connector *connector;
  372. struct amdgpu_connector *amdgpu_connector;
  373. struct amdgpu_connector_atom_dig *dig_connector;
  374. /* dp bridges are always DP */
  375. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  376. return ATOM_ENCODER_MODE_DP;
  377. /* DVO is always DVO */
  378. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  379. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  380. return ATOM_ENCODER_MODE_DVO;
  381. connector = amdgpu_get_connector_for_encoder(encoder);
  382. /* if we don't have an active device yet, just use one of
  383. * the connectors tied to the encoder.
  384. */
  385. if (!connector)
  386. connector = amdgpu_get_connector_for_encoder_init(encoder);
  387. amdgpu_connector = to_amdgpu_connector(connector);
  388. switch (connector->connector_type) {
  389. case DRM_MODE_CONNECTOR_DVII:
  390. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  391. if (amdgpu_audio != 0) {
  392. if (amdgpu_connector->use_digital &&
  393. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  394. return ATOM_ENCODER_MODE_HDMI;
  395. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  396. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  397. return ATOM_ENCODER_MODE_HDMI;
  398. else if (amdgpu_connector->use_digital)
  399. return ATOM_ENCODER_MODE_DVI;
  400. else
  401. return ATOM_ENCODER_MODE_CRT;
  402. } else if (amdgpu_connector->use_digital) {
  403. return ATOM_ENCODER_MODE_DVI;
  404. } else {
  405. return ATOM_ENCODER_MODE_CRT;
  406. }
  407. break;
  408. case DRM_MODE_CONNECTOR_DVID:
  409. case DRM_MODE_CONNECTOR_HDMIA:
  410. default:
  411. if (amdgpu_audio != 0) {
  412. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  413. return ATOM_ENCODER_MODE_HDMI;
  414. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  415. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  416. return ATOM_ENCODER_MODE_HDMI;
  417. else
  418. return ATOM_ENCODER_MODE_DVI;
  419. } else {
  420. return ATOM_ENCODER_MODE_DVI;
  421. }
  422. break;
  423. case DRM_MODE_CONNECTOR_LVDS:
  424. return ATOM_ENCODER_MODE_LVDS;
  425. break;
  426. case DRM_MODE_CONNECTOR_DisplayPort:
  427. dig_connector = amdgpu_connector->con_priv;
  428. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  429. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  430. return ATOM_ENCODER_MODE_DP;
  431. } else if (amdgpu_audio != 0) {
  432. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  433. return ATOM_ENCODER_MODE_HDMI;
  434. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  435. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  436. return ATOM_ENCODER_MODE_HDMI;
  437. else
  438. return ATOM_ENCODER_MODE_DVI;
  439. } else {
  440. return ATOM_ENCODER_MODE_DVI;
  441. }
  442. break;
  443. case DRM_MODE_CONNECTOR_eDP:
  444. return ATOM_ENCODER_MODE_DP;
  445. case DRM_MODE_CONNECTOR_DVIA:
  446. case DRM_MODE_CONNECTOR_VGA:
  447. return ATOM_ENCODER_MODE_CRT;
  448. break;
  449. case DRM_MODE_CONNECTOR_Composite:
  450. case DRM_MODE_CONNECTOR_SVIDEO:
  451. case DRM_MODE_CONNECTOR_9PinDIN:
  452. /* fix me */
  453. return ATOM_ENCODER_MODE_TV;
  454. /*return ATOM_ENCODER_MODE_CV;*/
  455. break;
  456. }
  457. }
  458. /*
  459. * DIG Encoder/Transmitter Setup
  460. *
  461. * DCE 6.0
  462. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  463. * Supports up to 6 digital outputs
  464. * - 6 DIG encoder blocks.
  465. * - DIG to PHY mapping is hardcoded
  466. * DIG1 drives UNIPHY0 link A, A+B
  467. * DIG2 drives UNIPHY0 link B
  468. * DIG3 drives UNIPHY1 link A, A+B
  469. * DIG4 drives UNIPHY1 link B
  470. * DIG5 drives UNIPHY2 link A, A+B
  471. * DIG6 drives UNIPHY2 link B
  472. *
  473. * Routing
  474. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  475. * Examples:
  476. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  477. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  478. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  479. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  480. */
  481. union dig_encoder_control {
  482. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  483. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  484. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  485. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  486. DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
  487. };
  488. void
  489. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  490. int action, int panel_mode)
  491. {
  492. struct drm_device *dev = encoder->dev;
  493. struct amdgpu_device *adev = dev->dev_private;
  494. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  495. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  496. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  497. union dig_encoder_control args;
  498. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  499. uint8_t frev, crev;
  500. int dp_clock = 0;
  501. int dp_lane_count = 0;
  502. int hpd_id = AMDGPU_HPD_NONE;
  503. if (connector) {
  504. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  505. struct amdgpu_connector_atom_dig *dig_connector =
  506. amdgpu_connector->con_priv;
  507. dp_clock = dig_connector->dp_clock;
  508. dp_lane_count = dig_connector->dp_lane_count;
  509. hpd_id = amdgpu_connector->hpd.hpd;
  510. }
  511. /* no dig encoder assigned */
  512. if (dig->dig_encoder == -1)
  513. return;
  514. memset(&args, 0, sizeof(args));
  515. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  516. return;
  517. switch (frev) {
  518. case 1:
  519. switch (crev) {
  520. case 1:
  521. args.v1.ucAction = action;
  522. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  523. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  524. args.v3.ucPanelMode = panel_mode;
  525. else
  526. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  527. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  528. args.v1.ucLaneNum = dp_lane_count;
  529. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  530. args.v1.ucLaneNum = 8;
  531. else
  532. args.v1.ucLaneNum = 4;
  533. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  534. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  535. switch (amdgpu_encoder->encoder_id) {
  536. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  537. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  538. break;
  539. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  540. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  541. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  542. break;
  543. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  544. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  545. break;
  546. }
  547. if (dig->linkb)
  548. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  549. else
  550. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  551. break;
  552. case 2:
  553. case 3:
  554. args.v3.ucAction = action;
  555. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  556. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  557. args.v3.ucPanelMode = panel_mode;
  558. else
  559. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  560. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  561. args.v3.ucLaneNum = dp_lane_count;
  562. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  563. args.v3.ucLaneNum = 8;
  564. else
  565. args.v3.ucLaneNum = 4;
  566. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  567. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  568. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  569. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  570. break;
  571. case 4:
  572. args.v4.ucAction = action;
  573. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  574. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  575. args.v4.ucPanelMode = panel_mode;
  576. else
  577. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  578. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  579. args.v4.ucLaneNum = dp_lane_count;
  580. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  581. args.v4.ucLaneNum = 8;
  582. else
  583. args.v4.ucLaneNum = 4;
  584. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  585. if (dp_clock == 540000)
  586. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  587. else if (dp_clock == 324000)
  588. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  589. else if (dp_clock == 270000)
  590. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  591. else
  592. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  593. }
  594. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  595. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  596. if (hpd_id == AMDGPU_HPD_NONE)
  597. args.v4.ucHPD_ID = 0;
  598. else
  599. args.v4.ucHPD_ID = hpd_id + 1;
  600. break;
  601. case 5:
  602. switch (action) {
  603. case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
  604. args.v5.asDPPanelModeParam.ucAction = action;
  605. args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
  606. args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
  607. break;
  608. case ATOM_ENCODER_CMD_STREAM_SETUP:
  609. args.v5.asStreamParam.ucAction = action;
  610. args.v5.asStreamParam.ucDigId = dig->dig_encoder;
  611. args.v5.asStreamParam.ucDigMode =
  612. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  613. if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
  614. args.v5.asStreamParam.ucLaneNum = dp_lane_count;
  615. else if (amdgpu_dig_monitor_is_duallink(encoder,
  616. amdgpu_encoder->pixel_clock))
  617. args.v5.asStreamParam.ucLaneNum = 8;
  618. else
  619. args.v5.asStreamParam.ucLaneNum = 4;
  620. args.v5.asStreamParam.ulPixelClock =
  621. cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  622. args.v5.asStreamParam.ucBitPerColor =
  623. amdgpu_atombios_encoder_get_bpc(encoder);
  624. args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
  625. break;
  626. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
  627. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
  628. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
  629. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
  630. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
  631. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
  632. case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
  633. case ATOM_ENCODER_CMD_DP_VIDEO_ON:
  634. args.v5.asCmdParam.ucAction = action;
  635. args.v5.asCmdParam.ucDigId = dig->dig_encoder;
  636. break;
  637. default:
  638. DRM_ERROR("Unsupported action 0x%x\n", action);
  639. break;
  640. }
  641. break;
  642. default:
  643. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  644. break;
  645. }
  646. break;
  647. default:
  648. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  649. break;
  650. }
  651. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  652. }
  653. union dig_transmitter_control {
  654. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  655. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  656. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  657. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  658. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  659. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
  660. };
  661. void
  662. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  663. uint8_t lane_num, uint8_t lane_set)
  664. {
  665. struct drm_device *dev = encoder->dev;
  666. struct amdgpu_device *adev = dev->dev_private;
  667. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  668. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  669. struct drm_connector *connector;
  670. union dig_transmitter_control args;
  671. int index = 0;
  672. uint8_t frev, crev;
  673. bool is_dp = false;
  674. int pll_id = 0;
  675. int dp_clock = 0;
  676. int dp_lane_count = 0;
  677. int connector_object_id = 0;
  678. int igp_lane_info = 0;
  679. int dig_encoder = dig->dig_encoder;
  680. int hpd_id = AMDGPU_HPD_NONE;
  681. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  682. connector = amdgpu_get_connector_for_encoder_init(encoder);
  683. /* just needed to avoid bailing in the encoder check. the encoder
  684. * isn't used for init
  685. */
  686. dig_encoder = 0;
  687. } else
  688. connector = amdgpu_get_connector_for_encoder(encoder);
  689. if (connector) {
  690. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  691. struct amdgpu_connector_atom_dig *dig_connector =
  692. amdgpu_connector->con_priv;
  693. hpd_id = amdgpu_connector->hpd.hpd;
  694. dp_clock = dig_connector->dp_clock;
  695. dp_lane_count = dig_connector->dp_lane_count;
  696. connector_object_id =
  697. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  698. }
  699. if (encoder->crtc) {
  700. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  701. pll_id = amdgpu_crtc->pll_id;
  702. }
  703. /* no dig encoder assigned */
  704. if (dig_encoder == -1)
  705. return;
  706. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  707. is_dp = true;
  708. memset(&args, 0, sizeof(args));
  709. switch (amdgpu_encoder->encoder_id) {
  710. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  711. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  712. break;
  713. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  714. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  715. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  716. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  717. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  718. break;
  719. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  720. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  721. break;
  722. }
  723. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  724. return;
  725. switch (frev) {
  726. case 1:
  727. switch (crev) {
  728. case 1:
  729. args.v1.ucAction = action;
  730. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  731. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  732. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  733. args.v1.asMode.ucLaneSel = lane_num;
  734. args.v1.asMode.ucLaneSet = lane_set;
  735. } else {
  736. if (is_dp)
  737. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  738. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  739. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  740. else
  741. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  742. }
  743. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  744. if (dig_encoder)
  745. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  746. else
  747. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  748. if ((adev->flags & AMD_IS_APU) &&
  749. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  750. if (is_dp ||
  751. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  752. if (igp_lane_info & 0x1)
  753. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  754. else if (igp_lane_info & 0x2)
  755. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  756. else if (igp_lane_info & 0x4)
  757. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  758. else if (igp_lane_info & 0x8)
  759. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  760. } else {
  761. if (igp_lane_info & 0x3)
  762. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  763. else if (igp_lane_info & 0xc)
  764. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  765. }
  766. }
  767. if (dig->linkb)
  768. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  769. else
  770. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  771. if (is_dp)
  772. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  773. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  774. if (dig->coherent_mode)
  775. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  776. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  777. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  778. }
  779. break;
  780. case 2:
  781. args.v2.ucAction = action;
  782. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  783. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  784. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  785. args.v2.asMode.ucLaneSel = lane_num;
  786. args.v2.asMode.ucLaneSet = lane_set;
  787. } else {
  788. if (is_dp)
  789. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  790. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  791. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  792. else
  793. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  794. }
  795. args.v2.acConfig.ucEncoderSel = dig_encoder;
  796. if (dig->linkb)
  797. args.v2.acConfig.ucLinkSel = 1;
  798. switch (amdgpu_encoder->encoder_id) {
  799. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  800. args.v2.acConfig.ucTransmitterSel = 0;
  801. break;
  802. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  803. args.v2.acConfig.ucTransmitterSel = 1;
  804. break;
  805. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  806. args.v2.acConfig.ucTransmitterSel = 2;
  807. break;
  808. }
  809. if (is_dp) {
  810. args.v2.acConfig.fCoherentMode = 1;
  811. args.v2.acConfig.fDPConnector = 1;
  812. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  813. if (dig->coherent_mode)
  814. args.v2.acConfig.fCoherentMode = 1;
  815. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  816. args.v2.acConfig.fDualLinkConnector = 1;
  817. }
  818. break;
  819. case 3:
  820. args.v3.ucAction = action;
  821. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  822. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  823. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  824. args.v3.asMode.ucLaneSel = lane_num;
  825. args.v3.asMode.ucLaneSet = lane_set;
  826. } else {
  827. if (is_dp)
  828. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  829. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  830. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  831. else
  832. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  833. }
  834. if (is_dp)
  835. args.v3.ucLaneNum = dp_lane_count;
  836. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  837. args.v3.ucLaneNum = 8;
  838. else
  839. args.v3.ucLaneNum = 4;
  840. if (dig->linkb)
  841. args.v3.acConfig.ucLinkSel = 1;
  842. if (dig_encoder & 1)
  843. args.v3.acConfig.ucEncoderSel = 1;
  844. /* Select the PLL for the PHY
  845. * DP PHY should be clocked from external src if there is
  846. * one.
  847. */
  848. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  849. if (is_dp && adev->clock.dp_extclk)
  850. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  851. else
  852. args.v3.acConfig.ucRefClkSource = pll_id;
  853. switch (amdgpu_encoder->encoder_id) {
  854. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  855. args.v3.acConfig.ucTransmitterSel = 0;
  856. break;
  857. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  858. args.v3.acConfig.ucTransmitterSel = 1;
  859. break;
  860. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  861. args.v3.acConfig.ucTransmitterSel = 2;
  862. break;
  863. }
  864. if (is_dp)
  865. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  866. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  867. if (dig->coherent_mode)
  868. args.v3.acConfig.fCoherentMode = 1;
  869. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  870. args.v3.acConfig.fDualLinkConnector = 1;
  871. }
  872. break;
  873. case 4:
  874. args.v4.ucAction = action;
  875. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  876. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  877. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  878. args.v4.asMode.ucLaneSel = lane_num;
  879. args.v4.asMode.ucLaneSet = lane_set;
  880. } else {
  881. if (is_dp)
  882. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  883. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  884. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  885. else
  886. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  887. }
  888. if (is_dp)
  889. args.v4.ucLaneNum = dp_lane_count;
  890. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  891. args.v4.ucLaneNum = 8;
  892. else
  893. args.v4.ucLaneNum = 4;
  894. if (dig->linkb)
  895. args.v4.acConfig.ucLinkSel = 1;
  896. if (dig_encoder & 1)
  897. args.v4.acConfig.ucEncoderSel = 1;
  898. /* Select the PLL for the PHY
  899. * DP PHY should be clocked from external src if there is
  900. * one.
  901. */
  902. /* On DCE5 DCPLL usually generates the DP ref clock */
  903. if (is_dp) {
  904. if (adev->clock.dp_extclk)
  905. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  906. else
  907. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  908. } else
  909. args.v4.acConfig.ucRefClkSource = pll_id;
  910. switch (amdgpu_encoder->encoder_id) {
  911. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  912. args.v4.acConfig.ucTransmitterSel = 0;
  913. break;
  914. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  915. args.v4.acConfig.ucTransmitterSel = 1;
  916. break;
  917. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  918. args.v4.acConfig.ucTransmitterSel = 2;
  919. break;
  920. }
  921. if (is_dp)
  922. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  923. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  924. if (dig->coherent_mode)
  925. args.v4.acConfig.fCoherentMode = 1;
  926. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  927. args.v4.acConfig.fDualLinkConnector = 1;
  928. }
  929. break;
  930. case 5:
  931. args.v5.ucAction = action;
  932. if (is_dp)
  933. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  934. else
  935. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  936. switch (amdgpu_encoder->encoder_id) {
  937. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  938. if (dig->linkb)
  939. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  940. else
  941. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  942. break;
  943. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  944. if (dig->linkb)
  945. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  946. else
  947. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  948. break;
  949. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  950. if (dig->linkb)
  951. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  952. else
  953. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  954. break;
  955. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  956. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  957. break;
  958. }
  959. if (is_dp)
  960. args.v5.ucLaneNum = dp_lane_count;
  961. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  962. args.v5.ucLaneNum = 8;
  963. else
  964. args.v5.ucLaneNum = 4;
  965. args.v5.ucConnObjId = connector_object_id;
  966. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  967. if (is_dp && adev->clock.dp_extclk)
  968. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  969. else
  970. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  971. if (is_dp)
  972. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  973. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  974. if (dig->coherent_mode)
  975. args.v5.asConfig.ucCoherentMode = 1;
  976. }
  977. if (hpd_id == AMDGPU_HPD_NONE)
  978. args.v5.asConfig.ucHPDSel = 0;
  979. else
  980. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  981. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  982. args.v5.ucDPLaneSet = lane_set;
  983. break;
  984. case 6:
  985. args.v6.ucAction = action;
  986. if (is_dp)
  987. args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
  988. else
  989. args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  990. switch (amdgpu_encoder->encoder_id) {
  991. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  992. if (dig->linkb)
  993. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  994. else
  995. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  996. break;
  997. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  998. if (dig->linkb)
  999. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  1000. else
  1001. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  1002. break;
  1003. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1004. if (dig->linkb)
  1005. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  1006. else
  1007. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  1008. break;
  1009. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1010. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  1011. break;
  1012. }
  1013. if (is_dp)
  1014. args.v6.ucLaneNum = dp_lane_count;
  1015. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1016. args.v6.ucLaneNum = 8;
  1017. else
  1018. args.v6.ucLaneNum = 4;
  1019. args.v6.ucConnObjId = connector_object_id;
  1020. if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
  1021. args.v6.ucDPLaneSet = lane_set;
  1022. else
  1023. args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1024. if (hpd_id == AMDGPU_HPD_NONE)
  1025. args.v6.ucHPDSel = 0;
  1026. else
  1027. args.v6.ucHPDSel = hpd_id + 1;
  1028. args.v6.ucDigEncoderSel = 1 << dig_encoder;
  1029. break;
  1030. default:
  1031. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1032. break;
  1033. }
  1034. break;
  1035. default:
  1036. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1037. break;
  1038. }
  1039. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1040. }
  1041. bool
  1042. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  1043. int action)
  1044. {
  1045. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1046. struct drm_device *dev = amdgpu_connector->base.dev;
  1047. struct amdgpu_device *adev = dev->dev_private;
  1048. union dig_transmitter_control args;
  1049. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  1050. uint8_t frev, crev;
  1051. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  1052. goto done;
  1053. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  1054. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  1055. goto done;
  1056. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1057. goto done;
  1058. memset(&args, 0, sizeof(args));
  1059. args.v1.ucAction = action;
  1060. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1061. /* wait for the panel to power up */
  1062. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  1063. int i;
  1064. for (i = 0; i < 300; i++) {
  1065. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  1066. return true;
  1067. mdelay(1);
  1068. }
  1069. return false;
  1070. }
  1071. done:
  1072. return true;
  1073. }
  1074. union external_encoder_control {
  1075. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  1076. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  1077. };
  1078. static void
  1079. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  1080. struct drm_encoder *ext_encoder,
  1081. int action)
  1082. {
  1083. struct drm_device *dev = encoder->dev;
  1084. struct amdgpu_device *adev = dev->dev_private;
  1085. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1086. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  1087. union external_encoder_control args;
  1088. struct drm_connector *connector;
  1089. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1090. u8 frev, crev;
  1091. int dp_clock = 0;
  1092. int dp_lane_count = 0;
  1093. int connector_object_id = 0;
  1094. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1095. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1096. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1097. else
  1098. connector = amdgpu_get_connector_for_encoder(encoder);
  1099. if (connector) {
  1100. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1101. struct amdgpu_connector_atom_dig *dig_connector =
  1102. amdgpu_connector->con_priv;
  1103. dp_clock = dig_connector->dp_clock;
  1104. dp_lane_count = dig_connector->dp_lane_count;
  1105. connector_object_id =
  1106. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1107. }
  1108. memset(&args, 0, sizeof(args));
  1109. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1110. return;
  1111. switch (frev) {
  1112. case 1:
  1113. /* no params on frev 1 */
  1114. break;
  1115. case 2:
  1116. switch (crev) {
  1117. case 1:
  1118. case 2:
  1119. args.v1.sDigEncoder.ucAction = action;
  1120. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1121. args.v1.sDigEncoder.ucEncoderMode =
  1122. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1123. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1124. if (dp_clock == 270000)
  1125. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1126. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1127. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1128. args.v1.sDigEncoder.ucLaneNum = 8;
  1129. else
  1130. args.v1.sDigEncoder.ucLaneNum = 4;
  1131. break;
  1132. case 3:
  1133. args.v3.sExtEncoder.ucAction = action;
  1134. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1135. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1136. else
  1137. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1138. args.v3.sExtEncoder.ucEncoderMode =
  1139. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1140. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1141. if (dp_clock == 270000)
  1142. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1143. else if (dp_clock == 540000)
  1144. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1145. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1146. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1147. args.v3.sExtEncoder.ucLaneNum = 8;
  1148. else
  1149. args.v3.sExtEncoder.ucLaneNum = 4;
  1150. switch (ext_enum) {
  1151. case GRAPH_OBJECT_ENUM_ID1:
  1152. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1153. break;
  1154. case GRAPH_OBJECT_ENUM_ID2:
  1155. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1156. break;
  1157. case GRAPH_OBJECT_ENUM_ID3:
  1158. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1159. break;
  1160. }
  1161. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1162. break;
  1163. default:
  1164. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1165. return;
  1166. }
  1167. break;
  1168. default:
  1169. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1170. return;
  1171. }
  1172. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1173. }
  1174. static void
  1175. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1176. {
  1177. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1178. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1179. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1180. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1181. struct amdgpu_connector *amdgpu_connector = NULL;
  1182. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1183. if (connector) {
  1184. amdgpu_connector = to_amdgpu_connector(connector);
  1185. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1186. }
  1187. if (action == ATOM_ENABLE) {
  1188. if (!connector)
  1189. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1190. else
  1191. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1192. /* setup and enable the encoder */
  1193. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1194. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1195. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1196. dig->panel_mode);
  1197. if (ext_encoder)
  1198. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1199. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1200. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1201. connector) {
  1202. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1203. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1204. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1205. amdgpu_dig_connector->edp_on = true;
  1206. }
  1207. }
  1208. /* enable the transmitter */
  1209. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1210. ATOM_TRANSMITTER_ACTION_ENABLE,
  1211. 0, 0);
  1212. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1213. connector) {
  1214. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1215. amdgpu_atombios_dp_link_train(encoder, connector);
  1216. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1217. }
  1218. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1219. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1220. if (ext_encoder)
  1221. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1222. } else {
  1223. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1224. connector)
  1225. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1226. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1227. if (ext_encoder)
  1228. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1229. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1230. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1231. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1232. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1233. connector)
  1234. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1235. /* disable the transmitter */
  1236. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1237. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1238. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1239. connector) {
  1240. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1241. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1242. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1243. amdgpu_dig_connector->edp_on = false;
  1244. }
  1245. }
  1246. }
  1247. }
  1248. void
  1249. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1250. {
  1251. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1252. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1253. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1254. amdgpu_encoder->active_device);
  1255. switch (amdgpu_encoder->encoder_id) {
  1256. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1257. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1258. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1259. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1260. switch (mode) {
  1261. case DRM_MODE_DPMS_ON:
  1262. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1263. break;
  1264. case DRM_MODE_DPMS_STANDBY:
  1265. case DRM_MODE_DPMS_SUSPEND:
  1266. case DRM_MODE_DPMS_OFF:
  1267. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1268. break;
  1269. }
  1270. break;
  1271. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1272. switch (mode) {
  1273. case DRM_MODE_DPMS_ON:
  1274. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1275. break;
  1276. case DRM_MODE_DPMS_STANDBY:
  1277. case DRM_MODE_DPMS_SUSPEND:
  1278. case DRM_MODE_DPMS_OFF:
  1279. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1280. break;
  1281. }
  1282. break;
  1283. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1284. switch (mode) {
  1285. case DRM_MODE_DPMS_ON:
  1286. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1287. break;
  1288. case DRM_MODE_DPMS_STANDBY:
  1289. case DRM_MODE_DPMS_SUSPEND:
  1290. case DRM_MODE_DPMS_OFF:
  1291. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1292. break;
  1293. }
  1294. break;
  1295. default:
  1296. return;
  1297. }
  1298. }
  1299. union crtc_source_param {
  1300. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1301. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1302. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1303. };
  1304. void
  1305. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1306. {
  1307. struct drm_device *dev = encoder->dev;
  1308. struct amdgpu_device *adev = dev->dev_private;
  1309. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1310. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1311. union crtc_source_param args;
  1312. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1313. uint8_t frev, crev;
  1314. struct amdgpu_encoder_atom_dig *dig;
  1315. memset(&args, 0, sizeof(args));
  1316. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1317. return;
  1318. switch (frev) {
  1319. case 1:
  1320. switch (crev) {
  1321. case 1:
  1322. default:
  1323. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1324. switch (amdgpu_encoder->encoder_id) {
  1325. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1326. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1327. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1328. break;
  1329. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1330. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1331. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1332. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1333. else
  1334. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1335. break;
  1336. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1337. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1338. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1339. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1340. break;
  1341. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1342. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1343. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1344. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1345. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1346. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1347. else
  1348. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1349. break;
  1350. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1351. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1352. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1353. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1354. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1355. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1356. else
  1357. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1358. break;
  1359. }
  1360. break;
  1361. case 2:
  1362. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1363. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1364. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1365. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1366. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1367. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1368. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1369. else
  1370. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1371. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1372. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1373. } else {
  1374. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1375. }
  1376. switch (amdgpu_encoder->encoder_id) {
  1377. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1378. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1379. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1380. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1381. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1382. dig = amdgpu_encoder->enc_priv;
  1383. switch (dig->dig_encoder) {
  1384. case 0:
  1385. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1386. break;
  1387. case 1:
  1388. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1389. break;
  1390. case 2:
  1391. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1392. break;
  1393. case 3:
  1394. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1395. break;
  1396. case 4:
  1397. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1398. break;
  1399. case 5:
  1400. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1401. break;
  1402. case 6:
  1403. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1404. break;
  1405. }
  1406. break;
  1407. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1408. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1409. break;
  1410. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1411. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1412. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1413. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1414. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1415. else
  1416. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1417. break;
  1418. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1419. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1420. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1421. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1422. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1423. else
  1424. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1425. break;
  1426. }
  1427. break;
  1428. case 3:
  1429. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1430. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1431. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1432. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1433. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1434. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1435. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1436. else
  1437. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1438. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1439. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1440. } else {
  1441. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1442. }
  1443. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1444. switch (amdgpu_encoder->encoder_id) {
  1445. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1446. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1447. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1448. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1449. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1450. dig = amdgpu_encoder->enc_priv;
  1451. switch (dig->dig_encoder) {
  1452. case 0:
  1453. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1454. break;
  1455. case 1:
  1456. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1457. break;
  1458. case 2:
  1459. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1460. break;
  1461. case 3:
  1462. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1463. break;
  1464. case 4:
  1465. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1466. break;
  1467. case 5:
  1468. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1469. break;
  1470. case 6:
  1471. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1472. break;
  1473. }
  1474. break;
  1475. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1476. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1477. break;
  1478. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1479. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1480. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1481. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1482. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1483. else
  1484. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1485. break;
  1486. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1487. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1488. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1489. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1490. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1491. else
  1492. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1493. break;
  1494. }
  1495. break;
  1496. }
  1497. break;
  1498. default:
  1499. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1500. return;
  1501. }
  1502. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1503. }
  1504. /* This only needs to be called once at startup */
  1505. void
  1506. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1507. {
  1508. struct drm_device *dev = adev->ddev;
  1509. struct drm_encoder *encoder;
  1510. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1511. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1512. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1513. switch (amdgpu_encoder->encoder_id) {
  1514. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1515. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1516. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1517. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1518. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1519. 0, 0);
  1520. break;
  1521. }
  1522. if (ext_encoder)
  1523. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1524. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1525. }
  1526. }
  1527. static bool
  1528. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1529. struct drm_connector *connector)
  1530. {
  1531. struct drm_device *dev = encoder->dev;
  1532. struct amdgpu_device *adev = dev->dev_private;
  1533. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1534. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1535. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1536. ATOM_DEVICE_CV_SUPPORT |
  1537. ATOM_DEVICE_CRT_SUPPORT)) {
  1538. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1539. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1540. uint8_t frev, crev;
  1541. memset(&args, 0, sizeof(args));
  1542. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1543. return false;
  1544. args.sDacload.ucMisc = 0;
  1545. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1546. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1547. args.sDacload.ucDacType = ATOM_DAC_A;
  1548. else
  1549. args.sDacload.ucDacType = ATOM_DAC_B;
  1550. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1551. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1552. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1553. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1554. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1555. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1556. if (crev >= 3)
  1557. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1558. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1559. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1560. if (crev >= 3)
  1561. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1562. }
  1563. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1564. return true;
  1565. } else
  1566. return false;
  1567. }
  1568. enum drm_connector_status
  1569. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1570. struct drm_connector *connector)
  1571. {
  1572. struct drm_device *dev = encoder->dev;
  1573. struct amdgpu_device *adev = dev->dev_private;
  1574. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1575. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1576. uint32_t bios_0_scratch;
  1577. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1578. DRM_DEBUG_KMS("detect returned false \n");
  1579. return connector_status_unknown;
  1580. }
  1581. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1582. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1583. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1584. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1585. return connector_status_connected;
  1586. }
  1587. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1588. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1589. return connector_status_connected;
  1590. }
  1591. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1592. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1593. return connector_status_connected;
  1594. }
  1595. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1596. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1597. return connector_status_connected; /* CTV */
  1598. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1599. return connector_status_connected; /* STV */
  1600. }
  1601. return connector_status_disconnected;
  1602. }
  1603. enum drm_connector_status
  1604. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1605. struct drm_connector *connector)
  1606. {
  1607. struct drm_device *dev = encoder->dev;
  1608. struct amdgpu_device *adev = dev->dev_private;
  1609. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1610. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1611. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1612. u32 bios_0_scratch;
  1613. if (!ext_encoder)
  1614. return connector_status_unknown;
  1615. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1616. return connector_status_unknown;
  1617. /* load detect on the dp bridge */
  1618. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1619. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1620. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1621. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1622. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1623. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1624. return connector_status_connected;
  1625. }
  1626. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1627. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1628. return connector_status_connected;
  1629. }
  1630. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1631. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1632. return connector_status_connected;
  1633. }
  1634. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1635. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1636. return connector_status_connected; /* CTV */
  1637. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1638. return connector_status_connected; /* STV */
  1639. }
  1640. return connector_status_disconnected;
  1641. }
  1642. void
  1643. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1644. {
  1645. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1646. if (ext_encoder)
  1647. /* ddc_setup on the dp bridge */
  1648. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1649. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1650. }
  1651. void
  1652. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1653. struct drm_encoder *encoder,
  1654. bool connected)
  1655. {
  1656. struct drm_device *dev = connector->dev;
  1657. struct amdgpu_device *adev = dev->dev_private;
  1658. struct amdgpu_connector *amdgpu_connector =
  1659. to_amdgpu_connector(connector);
  1660. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1661. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1662. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1663. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1664. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1665. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1666. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1667. if (connected) {
  1668. DRM_DEBUG_KMS("LCD1 connected\n");
  1669. bios_0_scratch |= ATOM_S0_LCD1;
  1670. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1671. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1672. } else {
  1673. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1674. bios_0_scratch &= ~ATOM_S0_LCD1;
  1675. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1676. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1677. }
  1678. }
  1679. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1680. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1681. if (connected) {
  1682. DRM_DEBUG_KMS("CRT1 connected\n");
  1683. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1684. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1685. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1686. } else {
  1687. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1688. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1689. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1690. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1691. }
  1692. }
  1693. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1694. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1695. if (connected) {
  1696. DRM_DEBUG_KMS("CRT2 connected\n");
  1697. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1698. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1699. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1700. } else {
  1701. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1702. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1703. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1704. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1705. }
  1706. }
  1707. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1708. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1709. if (connected) {
  1710. DRM_DEBUG_KMS("DFP1 connected\n");
  1711. bios_0_scratch |= ATOM_S0_DFP1;
  1712. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1713. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1714. } else {
  1715. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1716. bios_0_scratch &= ~ATOM_S0_DFP1;
  1717. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1718. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1719. }
  1720. }
  1721. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1722. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1723. if (connected) {
  1724. DRM_DEBUG_KMS("DFP2 connected\n");
  1725. bios_0_scratch |= ATOM_S0_DFP2;
  1726. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1727. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1728. } else {
  1729. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1730. bios_0_scratch &= ~ATOM_S0_DFP2;
  1731. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1732. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1733. }
  1734. }
  1735. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1736. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1737. if (connected) {
  1738. DRM_DEBUG_KMS("DFP3 connected\n");
  1739. bios_0_scratch |= ATOM_S0_DFP3;
  1740. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1741. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1742. } else {
  1743. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1744. bios_0_scratch &= ~ATOM_S0_DFP3;
  1745. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1746. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1747. }
  1748. }
  1749. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1750. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1751. if (connected) {
  1752. DRM_DEBUG_KMS("DFP4 connected\n");
  1753. bios_0_scratch |= ATOM_S0_DFP4;
  1754. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1755. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1756. } else {
  1757. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1758. bios_0_scratch &= ~ATOM_S0_DFP4;
  1759. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1760. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1761. }
  1762. }
  1763. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1764. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1765. if (connected) {
  1766. DRM_DEBUG_KMS("DFP5 connected\n");
  1767. bios_0_scratch |= ATOM_S0_DFP5;
  1768. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1769. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1770. } else {
  1771. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1772. bios_0_scratch &= ~ATOM_S0_DFP5;
  1773. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1774. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1775. }
  1776. }
  1777. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1778. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1779. if (connected) {
  1780. DRM_DEBUG_KMS("DFP6 connected\n");
  1781. bios_0_scratch |= ATOM_S0_DFP6;
  1782. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1783. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1784. } else {
  1785. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1786. bios_0_scratch &= ~ATOM_S0_DFP6;
  1787. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1788. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1789. }
  1790. }
  1791. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1792. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1793. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1794. }
  1795. union lvds_info {
  1796. struct _ATOM_LVDS_INFO info;
  1797. struct _ATOM_LVDS_INFO_V12 info_12;
  1798. };
  1799. struct amdgpu_encoder_atom_dig *
  1800. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1801. {
  1802. struct drm_device *dev = encoder->base.dev;
  1803. struct amdgpu_device *adev = dev->dev_private;
  1804. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1805. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1806. uint16_t data_offset, misc;
  1807. union lvds_info *lvds_info;
  1808. uint8_t frev, crev;
  1809. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1810. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1811. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1812. &frev, &crev, &data_offset)) {
  1813. lvds_info =
  1814. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1815. lvds =
  1816. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1817. if (!lvds)
  1818. return NULL;
  1819. lvds->native_mode.clock =
  1820. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1821. lvds->native_mode.hdisplay =
  1822. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1823. lvds->native_mode.vdisplay =
  1824. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1825. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1826. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1827. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1828. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1829. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1830. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1831. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1832. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1833. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1834. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1835. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1836. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1837. lvds->panel_pwr_delay =
  1838. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1839. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1840. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1841. if (misc & ATOM_VSYNC_POLARITY)
  1842. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1843. if (misc & ATOM_HSYNC_POLARITY)
  1844. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1845. if (misc & ATOM_COMPOSITESYNC)
  1846. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1847. if (misc & ATOM_INTERLACE)
  1848. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1849. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1850. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1851. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1852. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1853. /* set crtc values */
  1854. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1855. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1856. encoder->native_mode = lvds->native_mode;
  1857. if (encoder_enum == 2)
  1858. lvds->linkb = true;
  1859. else
  1860. lvds->linkb = false;
  1861. /* parse the lcd record table */
  1862. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1863. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1864. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1865. bool bad_record = false;
  1866. u8 *record;
  1867. if ((frev == 1) && (crev < 2))
  1868. /* absolute */
  1869. record = (u8 *)(mode_info->atom_context->bios +
  1870. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1871. else
  1872. /* relative */
  1873. record = (u8 *)(mode_info->atom_context->bios +
  1874. data_offset +
  1875. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1876. while (*record != ATOM_RECORD_END_TYPE) {
  1877. switch (*record) {
  1878. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1879. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1880. break;
  1881. case LCD_RTS_RECORD_TYPE:
  1882. record += sizeof(ATOM_LCD_RTS_RECORD);
  1883. break;
  1884. case LCD_CAP_RECORD_TYPE:
  1885. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1886. break;
  1887. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1888. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1889. if (fake_edid_record->ucFakeEDIDLength) {
  1890. struct edid *edid;
  1891. int edid_size =
  1892. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1893. edid = kmalloc(edid_size, GFP_KERNEL);
  1894. if (edid) {
  1895. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1896. fake_edid_record->ucFakeEDIDLength);
  1897. if (drm_edid_is_valid(edid)) {
  1898. adev->mode_info.bios_hardcoded_edid = edid;
  1899. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1900. } else
  1901. kfree(edid);
  1902. }
  1903. }
  1904. record += fake_edid_record->ucFakeEDIDLength ?
  1905. fake_edid_record->ucFakeEDIDLength + 2 :
  1906. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1907. break;
  1908. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1909. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1910. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1911. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1912. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1913. break;
  1914. default:
  1915. DRM_ERROR("Bad LCD record %d\n", *record);
  1916. bad_record = true;
  1917. break;
  1918. }
  1919. if (bad_record)
  1920. break;
  1921. }
  1922. }
  1923. }
  1924. return lvds;
  1925. }
  1926. struct amdgpu_encoder_atom_dig *
  1927. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1928. {
  1929. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1930. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1931. if (!dig)
  1932. return NULL;
  1933. /* coherent mode by default */
  1934. dig->coherent_mode = true;
  1935. dig->dig_encoder = -1;
  1936. if (encoder_enum == 2)
  1937. dig->linkb = true;
  1938. else
  1939. dig->linkb = false;
  1940. return dig;
  1941. }