cdv_intel_dp.c 52 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952
  1. /*
  2. * Copyright © 2012 Intel Corporation
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20. * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21. * IN THE SOFTWARE.
  22. *
  23. * Authors:
  24. * Keith Packard <keithp@keithp.com>
  25. *
  26. */
  27. #include <linux/i2c.h>
  28. #include <linux/slab.h>
  29. #include <linux/module.h>
  30. #include <drm/drmP.h>
  31. #include <drm/drm_crtc.h>
  32. #include <drm/drm_crtc_helper.h>
  33. #include "psb_drv.h"
  34. #include "psb_intel_drv.h"
  35. #include "psb_intel_reg.h"
  36. #include "gma_display.h"
  37. #include <drm/drm_dp_helper.h>
  38. #define _wait_for(COND, MS, W) ({ \
  39. unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
  40. int ret__ = 0; \
  41. while (! (COND)) { \
  42. if (time_after(jiffies, timeout__)) { \
  43. ret__ = -ETIMEDOUT; \
  44. break; \
  45. } \
  46. if (W && !in_dbg_master()) msleep(W); \
  47. } \
  48. ret__; \
  49. })
  50. #define wait_for(COND, MS) _wait_for(COND, MS, 1)
  51. #define DP_LINK_STATUS_SIZE 6
  52. #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
  53. #define DP_LINK_CONFIGURATION_SIZE 9
  54. #define CDV_FAST_LINK_TRAIN 1
  55. struct cdv_intel_dp {
  56. uint32_t output_reg;
  57. uint32_t DP;
  58. uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
  59. bool has_audio;
  60. int force_audio;
  61. uint32_t color_range;
  62. uint8_t link_bw;
  63. uint8_t lane_count;
  64. uint8_t dpcd[4];
  65. struct gma_encoder *encoder;
  66. struct i2c_adapter adapter;
  67. struct i2c_algo_dp_aux_data algo;
  68. uint8_t train_set[4];
  69. uint8_t link_status[DP_LINK_STATUS_SIZE];
  70. int panel_power_up_delay;
  71. int panel_power_down_delay;
  72. int panel_power_cycle_delay;
  73. int backlight_on_delay;
  74. int backlight_off_delay;
  75. struct drm_display_mode *panel_fixed_mode; /* for eDP */
  76. bool panel_on;
  77. };
  78. struct ddi_regoff {
  79. uint32_t PreEmph1;
  80. uint32_t PreEmph2;
  81. uint32_t VSwing1;
  82. uint32_t VSwing2;
  83. uint32_t VSwing3;
  84. uint32_t VSwing4;
  85. uint32_t VSwing5;
  86. };
  87. static struct ddi_regoff ddi_DP_train_table[] = {
  88. {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
  89. .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
  90. .VSwing5 = 0x8158,},
  91. {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
  92. .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
  93. .VSwing5 = 0x8258,},
  94. };
  95. static uint32_t dp_vswing_premph_table[] = {
  96. 0x55338954, 0x4000,
  97. 0x554d8954, 0x2000,
  98. 0x55668954, 0,
  99. 0x559ac0d4, 0x6000,
  100. };
  101. /**
  102. * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
  103. * @intel_dp: DP struct
  104. *
  105. * If a CPU or PCH DP output is attached to an eDP panel, this function
  106. * will return true, and false otherwise.
  107. */
  108. static bool is_edp(struct gma_encoder *encoder)
  109. {
  110. return encoder->type == INTEL_OUTPUT_EDP;
  111. }
  112. static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
  113. static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
  114. static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
  115. static int
  116. cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
  117. {
  118. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  119. int max_lane_count = 4;
  120. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
  121. max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
  122. switch (max_lane_count) {
  123. case 1: case 2: case 4:
  124. break;
  125. default:
  126. max_lane_count = 4;
  127. }
  128. }
  129. return max_lane_count;
  130. }
  131. static int
  132. cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
  133. {
  134. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  135. int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
  136. switch (max_link_bw) {
  137. case DP_LINK_BW_1_62:
  138. case DP_LINK_BW_2_7:
  139. break;
  140. default:
  141. max_link_bw = DP_LINK_BW_1_62;
  142. break;
  143. }
  144. return max_link_bw;
  145. }
  146. static int
  147. cdv_intel_dp_link_clock(uint8_t link_bw)
  148. {
  149. if (link_bw == DP_LINK_BW_2_7)
  150. return 270000;
  151. else
  152. return 162000;
  153. }
  154. static int
  155. cdv_intel_dp_link_required(int pixel_clock, int bpp)
  156. {
  157. return (pixel_clock * bpp + 7) / 8;
  158. }
  159. static int
  160. cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
  161. {
  162. return (max_link_clock * max_lanes * 19) / 20;
  163. }
  164. static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
  165. {
  166. struct drm_device *dev = intel_encoder->base.dev;
  167. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  168. u32 pp;
  169. if (intel_dp->panel_on) {
  170. DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
  171. return;
  172. }
  173. DRM_DEBUG_KMS("\n");
  174. pp = REG_READ(PP_CONTROL);
  175. pp |= EDP_FORCE_VDD;
  176. REG_WRITE(PP_CONTROL, pp);
  177. REG_READ(PP_CONTROL);
  178. msleep(intel_dp->panel_power_up_delay);
  179. }
  180. static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
  181. {
  182. struct drm_device *dev = intel_encoder->base.dev;
  183. u32 pp;
  184. DRM_DEBUG_KMS("\n");
  185. pp = REG_READ(PP_CONTROL);
  186. pp &= ~EDP_FORCE_VDD;
  187. REG_WRITE(PP_CONTROL, pp);
  188. REG_READ(PP_CONTROL);
  189. }
  190. /* Returns true if the panel was already on when called */
  191. static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
  192. {
  193. struct drm_device *dev = intel_encoder->base.dev;
  194. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  195. u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
  196. if (intel_dp->panel_on)
  197. return true;
  198. DRM_DEBUG_KMS("\n");
  199. pp = REG_READ(PP_CONTROL);
  200. pp &= ~PANEL_UNLOCK_MASK;
  201. pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
  202. REG_WRITE(PP_CONTROL, pp);
  203. REG_READ(PP_CONTROL);
  204. if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
  205. DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
  206. intel_dp->panel_on = false;
  207. } else
  208. intel_dp->panel_on = true;
  209. msleep(intel_dp->panel_power_up_delay);
  210. return false;
  211. }
  212. static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
  213. {
  214. struct drm_device *dev = intel_encoder->base.dev;
  215. u32 pp, idle_off_mask = PP_ON ;
  216. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  217. DRM_DEBUG_KMS("\n");
  218. pp = REG_READ(PP_CONTROL);
  219. if ((pp & POWER_TARGET_ON) == 0)
  220. return;
  221. intel_dp->panel_on = false;
  222. pp &= ~PANEL_UNLOCK_MASK;
  223. /* ILK workaround: disable reset around power sequence */
  224. pp &= ~POWER_TARGET_ON;
  225. pp &= ~EDP_FORCE_VDD;
  226. pp &= ~EDP_BLC_ENABLE;
  227. REG_WRITE(PP_CONTROL, pp);
  228. REG_READ(PP_CONTROL);
  229. DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
  230. if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
  231. DRM_DEBUG_KMS("Error in turning off Panel\n");
  232. }
  233. msleep(intel_dp->panel_power_cycle_delay);
  234. DRM_DEBUG_KMS("Over\n");
  235. }
  236. static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
  237. {
  238. struct drm_device *dev = intel_encoder->base.dev;
  239. u32 pp;
  240. DRM_DEBUG_KMS("\n");
  241. /*
  242. * If we enable the backlight right away following a panel power
  243. * on, we may see slight flicker as the panel syncs with the eDP
  244. * link. So delay a bit to make sure the image is solid before
  245. * allowing it to appear.
  246. */
  247. msleep(300);
  248. pp = REG_READ(PP_CONTROL);
  249. pp |= EDP_BLC_ENABLE;
  250. REG_WRITE(PP_CONTROL, pp);
  251. gma_backlight_enable(dev);
  252. }
  253. static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
  254. {
  255. struct drm_device *dev = intel_encoder->base.dev;
  256. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  257. u32 pp;
  258. DRM_DEBUG_KMS("\n");
  259. gma_backlight_disable(dev);
  260. msleep(10);
  261. pp = REG_READ(PP_CONTROL);
  262. pp &= ~EDP_BLC_ENABLE;
  263. REG_WRITE(PP_CONTROL, pp);
  264. msleep(intel_dp->backlight_off_delay);
  265. }
  266. static int
  267. cdv_intel_dp_mode_valid(struct drm_connector *connector,
  268. struct drm_display_mode *mode)
  269. {
  270. struct gma_encoder *encoder = gma_attached_encoder(connector);
  271. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  272. int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
  273. int max_lanes = cdv_intel_dp_max_lane_count(encoder);
  274. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  275. if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
  276. if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
  277. return MODE_PANEL;
  278. if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
  279. return MODE_PANEL;
  280. }
  281. /* only refuse the mode on non eDP since we have seen some weird eDP panels
  282. which are outside spec tolerances but somehow work by magic */
  283. if (!is_edp(encoder) &&
  284. (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
  285. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
  286. return MODE_CLOCK_HIGH;
  287. if (is_edp(encoder)) {
  288. if (cdv_intel_dp_link_required(mode->clock, 24)
  289. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
  290. return MODE_CLOCK_HIGH;
  291. }
  292. if (mode->clock < 10000)
  293. return MODE_CLOCK_LOW;
  294. return MODE_OK;
  295. }
  296. static uint32_t
  297. pack_aux(uint8_t *src, int src_bytes)
  298. {
  299. int i;
  300. uint32_t v = 0;
  301. if (src_bytes > 4)
  302. src_bytes = 4;
  303. for (i = 0; i < src_bytes; i++)
  304. v |= ((uint32_t) src[i]) << ((3-i) * 8);
  305. return v;
  306. }
  307. static void
  308. unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
  309. {
  310. int i;
  311. if (dst_bytes > 4)
  312. dst_bytes = 4;
  313. for (i = 0; i < dst_bytes; i++)
  314. dst[i] = src >> ((3-i) * 8);
  315. }
  316. static int
  317. cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
  318. uint8_t *send, int send_bytes,
  319. uint8_t *recv, int recv_size)
  320. {
  321. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  322. uint32_t output_reg = intel_dp->output_reg;
  323. struct drm_device *dev = encoder->base.dev;
  324. uint32_t ch_ctl = output_reg + 0x10;
  325. uint32_t ch_data = ch_ctl + 4;
  326. int i;
  327. int recv_bytes;
  328. uint32_t status;
  329. uint32_t aux_clock_divider;
  330. int try, precharge;
  331. /* The clock divider is based off the hrawclk,
  332. * and would like to run at 2MHz. So, take the
  333. * hrawclk value and divide by 2 and use that
  334. * On CDV platform it uses 200MHz as hrawclk.
  335. *
  336. */
  337. aux_clock_divider = 200 / 2;
  338. precharge = 4;
  339. if (is_edp(encoder))
  340. precharge = 10;
  341. if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
  342. DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
  343. REG_READ(ch_ctl));
  344. return -EBUSY;
  345. }
  346. /* Must try at least 3 times according to DP spec */
  347. for (try = 0; try < 5; try++) {
  348. /* Load the send data into the aux channel data registers */
  349. for (i = 0; i < send_bytes; i += 4)
  350. REG_WRITE(ch_data + i,
  351. pack_aux(send + i, send_bytes - i));
  352. /* Send the command and wait for it to complete */
  353. REG_WRITE(ch_ctl,
  354. DP_AUX_CH_CTL_SEND_BUSY |
  355. DP_AUX_CH_CTL_TIME_OUT_400us |
  356. (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
  357. (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
  358. (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
  359. DP_AUX_CH_CTL_DONE |
  360. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  361. DP_AUX_CH_CTL_RECEIVE_ERROR);
  362. for (;;) {
  363. status = REG_READ(ch_ctl);
  364. if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
  365. break;
  366. udelay(100);
  367. }
  368. /* Clear done status and any errors */
  369. REG_WRITE(ch_ctl,
  370. status |
  371. DP_AUX_CH_CTL_DONE |
  372. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  373. DP_AUX_CH_CTL_RECEIVE_ERROR);
  374. if (status & DP_AUX_CH_CTL_DONE)
  375. break;
  376. }
  377. if ((status & DP_AUX_CH_CTL_DONE) == 0) {
  378. DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
  379. return -EBUSY;
  380. }
  381. /* Check for timeout or receive error.
  382. * Timeouts occur when the sink is not connected
  383. */
  384. if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
  385. DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
  386. return -EIO;
  387. }
  388. /* Timeouts occur when the device isn't connected, so they're
  389. * "normal" -- don't fill the kernel log with these */
  390. if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
  391. DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
  392. return -ETIMEDOUT;
  393. }
  394. /* Unload any bytes sent back from the other side */
  395. recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
  396. DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
  397. if (recv_bytes > recv_size)
  398. recv_bytes = recv_size;
  399. for (i = 0; i < recv_bytes; i += 4)
  400. unpack_aux(REG_READ(ch_data + i),
  401. recv + i, recv_bytes - i);
  402. return recv_bytes;
  403. }
  404. /* Write data to the aux channel in native mode */
  405. static int
  406. cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
  407. uint16_t address, uint8_t *send, int send_bytes)
  408. {
  409. int ret;
  410. uint8_t msg[20];
  411. int msg_bytes;
  412. uint8_t ack;
  413. if (send_bytes > 16)
  414. return -1;
  415. msg[0] = DP_AUX_NATIVE_WRITE << 4;
  416. msg[1] = address >> 8;
  417. msg[2] = address & 0xff;
  418. msg[3] = send_bytes - 1;
  419. memcpy(&msg[4], send, send_bytes);
  420. msg_bytes = send_bytes + 4;
  421. for (;;) {
  422. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
  423. if (ret < 0)
  424. return ret;
  425. ack >>= 4;
  426. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
  427. break;
  428. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  429. udelay(100);
  430. else
  431. return -EIO;
  432. }
  433. return send_bytes;
  434. }
  435. /* Write a single byte to the aux channel in native mode */
  436. static int
  437. cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
  438. uint16_t address, uint8_t byte)
  439. {
  440. return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
  441. }
  442. /* read bytes from a native aux channel */
  443. static int
  444. cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
  445. uint16_t address, uint8_t *recv, int recv_bytes)
  446. {
  447. uint8_t msg[4];
  448. int msg_bytes;
  449. uint8_t reply[20];
  450. int reply_bytes;
  451. uint8_t ack;
  452. int ret;
  453. msg[0] = DP_AUX_NATIVE_READ << 4;
  454. msg[1] = address >> 8;
  455. msg[2] = address & 0xff;
  456. msg[3] = recv_bytes - 1;
  457. msg_bytes = 4;
  458. reply_bytes = recv_bytes + 1;
  459. for (;;) {
  460. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
  461. reply, reply_bytes);
  462. if (ret == 0)
  463. return -EPROTO;
  464. if (ret < 0)
  465. return ret;
  466. ack = reply[0] >> 4;
  467. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
  468. memcpy(recv, reply + 1, ret - 1);
  469. return ret - 1;
  470. }
  471. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  472. udelay(100);
  473. else
  474. return -EIO;
  475. }
  476. }
  477. static int
  478. cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
  479. uint8_t write_byte, uint8_t *read_byte)
  480. {
  481. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  482. struct cdv_intel_dp *intel_dp = container_of(adapter,
  483. struct cdv_intel_dp,
  484. adapter);
  485. struct gma_encoder *encoder = intel_dp->encoder;
  486. uint16_t address = algo_data->address;
  487. uint8_t msg[5];
  488. uint8_t reply[2];
  489. unsigned retry;
  490. int msg_bytes;
  491. int reply_bytes;
  492. int ret;
  493. /* Set up the command byte */
  494. if (mode & MODE_I2C_READ)
  495. msg[0] = DP_AUX_I2C_READ << 4;
  496. else
  497. msg[0] = DP_AUX_I2C_WRITE << 4;
  498. if (!(mode & MODE_I2C_STOP))
  499. msg[0] |= DP_AUX_I2C_MOT << 4;
  500. msg[1] = address >> 8;
  501. msg[2] = address;
  502. switch (mode) {
  503. case MODE_I2C_WRITE:
  504. msg[3] = 0;
  505. msg[4] = write_byte;
  506. msg_bytes = 5;
  507. reply_bytes = 1;
  508. break;
  509. case MODE_I2C_READ:
  510. msg[3] = 0;
  511. msg_bytes = 4;
  512. reply_bytes = 2;
  513. break;
  514. default:
  515. msg_bytes = 3;
  516. reply_bytes = 1;
  517. break;
  518. }
  519. for (retry = 0; retry < 5; retry++) {
  520. ret = cdv_intel_dp_aux_ch(encoder,
  521. msg, msg_bytes,
  522. reply, reply_bytes);
  523. if (ret < 0) {
  524. DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
  525. return ret;
  526. }
  527. switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
  528. case DP_AUX_NATIVE_REPLY_ACK:
  529. /* I2C-over-AUX Reply field is only valid
  530. * when paired with AUX ACK.
  531. */
  532. break;
  533. case DP_AUX_NATIVE_REPLY_NACK:
  534. DRM_DEBUG_KMS("aux_ch native nack\n");
  535. return -EREMOTEIO;
  536. case DP_AUX_NATIVE_REPLY_DEFER:
  537. udelay(100);
  538. continue;
  539. default:
  540. DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
  541. reply[0]);
  542. return -EREMOTEIO;
  543. }
  544. switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
  545. case DP_AUX_I2C_REPLY_ACK:
  546. if (mode == MODE_I2C_READ) {
  547. *read_byte = reply[1];
  548. }
  549. return reply_bytes - 1;
  550. case DP_AUX_I2C_REPLY_NACK:
  551. DRM_DEBUG_KMS("aux_i2c nack\n");
  552. return -EREMOTEIO;
  553. case DP_AUX_I2C_REPLY_DEFER:
  554. DRM_DEBUG_KMS("aux_i2c defer\n");
  555. udelay(100);
  556. break;
  557. default:
  558. DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
  559. return -EREMOTEIO;
  560. }
  561. }
  562. DRM_ERROR("too many retries, giving up\n");
  563. return -EREMOTEIO;
  564. }
  565. static int
  566. cdv_intel_dp_i2c_init(struct gma_connector *connector,
  567. struct gma_encoder *encoder, const char *name)
  568. {
  569. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  570. int ret;
  571. DRM_DEBUG_KMS("i2c_init %s\n", name);
  572. intel_dp->algo.running = false;
  573. intel_dp->algo.address = 0;
  574. intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
  575. memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
  576. intel_dp->adapter.owner = THIS_MODULE;
  577. intel_dp->adapter.class = I2C_CLASS_DDC;
  578. strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
  579. intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
  580. intel_dp->adapter.algo_data = &intel_dp->algo;
  581. intel_dp->adapter.dev.parent = connector->base.kdev;
  582. if (is_edp(encoder))
  583. cdv_intel_edp_panel_vdd_on(encoder);
  584. ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
  585. if (is_edp(encoder))
  586. cdv_intel_edp_panel_vdd_off(encoder);
  587. return ret;
  588. }
  589. static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
  590. struct drm_display_mode *adjusted_mode)
  591. {
  592. adjusted_mode->hdisplay = fixed_mode->hdisplay;
  593. adjusted_mode->hsync_start = fixed_mode->hsync_start;
  594. adjusted_mode->hsync_end = fixed_mode->hsync_end;
  595. adjusted_mode->htotal = fixed_mode->htotal;
  596. adjusted_mode->vdisplay = fixed_mode->vdisplay;
  597. adjusted_mode->vsync_start = fixed_mode->vsync_start;
  598. adjusted_mode->vsync_end = fixed_mode->vsync_end;
  599. adjusted_mode->vtotal = fixed_mode->vtotal;
  600. adjusted_mode->clock = fixed_mode->clock;
  601. drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
  602. }
  603. static bool
  604. cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
  605. struct drm_display_mode *adjusted_mode)
  606. {
  607. struct drm_psb_private *dev_priv = encoder->dev->dev_private;
  608. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  609. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  610. int lane_count, clock;
  611. int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
  612. int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
  613. static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
  614. int refclock = mode->clock;
  615. int bpp = 24;
  616. if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
  617. cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
  618. refclock = intel_dp->panel_fixed_mode->clock;
  619. bpp = dev_priv->edp.bpp;
  620. }
  621. for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
  622. for (clock = max_clock; clock >= 0; clock--) {
  623. int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
  624. if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
  625. intel_dp->link_bw = bws[clock];
  626. intel_dp->lane_count = lane_count;
  627. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  628. DRM_DEBUG_KMS("Display port link bw %02x lane "
  629. "count %d clock %d\n",
  630. intel_dp->link_bw, intel_dp->lane_count,
  631. adjusted_mode->clock);
  632. return true;
  633. }
  634. }
  635. }
  636. if (is_edp(intel_encoder)) {
  637. /* okay we failed just pick the highest */
  638. intel_dp->lane_count = max_lane_count;
  639. intel_dp->link_bw = bws[max_clock];
  640. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  641. DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
  642. "count %d clock %d\n",
  643. intel_dp->link_bw, intel_dp->lane_count,
  644. adjusted_mode->clock);
  645. return true;
  646. }
  647. return false;
  648. }
  649. struct cdv_intel_dp_m_n {
  650. uint32_t tu;
  651. uint32_t gmch_m;
  652. uint32_t gmch_n;
  653. uint32_t link_m;
  654. uint32_t link_n;
  655. };
  656. static void
  657. cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
  658. {
  659. /*
  660. while (*num > 0xffffff || *den > 0xffffff) {
  661. *num >>= 1;
  662. *den >>= 1;
  663. }*/
  664. uint64_t value, m;
  665. m = *num;
  666. value = m * (0x800000);
  667. m = do_div(value, *den);
  668. *num = value;
  669. *den = 0x800000;
  670. }
  671. static void
  672. cdv_intel_dp_compute_m_n(int bpp,
  673. int nlanes,
  674. int pixel_clock,
  675. int link_clock,
  676. struct cdv_intel_dp_m_n *m_n)
  677. {
  678. m_n->tu = 64;
  679. m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
  680. m_n->gmch_n = link_clock * nlanes;
  681. cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
  682. m_n->link_m = pixel_clock;
  683. m_n->link_n = link_clock;
  684. cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
  685. }
  686. void
  687. cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
  688. struct drm_display_mode *adjusted_mode)
  689. {
  690. struct drm_device *dev = crtc->dev;
  691. struct drm_psb_private *dev_priv = dev->dev_private;
  692. struct drm_mode_config *mode_config = &dev->mode_config;
  693. struct drm_encoder *encoder;
  694. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  695. int lane_count = 4, bpp = 24;
  696. struct cdv_intel_dp_m_n m_n;
  697. int pipe = gma_crtc->pipe;
  698. /*
  699. * Find the lane count in the intel_encoder private
  700. */
  701. list_for_each_entry(encoder, &mode_config->encoder_list, head) {
  702. struct gma_encoder *intel_encoder;
  703. struct cdv_intel_dp *intel_dp;
  704. if (encoder->crtc != crtc)
  705. continue;
  706. intel_encoder = to_gma_encoder(encoder);
  707. intel_dp = intel_encoder->dev_priv;
  708. if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
  709. lane_count = intel_dp->lane_count;
  710. break;
  711. } else if (is_edp(intel_encoder)) {
  712. lane_count = intel_dp->lane_count;
  713. bpp = dev_priv->edp.bpp;
  714. break;
  715. }
  716. }
  717. /*
  718. * Compute the GMCH and Link ratios. The '3' here is
  719. * the number of bytes_per_pixel post-LUT, which we always
  720. * set up for 8-bits of R/G/B, or 3 bytes total.
  721. */
  722. cdv_intel_dp_compute_m_n(bpp, lane_count,
  723. mode->clock, adjusted_mode->clock, &m_n);
  724. {
  725. REG_WRITE(PIPE_GMCH_DATA_M(pipe),
  726. ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
  727. m_n.gmch_m);
  728. REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
  729. REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
  730. REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
  731. }
  732. }
  733. static void
  734. cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  735. struct drm_display_mode *adjusted_mode)
  736. {
  737. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  738. struct drm_crtc *crtc = encoder->crtc;
  739. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  740. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  741. struct drm_device *dev = encoder->dev;
  742. intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
  743. intel_dp->DP |= intel_dp->color_range;
  744. if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
  745. intel_dp->DP |= DP_SYNC_HS_HIGH;
  746. if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
  747. intel_dp->DP |= DP_SYNC_VS_HIGH;
  748. intel_dp->DP |= DP_LINK_TRAIN_OFF;
  749. switch (intel_dp->lane_count) {
  750. case 1:
  751. intel_dp->DP |= DP_PORT_WIDTH_1;
  752. break;
  753. case 2:
  754. intel_dp->DP |= DP_PORT_WIDTH_2;
  755. break;
  756. case 4:
  757. intel_dp->DP |= DP_PORT_WIDTH_4;
  758. break;
  759. }
  760. if (intel_dp->has_audio)
  761. intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
  762. memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
  763. intel_dp->link_configuration[0] = intel_dp->link_bw;
  764. intel_dp->link_configuration[1] = intel_dp->lane_count;
  765. /*
  766. * Check for DPCD version > 1.1 and enhanced framing support
  767. */
  768. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
  769. (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
  770. intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
  771. intel_dp->DP |= DP_ENHANCED_FRAMING;
  772. }
  773. /* CPT DP's pipe select is decided in TRANS_DP_CTL */
  774. if (gma_crtc->pipe == 1)
  775. intel_dp->DP |= DP_PIPEB_SELECT;
  776. REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
  777. DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
  778. if (is_edp(intel_encoder)) {
  779. uint32_t pfit_control;
  780. cdv_intel_edp_panel_on(intel_encoder);
  781. if (mode->hdisplay != adjusted_mode->hdisplay ||
  782. mode->vdisplay != adjusted_mode->vdisplay)
  783. pfit_control = PFIT_ENABLE;
  784. else
  785. pfit_control = 0;
  786. pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
  787. REG_WRITE(PFIT_CONTROL, pfit_control);
  788. }
  789. }
  790. /* If the sink supports it, try to set the power state appropriately */
  791. static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
  792. {
  793. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  794. int ret, i;
  795. /* Should have a valid DPCD by this point */
  796. if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
  797. return;
  798. if (mode != DRM_MODE_DPMS_ON) {
  799. ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
  800. DP_SET_POWER_D3);
  801. if (ret != 1)
  802. DRM_DEBUG_DRIVER("failed to write sink power state\n");
  803. } else {
  804. /*
  805. * When turning on, we need to retry for 1ms to give the sink
  806. * time to wake up.
  807. */
  808. for (i = 0; i < 3; i++) {
  809. ret = cdv_intel_dp_aux_native_write_1(encoder,
  810. DP_SET_POWER,
  811. DP_SET_POWER_D0);
  812. if (ret == 1)
  813. break;
  814. udelay(1000);
  815. }
  816. }
  817. }
  818. static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
  819. {
  820. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  821. int edp = is_edp(intel_encoder);
  822. if (edp) {
  823. cdv_intel_edp_backlight_off(intel_encoder);
  824. cdv_intel_edp_panel_off(intel_encoder);
  825. cdv_intel_edp_panel_vdd_on(intel_encoder);
  826. }
  827. /* Wake up the sink first */
  828. cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
  829. cdv_intel_dp_link_down(intel_encoder);
  830. if (edp)
  831. cdv_intel_edp_panel_vdd_off(intel_encoder);
  832. }
  833. static void cdv_intel_dp_commit(struct drm_encoder *encoder)
  834. {
  835. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  836. int edp = is_edp(intel_encoder);
  837. if (edp)
  838. cdv_intel_edp_panel_on(intel_encoder);
  839. cdv_intel_dp_start_link_train(intel_encoder);
  840. cdv_intel_dp_complete_link_train(intel_encoder);
  841. if (edp)
  842. cdv_intel_edp_backlight_on(intel_encoder);
  843. }
  844. static void
  845. cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
  846. {
  847. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  848. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  849. struct drm_device *dev = encoder->dev;
  850. uint32_t dp_reg = REG_READ(intel_dp->output_reg);
  851. int edp = is_edp(intel_encoder);
  852. if (mode != DRM_MODE_DPMS_ON) {
  853. if (edp) {
  854. cdv_intel_edp_backlight_off(intel_encoder);
  855. cdv_intel_edp_panel_vdd_on(intel_encoder);
  856. }
  857. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  858. cdv_intel_dp_link_down(intel_encoder);
  859. if (edp) {
  860. cdv_intel_edp_panel_vdd_off(intel_encoder);
  861. cdv_intel_edp_panel_off(intel_encoder);
  862. }
  863. } else {
  864. if (edp)
  865. cdv_intel_edp_panel_on(intel_encoder);
  866. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  867. if (!(dp_reg & DP_PORT_EN)) {
  868. cdv_intel_dp_start_link_train(intel_encoder);
  869. cdv_intel_dp_complete_link_train(intel_encoder);
  870. }
  871. if (edp)
  872. cdv_intel_edp_backlight_on(intel_encoder);
  873. }
  874. }
  875. /*
  876. * Native read with retry for link status and receiver capability reads for
  877. * cases where the sink may still be asleep.
  878. */
  879. static bool
  880. cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
  881. uint8_t *recv, int recv_bytes)
  882. {
  883. int ret, i;
  884. /*
  885. * Sinks are *supposed* to come up within 1ms from an off state,
  886. * but we're also supposed to retry 3 times per the spec.
  887. */
  888. for (i = 0; i < 3; i++) {
  889. ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
  890. recv_bytes);
  891. if (ret == recv_bytes)
  892. return true;
  893. udelay(1000);
  894. }
  895. return false;
  896. }
  897. /*
  898. * Fetch AUX CH registers 0x202 - 0x207 which contain
  899. * link status information
  900. */
  901. static bool
  902. cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
  903. {
  904. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  905. return cdv_intel_dp_aux_native_read_retry(encoder,
  906. DP_LANE0_1_STATUS,
  907. intel_dp->link_status,
  908. DP_LINK_STATUS_SIZE);
  909. }
  910. static uint8_t
  911. cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  912. int r)
  913. {
  914. return link_status[r - DP_LANE0_1_STATUS];
  915. }
  916. static uint8_t
  917. cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
  918. int lane)
  919. {
  920. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  921. int s = ((lane & 1) ?
  922. DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
  923. DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
  924. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  925. return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
  926. }
  927. static uint8_t
  928. cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
  929. int lane)
  930. {
  931. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  932. int s = ((lane & 1) ?
  933. DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
  934. DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
  935. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  936. return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
  937. }
  938. #if 0
  939. static char *voltage_names[] = {
  940. "0.4V", "0.6V", "0.8V", "1.2V"
  941. };
  942. static char *pre_emph_names[] = {
  943. "0dB", "3.5dB", "6dB", "9.5dB"
  944. };
  945. static char *link_train_names[] = {
  946. "pattern 1", "pattern 2", "idle", "off"
  947. };
  948. #endif
  949. #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
  950. /*
  951. static uint8_t
  952. cdv_intel_dp_pre_emphasis_max(uint8_t voltage_swing)
  953. {
  954. switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
  955. case DP_TRAIN_VOLTAGE_SWING_400:
  956. return DP_TRAIN_PRE_EMPHASIS_6;
  957. case DP_TRAIN_VOLTAGE_SWING_600:
  958. return DP_TRAIN_PRE_EMPHASIS_6;
  959. case DP_TRAIN_VOLTAGE_SWING_800:
  960. return DP_TRAIN_PRE_EMPHASIS_3_5;
  961. case DP_TRAIN_VOLTAGE_SWING_1200:
  962. default:
  963. return DP_TRAIN_PRE_EMPHASIS_0;
  964. }
  965. }
  966. */
  967. static void
  968. cdv_intel_get_adjust_train(struct gma_encoder *encoder)
  969. {
  970. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  971. uint8_t v = 0;
  972. uint8_t p = 0;
  973. int lane;
  974. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  975. uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
  976. uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
  977. if (this_v > v)
  978. v = this_v;
  979. if (this_p > p)
  980. p = this_p;
  981. }
  982. if (v >= CDV_DP_VOLTAGE_MAX)
  983. v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
  984. if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
  985. p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
  986. for (lane = 0; lane < 4; lane++)
  987. intel_dp->train_set[lane] = v | p;
  988. }
  989. static uint8_t
  990. cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  991. int lane)
  992. {
  993. int i = DP_LANE0_1_STATUS + (lane >> 1);
  994. int s = (lane & 1) * 4;
  995. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  996. return (l >> s) & 0xf;
  997. }
  998. /* Check for clock recovery is done on all channels */
  999. static bool
  1000. cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
  1001. {
  1002. int lane;
  1003. uint8_t lane_status;
  1004. for (lane = 0; lane < lane_count; lane++) {
  1005. lane_status = cdv_intel_get_lane_status(link_status, lane);
  1006. if ((lane_status & DP_LANE_CR_DONE) == 0)
  1007. return false;
  1008. }
  1009. return true;
  1010. }
  1011. /* Check to see if channel eq is done on all channels */
  1012. #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
  1013. DP_LANE_CHANNEL_EQ_DONE|\
  1014. DP_LANE_SYMBOL_LOCKED)
  1015. static bool
  1016. cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
  1017. {
  1018. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1019. uint8_t lane_align;
  1020. uint8_t lane_status;
  1021. int lane;
  1022. lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
  1023. DP_LANE_ALIGN_STATUS_UPDATED);
  1024. if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
  1025. return false;
  1026. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  1027. lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
  1028. if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
  1029. return false;
  1030. }
  1031. return true;
  1032. }
  1033. static bool
  1034. cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
  1035. uint32_t dp_reg_value,
  1036. uint8_t dp_train_pat)
  1037. {
  1038. struct drm_device *dev = encoder->base.dev;
  1039. int ret;
  1040. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1041. REG_WRITE(intel_dp->output_reg, dp_reg_value);
  1042. REG_READ(intel_dp->output_reg);
  1043. ret = cdv_intel_dp_aux_native_write_1(encoder,
  1044. DP_TRAINING_PATTERN_SET,
  1045. dp_train_pat);
  1046. if (ret != 1) {
  1047. DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
  1048. dp_train_pat);
  1049. return false;
  1050. }
  1051. return true;
  1052. }
  1053. static bool
  1054. cdv_intel_dplink_set_level(struct gma_encoder *encoder,
  1055. uint8_t dp_train_pat)
  1056. {
  1057. int ret;
  1058. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1059. ret = cdv_intel_dp_aux_native_write(encoder,
  1060. DP_TRAINING_LANE0_SET,
  1061. intel_dp->train_set,
  1062. intel_dp->lane_count);
  1063. if (ret != intel_dp->lane_count) {
  1064. DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
  1065. intel_dp->train_set[0], intel_dp->lane_count);
  1066. return false;
  1067. }
  1068. return true;
  1069. }
  1070. static void
  1071. cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
  1072. {
  1073. struct drm_device *dev = encoder->base.dev;
  1074. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1075. struct ddi_regoff *ddi_reg;
  1076. int vswing, premph, index;
  1077. if (intel_dp->output_reg == DP_B)
  1078. ddi_reg = &ddi_DP_train_table[0];
  1079. else
  1080. ddi_reg = &ddi_DP_train_table[1];
  1081. vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
  1082. premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
  1083. DP_TRAIN_PRE_EMPHASIS_SHIFT;
  1084. if (vswing + premph > 3)
  1085. return;
  1086. #ifdef CDV_FAST_LINK_TRAIN
  1087. return;
  1088. #endif
  1089. DRM_DEBUG_KMS("Test2\n");
  1090. //return ;
  1091. cdv_sb_reset(dev);
  1092. /* ;Swing voltage programming
  1093. ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
  1094. cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
  1095. /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
  1096. cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
  1097. /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
  1098. * The VSwing_PreEmph table is also considered based on the vswing/premp
  1099. */
  1100. index = (vswing + premph) * 2;
  1101. if (premph == 1 && vswing == 1) {
  1102. cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
  1103. } else
  1104. cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
  1105. /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
  1106. if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_1200)
  1107. cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
  1108. else
  1109. cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
  1110. /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
  1111. /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
  1112. /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
  1113. cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
  1114. /* ;Pre emphasis programming
  1115. * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
  1116. */
  1117. cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
  1118. /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
  1119. index = 2 * premph + 1;
  1120. cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
  1121. return;
  1122. }
  1123. /* Enable corresponding port and start training pattern 1 */
  1124. static void
  1125. cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
  1126. {
  1127. struct drm_device *dev = encoder->base.dev;
  1128. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1129. int i;
  1130. uint8_t voltage;
  1131. bool clock_recovery = false;
  1132. int tries;
  1133. u32 reg;
  1134. uint32_t DP = intel_dp->DP;
  1135. DP |= DP_PORT_EN;
  1136. DP &= ~DP_LINK_TRAIN_MASK;
  1137. reg = DP;
  1138. reg |= DP_LINK_TRAIN_PAT_1;
  1139. /* Enable output, wait for it to become active */
  1140. REG_WRITE(intel_dp->output_reg, reg);
  1141. REG_READ(intel_dp->output_reg);
  1142. gma_wait_for_vblank(dev);
  1143. DRM_DEBUG_KMS("Link config\n");
  1144. /* Write the link configuration data */
  1145. cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
  1146. intel_dp->link_configuration,
  1147. 2);
  1148. memset(intel_dp->train_set, 0, 4);
  1149. voltage = 0;
  1150. tries = 0;
  1151. clock_recovery = false;
  1152. DRM_DEBUG_KMS("Start train\n");
  1153. reg = DP | DP_LINK_TRAIN_PAT_1;
  1154. for (;;) {
  1155. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1156. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1157. intel_dp->train_set[0],
  1158. intel_dp->link_configuration[0],
  1159. intel_dp->link_configuration[1]);
  1160. if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
  1161. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
  1162. }
  1163. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1164. /* Set training pattern 1 */
  1165. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
  1166. udelay(200);
  1167. if (!cdv_intel_dp_get_link_status(encoder))
  1168. break;
  1169. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1170. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1171. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1172. if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1173. DRM_DEBUG_KMS("PT1 train is done\n");
  1174. clock_recovery = true;
  1175. break;
  1176. }
  1177. /* Check to see if we've tried the max voltage */
  1178. for (i = 0; i < intel_dp->lane_count; i++)
  1179. if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
  1180. break;
  1181. if (i == intel_dp->lane_count)
  1182. break;
  1183. /* Check to see if we've tried the same voltage 5 times */
  1184. if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
  1185. ++tries;
  1186. if (tries == 5)
  1187. break;
  1188. } else
  1189. tries = 0;
  1190. voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
  1191. /* Compute new intel_dp->train_set as requested by target */
  1192. cdv_intel_get_adjust_train(encoder);
  1193. }
  1194. if (!clock_recovery) {
  1195. DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
  1196. }
  1197. intel_dp->DP = DP;
  1198. }
  1199. static void
  1200. cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
  1201. {
  1202. struct drm_device *dev = encoder->base.dev;
  1203. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1204. bool channel_eq = false;
  1205. int tries, cr_tries;
  1206. u32 reg;
  1207. uint32_t DP = intel_dp->DP;
  1208. /* channel equalization */
  1209. tries = 0;
  1210. cr_tries = 0;
  1211. channel_eq = false;
  1212. DRM_DEBUG_KMS("\n");
  1213. reg = DP | DP_LINK_TRAIN_PAT_2;
  1214. for (;;) {
  1215. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1216. intel_dp->train_set[0],
  1217. intel_dp->link_configuration[0],
  1218. intel_dp->link_configuration[1]);
  1219. /* channel eq pattern */
  1220. if (!cdv_intel_dp_set_link_train(encoder, reg,
  1221. DP_TRAINING_PATTERN_2)) {
  1222. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
  1223. }
  1224. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1225. if (cr_tries > 5) {
  1226. DRM_ERROR("failed to train DP, aborting\n");
  1227. cdv_intel_dp_link_down(encoder);
  1228. break;
  1229. }
  1230. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1231. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
  1232. udelay(1000);
  1233. if (!cdv_intel_dp_get_link_status(encoder))
  1234. break;
  1235. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1236. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1237. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1238. /* Make sure clock is still ok */
  1239. if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1240. cdv_intel_dp_start_link_train(encoder);
  1241. cr_tries++;
  1242. continue;
  1243. }
  1244. if (cdv_intel_channel_eq_ok(encoder)) {
  1245. DRM_DEBUG_KMS("PT2 train is done\n");
  1246. channel_eq = true;
  1247. break;
  1248. }
  1249. /* Try 5 times, then try clock recovery if that fails */
  1250. if (tries > 5) {
  1251. cdv_intel_dp_link_down(encoder);
  1252. cdv_intel_dp_start_link_train(encoder);
  1253. tries = 0;
  1254. cr_tries++;
  1255. continue;
  1256. }
  1257. /* Compute new intel_dp->train_set as requested by target */
  1258. cdv_intel_get_adjust_train(encoder);
  1259. ++tries;
  1260. }
  1261. reg = DP | DP_LINK_TRAIN_OFF;
  1262. REG_WRITE(intel_dp->output_reg, reg);
  1263. REG_READ(intel_dp->output_reg);
  1264. cdv_intel_dp_aux_native_write_1(encoder,
  1265. DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
  1266. }
  1267. static void
  1268. cdv_intel_dp_link_down(struct gma_encoder *encoder)
  1269. {
  1270. struct drm_device *dev = encoder->base.dev;
  1271. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1272. uint32_t DP = intel_dp->DP;
  1273. if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
  1274. return;
  1275. DRM_DEBUG_KMS("\n");
  1276. {
  1277. DP &= ~DP_LINK_TRAIN_MASK;
  1278. REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
  1279. }
  1280. REG_READ(intel_dp->output_reg);
  1281. msleep(17);
  1282. REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
  1283. REG_READ(intel_dp->output_reg);
  1284. }
  1285. static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
  1286. {
  1287. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1288. enum drm_connector_status status;
  1289. status = connector_status_disconnected;
  1290. if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
  1291. sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
  1292. {
  1293. if (intel_dp->dpcd[DP_DPCD_REV] != 0)
  1294. status = connector_status_connected;
  1295. }
  1296. if (status == connector_status_connected)
  1297. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1298. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1299. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1300. return status;
  1301. }
  1302. /**
  1303. * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
  1304. *
  1305. * \return true if DP port is connected.
  1306. * \return false if DP port is disconnected.
  1307. */
  1308. static enum drm_connector_status
  1309. cdv_intel_dp_detect(struct drm_connector *connector, bool force)
  1310. {
  1311. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1312. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1313. enum drm_connector_status status;
  1314. struct edid *edid = NULL;
  1315. int edp = is_edp(encoder);
  1316. intel_dp->has_audio = false;
  1317. if (edp)
  1318. cdv_intel_edp_panel_vdd_on(encoder);
  1319. status = cdv_dp_detect(encoder);
  1320. if (status != connector_status_connected) {
  1321. if (edp)
  1322. cdv_intel_edp_panel_vdd_off(encoder);
  1323. return status;
  1324. }
  1325. if (intel_dp->force_audio) {
  1326. intel_dp->has_audio = intel_dp->force_audio > 0;
  1327. } else {
  1328. edid = drm_get_edid(connector, &intel_dp->adapter);
  1329. if (edid) {
  1330. intel_dp->has_audio = drm_detect_monitor_audio(edid);
  1331. kfree(edid);
  1332. }
  1333. }
  1334. if (edp)
  1335. cdv_intel_edp_panel_vdd_off(encoder);
  1336. return connector_status_connected;
  1337. }
  1338. static int cdv_intel_dp_get_modes(struct drm_connector *connector)
  1339. {
  1340. struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
  1341. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  1342. struct edid *edid = NULL;
  1343. int ret = 0;
  1344. int edp = is_edp(intel_encoder);
  1345. edid = drm_get_edid(connector, &intel_dp->adapter);
  1346. if (edid) {
  1347. drm_mode_connector_update_edid_property(connector, edid);
  1348. ret = drm_add_edid_modes(connector, edid);
  1349. kfree(edid);
  1350. }
  1351. if (is_edp(intel_encoder)) {
  1352. struct drm_device *dev = connector->dev;
  1353. struct drm_psb_private *dev_priv = dev->dev_private;
  1354. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1355. if (ret) {
  1356. if (edp && !intel_dp->panel_fixed_mode) {
  1357. struct drm_display_mode *newmode;
  1358. list_for_each_entry(newmode, &connector->probed_modes,
  1359. head) {
  1360. if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
  1361. intel_dp->panel_fixed_mode =
  1362. drm_mode_duplicate(dev, newmode);
  1363. break;
  1364. }
  1365. }
  1366. }
  1367. return ret;
  1368. }
  1369. if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
  1370. intel_dp->panel_fixed_mode =
  1371. drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
  1372. if (intel_dp->panel_fixed_mode) {
  1373. intel_dp->panel_fixed_mode->type |=
  1374. DRM_MODE_TYPE_PREFERRED;
  1375. }
  1376. }
  1377. if (intel_dp->panel_fixed_mode != NULL) {
  1378. struct drm_display_mode *mode;
  1379. mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
  1380. drm_mode_probed_add(connector, mode);
  1381. return 1;
  1382. }
  1383. }
  1384. return ret;
  1385. }
  1386. static bool
  1387. cdv_intel_dp_detect_audio(struct drm_connector *connector)
  1388. {
  1389. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1390. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1391. struct edid *edid;
  1392. bool has_audio = false;
  1393. int edp = is_edp(encoder);
  1394. if (edp)
  1395. cdv_intel_edp_panel_vdd_on(encoder);
  1396. edid = drm_get_edid(connector, &intel_dp->adapter);
  1397. if (edid) {
  1398. has_audio = drm_detect_monitor_audio(edid);
  1399. kfree(edid);
  1400. }
  1401. if (edp)
  1402. cdv_intel_edp_panel_vdd_off(encoder);
  1403. return has_audio;
  1404. }
  1405. static int
  1406. cdv_intel_dp_set_property(struct drm_connector *connector,
  1407. struct drm_property *property,
  1408. uint64_t val)
  1409. {
  1410. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  1411. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1412. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1413. int ret;
  1414. ret = drm_object_property_set_value(&connector->base, property, val);
  1415. if (ret)
  1416. return ret;
  1417. if (property == dev_priv->force_audio_property) {
  1418. int i = val;
  1419. bool has_audio;
  1420. if (i == intel_dp->force_audio)
  1421. return 0;
  1422. intel_dp->force_audio = i;
  1423. if (i == 0)
  1424. has_audio = cdv_intel_dp_detect_audio(connector);
  1425. else
  1426. has_audio = i > 0;
  1427. if (has_audio == intel_dp->has_audio)
  1428. return 0;
  1429. intel_dp->has_audio = has_audio;
  1430. goto done;
  1431. }
  1432. if (property == dev_priv->broadcast_rgb_property) {
  1433. if (val == !!intel_dp->color_range)
  1434. return 0;
  1435. intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
  1436. goto done;
  1437. }
  1438. return -EINVAL;
  1439. done:
  1440. if (encoder->base.crtc) {
  1441. struct drm_crtc *crtc = encoder->base.crtc;
  1442. drm_crtc_helper_set_mode(crtc, &crtc->mode,
  1443. crtc->x, crtc->y,
  1444. crtc->primary->fb);
  1445. }
  1446. return 0;
  1447. }
  1448. static void
  1449. cdv_intel_dp_destroy(struct drm_connector *connector)
  1450. {
  1451. struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
  1452. struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
  1453. if (is_edp(gma_encoder)) {
  1454. /* cdv_intel_panel_destroy_backlight(connector->dev); */
  1455. if (intel_dp->panel_fixed_mode) {
  1456. kfree(intel_dp->panel_fixed_mode);
  1457. intel_dp->panel_fixed_mode = NULL;
  1458. }
  1459. }
  1460. i2c_del_adapter(&intel_dp->adapter);
  1461. drm_sysfs_connector_remove(connector);
  1462. drm_connector_cleanup(connector);
  1463. kfree(connector);
  1464. }
  1465. static void cdv_intel_dp_encoder_destroy(struct drm_encoder *encoder)
  1466. {
  1467. drm_encoder_cleanup(encoder);
  1468. }
  1469. static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
  1470. .dpms = cdv_intel_dp_dpms,
  1471. .mode_fixup = cdv_intel_dp_mode_fixup,
  1472. .prepare = cdv_intel_dp_prepare,
  1473. .mode_set = cdv_intel_dp_mode_set,
  1474. .commit = cdv_intel_dp_commit,
  1475. };
  1476. static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
  1477. .dpms = drm_helper_connector_dpms,
  1478. .detect = cdv_intel_dp_detect,
  1479. .fill_modes = drm_helper_probe_single_connector_modes,
  1480. .set_property = cdv_intel_dp_set_property,
  1481. .destroy = cdv_intel_dp_destroy,
  1482. };
  1483. static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
  1484. .get_modes = cdv_intel_dp_get_modes,
  1485. .mode_valid = cdv_intel_dp_mode_valid,
  1486. .best_encoder = gma_best_encoder,
  1487. };
  1488. static const struct drm_encoder_funcs cdv_intel_dp_enc_funcs = {
  1489. .destroy = cdv_intel_dp_encoder_destroy,
  1490. };
  1491. static void cdv_intel_dp_add_properties(struct drm_connector *connector)
  1492. {
  1493. cdv_intel_attach_force_audio_property(connector);
  1494. cdv_intel_attach_broadcast_rgb_property(connector);
  1495. }
  1496. /* check the VBT to see whether the eDP is on DP-D port */
  1497. static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
  1498. {
  1499. struct drm_psb_private *dev_priv = dev->dev_private;
  1500. struct child_device_config *p_child;
  1501. int i;
  1502. if (!dev_priv->child_dev_num)
  1503. return false;
  1504. for (i = 0; i < dev_priv->child_dev_num; i++) {
  1505. p_child = dev_priv->child_dev + i;
  1506. if (p_child->dvo_port == PORT_IDPC &&
  1507. p_child->device_type == DEVICE_TYPE_eDP)
  1508. return true;
  1509. }
  1510. return false;
  1511. }
  1512. /* Cedarview display clock gating
  1513. We need this disable dot get correct behaviour while enabling
  1514. DP/eDP. TODO - investigate if we can turn it back to normality
  1515. after enabling */
  1516. static void cdv_disable_intel_clock_gating(struct drm_device *dev)
  1517. {
  1518. u32 reg_value;
  1519. reg_value = REG_READ(DSPCLK_GATE_D);
  1520. reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
  1521. DPUNIT_PIPEA_GATE_DISABLE |
  1522. DPCUNIT_CLOCK_GATE_DISABLE |
  1523. DPLSUNIT_CLOCK_GATE_DISABLE |
  1524. DPOUNIT_CLOCK_GATE_DISABLE |
  1525. DPIOUNIT_CLOCK_GATE_DISABLE);
  1526. REG_WRITE(DSPCLK_GATE_D, reg_value);
  1527. udelay(500);
  1528. }
  1529. void
  1530. cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
  1531. {
  1532. struct gma_encoder *gma_encoder;
  1533. struct gma_connector *gma_connector;
  1534. struct drm_connector *connector;
  1535. struct drm_encoder *encoder;
  1536. struct cdv_intel_dp *intel_dp;
  1537. const char *name = NULL;
  1538. int type = DRM_MODE_CONNECTOR_DisplayPort;
  1539. gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
  1540. if (!gma_encoder)
  1541. return;
  1542. gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
  1543. if (!gma_connector)
  1544. goto err_connector;
  1545. intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
  1546. if (!intel_dp)
  1547. goto err_priv;
  1548. if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
  1549. type = DRM_MODE_CONNECTOR_eDP;
  1550. connector = &gma_connector->base;
  1551. encoder = &gma_encoder->base;
  1552. drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
  1553. drm_encoder_init(dev, encoder, &cdv_intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS);
  1554. gma_connector_attach_encoder(gma_connector, gma_encoder);
  1555. if (type == DRM_MODE_CONNECTOR_DisplayPort)
  1556. gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
  1557. else
  1558. gma_encoder->type = INTEL_OUTPUT_EDP;
  1559. gma_encoder->dev_priv=intel_dp;
  1560. intel_dp->encoder = gma_encoder;
  1561. intel_dp->output_reg = output_reg;
  1562. drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
  1563. drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
  1564. connector->polled = DRM_CONNECTOR_POLL_HPD;
  1565. connector->interlace_allowed = false;
  1566. connector->doublescan_allowed = false;
  1567. drm_sysfs_connector_add(connector);
  1568. /* Set up the DDC bus. */
  1569. switch (output_reg) {
  1570. case DP_B:
  1571. name = "DPDDC-B";
  1572. gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
  1573. break;
  1574. case DP_C:
  1575. name = "DPDDC-C";
  1576. gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
  1577. break;
  1578. }
  1579. cdv_disable_intel_clock_gating(dev);
  1580. cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
  1581. /* FIXME:fail check */
  1582. cdv_intel_dp_add_properties(connector);
  1583. if (is_edp(gma_encoder)) {
  1584. int ret;
  1585. struct edp_power_seq cur;
  1586. u32 pp_on, pp_off, pp_div;
  1587. u32 pwm_ctrl;
  1588. pp_on = REG_READ(PP_CONTROL);
  1589. pp_on &= ~PANEL_UNLOCK_MASK;
  1590. pp_on |= PANEL_UNLOCK_REGS;
  1591. REG_WRITE(PP_CONTROL, pp_on);
  1592. pwm_ctrl = REG_READ(BLC_PWM_CTL2);
  1593. pwm_ctrl |= PWM_PIPE_B;
  1594. REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
  1595. pp_on = REG_READ(PP_ON_DELAYS);
  1596. pp_off = REG_READ(PP_OFF_DELAYS);
  1597. pp_div = REG_READ(PP_DIVISOR);
  1598. /* Pull timing values out of registers */
  1599. cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
  1600. PANEL_POWER_UP_DELAY_SHIFT;
  1601. cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
  1602. PANEL_LIGHT_ON_DELAY_SHIFT;
  1603. cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
  1604. PANEL_LIGHT_OFF_DELAY_SHIFT;
  1605. cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
  1606. PANEL_POWER_DOWN_DELAY_SHIFT;
  1607. cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
  1608. PANEL_POWER_CYCLE_DELAY_SHIFT);
  1609. DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
  1610. cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
  1611. intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
  1612. intel_dp->backlight_on_delay = cur.t8 / 10;
  1613. intel_dp->backlight_off_delay = cur.t9 / 10;
  1614. intel_dp->panel_power_down_delay = cur.t10 / 10;
  1615. intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
  1616. DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
  1617. intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
  1618. intel_dp->panel_power_cycle_delay);
  1619. DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
  1620. intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
  1621. cdv_intel_edp_panel_vdd_on(gma_encoder);
  1622. ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
  1623. intel_dp->dpcd,
  1624. sizeof(intel_dp->dpcd));
  1625. cdv_intel_edp_panel_vdd_off(gma_encoder);
  1626. if (ret == 0) {
  1627. /* if this fails, presume the device is a ghost */
  1628. DRM_INFO("failed to retrieve link info, disabling eDP\n");
  1629. cdv_intel_dp_encoder_destroy(encoder);
  1630. cdv_intel_dp_destroy(connector);
  1631. goto err_priv;
  1632. } else {
  1633. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1634. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1635. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1636. }
  1637. /* The CDV reference driver moves pnale backlight setup into the displays that
  1638. have a backlight: this is a good idea and one we should probably adopt, however
  1639. we need to migrate all the drivers before we can do that */
  1640. /*cdv_intel_panel_setup_backlight(dev); */
  1641. }
  1642. return;
  1643. err_priv:
  1644. kfree(gma_connector);
  1645. err_connector:
  1646. kfree(gma_encoder);
  1647. }