cdv_intel_dp.c 57 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147
  1. /*
  2. * Copyright © 2012 Intel Corporation
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20. * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21. * IN THE SOFTWARE.
  22. *
  23. * Authors:
  24. * Keith Packard <keithp@keithp.com>
  25. *
  26. */
  27. #include <linux/i2c.h>
  28. #include <linux/slab.h>
  29. #include <linux/module.h>
  30. #include <drm/drmP.h>
  31. #include <drm/drm_crtc.h>
  32. #include <drm/drm_crtc_helper.h>
  33. #include "psb_drv.h"
  34. #include "psb_intel_drv.h"
  35. #include "psb_intel_reg.h"
  36. #include "gma_display.h"
  37. #include <drm/drm_dp_helper.h>
  38. /**
  39. * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
  40. * aux algorithm
  41. * @running: set by the algo indicating whether an i2c is ongoing or whether
  42. * the i2c bus is quiescent
  43. * @address: i2c target address for the currently ongoing transfer
  44. * @aux_ch: driver callback to transfer a single byte of the i2c payload
  45. */
  46. struct i2c_algo_dp_aux_data {
  47. bool running;
  48. u16 address;
  49. int (*aux_ch) (struct i2c_adapter *adapter,
  50. int mode, uint8_t write_byte,
  51. uint8_t *read_byte);
  52. };
  53. /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
  54. static int
  55. i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
  56. uint8_t write_byte, uint8_t *read_byte)
  57. {
  58. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  59. int ret;
  60. ret = (*algo_data->aux_ch)(adapter, mode,
  61. write_byte, read_byte);
  62. return ret;
  63. }
  64. /*
  65. * I2C over AUX CH
  66. */
  67. /*
  68. * Send the address. If the I2C link is running, this 'restarts'
  69. * the connection with the new address, this is used for doing
  70. * a write followed by a read (as needed for DDC)
  71. */
  72. static int
  73. i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
  74. {
  75. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  76. int mode = MODE_I2C_START;
  77. int ret;
  78. if (reading)
  79. mode |= MODE_I2C_READ;
  80. else
  81. mode |= MODE_I2C_WRITE;
  82. algo_data->address = address;
  83. algo_data->running = true;
  84. ret = i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
  85. return ret;
  86. }
  87. /*
  88. * Stop the I2C transaction. This closes out the link, sending
  89. * a bare address packet with the MOT bit turned off
  90. */
  91. static void
  92. i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
  93. {
  94. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  95. int mode = MODE_I2C_STOP;
  96. if (reading)
  97. mode |= MODE_I2C_READ;
  98. else
  99. mode |= MODE_I2C_WRITE;
  100. if (algo_data->running) {
  101. (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
  102. algo_data->running = false;
  103. }
  104. }
  105. /*
  106. * Write a single byte to the current I2C address, the
  107. * the I2C link must be running or this returns -EIO
  108. */
  109. static int
  110. i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
  111. {
  112. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  113. int ret;
  114. if (!algo_data->running)
  115. return -EIO;
  116. ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
  117. return ret;
  118. }
  119. /*
  120. * Read a single byte from the current I2C address, the
  121. * I2C link must be running or this returns -EIO
  122. */
  123. static int
  124. i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
  125. {
  126. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  127. int ret;
  128. if (!algo_data->running)
  129. return -EIO;
  130. ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
  131. return ret;
  132. }
  133. static int
  134. i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
  135. struct i2c_msg *msgs,
  136. int num)
  137. {
  138. int ret = 0;
  139. bool reading = false;
  140. int m;
  141. int b;
  142. for (m = 0; m < num; m++) {
  143. u16 len = msgs[m].len;
  144. u8 *buf = msgs[m].buf;
  145. reading = (msgs[m].flags & I2C_M_RD) != 0;
  146. ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
  147. if (ret < 0)
  148. break;
  149. if (reading) {
  150. for (b = 0; b < len; b++) {
  151. ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
  152. if (ret < 0)
  153. break;
  154. }
  155. } else {
  156. for (b = 0; b < len; b++) {
  157. ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
  158. if (ret < 0)
  159. break;
  160. }
  161. }
  162. if (ret < 0)
  163. break;
  164. }
  165. if (ret >= 0)
  166. ret = num;
  167. i2c_algo_dp_aux_stop(adapter, reading);
  168. DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
  169. return ret;
  170. }
  171. static u32
  172. i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
  173. {
  174. return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
  175. I2C_FUNC_SMBUS_READ_BLOCK_DATA |
  176. I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
  177. I2C_FUNC_10BIT_ADDR;
  178. }
  179. static const struct i2c_algorithm i2c_dp_aux_algo = {
  180. .master_xfer = i2c_algo_dp_aux_xfer,
  181. .functionality = i2c_algo_dp_aux_functionality,
  182. };
  183. static void
  184. i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
  185. {
  186. (void) i2c_algo_dp_aux_address(adapter, 0, false);
  187. (void) i2c_algo_dp_aux_stop(adapter, false);
  188. }
  189. static int
  190. i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
  191. {
  192. adapter->algo = &i2c_dp_aux_algo;
  193. adapter->retries = 3;
  194. i2c_dp_aux_reset_bus(adapter);
  195. return 0;
  196. }
  197. /*
  198. * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
  199. * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
  200. */
  201. static int __deprecated
  202. i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
  203. {
  204. int error;
  205. error = i2c_dp_aux_prepare_bus(adapter);
  206. if (error)
  207. return error;
  208. error = i2c_add_adapter(adapter);
  209. return error;
  210. }
  211. #define _wait_for(COND, MS, W) ({ \
  212. unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
  213. int ret__ = 0; \
  214. while (! (COND)) { \
  215. if (time_after(jiffies, timeout__)) { \
  216. ret__ = -ETIMEDOUT; \
  217. break; \
  218. } \
  219. if (W && !in_dbg_master()) msleep(W); \
  220. } \
  221. ret__; \
  222. })
  223. #define wait_for(COND, MS) _wait_for(COND, MS, 1)
  224. #define DP_LINK_STATUS_SIZE 6
  225. #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
  226. #define DP_LINK_CONFIGURATION_SIZE 9
  227. #define CDV_FAST_LINK_TRAIN 1
  228. struct cdv_intel_dp {
  229. uint32_t output_reg;
  230. uint32_t DP;
  231. uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
  232. bool has_audio;
  233. int force_audio;
  234. uint32_t color_range;
  235. uint8_t link_bw;
  236. uint8_t lane_count;
  237. uint8_t dpcd[4];
  238. struct gma_encoder *encoder;
  239. struct i2c_adapter adapter;
  240. struct i2c_algo_dp_aux_data algo;
  241. uint8_t train_set[4];
  242. uint8_t link_status[DP_LINK_STATUS_SIZE];
  243. int panel_power_up_delay;
  244. int panel_power_down_delay;
  245. int panel_power_cycle_delay;
  246. int backlight_on_delay;
  247. int backlight_off_delay;
  248. struct drm_display_mode *panel_fixed_mode; /* for eDP */
  249. bool panel_on;
  250. };
  251. struct ddi_regoff {
  252. uint32_t PreEmph1;
  253. uint32_t PreEmph2;
  254. uint32_t VSwing1;
  255. uint32_t VSwing2;
  256. uint32_t VSwing3;
  257. uint32_t VSwing4;
  258. uint32_t VSwing5;
  259. };
  260. static struct ddi_regoff ddi_DP_train_table[] = {
  261. {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
  262. .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
  263. .VSwing5 = 0x8158,},
  264. {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
  265. .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
  266. .VSwing5 = 0x8258,},
  267. };
  268. static uint32_t dp_vswing_premph_table[] = {
  269. 0x55338954, 0x4000,
  270. 0x554d8954, 0x2000,
  271. 0x55668954, 0,
  272. 0x559ac0d4, 0x6000,
  273. };
  274. /**
  275. * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
  276. * @intel_dp: DP struct
  277. *
  278. * If a CPU or PCH DP output is attached to an eDP panel, this function
  279. * will return true, and false otherwise.
  280. */
  281. static bool is_edp(struct gma_encoder *encoder)
  282. {
  283. return encoder->type == INTEL_OUTPUT_EDP;
  284. }
  285. static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
  286. static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
  287. static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
  288. static int
  289. cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
  290. {
  291. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  292. int max_lane_count = 4;
  293. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
  294. max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
  295. switch (max_lane_count) {
  296. case 1: case 2: case 4:
  297. break;
  298. default:
  299. max_lane_count = 4;
  300. }
  301. }
  302. return max_lane_count;
  303. }
  304. static int
  305. cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
  306. {
  307. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  308. int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
  309. switch (max_link_bw) {
  310. case DP_LINK_BW_1_62:
  311. case DP_LINK_BW_2_7:
  312. break;
  313. default:
  314. max_link_bw = DP_LINK_BW_1_62;
  315. break;
  316. }
  317. return max_link_bw;
  318. }
  319. static int
  320. cdv_intel_dp_link_clock(uint8_t link_bw)
  321. {
  322. if (link_bw == DP_LINK_BW_2_7)
  323. return 270000;
  324. else
  325. return 162000;
  326. }
  327. static int
  328. cdv_intel_dp_link_required(int pixel_clock, int bpp)
  329. {
  330. return (pixel_clock * bpp + 7) / 8;
  331. }
  332. static int
  333. cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
  334. {
  335. return (max_link_clock * max_lanes * 19) / 20;
  336. }
  337. static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
  338. {
  339. struct drm_device *dev = intel_encoder->base.dev;
  340. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  341. u32 pp;
  342. if (intel_dp->panel_on) {
  343. DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
  344. return;
  345. }
  346. DRM_DEBUG_KMS("\n");
  347. pp = REG_READ(PP_CONTROL);
  348. pp |= EDP_FORCE_VDD;
  349. REG_WRITE(PP_CONTROL, pp);
  350. REG_READ(PP_CONTROL);
  351. msleep(intel_dp->panel_power_up_delay);
  352. }
  353. static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
  354. {
  355. struct drm_device *dev = intel_encoder->base.dev;
  356. u32 pp;
  357. DRM_DEBUG_KMS("\n");
  358. pp = REG_READ(PP_CONTROL);
  359. pp &= ~EDP_FORCE_VDD;
  360. REG_WRITE(PP_CONTROL, pp);
  361. REG_READ(PP_CONTROL);
  362. }
  363. /* Returns true if the panel was already on when called */
  364. static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
  365. {
  366. struct drm_device *dev = intel_encoder->base.dev;
  367. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  368. u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
  369. if (intel_dp->panel_on)
  370. return true;
  371. DRM_DEBUG_KMS("\n");
  372. pp = REG_READ(PP_CONTROL);
  373. pp &= ~PANEL_UNLOCK_MASK;
  374. pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
  375. REG_WRITE(PP_CONTROL, pp);
  376. REG_READ(PP_CONTROL);
  377. if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
  378. DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
  379. intel_dp->panel_on = false;
  380. } else
  381. intel_dp->panel_on = true;
  382. msleep(intel_dp->panel_power_up_delay);
  383. return false;
  384. }
  385. static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
  386. {
  387. struct drm_device *dev = intel_encoder->base.dev;
  388. u32 pp, idle_off_mask = PP_ON ;
  389. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  390. DRM_DEBUG_KMS("\n");
  391. pp = REG_READ(PP_CONTROL);
  392. if ((pp & POWER_TARGET_ON) == 0)
  393. return;
  394. intel_dp->panel_on = false;
  395. pp &= ~PANEL_UNLOCK_MASK;
  396. /* ILK workaround: disable reset around power sequence */
  397. pp &= ~POWER_TARGET_ON;
  398. pp &= ~EDP_FORCE_VDD;
  399. pp &= ~EDP_BLC_ENABLE;
  400. REG_WRITE(PP_CONTROL, pp);
  401. REG_READ(PP_CONTROL);
  402. DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
  403. if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
  404. DRM_DEBUG_KMS("Error in turning off Panel\n");
  405. }
  406. msleep(intel_dp->panel_power_cycle_delay);
  407. DRM_DEBUG_KMS("Over\n");
  408. }
  409. static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
  410. {
  411. struct drm_device *dev = intel_encoder->base.dev;
  412. u32 pp;
  413. DRM_DEBUG_KMS("\n");
  414. /*
  415. * If we enable the backlight right away following a panel power
  416. * on, we may see slight flicker as the panel syncs with the eDP
  417. * link. So delay a bit to make sure the image is solid before
  418. * allowing it to appear.
  419. */
  420. msleep(300);
  421. pp = REG_READ(PP_CONTROL);
  422. pp |= EDP_BLC_ENABLE;
  423. REG_WRITE(PP_CONTROL, pp);
  424. gma_backlight_enable(dev);
  425. }
  426. static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
  427. {
  428. struct drm_device *dev = intel_encoder->base.dev;
  429. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  430. u32 pp;
  431. DRM_DEBUG_KMS("\n");
  432. gma_backlight_disable(dev);
  433. msleep(10);
  434. pp = REG_READ(PP_CONTROL);
  435. pp &= ~EDP_BLC_ENABLE;
  436. REG_WRITE(PP_CONTROL, pp);
  437. msleep(intel_dp->backlight_off_delay);
  438. }
  439. static int
  440. cdv_intel_dp_mode_valid(struct drm_connector *connector,
  441. struct drm_display_mode *mode)
  442. {
  443. struct gma_encoder *encoder = gma_attached_encoder(connector);
  444. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  445. int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
  446. int max_lanes = cdv_intel_dp_max_lane_count(encoder);
  447. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  448. if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
  449. if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
  450. return MODE_PANEL;
  451. if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
  452. return MODE_PANEL;
  453. }
  454. /* only refuse the mode on non eDP since we have seen some weird eDP panels
  455. which are outside spec tolerances but somehow work by magic */
  456. if (!is_edp(encoder) &&
  457. (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
  458. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
  459. return MODE_CLOCK_HIGH;
  460. if (is_edp(encoder)) {
  461. if (cdv_intel_dp_link_required(mode->clock, 24)
  462. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
  463. return MODE_CLOCK_HIGH;
  464. }
  465. if (mode->clock < 10000)
  466. return MODE_CLOCK_LOW;
  467. return MODE_OK;
  468. }
  469. static uint32_t
  470. pack_aux(uint8_t *src, int src_bytes)
  471. {
  472. int i;
  473. uint32_t v = 0;
  474. if (src_bytes > 4)
  475. src_bytes = 4;
  476. for (i = 0; i < src_bytes; i++)
  477. v |= ((uint32_t) src[i]) << ((3-i) * 8);
  478. return v;
  479. }
  480. static void
  481. unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
  482. {
  483. int i;
  484. if (dst_bytes > 4)
  485. dst_bytes = 4;
  486. for (i = 0; i < dst_bytes; i++)
  487. dst[i] = src >> ((3-i) * 8);
  488. }
  489. static int
  490. cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
  491. uint8_t *send, int send_bytes,
  492. uint8_t *recv, int recv_size)
  493. {
  494. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  495. uint32_t output_reg = intel_dp->output_reg;
  496. struct drm_device *dev = encoder->base.dev;
  497. uint32_t ch_ctl = output_reg + 0x10;
  498. uint32_t ch_data = ch_ctl + 4;
  499. int i;
  500. int recv_bytes;
  501. uint32_t status;
  502. uint32_t aux_clock_divider;
  503. int try, precharge;
  504. /* The clock divider is based off the hrawclk,
  505. * and would like to run at 2MHz. So, take the
  506. * hrawclk value and divide by 2 and use that
  507. * On CDV platform it uses 200MHz as hrawclk.
  508. *
  509. */
  510. aux_clock_divider = 200 / 2;
  511. precharge = 4;
  512. if (is_edp(encoder))
  513. precharge = 10;
  514. if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
  515. DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
  516. REG_READ(ch_ctl));
  517. return -EBUSY;
  518. }
  519. /* Must try at least 3 times according to DP spec */
  520. for (try = 0; try < 5; try++) {
  521. /* Load the send data into the aux channel data registers */
  522. for (i = 0; i < send_bytes; i += 4)
  523. REG_WRITE(ch_data + i,
  524. pack_aux(send + i, send_bytes - i));
  525. /* Send the command and wait for it to complete */
  526. REG_WRITE(ch_ctl,
  527. DP_AUX_CH_CTL_SEND_BUSY |
  528. DP_AUX_CH_CTL_TIME_OUT_400us |
  529. (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
  530. (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
  531. (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
  532. DP_AUX_CH_CTL_DONE |
  533. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  534. DP_AUX_CH_CTL_RECEIVE_ERROR);
  535. for (;;) {
  536. status = REG_READ(ch_ctl);
  537. if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
  538. break;
  539. udelay(100);
  540. }
  541. /* Clear done status and any errors */
  542. REG_WRITE(ch_ctl,
  543. status |
  544. DP_AUX_CH_CTL_DONE |
  545. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  546. DP_AUX_CH_CTL_RECEIVE_ERROR);
  547. if (status & DP_AUX_CH_CTL_DONE)
  548. break;
  549. }
  550. if ((status & DP_AUX_CH_CTL_DONE) == 0) {
  551. DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
  552. return -EBUSY;
  553. }
  554. /* Check for timeout or receive error.
  555. * Timeouts occur when the sink is not connected
  556. */
  557. if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
  558. DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
  559. return -EIO;
  560. }
  561. /* Timeouts occur when the device isn't connected, so they're
  562. * "normal" -- don't fill the kernel log with these */
  563. if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
  564. DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
  565. return -ETIMEDOUT;
  566. }
  567. /* Unload any bytes sent back from the other side */
  568. recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
  569. DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
  570. if (recv_bytes > recv_size)
  571. recv_bytes = recv_size;
  572. for (i = 0; i < recv_bytes; i += 4)
  573. unpack_aux(REG_READ(ch_data + i),
  574. recv + i, recv_bytes - i);
  575. return recv_bytes;
  576. }
  577. /* Write data to the aux channel in native mode */
  578. static int
  579. cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
  580. uint16_t address, uint8_t *send, int send_bytes)
  581. {
  582. int ret;
  583. uint8_t msg[20];
  584. int msg_bytes;
  585. uint8_t ack;
  586. if (send_bytes > 16)
  587. return -1;
  588. msg[0] = DP_AUX_NATIVE_WRITE << 4;
  589. msg[1] = address >> 8;
  590. msg[2] = address & 0xff;
  591. msg[3] = send_bytes - 1;
  592. memcpy(&msg[4], send, send_bytes);
  593. msg_bytes = send_bytes + 4;
  594. for (;;) {
  595. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
  596. if (ret < 0)
  597. return ret;
  598. ack >>= 4;
  599. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
  600. break;
  601. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  602. udelay(100);
  603. else
  604. return -EIO;
  605. }
  606. return send_bytes;
  607. }
  608. /* Write a single byte to the aux channel in native mode */
  609. static int
  610. cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
  611. uint16_t address, uint8_t byte)
  612. {
  613. return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
  614. }
  615. /* read bytes from a native aux channel */
  616. static int
  617. cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
  618. uint16_t address, uint8_t *recv, int recv_bytes)
  619. {
  620. uint8_t msg[4];
  621. int msg_bytes;
  622. uint8_t reply[20];
  623. int reply_bytes;
  624. uint8_t ack;
  625. int ret;
  626. msg[0] = DP_AUX_NATIVE_READ << 4;
  627. msg[1] = address >> 8;
  628. msg[2] = address & 0xff;
  629. msg[3] = recv_bytes - 1;
  630. msg_bytes = 4;
  631. reply_bytes = recv_bytes + 1;
  632. for (;;) {
  633. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
  634. reply, reply_bytes);
  635. if (ret == 0)
  636. return -EPROTO;
  637. if (ret < 0)
  638. return ret;
  639. ack = reply[0] >> 4;
  640. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
  641. memcpy(recv, reply + 1, ret - 1);
  642. return ret - 1;
  643. }
  644. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  645. udelay(100);
  646. else
  647. return -EIO;
  648. }
  649. }
  650. static int
  651. cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
  652. uint8_t write_byte, uint8_t *read_byte)
  653. {
  654. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  655. struct cdv_intel_dp *intel_dp = container_of(adapter,
  656. struct cdv_intel_dp,
  657. adapter);
  658. struct gma_encoder *encoder = intel_dp->encoder;
  659. uint16_t address = algo_data->address;
  660. uint8_t msg[5];
  661. uint8_t reply[2];
  662. unsigned retry;
  663. int msg_bytes;
  664. int reply_bytes;
  665. int ret;
  666. /* Set up the command byte */
  667. if (mode & MODE_I2C_READ)
  668. msg[0] = DP_AUX_I2C_READ << 4;
  669. else
  670. msg[0] = DP_AUX_I2C_WRITE << 4;
  671. if (!(mode & MODE_I2C_STOP))
  672. msg[0] |= DP_AUX_I2C_MOT << 4;
  673. msg[1] = address >> 8;
  674. msg[2] = address;
  675. switch (mode) {
  676. case MODE_I2C_WRITE:
  677. msg[3] = 0;
  678. msg[4] = write_byte;
  679. msg_bytes = 5;
  680. reply_bytes = 1;
  681. break;
  682. case MODE_I2C_READ:
  683. msg[3] = 0;
  684. msg_bytes = 4;
  685. reply_bytes = 2;
  686. break;
  687. default:
  688. msg_bytes = 3;
  689. reply_bytes = 1;
  690. break;
  691. }
  692. for (retry = 0; retry < 5; retry++) {
  693. ret = cdv_intel_dp_aux_ch(encoder,
  694. msg, msg_bytes,
  695. reply, reply_bytes);
  696. if (ret < 0) {
  697. DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
  698. return ret;
  699. }
  700. switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
  701. case DP_AUX_NATIVE_REPLY_ACK:
  702. /* I2C-over-AUX Reply field is only valid
  703. * when paired with AUX ACK.
  704. */
  705. break;
  706. case DP_AUX_NATIVE_REPLY_NACK:
  707. DRM_DEBUG_KMS("aux_ch native nack\n");
  708. return -EREMOTEIO;
  709. case DP_AUX_NATIVE_REPLY_DEFER:
  710. udelay(100);
  711. continue;
  712. default:
  713. DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
  714. reply[0]);
  715. return -EREMOTEIO;
  716. }
  717. switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
  718. case DP_AUX_I2C_REPLY_ACK:
  719. if (mode == MODE_I2C_READ) {
  720. *read_byte = reply[1];
  721. }
  722. return reply_bytes - 1;
  723. case DP_AUX_I2C_REPLY_NACK:
  724. DRM_DEBUG_KMS("aux_i2c nack\n");
  725. return -EREMOTEIO;
  726. case DP_AUX_I2C_REPLY_DEFER:
  727. DRM_DEBUG_KMS("aux_i2c defer\n");
  728. udelay(100);
  729. break;
  730. default:
  731. DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
  732. return -EREMOTEIO;
  733. }
  734. }
  735. DRM_ERROR("too many retries, giving up\n");
  736. return -EREMOTEIO;
  737. }
  738. static int
  739. cdv_intel_dp_i2c_init(struct gma_connector *connector,
  740. struct gma_encoder *encoder, const char *name)
  741. {
  742. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  743. int ret;
  744. DRM_DEBUG_KMS("i2c_init %s\n", name);
  745. intel_dp->algo.running = false;
  746. intel_dp->algo.address = 0;
  747. intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
  748. memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
  749. intel_dp->adapter.owner = THIS_MODULE;
  750. intel_dp->adapter.class = I2C_CLASS_DDC;
  751. strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
  752. intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
  753. intel_dp->adapter.algo_data = &intel_dp->algo;
  754. intel_dp->adapter.dev.parent = connector->base.kdev;
  755. if (is_edp(encoder))
  756. cdv_intel_edp_panel_vdd_on(encoder);
  757. ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
  758. if (is_edp(encoder))
  759. cdv_intel_edp_panel_vdd_off(encoder);
  760. return ret;
  761. }
  762. static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
  763. struct drm_display_mode *adjusted_mode)
  764. {
  765. adjusted_mode->hdisplay = fixed_mode->hdisplay;
  766. adjusted_mode->hsync_start = fixed_mode->hsync_start;
  767. adjusted_mode->hsync_end = fixed_mode->hsync_end;
  768. adjusted_mode->htotal = fixed_mode->htotal;
  769. adjusted_mode->vdisplay = fixed_mode->vdisplay;
  770. adjusted_mode->vsync_start = fixed_mode->vsync_start;
  771. adjusted_mode->vsync_end = fixed_mode->vsync_end;
  772. adjusted_mode->vtotal = fixed_mode->vtotal;
  773. adjusted_mode->clock = fixed_mode->clock;
  774. drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
  775. }
  776. static bool
  777. cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
  778. struct drm_display_mode *adjusted_mode)
  779. {
  780. struct drm_psb_private *dev_priv = encoder->dev->dev_private;
  781. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  782. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  783. int lane_count, clock;
  784. int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
  785. int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
  786. static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
  787. int refclock = mode->clock;
  788. int bpp = 24;
  789. if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
  790. cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
  791. refclock = intel_dp->panel_fixed_mode->clock;
  792. bpp = dev_priv->edp.bpp;
  793. }
  794. for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
  795. for (clock = max_clock; clock >= 0; clock--) {
  796. int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
  797. if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
  798. intel_dp->link_bw = bws[clock];
  799. intel_dp->lane_count = lane_count;
  800. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  801. DRM_DEBUG_KMS("Display port link bw %02x lane "
  802. "count %d clock %d\n",
  803. intel_dp->link_bw, intel_dp->lane_count,
  804. adjusted_mode->clock);
  805. return true;
  806. }
  807. }
  808. }
  809. if (is_edp(intel_encoder)) {
  810. /* okay we failed just pick the highest */
  811. intel_dp->lane_count = max_lane_count;
  812. intel_dp->link_bw = bws[max_clock];
  813. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  814. DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
  815. "count %d clock %d\n",
  816. intel_dp->link_bw, intel_dp->lane_count,
  817. adjusted_mode->clock);
  818. return true;
  819. }
  820. return false;
  821. }
  822. struct cdv_intel_dp_m_n {
  823. uint32_t tu;
  824. uint32_t gmch_m;
  825. uint32_t gmch_n;
  826. uint32_t link_m;
  827. uint32_t link_n;
  828. };
  829. static void
  830. cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
  831. {
  832. /*
  833. while (*num > 0xffffff || *den > 0xffffff) {
  834. *num >>= 1;
  835. *den >>= 1;
  836. }*/
  837. uint64_t value, m;
  838. m = *num;
  839. value = m * (0x800000);
  840. m = do_div(value, *den);
  841. *num = value;
  842. *den = 0x800000;
  843. }
  844. static void
  845. cdv_intel_dp_compute_m_n(int bpp,
  846. int nlanes,
  847. int pixel_clock,
  848. int link_clock,
  849. struct cdv_intel_dp_m_n *m_n)
  850. {
  851. m_n->tu = 64;
  852. m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
  853. m_n->gmch_n = link_clock * nlanes;
  854. cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
  855. m_n->link_m = pixel_clock;
  856. m_n->link_n = link_clock;
  857. cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
  858. }
  859. void
  860. cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
  861. struct drm_display_mode *adjusted_mode)
  862. {
  863. struct drm_device *dev = crtc->dev;
  864. struct drm_psb_private *dev_priv = dev->dev_private;
  865. struct drm_mode_config *mode_config = &dev->mode_config;
  866. struct drm_encoder *encoder;
  867. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  868. int lane_count = 4, bpp = 24;
  869. struct cdv_intel_dp_m_n m_n;
  870. int pipe = gma_crtc->pipe;
  871. /*
  872. * Find the lane count in the intel_encoder private
  873. */
  874. list_for_each_entry(encoder, &mode_config->encoder_list, head) {
  875. struct gma_encoder *intel_encoder;
  876. struct cdv_intel_dp *intel_dp;
  877. if (encoder->crtc != crtc)
  878. continue;
  879. intel_encoder = to_gma_encoder(encoder);
  880. intel_dp = intel_encoder->dev_priv;
  881. if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
  882. lane_count = intel_dp->lane_count;
  883. break;
  884. } else if (is_edp(intel_encoder)) {
  885. lane_count = intel_dp->lane_count;
  886. bpp = dev_priv->edp.bpp;
  887. break;
  888. }
  889. }
  890. /*
  891. * Compute the GMCH and Link ratios. The '3' here is
  892. * the number of bytes_per_pixel post-LUT, which we always
  893. * set up for 8-bits of R/G/B, or 3 bytes total.
  894. */
  895. cdv_intel_dp_compute_m_n(bpp, lane_count,
  896. mode->clock, adjusted_mode->clock, &m_n);
  897. {
  898. REG_WRITE(PIPE_GMCH_DATA_M(pipe),
  899. ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
  900. m_n.gmch_m);
  901. REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
  902. REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
  903. REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
  904. }
  905. }
  906. static void
  907. cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  908. struct drm_display_mode *adjusted_mode)
  909. {
  910. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  911. struct drm_crtc *crtc = encoder->crtc;
  912. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  913. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  914. struct drm_device *dev = encoder->dev;
  915. intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
  916. intel_dp->DP |= intel_dp->color_range;
  917. if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
  918. intel_dp->DP |= DP_SYNC_HS_HIGH;
  919. if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
  920. intel_dp->DP |= DP_SYNC_VS_HIGH;
  921. intel_dp->DP |= DP_LINK_TRAIN_OFF;
  922. switch (intel_dp->lane_count) {
  923. case 1:
  924. intel_dp->DP |= DP_PORT_WIDTH_1;
  925. break;
  926. case 2:
  927. intel_dp->DP |= DP_PORT_WIDTH_2;
  928. break;
  929. case 4:
  930. intel_dp->DP |= DP_PORT_WIDTH_4;
  931. break;
  932. }
  933. if (intel_dp->has_audio)
  934. intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
  935. memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
  936. intel_dp->link_configuration[0] = intel_dp->link_bw;
  937. intel_dp->link_configuration[1] = intel_dp->lane_count;
  938. /*
  939. * Check for DPCD version > 1.1 and enhanced framing support
  940. */
  941. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
  942. (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
  943. intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
  944. intel_dp->DP |= DP_ENHANCED_FRAMING;
  945. }
  946. /* CPT DP's pipe select is decided in TRANS_DP_CTL */
  947. if (gma_crtc->pipe == 1)
  948. intel_dp->DP |= DP_PIPEB_SELECT;
  949. REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
  950. DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
  951. if (is_edp(intel_encoder)) {
  952. uint32_t pfit_control;
  953. cdv_intel_edp_panel_on(intel_encoder);
  954. if (mode->hdisplay != adjusted_mode->hdisplay ||
  955. mode->vdisplay != adjusted_mode->vdisplay)
  956. pfit_control = PFIT_ENABLE;
  957. else
  958. pfit_control = 0;
  959. pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
  960. REG_WRITE(PFIT_CONTROL, pfit_control);
  961. }
  962. }
  963. /* If the sink supports it, try to set the power state appropriately */
  964. static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
  965. {
  966. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  967. int ret, i;
  968. /* Should have a valid DPCD by this point */
  969. if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
  970. return;
  971. if (mode != DRM_MODE_DPMS_ON) {
  972. ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
  973. DP_SET_POWER_D3);
  974. if (ret != 1)
  975. DRM_DEBUG_DRIVER("failed to write sink power state\n");
  976. } else {
  977. /*
  978. * When turning on, we need to retry for 1ms to give the sink
  979. * time to wake up.
  980. */
  981. for (i = 0; i < 3; i++) {
  982. ret = cdv_intel_dp_aux_native_write_1(encoder,
  983. DP_SET_POWER,
  984. DP_SET_POWER_D0);
  985. if (ret == 1)
  986. break;
  987. udelay(1000);
  988. }
  989. }
  990. }
  991. static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
  992. {
  993. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  994. int edp = is_edp(intel_encoder);
  995. if (edp) {
  996. cdv_intel_edp_backlight_off(intel_encoder);
  997. cdv_intel_edp_panel_off(intel_encoder);
  998. cdv_intel_edp_panel_vdd_on(intel_encoder);
  999. }
  1000. /* Wake up the sink first */
  1001. cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
  1002. cdv_intel_dp_link_down(intel_encoder);
  1003. if (edp)
  1004. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1005. }
  1006. static void cdv_intel_dp_commit(struct drm_encoder *encoder)
  1007. {
  1008. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  1009. int edp = is_edp(intel_encoder);
  1010. if (edp)
  1011. cdv_intel_edp_panel_on(intel_encoder);
  1012. cdv_intel_dp_start_link_train(intel_encoder);
  1013. cdv_intel_dp_complete_link_train(intel_encoder);
  1014. if (edp)
  1015. cdv_intel_edp_backlight_on(intel_encoder);
  1016. }
  1017. static void
  1018. cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
  1019. {
  1020. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  1021. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  1022. struct drm_device *dev = encoder->dev;
  1023. uint32_t dp_reg = REG_READ(intel_dp->output_reg);
  1024. int edp = is_edp(intel_encoder);
  1025. if (mode != DRM_MODE_DPMS_ON) {
  1026. if (edp) {
  1027. cdv_intel_edp_backlight_off(intel_encoder);
  1028. cdv_intel_edp_panel_vdd_on(intel_encoder);
  1029. }
  1030. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  1031. cdv_intel_dp_link_down(intel_encoder);
  1032. if (edp) {
  1033. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1034. cdv_intel_edp_panel_off(intel_encoder);
  1035. }
  1036. } else {
  1037. if (edp)
  1038. cdv_intel_edp_panel_on(intel_encoder);
  1039. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  1040. if (!(dp_reg & DP_PORT_EN)) {
  1041. cdv_intel_dp_start_link_train(intel_encoder);
  1042. cdv_intel_dp_complete_link_train(intel_encoder);
  1043. }
  1044. if (edp)
  1045. cdv_intel_edp_backlight_on(intel_encoder);
  1046. }
  1047. }
  1048. /*
  1049. * Native read with retry for link status and receiver capability reads for
  1050. * cases where the sink may still be asleep.
  1051. */
  1052. static bool
  1053. cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
  1054. uint8_t *recv, int recv_bytes)
  1055. {
  1056. int ret, i;
  1057. /*
  1058. * Sinks are *supposed* to come up within 1ms from an off state,
  1059. * but we're also supposed to retry 3 times per the spec.
  1060. */
  1061. for (i = 0; i < 3; i++) {
  1062. ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
  1063. recv_bytes);
  1064. if (ret == recv_bytes)
  1065. return true;
  1066. udelay(1000);
  1067. }
  1068. return false;
  1069. }
  1070. /*
  1071. * Fetch AUX CH registers 0x202 - 0x207 which contain
  1072. * link status information
  1073. */
  1074. static bool
  1075. cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
  1076. {
  1077. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1078. return cdv_intel_dp_aux_native_read_retry(encoder,
  1079. DP_LANE0_1_STATUS,
  1080. intel_dp->link_status,
  1081. DP_LINK_STATUS_SIZE);
  1082. }
  1083. static uint8_t
  1084. cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1085. int r)
  1086. {
  1087. return link_status[r - DP_LANE0_1_STATUS];
  1088. }
  1089. static uint8_t
  1090. cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1091. int lane)
  1092. {
  1093. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  1094. int s = ((lane & 1) ?
  1095. DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
  1096. DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
  1097. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1098. return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
  1099. }
  1100. static uint8_t
  1101. cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1102. int lane)
  1103. {
  1104. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  1105. int s = ((lane & 1) ?
  1106. DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
  1107. DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
  1108. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1109. return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
  1110. }
  1111. #if 0
  1112. static char *voltage_names[] = {
  1113. "0.4V", "0.6V", "0.8V", "1.2V"
  1114. };
  1115. static char *pre_emph_names[] = {
  1116. "0dB", "3.5dB", "6dB", "9.5dB"
  1117. };
  1118. static char *link_train_names[] = {
  1119. "pattern 1", "pattern 2", "idle", "off"
  1120. };
  1121. #endif
  1122. #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
  1123. /*
  1124. static uint8_t
  1125. cdv_intel_dp_pre_emphasis_max(uint8_t voltage_swing)
  1126. {
  1127. switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
  1128. case DP_TRAIN_VOLTAGE_SWING_400:
  1129. return DP_TRAIN_PRE_EMPHASIS_6;
  1130. case DP_TRAIN_VOLTAGE_SWING_600:
  1131. return DP_TRAIN_PRE_EMPHASIS_6;
  1132. case DP_TRAIN_VOLTAGE_SWING_800:
  1133. return DP_TRAIN_PRE_EMPHASIS_3_5;
  1134. case DP_TRAIN_VOLTAGE_SWING_1200:
  1135. default:
  1136. return DP_TRAIN_PRE_EMPHASIS_0;
  1137. }
  1138. }
  1139. */
  1140. static void
  1141. cdv_intel_get_adjust_train(struct gma_encoder *encoder)
  1142. {
  1143. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1144. uint8_t v = 0;
  1145. uint8_t p = 0;
  1146. int lane;
  1147. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  1148. uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
  1149. uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
  1150. if (this_v > v)
  1151. v = this_v;
  1152. if (this_p > p)
  1153. p = this_p;
  1154. }
  1155. if (v >= CDV_DP_VOLTAGE_MAX)
  1156. v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
  1157. if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
  1158. p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
  1159. for (lane = 0; lane < 4; lane++)
  1160. intel_dp->train_set[lane] = v | p;
  1161. }
  1162. static uint8_t
  1163. cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1164. int lane)
  1165. {
  1166. int i = DP_LANE0_1_STATUS + (lane >> 1);
  1167. int s = (lane & 1) * 4;
  1168. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1169. return (l >> s) & 0xf;
  1170. }
  1171. /* Check for clock recovery is done on all channels */
  1172. static bool
  1173. cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
  1174. {
  1175. int lane;
  1176. uint8_t lane_status;
  1177. for (lane = 0; lane < lane_count; lane++) {
  1178. lane_status = cdv_intel_get_lane_status(link_status, lane);
  1179. if ((lane_status & DP_LANE_CR_DONE) == 0)
  1180. return false;
  1181. }
  1182. return true;
  1183. }
  1184. /* Check to see if channel eq is done on all channels */
  1185. #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
  1186. DP_LANE_CHANNEL_EQ_DONE|\
  1187. DP_LANE_SYMBOL_LOCKED)
  1188. static bool
  1189. cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
  1190. {
  1191. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1192. uint8_t lane_align;
  1193. uint8_t lane_status;
  1194. int lane;
  1195. lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
  1196. DP_LANE_ALIGN_STATUS_UPDATED);
  1197. if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
  1198. return false;
  1199. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  1200. lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
  1201. if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
  1202. return false;
  1203. }
  1204. return true;
  1205. }
  1206. static bool
  1207. cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
  1208. uint32_t dp_reg_value,
  1209. uint8_t dp_train_pat)
  1210. {
  1211. struct drm_device *dev = encoder->base.dev;
  1212. int ret;
  1213. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1214. REG_WRITE(intel_dp->output_reg, dp_reg_value);
  1215. REG_READ(intel_dp->output_reg);
  1216. ret = cdv_intel_dp_aux_native_write_1(encoder,
  1217. DP_TRAINING_PATTERN_SET,
  1218. dp_train_pat);
  1219. if (ret != 1) {
  1220. DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
  1221. dp_train_pat);
  1222. return false;
  1223. }
  1224. return true;
  1225. }
  1226. static bool
  1227. cdv_intel_dplink_set_level(struct gma_encoder *encoder,
  1228. uint8_t dp_train_pat)
  1229. {
  1230. int ret;
  1231. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1232. ret = cdv_intel_dp_aux_native_write(encoder,
  1233. DP_TRAINING_LANE0_SET,
  1234. intel_dp->train_set,
  1235. intel_dp->lane_count);
  1236. if (ret != intel_dp->lane_count) {
  1237. DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
  1238. intel_dp->train_set[0], intel_dp->lane_count);
  1239. return false;
  1240. }
  1241. return true;
  1242. }
  1243. static void
  1244. cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
  1245. {
  1246. struct drm_device *dev = encoder->base.dev;
  1247. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1248. struct ddi_regoff *ddi_reg;
  1249. int vswing, premph, index;
  1250. if (intel_dp->output_reg == DP_B)
  1251. ddi_reg = &ddi_DP_train_table[0];
  1252. else
  1253. ddi_reg = &ddi_DP_train_table[1];
  1254. vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
  1255. premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
  1256. DP_TRAIN_PRE_EMPHASIS_SHIFT;
  1257. if (vswing + premph > 3)
  1258. return;
  1259. #ifdef CDV_FAST_LINK_TRAIN
  1260. return;
  1261. #endif
  1262. DRM_DEBUG_KMS("Test2\n");
  1263. //return ;
  1264. cdv_sb_reset(dev);
  1265. /* ;Swing voltage programming
  1266. ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
  1267. cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
  1268. /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
  1269. cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
  1270. /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
  1271. * The VSwing_PreEmph table is also considered based on the vswing/premp
  1272. */
  1273. index = (vswing + premph) * 2;
  1274. if (premph == 1 && vswing == 1) {
  1275. cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
  1276. } else
  1277. cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
  1278. /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
  1279. if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
  1280. cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
  1281. else
  1282. cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
  1283. /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
  1284. /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
  1285. /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
  1286. cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
  1287. /* ;Pre emphasis programming
  1288. * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
  1289. */
  1290. cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
  1291. /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
  1292. index = 2 * premph + 1;
  1293. cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
  1294. return;
  1295. }
  1296. /* Enable corresponding port and start training pattern 1 */
  1297. static void
  1298. cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
  1299. {
  1300. struct drm_device *dev = encoder->base.dev;
  1301. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1302. int i;
  1303. uint8_t voltage;
  1304. bool clock_recovery = false;
  1305. int tries;
  1306. u32 reg;
  1307. uint32_t DP = intel_dp->DP;
  1308. DP |= DP_PORT_EN;
  1309. DP &= ~DP_LINK_TRAIN_MASK;
  1310. reg = DP;
  1311. reg |= DP_LINK_TRAIN_PAT_1;
  1312. /* Enable output, wait for it to become active */
  1313. REG_WRITE(intel_dp->output_reg, reg);
  1314. REG_READ(intel_dp->output_reg);
  1315. gma_wait_for_vblank(dev);
  1316. DRM_DEBUG_KMS("Link config\n");
  1317. /* Write the link configuration data */
  1318. cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
  1319. intel_dp->link_configuration,
  1320. 2);
  1321. memset(intel_dp->train_set, 0, 4);
  1322. voltage = 0;
  1323. tries = 0;
  1324. clock_recovery = false;
  1325. DRM_DEBUG_KMS("Start train\n");
  1326. reg = DP | DP_LINK_TRAIN_PAT_1;
  1327. for (;;) {
  1328. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1329. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1330. intel_dp->train_set[0],
  1331. intel_dp->link_configuration[0],
  1332. intel_dp->link_configuration[1]);
  1333. if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
  1334. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
  1335. }
  1336. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1337. /* Set training pattern 1 */
  1338. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
  1339. udelay(200);
  1340. if (!cdv_intel_dp_get_link_status(encoder))
  1341. break;
  1342. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1343. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1344. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1345. if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1346. DRM_DEBUG_KMS("PT1 train is done\n");
  1347. clock_recovery = true;
  1348. break;
  1349. }
  1350. /* Check to see if we've tried the max voltage */
  1351. for (i = 0; i < intel_dp->lane_count; i++)
  1352. if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
  1353. break;
  1354. if (i == intel_dp->lane_count)
  1355. break;
  1356. /* Check to see if we've tried the same voltage 5 times */
  1357. if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
  1358. ++tries;
  1359. if (tries == 5)
  1360. break;
  1361. } else
  1362. tries = 0;
  1363. voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
  1364. /* Compute new intel_dp->train_set as requested by target */
  1365. cdv_intel_get_adjust_train(encoder);
  1366. }
  1367. if (!clock_recovery) {
  1368. DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
  1369. }
  1370. intel_dp->DP = DP;
  1371. }
  1372. static void
  1373. cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
  1374. {
  1375. struct drm_device *dev = encoder->base.dev;
  1376. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1377. bool channel_eq = false;
  1378. int tries, cr_tries;
  1379. u32 reg;
  1380. uint32_t DP = intel_dp->DP;
  1381. /* channel equalization */
  1382. tries = 0;
  1383. cr_tries = 0;
  1384. channel_eq = false;
  1385. DRM_DEBUG_KMS("\n");
  1386. reg = DP | DP_LINK_TRAIN_PAT_2;
  1387. for (;;) {
  1388. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1389. intel_dp->train_set[0],
  1390. intel_dp->link_configuration[0],
  1391. intel_dp->link_configuration[1]);
  1392. /* channel eq pattern */
  1393. if (!cdv_intel_dp_set_link_train(encoder, reg,
  1394. DP_TRAINING_PATTERN_2)) {
  1395. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
  1396. }
  1397. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1398. if (cr_tries > 5) {
  1399. DRM_ERROR("failed to train DP, aborting\n");
  1400. cdv_intel_dp_link_down(encoder);
  1401. break;
  1402. }
  1403. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1404. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
  1405. udelay(1000);
  1406. if (!cdv_intel_dp_get_link_status(encoder))
  1407. break;
  1408. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1409. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1410. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1411. /* Make sure clock is still ok */
  1412. if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1413. cdv_intel_dp_start_link_train(encoder);
  1414. cr_tries++;
  1415. continue;
  1416. }
  1417. if (cdv_intel_channel_eq_ok(encoder)) {
  1418. DRM_DEBUG_KMS("PT2 train is done\n");
  1419. channel_eq = true;
  1420. break;
  1421. }
  1422. /* Try 5 times, then try clock recovery if that fails */
  1423. if (tries > 5) {
  1424. cdv_intel_dp_link_down(encoder);
  1425. cdv_intel_dp_start_link_train(encoder);
  1426. tries = 0;
  1427. cr_tries++;
  1428. continue;
  1429. }
  1430. /* Compute new intel_dp->train_set as requested by target */
  1431. cdv_intel_get_adjust_train(encoder);
  1432. ++tries;
  1433. }
  1434. reg = DP | DP_LINK_TRAIN_OFF;
  1435. REG_WRITE(intel_dp->output_reg, reg);
  1436. REG_READ(intel_dp->output_reg);
  1437. cdv_intel_dp_aux_native_write_1(encoder,
  1438. DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
  1439. }
  1440. static void
  1441. cdv_intel_dp_link_down(struct gma_encoder *encoder)
  1442. {
  1443. struct drm_device *dev = encoder->base.dev;
  1444. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1445. uint32_t DP = intel_dp->DP;
  1446. if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
  1447. return;
  1448. DRM_DEBUG_KMS("\n");
  1449. {
  1450. DP &= ~DP_LINK_TRAIN_MASK;
  1451. REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
  1452. }
  1453. REG_READ(intel_dp->output_reg);
  1454. msleep(17);
  1455. REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
  1456. REG_READ(intel_dp->output_reg);
  1457. }
  1458. static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
  1459. {
  1460. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1461. enum drm_connector_status status;
  1462. status = connector_status_disconnected;
  1463. if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
  1464. sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
  1465. {
  1466. if (intel_dp->dpcd[DP_DPCD_REV] != 0)
  1467. status = connector_status_connected;
  1468. }
  1469. if (status == connector_status_connected)
  1470. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1471. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1472. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1473. return status;
  1474. }
  1475. /**
  1476. * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
  1477. *
  1478. * \return true if DP port is connected.
  1479. * \return false if DP port is disconnected.
  1480. */
  1481. static enum drm_connector_status
  1482. cdv_intel_dp_detect(struct drm_connector *connector, bool force)
  1483. {
  1484. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1485. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1486. enum drm_connector_status status;
  1487. struct edid *edid = NULL;
  1488. int edp = is_edp(encoder);
  1489. intel_dp->has_audio = false;
  1490. if (edp)
  1491. cdv_intel_edp_panel_vdd_on(encoder);
  1492. status = cdv_dp_detect(encoder);
  1493. if (status != connector_status_connected) {
  1494. if (edp)
  1495. cdv_intel_edp_panel_vdd_off(encoder);
  1496. return status;
  1497. }
  1498. if (intel_dp->force_audio) {
  1499. intel_dp->has_audio = intel_dp->force_audio > 0;
  1500. } else {
  1501. edid = drm_get_edid(connector, &intel_dp->adapter);
  1502. if (edid) {
  1503. intel_dp->has_audio = drm_detect_monitor_audio(edid);
  1504. kfree(edid);
  1505. }
  1506. }
  1507. if (edp)
  1508. cdv_intel_edp_panel_vdd_off(encoder);
  1509. return connector_status_connected;
  1510. }
  1511. static int cdv_intel_dp_get_modes(struct drm_connector *connector)
  1512. {
  1513. struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
  1514. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  1515. struct edid *edid = NULL;
  1516. int ret = 0;
  1517. int edp = is_edp(intel_encoder);
  1518. edid = drm_get_edid(connector, &intel_dp->adapter);
  1519. if (edid) {
  1520. drm_mode_connector_update_edid_property(connector, edid);
  1521. ret = drm_add_edid_modes(connector, edid);
  1522. kfree(edid);
  1523. }
  1524. if (is_edp(intel_encoder)) {
  1525. struct drm_device *dev = connector->dev;
  1526. struct drm_psb_private *dev_priv = dev->dev_private;
  1527. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1528. if (ret) {
  1529. if (edp && !intel_dp->panel_fixed_mode) {
  1530. struct drm_display_mode *newmode;
  1531. list_for_each_entry(newmode, &connector->probed_modes,
  1532. head) {
  1533. if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
  1534. intel_dp->panel_fixed_mode =
  1535. drm_mode_duplicate(dev, newmode);
  1536. break;
  1537. }
  1538. }
  1539. }
  1540. return ret;
  1541. }
  1542. if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
  1543. intel_dp->panel_fixed_mode =
  1544. drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
  1545. if (intel_dp->panel_fixed_mode) {
  1546. intel_dp->panel_fixed_mode->type |=
  1547. DRM_MODE_TYPE_PREFERRED;
  1548. }
  1549. }
  1550. if (intel_dp->panel_fixed_mode != NULL) {
  1551. struct drm_display_mode *mode;
  1552. mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
  1553. drm_mode_probed_add(connector, mode);
  1554. return 1;
  1555. }
  1556. }
  1557. return ret;
  1558. }
  1559. static bool
  1560. cdv_intel_dp_detect_audio(struct drm_connector *connector)
  1561. {
  1562. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1563. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1564. struct edid *edid;
  1565. bool has_audio = false;
  1566. int edp = is_edp(encoder);
  1567. if (edp)
  1568. cdv_intel_edp_panel_vdd_on(encoder);
  1569. edid = drm_get_edid(connector, &intel_dp->adapter);
  1570. if (edid) {
  1571. has_audio = drm_detect_monitor_audio(edid);
  1572. kfree(edid);
  1573. }
  1574. if (edp)
  1575. cdv_intel_edp_panel_vdd_off(encoder);
  1576. return has_audio;
  1577. }
  1578. static int
  1579. cdv_intel_dp_set_property(struct drm_connector *connector,
  1580. struct drm_property *property,
  1581. uint64_t val)
  1582. {
  1583. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  1584. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1585. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1586. int ret;
  1587. ret = drm_object_property_set_value(&connector->base, property, val);
  1588. if (ret)
  1589. return ret;
  1590. if (property == dev_priv->force_audio_property) {
  1591. int i = val;
  1592. bool has_audio;
  1593. if (i == intel_dp->force_audio)
  1594. return 0;
  1595. intel_dp->force_audio = i;
  1596. if (i == 0)
  1597. has_audio = cdv_intel_dp_detect_audio(connector);
  1598. else
  1599. has_audio = i > 0;
  1600. if (has_audio == intel_dp->has_audio)
  1601. return 0;
  1602. intel_dp->has_audio = has_audio;
  1603. goto done;
  1604. }
  1605. if (property == dev_priv->broadcast_rgb_property) {
  1606. if (val == !!intel_dp->color_range)
  1607. return 0;
  1608. intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
  1609. goto done;
  1610. }
  1611. return -EINVAL;
  1612. done:
  1613. if (encoder->base.crtc) {
  1614. struct drm_crtc *crtc = encoder->base.crtc;
  1615. drm_crtc_helper_set_mode(crtc, &crtc->mode,
  1616. crtc->x, crtc->y,
  1617. crtc->primary->fb);
  1618. }
  1619. return 0;
  1620. }
  1621. static void
  1622. cdv_intel_dp_destroy(struct drm_connector *connector)
  1623. {
  1624. struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
  1625. struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
  1626. if (is_edp(gma_encoder)) {
  1627. /* cdv_intel_panel_destroy_backlight(connector->dev); */
  1628. if (intel_dp->panel_fixed_mode) {
  1629. kfree(intel_dp->panel_fixed_mode);
  1630. intel_dp->panel_fixed_mode = NULL;
  1631. }
  1632. }
  1633. i2c_del_adapter(&intel_dp->adapter);
  1634. drm_connector_unregister(connector);
  1635. drm_connector_cleanup(connector);
  1636. kfree(connector);
  1637. }
  1638. static void cdv_intel_dp_encoder_destroy(struct drm_encoder *encoder)
  1639. {
  1640. drm_encoder_cleanup(encoder);
  1641. }
  1642. static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
  1643. .dpms = cdv_intel_dp_dpms,
  1644. .mode_fixup = cdv_intel_dp_mode_fixup,
  1645. .prepare = cdv_intel_dp_prepare,
  1646. .mode_set = cdv_intel_dp_mode_set,
  1647. .commit = cdv_intel_dp_commit,
  1648. };
  1649. static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
  1650. .dpms = drm_helper_connector_dpms,
  1651. .detect = cdv_intel_dp_detect,
  1652. .fill_modes = drm_helper_probe_single_connector_modes,
  1653. .set_property = cdv_intel_dp_set_property,
  1654. .destroy = cdv_intel_dp_destroy,
  1655. };
  1656. static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
  1657. .get_modes = cdv_intel_dp_get_modes,
  1658. .mode_valid = cdv_intel_dp_mode_valid,
  1659. .best_encoder = gma_best_encoder,
  1660. };
  1661. static const struct drm_encoder_funcs cdv_intel_dp_enc_funcs = {
  1662. .destroy = cdv_intel_dp_encoder_destroy,
  1663. };
  1664. static void cdv_intel_dp_add_properties(struct drm_connector *connector)
  1665. {
  1666. cdv_intel_attach_force_audio_property(connector);
  1667. cdv_intel_attach_broadcast_rgb_property(connector);
  1668. }
  1669. /* check the VBT to see whether the eDP is on DP-D port */
  1670. static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
  1671. {
  1672. struct drm_psb_private *dev_priv = dev->dev_private;
  1673. struct child_device_config *p_child;
  1674. int i;
  1675. if (!dev_priv->child_dev_num)
  1676. return false;
  1677. for (i = 0; i < dev_priv->child_dev_num; i++) {
  1678. p_child = dev_priv->child_dev + i;
  1679. if (p_child->dvo_port == PORT_IDPC &&
  1680. p_child->device_type == DEVICE_TYPE_eDP)
  1681. return true;
  1682. }
  1683. return false;
  1684. }
  1685. /* Cedarview display clock gating
  1686. We need this disable dot get correct behaviour while enabling
  1687. DP/eDP. TODO - investigate if we can turn it back to normality
  1688. after enabling */
  1689. static void cdv_disable_intel_clock_gating(struct drm_device *dev)
  1690. {
  1691. u32 reg_value;
  1692. reg_value = REG_READ(DSPCLK_GATE_D);
  1693. reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
  1694. DPUNIT_PIPEA_GATE_DISABLE |
  1695. DPCUNIT_CLOCK_GATE_DISABLE |
  1696. DPLSUNIT_CLOCK_GATE_DISABLE |
  1697. DPOUNIT_CLOCK_GATE_DISABLE |
  1698. DPIOUNIT_CLOCK_GATE_DISABLE);
  1699. REG_WRITE(DSPCLK_GATE_D, reg_value);
  1700. udelay(500);
  1701. }
  1702. void
  1703. cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
  1704. {
  1705. struct gma_encoder *gma_encoder;
  1706. struct gma_connector *gma_connector;
  1707. struct drm_connector *connector;
  1708. struct drm_encoder *encoder;
  1709. struct cdv_intel_dp *intel_dp;
  1710. const char *name = NULL;
  1711. int type = DRM_MODE_CONNECTOR_DisplayPort;
  1712. gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
  1713. if (!gma_encoder)
  1714. return;
  1715. gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
  1716. if (!gma_connector)
  1717. goto err_connector;
  1718. intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
  1719. if (!intel_dp)
  1720. goto err_priv;
  1721. if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
  1722. type = DRM_MODE_CONNECTOR_eDP;
  1723. connector = &gma_connector->base;
  1724. encoder = &gma_encoder->base;
  1725. drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
  1726. drm_encoder_init(dev, encoder, &cdv_intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS);
  1727. gma_connector_attach_encoder(gma_connector, gma_encoder);
  1728. if (type == DRM_MODE_CONNECTOR_DisplayPort)
  1729. gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
  1730. else
  1731. gma_encoder->type = INTEL_OUTPUT_EDP;
  1732. gma_encoder->dev_priv=intel_dp;
  1733. intel_dp->encoder = gma_encoder;
  1734. intel_dp->output_reg = output_reg;
  1735. drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
  1736. drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
  1737. connector->polled = DRM_CONNECTOR_POLL_HPD;
  1738. connector->interlace_allowed = false;
  1739. connector->doublescan_allowed = false;
  1740. drm_connector_register(connector);
  1741. /* Set up the DDC bus. */
  1742. switch (output_reg) {
  1743. case DP_B:
  1744. name = "DPDDC-B";
  1745. gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
  1746. break;
  1747. case DP_C:
  1748. name = "DPDDC-C";
  1749. gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
  1750. break;
  1751. }
  1752. cdv_disable_intel_clock_gating(dev);
  1753. cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
  1754. /* FIXME:fail check */
  1755. cdv_intel_dp_add_properties(connector);
  1756. if (is_edp(gma_encoder)) {
  1757. int ret;
  1758. struct edp_power_seq cur;
  1759. u32 pp_on, pp_off, pp_div;
  1760. u32 pwm_ctrl;
  1761. pp_on = REG_READ(PP_CONTROL);
  1762. pp_on &= ~PANEL_UNLOCK_MASK;
  1763. pp_on |= PANEL_UNLOCK_REGS;
  1764. REG_WRITE(PP_CONTROL, pp_on);
  1765. pwm_ctrl = REG_READ(BLC_PWM_CTL2);
  1766. pwm_ctrl |= PWM_PIPE_B;
  1767. REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
  1768. pp_on = REG_READ(PP_ON_DELAYS);
  1769. pp_off = REG_READ(PP_OFF_DELAYS);
  1770. pp_div = REG_READ(PP_DIVISOR);
  1771. /* Pull timing values out of registers */
  1772. cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
  1773. PANEL_POWER_UP_DELAY_SHIFT;
  1774. cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
  1775. PANEL_LIGHT_ON_DELAY_SHIFT;
  1776. cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
  1777. PANEL_LIGHT_OFF_DELAY_SHIFT;
  1778. cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
  1779. PANEL_POWER_DOWN_DELAY_SHIFT;
  1780. cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
  1781. PANEL_POWER_CYCLE_DELAY_SHIFT);
  1782. DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
  1783. cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
  1784. intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
  1785. intel_dp->backlight_on_delay = cur.t8 / 10;
  1786. intel_dp->backlight_off_delay = cur.t9 / 10;
  1787. intel_dp->panel_power_down_delay = cur.t10 / 10;
  1788. intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
  1789. DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
  1790. intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
  1791. intel_dp->panel_power_cycle_delay);
  1792. DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
  1793. intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
  1794. cdv_intel_edp_panel_vdd_on(gma_encoder);
  1795. ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
  1796. intel_dp->dpcd,
  1797. sizeof(intel_dp->dpcd));
  1798. cdv_intel_edp_panel_vdd_off(gma_encoder);
  1799. if (ret == 0) {
  1800. /* if this fails, presume the device is a ghost */
  1801. DRM_INFO("failed to retrieve link info, disabling eDP\n");
  1802. cdv_intel_dp_encoder_destroy(encoder);
  1803. cdv_intel_dp_destroy(connector);
  1804. goto err_priv;
  1805. } else {
  1806. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1807. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1808. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1809. }
  1810. /* The CDV reference driver moves pnale backlight setup into the displays that
  1811. have a backlight: this is a good idea and one we should probably adopt, however
  1812. we need to migrate all the drivers before we can do that */
  1813. /*cdv_intel_panel_setup_backlight(dev); */
  1814. }
  1815. return;
  1816. err_priv:
  1817. kfree(gma_connector);
  1818. err_connector:
  1819. kfree(gma_encoder);
  1820. }