intel_dpio_phy.c 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888
  1. /*
  2. * Copyright © 2014-2016 Intel Corporation
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20. * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  21. * DEALINGS IN THE SOFTWARE.
  22. */
  23. #include "intel_drv.h"
  24. /**
  25. * DOC: DPIO
  26. *
  27. * VLV, CHV and BXT have slightly peculiar display PHYs for driving DP/HDMI
  28. * ports. DPIO is the name given to such a display PHY. These PHYs
  29. * don't follow the standard programming model using direct MMIO
  30. * registers, and instead their registers must be accessed trough IOSF
  31. * sideband. VLV has one such PHY for driving ports B and C, and CHV
  32. * adds another PHY for driving port D. Each PHY responds to specific
  33. * IOSF-SB port.
  34. *
  35. * Each display PHY is made up of one or two channels. Each channel
  36. * houses a common lane part which contains the PLL and other common
  37. * logic. CH0 common lane also contains the IOSF-SB logic for the
  38. * Common Register Interface (CRI) ie. the DPIO registers. CRI clock
  39. * must be running when any DPIO registers are accessed.
  40. *
  41. * In addition to having their own registers, the PHYs are also
  42. * controlled through some dedicated signals from the display
  43. * controller. These include PLL reference clock enable, PLL enable,
  44. * and CRI clock selection, for example.
  45. *
  46. * Eeach channel also has two splines (also called data lanes), and
  47. * each spline is made up of one Physical Access Coding Sub-Layer
  48. * (PCS) block and two TX lanes. So each channel has two PCS blocks
  49. * and four TX lanes. The TX lanes are used as DP lanes or TMDS
  50. * data/clock pairs depending on the output type.
  51. *
  52. * Additionally the PHY also contains an AUX lane with AUX blocks
  53. * for each channel. This is used for DP AUX communication, but
  54. * this fact isn't really relevant for the driver since AUX is
  55. * controlled from the display controller side. No DPIO registers
  56. * need to be accessed during AUX communication,
  57. *
  58. * Generally on VLV/CHV the common lane corresponds to the pipe and
  59. * the spline (PCS/TX) corresponds to the port.
  60. *
  61. * For dual channel PHY (VLV/CHV):
  62. *
  63. * pipe A == CMN/PLL/REF CH0
  64. *
  65. * pipe B == CMN/PLL/REF CH1
  66. *
  67. * port B == PCS/TX CH0
  68. *
  69. * port C == PCS/TX CH1
  70. *
  71. * This is especially important when we cross the streams
  72. * ie. drive port B with pipe B, or port C with pipe A.
  73. *
  74. * For single channel PHY (CHV):
  75. *
  76. * pipe C == CMN/PLL/REF CH0
  77. *
  78. * port D == PCS/TX CH0
  79. *
  80. * On BXT the entire PHY channel corresponds to the port. That means
  81. * the PLL is also now associated with the port rather than the pipe,
  82. * and so the clock needs to be routed to the appropriate transcoder.
  83. * Port A PLL is directly connected to transcoder EDP and port B/C
  84. * PLLs can be routed to any transcoder A/B/C.
  85. *
  86. * Note: DDI0 is digital port B, DD1 is digital port C, and DDI2 is
  87. * digital port D (CHV) or port A (BXT). ::
  88. *
  89. *
  90. * Dual channel PHY (VLV/CHV/BXT)
  91. * ---------------------------------
  92. * | CH0 | CH1 |
  93. * | CMN/PLL/REF | CMN/PLL/REF |
  94. * |---------------|---------------| Display PHY
  95. * | PCS01 | PCS23 | PCS01 | PCS23 |
  96. * |-------|-------|-------|-------|
  97. * |TX0|TX1|TX2|TX3|TX0|TX1|TX2|TX3|
  98. * ---------------------------------
  99. * | DDI0 | DDI1 | DP/HDMI ports
  100. * ---------------------------------
  101. *
  102. * Single channel PHY (CHV/BXT)
  103. * -----------------
  104. * | CH0 |
  105. * | CMN/PLL/REF |
  106. * |---------------| Display PHY
  107. * | PCS01 | PCS23 |
  108. * |-------|-------|
  109. * |TX0|TX1|TX2|TX3|
  110. * -----------------
  111. * | DDI2 | DP/HDMI port
  112. * -----------------
  113. */
  114. bool bxt_ddi_phy_is_enabled(struct drm_i915_private *dev_priv,
  115. enum dpio_phy phy)
  116. {
  117. enum port port;
  118. if (!(I915_READ(BXT_P_CR_GT_DISP_PWRON) & GT_DISPLAY_POWER_ON(phy)))
  119. return false;
  120. if ((I915_READ(BXT_PORT_CL1CM_DW0(phy)) &
  121. (PHY_POWER_GOOD | PHY_RESERVED)) != PHY_POWER_GOOD) {
  122. DRM_DEBUG_DRIVER("DDI PHY %d powered, but power hasn't settled\n",
  123. phy);
  124. return false;
  125. }
  126. if (phy == DPIO_PHY1 &&
  127. !(I915_READ(BXT_PORT_REF_DW3(DPIO_PHY1)) & GRC_DONE)) {
  128. DRM_DEBUG_DRIVER("DDI PHY 1 powered, but GRC isn't done\n");
  129. return false;
  130. }
  131. if (!(I915_READ(BXT_PHY_CTL_FAMILY(phy)) & COMMON_RESET_DIS)) {
  132. DRM_DEBUG_DRIVER("DDI PHY %d powered, but still in reset\n",
  133. phy);
  134. return false;
  135. }
  136. for_each_port_masked(port,
  137. phy == DPIO_PHY0 ? BIT(PORT_B) | BIT(PORT_C) :
  138. BIT(PORT_A)) {
  139. u32 tmp = I915_READ(BXT_PHY_CTL(port));
  140. if (tmp & BXT_PHY_CMNLANE_POWERDOWN_ACK) {
  141. DRM_DEBUG_DRIVER("DDI PHY %d powered, but common lane "
  142. "for port %c powered down "
  143. "(PHY_CTL %08x)\n",
  144. phy, port_name(port), tmp);
  145. return false;
  146. }
  147. }
  148. return true;
  149. }
  150. static u32 bxt_get_grc(struct drm_i915_private *dev_priv, enum dpio_phy phy)
  151. {
  152. u32 val = I915_READ(BXT_PORT_REF_DW6(phy));
  153. return (val & GRC_CODE_MASK) >> GRC_CODE_SHIFT;
  154. }
  155. static void bxt_phy_wait_grc_done(struct drm_i915_private *dev_priv,
  156. enum dpio_phy phy)
  157. {
  158. if (intel_wait_for_register(dev_priv,
  159. BXT_PORT_REF_DW3(phy),
  160. GRC_DONE, GRC_DONE,
  161. 10))
  162. DRM_ERROR("timeout waiting for PHY%d GRC\n", phy);
  163. }
  164. void bxt_ddi_phy_init(struct drm_i915_private *dev_priv, enum dpio_phy phy)
  165. {
  166. u32 val;
  167. if (bxt_ddi_phy_is_enabled(dev_priv, phy)) {
  168. /* Still read out the GRC value for state verification */
  169. if (phy == DPIO_PHY0)
  170. dev_priv->bxt_phy_grc = bxt_get_grc(dev_priv, phy);
  171. if (bxt_ddi_phy_verify_state(dev_priv, phy)) {
  172. DRM_DEBUG_DRIVER("DDI PHY %d already enabled, "
  173. "won't reprogram it\n", phy);
  174. return;
  175. }
  176. DRM_DEBUG_DRIVER("DDI PHY %d enabled with invalid state, "
  177. "force reprogramming it\n", phy);
  178. }
  179. val = I915_READ(BXT_P_CR_GT_DISP_PWRON);
  180. val |= GT_DISPLAY_POWER_ON(phy);
  181. I915_WRITE(BXT_P_CR_GT_DISP_PWRON, val);
  182. /*
  183. * The PHY registers start out inaccessible and respond to reads with
  184. * all 1s. Eventually they become accessible as they power up, then
  185. * the reserved bit will give the default 0. Poll on the reserved bit
  186. * becoming 0 to find when the PHY is accessible.
  187. * HW team confirmed that the time to reach phypowergood status is
  188. * anywhere between 50 us and 100us.
  189. */
  190. if (wait_for_us(((I915_READ(BXT_PORT_CL1CM_DW0(phy)) &
  191. (PHY_RESERVED | PHY_POWER_GOOD)) == PHY_POWER_GOOD), 100)) {
  192. DRM_ERROR("timeout during PHY%d power on\n", phy);
  193. }
  194. /* Program PLL Rcomp code offset */
  195. val = I915_READ(BXT_PORT_CL1CM_DW9(phy));
  196. val &= ~IREF0RC_OFFSET_MASK;
  197. val |= 0xE4 << IREF0RC_OFFSET_SHIFT;
  198. I915_WRITE(BXT_PORT_CL1CM_DW9(phy), val);
  199. val = I915_READ(BXT_PORT_CL1CM_DW10(phy));
  200. val &= ~IREF1RC_OFFSET_MASK;
  201. val |= 0xE4 << IREF1RC_OFFSET_SHIFT;
  202. I915_WRITE(BXT_PORT_CL1CM_DW10(phy), val);
  203. /* Program power gating */
  204. val = I915_READ(BXT_PORT_CL1CM_DW28(phy));
  205. val |= OCL1_POWER_DOWN_EN | DW28_OLDO_DYN_PWR_DOWN_EN |
  206. SUS_CLK_CONFIG;
  207. I915_WRITE(BXT_PORT_CL1CM_DW28(phy), val);
  208. if (phy == DPIO_PHY0) {
  209. val = I915_READ(BXT_PORT_CL2CM_DW6_BC);
  210. val |= DW6_OLDO_DYN_PWR_DOWN_EN;
  211. I915_WRITE(BXT_PORT_CL2CM_DW6_BC, val);
  212. }
  213. val = I915_READ(BXT_PORT_CL1CM_DW30(phy));
  214. val &= ~OCL2_LDOFUSE_PWR_DIS;
  215. /*
  216. * On PHY1 disable power on the second channel, since no port is
  217. * connected there. On PHY0 both channels have a port, so leave it
  218. * enabled.
  219. * TODO: port C is only connected on BXT-P, so on BXT0/1 we should
  220. * power down the second channel on PHY0 as well.
  221. *
  222. * FIXME: Clarify programming of the following, the register is
  223. * read-only with bit 6 fixed at 0 at least in stepping A.
  224. */
  225. if (phy == DPIO_PHY1)
  226. val |= OCL2_LDOFUSE_PWR_DIS;
  227. I915_WRITE(BXT_PORT_CL1CM_DW30(phy), val);
  228. if (phy == DPIO_PHY0) {
  229. uint32_t grc_code;
  230. /*
  231. * PHY0 isn't connected to an RCOMP resistor so copy over
  232. * the corresponding calibrated value from PHY1, and disable
  233. * the automatic calibration on PHY0.
  234. */
  235. val = dev_priv->bxt_phy_grc = bxt_get_grc(dev_priv, DPIO_PHY1);
  236. grc_code = val << GRC_CODE_FAST_SHIFT |
  237. val << GRC_CODE_SLOW_SHIFT |
  238. val;
  239. I915_WRITE(BXT_PORT_REF_DW6(DPIO_PHY0), grc_code);
  240. val = I915_READ(BXT_PORT_REF_DW8(DPIO_PHY0));
  241. val |= GRC_DIS | GRC_RDY_OVRD;
  242. I915_WRITE(BXT_PORT_REF_DW8(DPIO_PHY0), val);
  243. }
  244. val = I915_READ(BXT_PHY_CTL_FAMILY(phy));
  245. val |= COMMON_RESET_DIS;
  246. I915_WRITE(BXT_PHY_CTL_FAMILY(phy), val);
  247. if (phy == DPIO_PHY1)
  248. bxt_phy_wait_grc_done(dev_priv, DPIO_PHY1);
  249. }
  250. void bxt_ddi_phy_uninit(struct drm_i915_private *dev_priv, enum dpio_phy phy)
  251. {
  252. uint32_t val;
  253. val = I915_READ(BXT_PHY_CTL_FAMILY(phy));
  254. val &= ~COMMON_RESET_DIS;
  255. I915_WRITE(BXT_PHY_CTL_FAMILY(phy), val);
  256. val = I915_READ(BXT_P_CR_GT_DISP_PWRON);
  257. val &= ~GT_DISPLAY_POWER_ON(phy);
  258. I915_WRITE(BXT_P_CR_GT_DISP_PWRON, val);
  259. }
  260. static bool __printf(6, 7)
  261. __phy_reg_verify_state(struct drm_i915_private *dev_priv, enum dpio_phy phy,
  262. i915_reg_t reg, u32 mask, u32 expected,
  263. const char *reg_fmt, ...)
  264. {
  265. struct va_format vaf;
  266. va_list args;
  267. u32 val;
  268. val = I915_READ(reg);
  269. if ((val & mask) == expected)
  270. return true;
  271. va_start(args, reg_fmt);
  272. vaf.fmt = reg_fmt;
  273. vaf.va = &args;
  274. DRM_DEBUG_DRIVER("DDI PHY %d reg %pV [%08x] state mismatch: "
  275. "current %08x, expected %08x (mask %08x)\n",
  276. phy, &vaf, reg.reg, val, (val & ~mask) | expected,
  277. mask);
  278. va_end(args);
  279. return false;
  280. }
  281. bool bxt_ddi_phy_verify_state(struct drm_i915_private *dev_priv,
  282. enum dpio_phy phy)
  283. {
  284. uint32_t mask;
  285. bool ok;
  286. #define _CHK(reg, mask, exp, fmt, ...) \
  287. __phy_reg_verify_state(dev_priv, phy, reg, mask, exp, fmt, \
  288. ## __VA_ARGS__)
  289. if (!bxt_ddi_phy_is_enabled(dev_priv, phy))
  290. return false;
  291. ok = true;
  292. /* PLL Rcomp code offset */
  293. ok &= _CHK(BXT_PORT_CL1CM_DW9(phy),
  294. IREF0RC_OFFSET_MASK, 0xe4 << IREF0RC_OFFSET_SHIFT,
  295. "BXT_PORT_CL1CM_DW9(%d)", phy);
  296. ok &= _CHK(BXT_PORT_CL1CM_DW10(phy),
  297. IREF1RC_OFFSET_MASK, 0xe4 << IREF1RC_OFFSET_SHIFT,
  298. "BXT_PORT_CL1CM_DW10(%d)", phy);
  299. /* Power gating */
  300. mask = OCL1_POWER_DOWN_EN | DW28_OLDO_DYN_PWR_DOWN_EN | SUS_CLK_CONFIG;
  301. ok &= _CHK(BXT_PORT_CL1CM_DW28(phy), mask, mask,
  302. "BXT_PORT_CL1CM_DW28(%d)", phy);
  303. if (phy == DPIO_PHY0)
  304. ok &= _CHK(BXT_PORT_CL2CM_DW6_BC,
  305. DW6_OLDO_DYN_PWR_DOWN_EN, DW6_OLDO_DYN_PWR_DOWN_EN,
  306. "BXT_PORT_CL2CM_DW6_BC");
  307. /*
  308. * TODO: Verify BXT_PORT_CL1CM_DW30 bit OCL2_LDOFUSE_PWR_DIS,
  309. * at least on stepping A this bit is read-only and fixed at 0.
  310. */
  311. if (phy == DPIO_PHY0) {
  312. u32 grc_code = dev_priv->bxt_phy_grc;
  313. grc_code = grc_code << GRC_CODE_FAST_SHIFT |
  314. grc_code << GRC_CODE_SLOW_SHIFT |
  315. grc_code;
  316. mask = GRC_CODE_FAST_MASK | GRC_CODE_SLOW_MASK |
  317. GRC_CODE_NOM_MASK;
  318. ok &= _CHK(BXT_PORT_REF_DW6(DPIO_PHY0), mask, grc_code,
  319. "BXT_PORT_REF_DW6(%d)", DPIO_PHY0);
  320. mask = GRC_DIS | GRC_RDY_OVRD;
  321. ok &= _CHK(BXT_PORT_REF_DW8(DPIO_PHY0), mask, mask,
  322. "BXT_PORT_REF_DW8(%d)", DPIO_PHY0);
  323. }
  324. return ok;
  325. #undef _CHK
  326. }
  327. uint8_t
  328. bxt_ddi_phy_calc_lane_lat_optim_mask(struct intel_encoder *encoder,
  329. uint8_t lane_count)
  330. {
  331. switch (lane_count) {
  332. case 1:
  333. return 0;
  334. case 2:
  335. return BIT(2) | BIT(0);
  336. case 4:
  337. return BIT(3) | BIT(2) | BIT(0);
  338. default:
  339. MISSING_CASE(lane_count);
  340. return 0;
  341. }
  342. }
  343. void bxt_ddi_phy_set_lane_optim_mask(struct intel_encoder *encoder,
  344. uint8_t lane_lat_optim_mask)
  345. {
  346. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  347. struct drm_i915_private *dev_priv = to_i915(dport->base.base.dev);
  348. enum port port = dport->port;
  349. int lane;
  350. for (lane = 0; lane < 4; lane++) {
  351. u32 val = I915_READ(BXT_PORT_TX_DW14_LN(port, lane));
  352. /*
  353. * Note that on CHV this flag is called UPAR, but has
  354. * the same function.
  355. */
  356. val &= ~LATENCY_OPTIM;
  357. if (lane_lat_optim_mask & BIT(lane))
  358. val |= LATENCY_OPTIM;
  359. I915_WRITE(BXT_PORT_TX_DW14_LN(port, lane), val);
  360. }
  361. }
  362. uint8_t
  363. bxt_ddi_phy_get_lane_lat_optim_mask(struct intel_encoder *encoder)
  364. {
  365. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  366. struct drm_i915_private *dev_priv = to_i915(dport->base.base.dev);
  367. enum port port = dport->port;
  368. int lane;
  369. uint8_t mask;
  370. mask = 0;
  371. for (lane = 0; lane < 4; lane++) {
  372. u32 val = I915_READ(BXT_PORT_TX_DW14_LN(port, lane));
  373. if (val & LATENCY_OPTIM)
  374. mask |= BIT(lane);
  375. }
  376. return mask;
  377. }
  378. void chv_set_phy_signal_level(struct intel_encoder *encoder,
  379. u32 deemph_reg_value, u32 margin_reg_value,
  380. bool uniq_trans_scale)
  381. {
  382. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  383. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  384. struct intel_crtc *intel_crtc = to_intel_crtc(dport->base.base.crtc);
  385. enum dpio_channel ch = vlv_dport_to_channel(dport);
  386. enum pipe pipe = intel_crtc->pipe;
  387. u32 val;
  388. int i;
  389. mutex_lock(&dev_priv->sb_lock);
  390. /* Clear calc init */
  391. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW10(ch));
  392. val &= ~(DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3);
  393. val &= ~(DPIO_PCS_TX1DEEMP_MASK | DPIO_PCS_TX2DEEMP_MASK);
  394. val |= DPIO_PCS_TX1DEEMP_9P5 | DPIO_PCS_TX2DEEMP_9P5;
  395. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW10(ch), val);
  396. if (intel_crtc->config->lane_count > 2) {
  397. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW10(ch));
  398. val &= ~(DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3);
  399. val &= ~(DPIO_PCS_TX1DEEMP_MASK | DPIO_PCS_TX2DEEMP_MASK);
  400. val |= DPIO_PCS_TX1DEEMP_9P5 | DPIO_PCS_TX2DEEMP_9P5;
  401. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW10(ch), val);
  402. }
  403. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW9(ch));
  404. val &= ~(DPIO_PCS_TX1MARGIN_MASK | DPIO_PCS_TX2MARGIN_MASK);
  405. val |= DPIO_PCS_TX1MARGIN_000 | DPIO_PCS_TX2MARGIN_000;
  406. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW9(ch), val);
  407. if (intel_crtc->config->lane_count > 2) {
  408. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW9(ch));
  409. val &= ~(DPIO_PCS_TX1MARGIN_MASK | DPIO_PCS_TX2MARGIN_MASK);
  410. val |= DPIO_PCS_TX1MARGIN_000 | DPIO_PCS_TX2MARGIN_000;
  411. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW9(ch), val);
  412. }
  413. /* Program swing deemph */
  414. for (i = 0; i < intel_crtc->config->lane_count; i++) {
  415. val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW4(ch, i));
  416. val &= ~DPIO_SWING_DEEMPH9P5_MASK;
  417. val |= deemph_reg_value << DPIO_SWING_DEEMPH9P5_SHIFT;
  418. vlv_dpio_write(dev_priv, pipe, CHV_TX_DW4(ch, i), val);
  419. }
  420. /* Program swing margin */
  421. for (i = 0; i < intel_crtc->config->lane_count; i++) {
  422. val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW2(ch, i));
  423. val &= ~DPIO_SWING_MARGIN000_MASK;
  424. val |= margin_reg_value << DPIO_SWING_MARGIN000_SHIFT;
  425. /*
  426. * Supposedly this value shouldn't matter when unique transition
  427. * scale is disabled, but in fact it does matter. Let's just
  428. * always program the same value and hope it's OK.
  429. */
  430. val &= ~(0xff << DPIO_UNIQ_TRANS_SCALE_SHIFT);
  431. val |= 0x9a << DPIO_UNIQ_TRANS_SCALE_SHIFT;
  432. vlv_dpio_write(dev_priv, pipe, CHV_TX_DW2(ch, i), val);
  433. }
  434. /*
  435. * The document said it needs to set bit 27 for ch0 and bit 26
  436. * for ch1. Might be a typo in the doc.
  437. * For now, for this unique transition scale selection, set bit
  438. * 27 for ch0 and ch1.
  439. */
  440. for (i = 0; i < intel_crtc->config->lane_count; i++) {
  441. val = vlv_dpio_read(dev_priv, pipe, CHV_TX_DW3(ch, i));
  442. if (uniq_trans_scale)
  443. val |= DPIO_TX_UNIQ_TRANS_SCALE_EN;
  444. else
  445. val &= ~DPIO_TX_UNIQ_TRANS_SCALE_EN;
  446. vlv_dpio_write(dev_priv, pipe, CHV_TX_DW3(ch, i), val);
  447. }
  448. /* Start swing calculation */
  449. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW10(ch));
  450. val |= DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3;
  451. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW10(ch), val);
  452. if (intel_crtc->config->lane_count > 2) {
  453. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW10(ch));
  454. val |= DPIO_PCS_SWING_CALC_TX0_TX2 | DPIO_PCS_SWING_CALC_TX1_TX3;
  455. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW10(ch), val);
  456. }
  457. mutex_unlock(&dev_priv->sb_lock);
  458. }
  459. void chv_data_lane_soft_reset(struct intel_encoder *encoder,
  460. bool reset)
  461. {
  462. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  463. enum dpio_channel ch = vlv_dport_to_channel(enc_to_dig_port(&encoder->base));
  464. struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
  465. enum pipe pipe = crtc->pipe;
  466. uint32_t val;
  467. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW0(ch));
  468. if (reset)
  469. val &= ~(DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET);
  470. else
  471. val |= DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET;
  472. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW0(ch), val);
  473. if (crtc->config->lane_count > 2) {
  474. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW0(ch));
  475. if (reset)
  476. val &= ~(DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET);
  477. else
  478. val |= DPIO_PCS_TX_LANE2_RESET | DPIO_PCS_TX_LANE1_RESET;
  479. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW0(ch), val);
  480. }
  481. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW1(ch));
  482. val |= CHV_PCS_REQ_SOFTRESET_EN;
  483. if (reset)
  484. val &= ~DPIO_PCS_CLK_SOFT_RESET;
  485. else
  486. val |= DPIO_PCS_CLK_SOFT_RESET;
  487. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW1(ch), val);
  488. if (crtc->config->lane_count > 2) {
  489. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW1(ch));
  490. val |= CHV_PCS_REQ_SOFTRESET_EN;
  491. if (reset)
  492. val &= ~DPIO_PCS_CLK_SOFT_RESET;
  493. else
  494. val |= DPIO_PCS_CLK_SOFT_RESET;
  495. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW1(ch), val);
  496. }
  497. }
  498. void chv_phy_pre_pll_enable(struct intel_encoder *encoder)
  499. {
  500. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  501. struct drm_device *dev = encoder->base.dev;
  502. struct drm_i915_private *dev_priv = to_i915(dev);
  503. struct intel_crtc *intel_crtc =
  504. to_intel_crtc(encoder->base.crtc);
  505. enum dpio_channel ch = vlv_dport_to_channel(dport);
  506. enum pipe pipe = intel_crtc->pipe;
  507. unsigned int lane_mask =
  508. intel_dp_unused_lane_mask(intel_crtc->config->lane_count);
  509. u32 val;
  510. /*
  511. * Must trick the second common lane into life.
  512. * Otherwise we can't even access the PLL.
  513. */
  514. if (ch == DPIO_CH0 && pipe == PIPE_B)
  515. dport->release_cl2_override =
  516. !chv_phy_powergate_ch(dev_priv, DPIO_PHY0, DPIO_CH1, true);
  517. chv_phy_powergate_lanes(encoder, true, lane_mask);
  518. mutex_lock(&dev_priv->sb_lock);
  519. /* Assert data lane reset */
  520. chv_data_lane_soft_reset(encoder, true);
  521. /* program left/right clock distribution */
  522. if (pipe != PIPE_B) {
  523. val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW5_CH0);
  524. val &= ~(CHV_BUFLEFTENA1_MASK | CHV_BUFRIGHTENA1_MASK);
  525. if (ch == DPIO_CH0)
  526. val |= CHV_BUFLEFTENA1_FORCE;
  527. if (ch == DPIO_CH1)
  528. val |= CHV_BUFRIGHTENA1_FORCE;
  529. vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW5_CH0, val);
  530. } else {
  531. val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW1_CH1);
  532. val &= ~(CHV_BUFLEFTENA2_MASK | CHV_BUFRIGHTENA2_MASK);
  533. if (ch == DPIO_CH0)
  534. val |= CHV_BUFLEFTENA2_FORCE;
  535. if (ch == DPIO_CH1)
  536. val |= CHV_BUFRIGHTENA2_FORCE;
  537. vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW1_CH1, val);
  538. }
  539. /* program clock channel usage */
  540. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW8(ch));
  541. val |= CHV_PCS_USEDCLKCHANNEL_OVRRIDE;
  542. if (pipe != PIPE_B)
  543. val &= ~CHV_PCS_USEDCLKCHANNEL;
  544. else
  545. val |= CHV_PCS_USEDCLKCHANNEL;
  546. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW8(ch), val);
  547. if (intel_crtc->config->lane_count > 2) {
  548. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW8(ch));
  549. val |= CHV_PCS_USEDCLKCHANNEL_OVRRIDE;
  550. if (pipe != PIPE_B)
  551. val &= ~CHV_PCS_USEDCLKCHANNEL;
  552. else
  553. val |= CHV_PCS_USEDCLKCHANNEL;
  554. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW8(ch), val);
  555. }
  556. /*
  557. * This a a bit weird since generally CL
  558. * matches the pipe, but here we need to
  559. * pick the CL based on the port.
  560. */
  561. val = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW19(ch));
  562. if (pipe != PIPE_B)
  563. val &= ~CHV_CMN_USEDCLKCHANNEL;
  564. else
  565. val |= CHV_CMN_USEDCLKCHANNEL;
  566. vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW19(ch), val);
  567. mutex_unlock(&dev_priv->sb_lock);
  568. }
  569. void chv_phy_pre_encoder_enable(struct intel_encoder *encoder)
  570. {
  571. struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
  572. struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
  573. struct drm_device *dev = encoder->base.dev;
  574. struct drm_i915_private *dev_priv = to_i915(dev);
  575. struct intel_crtc *intel_crtc =
  576. to_intel_crtc(encoder->base.crtc);
  577. enum dpio_channel ch = vlv_dport_to_channel(dport);
  578. int pipe = intel_crtc->pipe;
  579. int data, i, stagger;
  580. u32 val;
  581. mutex_lock(&dev_priv->sb_lock);
  582. /* allow hardware to manage TX FIFO reset source */
  583. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW11(ch));
  584. val &= ~DPIO_LANEDESKEW_STRAP_OVRD;
  585. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW11(ch), val);
  586. if (intel_crtc->config->lane_count > 2) {
  587. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW11(ch));
  588. val &= ~DPIO_LANEDESKEW_STRAP_OVRD;
  589. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW11(ch), val);
  590. }
  591. /* Program Tx lane latency optimal setting*/
  592. for (i = 0; i < intel_crtc->config->lane_count; i++) {
  593. /* Set the upar bit */
  594. if (intel_crtc->config->lane_count == 1)
  595. data = 0x0;
  596. else
  597. data = (i == 1) ? 0x0 : 0x1;
  598. vlv_dpio_write(dev_priv, pipe, CHV_TX_DW14(ch, i),
  599. data << DPIO_UPAR_SHIFT);
  600. }
  601. /* Data lane stagger programming */
  602. if (intel_crtc->config->port_clock > 270000)
  603. stagger = 0x18;
  604. else if (intel_crtc->config->port_clock > 135000)
  605. stagger = 0xd;
  606. else if (intel_crtc->config->port_clock > 67500)
  607. stagger = 0x7;
  608. else if (intel_crtc->config->port_clock > 33750)
  609. stagger = 0x4;
  610. else
  611. stagger = 0x2;
  612. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW11(ch));
  613. val |= DPIO_TX2_STAGGER_MASK(0x1f);
  614. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW11(ch), val);
  615. if (intel_crtc->config->lane_count > 2) {
  616. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS23_DW11(ch));
  617. val |= DPIO_TX2_STAGGER_MASK(0x1f);
  618. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW11(ch), val);
  619. }
  620. vlv_dpio_write(dev_priv, pipe, VLV_PCS01_DW12(ch),
  621. DPIO_LANESTAGGER_STRAP(stagger) |
  622. DPIO_LANESTAGGER_STRAP_OVRD |
  623. DPIO_TX1_STAGGER_MASK(0x1f) |
  624. DPIO_TX1_STAGGER_MULT(6) |
  625. DPIO_TX2_STAGGER_MULT(0));
  626. if (intel_crtc->config->lane_count > 2) {
  627. vlv_dpio_write(dev_priv, pipe, VLV_PCS23_DW12(ch),
  628. DPIO_LANESTAGGER_STRAP(stagger) |
  629. DPIO_LANESTAGGER_STRAP_OVRD |
  630. DPIO_TX1_STAGGER_MASK(0x1f) |
  631. DPIO_TX1_STAGGER_MULT(7) |
  632. DPIO_TX2_STAGGER_MULT(5));
  633. }
  634. /* Deassert data lane reset */
  635. chv_data_lane_soft_reset(encoder, false);
  636. mutex_unlock(&dev_priv->sb_lock);
  637. }
  638. void chv_phy_release_cl2_override(struct intel_encoder *encoder)
  639. {
  640. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  641. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  642. if (dport->release_cl2_override) {
  643. chv_phy_powergate_ch(dev_priv, DPIO_PHY0, DPIO_CH1, false);
  644. dport->release_cl2_override = false;
  645. }
  646. }
  647. void chv_phy_post_pll_disable(struct intel_encoder *encoder)
  648. {
  649. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  650. enum pipe pipe = to_intel_crtc(encoder->base.crtc)->pipe;
  651. u32 val;
  652. mutex_lock(&dev_priv->sb_lock);
  653. /* disable left/right clock distribution */
  654. if (pipe != PIPE_B) {
  655. val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW5_CH0);
  656. val &= ~(CHV_BUFLEFTENA1_MASK | CHV_BUFRIGHTENA1_MASK);
  657. vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW5_CH0, val);
  658. } else {
  659. val = vlv_dpio_read(dev_priv, pipe, _CHV_CMN_DW1_CH1);
  660. val &= ~(CHV_BUFLEFTENA2_MASK | CHV_BUFRIGHTENA2_MASK);
  661. vlv_dpio_write(dev_priv, pipe, _CHV_CMN_DW1_CH1, val);
  662. }
  663. mutex_unlock(&dev_priv->sb_lock);
  664. /*
  665. * Leave the power down bit cleared for at least one
  666. * lane so that chv_powergate_phy_ch() will power
  667. * on something when the channel is otherwise unused.
  668. * When the port is off and the override is removed
  669. * the lanes power down anyway, so otherwise it doesn't
  670. * really matter what the state of power down bits is
  671. * after this.
  672. */
  673. chv_phy_powergate_lanes(encoder, false, 0x0);
  674. }
  675. void vlv_set_phy_signal_level(struct intel_encoder *encoder,
  676. u32 demph_reg_value, u32 preemph_reg_value,
  677. u32 uniqtranscale_reg_value, u32 tx3_demph)
  678. {
  679. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  680. struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
  681. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  682. enum dpio_channel port = vlv_dport_to_channel(dport);
  683. int pipe = intel_crtc->pipe;
  684. mutex_lock(&dev_priv->sb_lock);
  685. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), 0x00000000);
  686. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW4(port), demph_reg_value);
  687. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW2(port),
  688. uniqtranscale_reg_value);
  689. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW3(port), 0x0C782040);
  690. if (tx3_demph)
  691. vlv_dpio_write(dev_priv, pipe, VLV_TX3_DW4(port), tx3_demph);
  692. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW11(port), 0x00030000);
  693. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW9(port), preemph_reg_value);
  694. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), DPIO_TX_OCALINIT_EN);
  695. mutex_unlock(&dev_priv->sb_lock);
  696. }
  697. void vlv_phy_pre_pll_enable(struct intel_encoder *encoder)
  698. {
  699. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  700. struct drm_device *dev = encoder->base.dev;
  701. struct drm_i915_private *dev_priv = to_i915(dev);
  702. struct intel_crtc *intel_crtc =
  703. to_intel_crtc(encoder->base.crtc);
  704. enum dpio_channel port = vlv_dport_to_channel(dport);
  705. int pipe = intel_crtc->pipe;
  706. /* Program Tx lane resets to default */
  707. mutex_lock(&dev_priv->sb_lock);
  708. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW0(port),
  709. DPIO_PCS_TX_LANE2_RESET |
  710. DPIO_PCS_TX_LANE1_RESET);
  711. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW1(port),
  712. DPIO_PCS_CLK_CRI_RXEB_EIOS_EN |
  713. DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN |
  714. (1<<DPIO_PCS_CLK_DATAWIDTH_SHIFT) |
  715. DPIO_PCS_CLK_SOFT_RESET);
  716. /* Fix up inter-pair skew failure */
  717. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW12(port), 0x00750f00);
  718. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW11(port), 0x00001500);
  719. vlv_dpio_write(dev_priv, pipe, VLV_TX_DW14(port), 0x40400000);
  720. mutex_unlock(&dev_priv->sb_lock);
  721. }
  722. void vlv_phy_pre_encoder_enable(struct intel_encoder *encoder)
  723. {
  724. struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
  725. struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
  726. struct drm_device *dev = encoder->base.dev;
  727. struct drm_i915_private *dev_priv = to_i915(dev);
  728. struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
  729. enum dpio_channel port = vlv_dport_to_channel(dport);
  730. int pipe = intel_crtc->pipe;
  731. u32 val;
  732. mutex_lock(&dev_priv->sb_lock);
  733. /* Enable clock channels for this port */
  734. val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW8(port));
  735. val = 0;
  736. if (pipe)
  737. val |= (1<<21);
  738. else
  739. val &= ~(1<<21);
  740. val |= 0x001000c4;
  741. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW8(port), val);
  742. /* Program lane clock */
  743. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW14(port), 0x00760018);
  744. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW23(port), 0x00400888);
  745. mutex_unlock(&dev_priv->sb_lock);
  746. }
  747. void vlv_phy_reset_lanes(struct intel_encoder *encoder)
  748. {
  749. struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
  750. struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
  751. struct intel_crtc *intel_crtc =
  752. to_intel_crtc(encoder->base.crtc);
  753. enum dpio_channel port = vlv_dport_to_channel(dport);
  754. int pipe = intel_crtc->pipe;
  755. mutex_lock(&dev_priv->sb_lock);
  756. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW0(port), 0x00000000);
  757. vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW1(port), 0x00e00060);
  758. mutex_unlock(&dev_priv->sb_lock);
  759. }