armada_crtc.c 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378
  1. /*
  2. * Copyright (C) 2012 Russell King
  3. * Rewritten from the dovefb driver, and Armada510 manuals.
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License version 2 as
  7. * published by the Free Software Foundation.
  8. */
  9. #include <linux/clk.h>
  10. #include <linux/component.h>
  11. #include <linux/of_device.h>
  12. #include <linux/platform_device.h>
  13. #include <drm/drmP.h>
  14. #include <drm/drm_crtc_helper.h>
  15. #include <drm/drm_plane_helper.h>
  16. #include "armada_crtc.h"
  17. #include "armada_drm.h"
  18. #include "armada_fb.h"
  19. #include "armada_gem.h"
  20. #include "armada_hw.h"
  21. #include "armada_trace.h"
  22. struct armada_frame_work {
  23. struct armada_plane_work work;
  24. struct drm_pending_vblank_event *event;
  25. struct armada_regs regs[4];
  26. struct drm_framebuffer *old_fb;
  27. };
  28. enum csc_mode {
  29. CSC_AUTO = 0,
  30. CSC_YUV_CCIR601 = 1,
  31. CSC_YUV_CCIR709 = 2,
  32. CSC_RGB_COMPUTER = 1,
  33. CSC_RGB_STUDIO = 2,
  34. };
  35. static const uint32_t armada_primary_formats[] = {
  36. DRM_FORMAT_UYVY,
  37. DRM_FORMAT_YUYV,
  38. DRM_FORMAT_VYUY,
  39. DRM_FORMAT_YVYU,
  40. DRM_FORMAT_ARGB8888,
  41. DRM_FORMAT_ABGR8888,
  42. DRM_FORMAT_XRGB8888,
  43. DRM_FORMAT_XBGR8888,
  44. DRM_FORMAT_RGB888,
  45. DRM_FORMAT_BGR888,
  46. DRM_FORMAT_ARGB1555,
  47. DRM_FORMAT_ABGR1555,
  48. DRM_FORMAT_RGB565,
  49. DRM_FORMAT_BGR565,
  50. };
  51. /*
  52. * A note about interlacing. Let's consider HDMI 1920x1080i.
  53. * The timing parameters we have from X are:
  54. * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
  55. * 1920 2448 2492 2640 1080 1084 1094 1125
  56. * Which get translated to:
  57. * Hact HsyA HsyI Htot Vact VsyA VsyI Vtot
  58. * 1920 2448 2492 2640 540 542 547 562
  59. *
  60. * This is how it is defined by CEA-861-D - line and pixel numbers are
  61. * referenced to the rising edge of VSYNC and HSYNC. Total clocks per
  62. * line: 2640. The odd frame, the first active line is at line 21, and
  63. * the even frame, the first active line is 584.
  64. *
  65. * LN: 560 561 562 563 567 568 569
  66. * DE: ~~~|____________________________//__________________________
  67. * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
  68. * VSYNC: _________________________|~~~~~~//~~~~~~~~~~~~~~~|__________
  69. * 22 blanking lines. VSYNC at 1320 (referenced to the HSYNC rising edge).
  70. *
  71. * LN: 1123 1124 1125 1 5 6 7
  72. * DE: ~~~|____________________________//__________________________
  73. * HSYNC: ____|~|_____|~|_____|~|_____|~|_//__|~|_____|~|_____|~|_____
  74. * VSYNC: ____________________|~~~~~~~~~~~//~~~~~~~~~~|_______________
  75. * 23 blanking lines
  76. *
  77. * The Armada LCD Controller line and pixel numbers are, like X timings,
  78. * referenced to the top left of the active frame.
  79. *
  80. * So, translating these to our LCD controller:
  81. * Odd frame, 563 total lines, VSYNC at line 543-548, pixel 1128.
  82. * Even frame, 562 total lines, VSYNC at line 542-547, pixel 2448.
  83. * Note: Vsync front porch remains constant!
  84. *
  85. * if (odd_frame) {
  86. * vtotal = mode->crtc_vtotal + 1;
  87. * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay + 1;
  88. * vhorizpos = mode->crtc_hsync_start - mode->crtc_htotal / 2
  89. * } else {
  90. * vtotal = mode->crtc_vtotal;
  91. * vbackporch = mode->crtc_vsync_start - mode->crtc_vdisplay;
  92. * vhorizpos = mode->crtc_hsync_start;
  93. * }
  94. * vfrontporch = mode->crtc_vtotal - mode->crtc_vsync_end;
  95. *
  96. * So, we need to reprogram these registers on each vsync event:
  97. * LCD_SPU_V_PORCH, LCD_SPU_ADV_REG, LCD_SPUT_V_H_TOTAL
  98. *
  99. * Note: we do not use the frame done interrupts because these appear
  100. * to happen too early, and lead to jitter on the display (presumably
  101. * they occur at the end of the last active line, before the vsync back
  102. * porch, which we're reprogramming.)
  103. */
  104. void
  105. armada_drm_crtc_update_regs(struct armada_crtc *dcrtc, struct armada_regs *regs)
  106. {
  107. while (regs->offset != ~0) {
  108. void __iomem *reg = dcrtc->base + regs->offset;
  109. uint32_t val;
  110. val = regs->mask;
  111. if (val != 0)
  112. val &= readl_relaxed(reg);
  113. writel_relaxed(val | regs->val, reg);
  114. ++regs;
  115. }
  116. }
  117. #define dpms_blanked(dpms) ((dpms) != DRM_MODE_DPMS_ON)
  118. static void armada_drm_crtc_update(struct armada_crtc *dcrtc)
  119. {
  120. uint32_t dumb_ctrl;
  121. dumb_ctrl = dcrtc->cfg_dumb_ctrl;
  122. if (!dpms_blanked(dcrtc->dpms))
  123. dumb_ctrl |= CFG_DUMB_ENA;
  124. /*
  125. * When the dumb interface isn't in DUMB24_RGB888_0 mode, it might
  126. * be using SPI or GPIO. If we set this to DUMB_BLANK, we will
  127. * force LCD_D[23:0] to output blank color, overriding the GPIO or
  128. * SPI usage. So leave it as-is unless in DUMB24_RGB888_0 mode.
  129. */
  130. if (dpms_blanked(dcrtc->dpms) &&
  131. (dumb_ctrl & DUMB_MASK) == DUMB24_RGB888_0) {
  132. dumb_ctrl &= ~DUMB_MASK;
  133. dumb_ctrl |= DUMB_BLANK;
  134. }
  135. /*
  136. * The documentation doesn't indicate what the normal state of
  137. * the sync signals are. Sebastian Hesselbart kindly probed
  138. * these signals on his board to determine their state.
  139. *
  140. * The non-inverted state of the sync signals is active high.
  141. * Setting these bits makes the appropriate signal active low.
  142. */
  143. if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NCSYNC)
  144. dumb_ctrl |= CFG_INV_CSYNC;
  145. if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NHSYNC)
  146. dumb_ctrl |= CFG_INV_HSYNC;
  147. if (dcrtc->crtc.mode.flags & DRM_MODE_FLAG_NVSYNC)
  148. dumb_ctrl |= CFG_INV_VSYNC;
  149. if (dcrtc->dumb_ctrl != dumb_ctrl) {
  150. dcrtc->dumb_ctrl = dumb_ctrl;
  151. writel_relaxed(dumb_ctrl, dcrtc->base + LCD_SPU_DUMB_CTRL);
  152. }
  153. }
  154. void armada_drm_plane_calc_addrs(u32 *addrs, struct drm_framebuffer *fb,
  155. int x, int y)
  156. {
  157. u32 addr = drm_fb_obj(fb)->dev_addr;
  158. int num_planes = fb->format->num_planes;
  159. int i;
  160. if (num_planes > 3)
  161. num_planes = 3;
  162. for (i = 0; i < num_planes; i++)
  163. addrs[i] = addr + fb->offsets[i] + y * fb->pitches[i] +
  164. x * fb->format->cpp[i];
  165. for (; i < 3; i++)
  166. addrs[i] = 0;
  167. }
  168. static unsigned armada_drm_crtc_calc_fb(struct drm_framebuffer *fb,
  169. int x, int y, struct armada_regs *regs, bool interlaced)
  170. {
  171. unsigned pitch = fb->pitches[0];
  172. u32 addrs[3], addr_odd, addr_even;
  173. unsigned i = 0;
  174. DRM_DEBUG_DRIVER("pitch %u x %d y %d bpp %d\n",
  175. pitch, x, y, fb->format->cpp[0] * 8);
  176. armada_drm_plane_calc_addrs(addrs, fb, x, y);
  177. addr_odd = addr_even = addrs[0];
  178. if (interlaced) {
  179. addr_even += pitch;
  180. pitch *= 2;
  181. }
  182. /* write offset, base, and pitch */
  183. armada_reg_queue_set(regs, i, addr_odd, LCD_CFG_GRA_START_ADDR0);
  184. armada_reg_queue_set(regs, i, addr_even, LCD_CFG_GRA_START_ADDR1);
  185. armada_reg_queue_mod(regs, i, pitch, 0xffff, LCD_CFG_GRA_PITCH);
  186. return i;
  187. }
  188. static void armada_drm_plane_work_run(struct armada_crtc *dcrtc,
  189. struct drm_plane *plane)
  190. {
  191. struct armada_plane *dplane = drm_to_armada_plane(plane);
  192. struct armada_plane_work *work = xchg(&dplane->work, NULL);
  193. /* Handle any pending frame work. */
  194. if (work) {
  195. work->fn(dcrtc, dplane, work);
  196. drm_crtc_vblank_put(&dcrtc->crtc);
  197. }
  198. wake_up(&dplane->frame_wait);
  199. }
  200. int armada_drm_plane_work_queue(struct armada_crtc *dcrtc,
  201. struct armada_plane *plane, struct armada_plane_work *work)
  202. {
  203. int ret;
  204. ret = drm_crtc_vblank_get(&dcrtc->crtc);
  205. if (ret) {
  206. DRM_ERROR("failed to acquire vblank counter\n");
  207. return ret;
  208. }
  209. ret = cmpxchg(&plane->work, NULL, work) ? -EBUSY : 0;
  210. if (ret)
  211. drm_crtc_vblank_put(&dcrtc->crtc);
  212. return ret;
  213. }
  214. int armada_drm_plane_work_wait(struct armada_plane *plane, long timeout)
  215. {
  216. return wait_event_timeout(plane->frame_wait, !plane->work, timeout);
  217. }
  218. struct armada_plane_work *armada_drm_plane_work_cancel(
  219. struct armada_crtc *dcrtc, struct armada_plane *plane)
  220. {
  221. struct armada_plane_work *work = xchg(&plane->work, NULL);
  222. if (work)
  223. drm_crtc_vblank_put(&dcrtc->crtc);
  224. return work;
  225. }
  226. static int armada_drm_crtc_queue_frame_work(struct armada_crtc *dcrtc,
  227. struct armada_frame_work *work)
  228. {
  229. struct armada_plane *plane = drm_to_armada_plane(dcrtc->crtc.primary);
  230. return armada_drm_plane_work_queue(dcrtc, plane, &work->work);
  231. }
  232. static void armada_drm_crtc_complete_frame_work(struct armada_crtc *dcrtc,
  233. struct armada_plane *plane, struct armada_plane_work *work)
  234. {
  235. struct armada_frame_work *fwork = container_of(work, struct armada_frame_work, work);
  236. struct drm_device *dev = dcrtc->crtc.dev;
  237. unsigned long flags;
  238. spin_lock_irqsave(&dcrtc->irq_lock, flags);
  239. armada_drm_crtc_update_regs(dcrtc, fwork->regs);
  240. spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
  241. if (fwork->event) {
  242. spin_lock_irqsave(&dev->event_lock, flags);
  243. drm_crtc_send_vblank_event(&dcrtc->crtc, fwork->event);
  244. spin_unlock_irqrestore(&dev->event_lock, flags);
  245. }
  246. /* Finally, queue the process-half of the cleanup. */
  247. __armada_drm_queue_unref_work(dcrtc->crtc.dev, fwork->old_fb);
  248. kfree(fwork);
  249. }
  250. static void armada_drm_crtc_finish_fb(struct armada_crtc *dcrtc,
  251. struct drm_framebuffer *fb, bool force)
  252. {
  253. struct armada_frame_work *work;
  254. if (!fb)
  255. return;
  256. if (force) {
  257. /* Display is disabled, so just drop the old fb */
  258. drm_framebuffer_unreference(fb);
  259. return;
  260. }
  261. work = kmalloc(sizeof(*work), GFP_KERNEL);
  262. if (work) {
  263. int i = 0;
  264. work->work.fn = armada_drm_crtc_complete_frame_work;
  265. work->event = NULL;
  266. work->old_fb = fb;
  267. armada_reg_queue_end(work->regs, i);
  268. if (armada_drm_crtc_queue_frame_work(dcrtc, work) == 0)
  269. return;
  270. kfree(work);
  271. }
  272. /*
  273. * Oops - just drop the reference immediately and hope for
  274. * the best. The worst that will happen is the buffer gets
  275. * reused before it has finished being displayed.
  276. */
  277. drm_framebuffer_unreference(fb);
  278. }
  279. static void armada_drm_vblank_off(struct armada_crtc *dcrtc)
  280. {
  281. /*
  282. * Tell the DRM core that vblank IRQs aren't going to happen for
  283. * a while. This cleans up any pending vblank events for us.
  284. */
  285. drm_crtc_vblank_off(&dcrtc->crtc);
  286. armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
  287. }
  288. void armada_drm_crtc_gamma_set(struct drm_crtc *crtc, u16 r, u16 g, u16 b,
  289. int idx)
  290. {
  291. }
  292. void armada_drm_crtc_gamma_get(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  293. int idx)
  294. {
  295. }
  296. /* The mode_config.mutex will be held for this call */
  297. static void armada_drm_crtc_dpms(struct drm_crtc *crtc, int dpms)
  298. {
  299. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  300. if (dpms_blanked(dcrtc->dpms) != dpms_blanked(dpms)) {
  301. if (dpms_blanked(dpms))
  302. armada_drm_vblank_off(dcrtc);
  303. else if (!IS_ERR(dcrtc->clk))
  304. WARN_ON(clk_prepare_enable(dcrtc->clk));
  305. dcrtc->dpms = dpms;
  306. armada_drm_crtc_update(dcrtc);
  307. if (!dpms_blanked(dpms))
  308. drm_crtc_vblank_on(&dcrtc->crtc);
  309. else if (!IS_ERR(dcrtc->clk))
  310. clk_disable_unprepare(dcrtc->clk);
  311. } else if (dcrtc->dpms != dpms) {
  312. dcrtc->dpms = dpms;
  313. }
  314. }
  315. /*
  316. * Prepare for a mode set. Turn off overlay to ensure that we don't end
  317. * up with the overlay size being bigger than the active screen size.
  318. * We rely upon X refreshing this state after the mode set has completed.
  319. *
  320. * The mode_config.mutex will be held for this call
  321. */
  322. static void armada_drm_crtc_prepare(struct drm_crtc *crtc)
  323. {
  324. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  325. struct drm_plane *plane;
  326. /*
  327. * If we have an overlay plane associated with this CRTC, disable
  328. * it before the modeset to avoid its coordinates being outside
  329. * the new mode parameters.
  330. */
  331. plane = dcrtc->plane;
  332. if (plane)
  333. drm_plane_force_disable(plane);
  334. }
  335. /* The mode_config.mutex will be held for this call */
  336. static void armada_drm_crtc_commit(struct drm_crtc *crtc)
  337. {
  338. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  339. if (dcrtc->dpms != DRM_MODE_DPMS_ON) {
  340. dcrtc->dpms = DRM_MODE_DPMS_ON;
  341. armada_drm_crtc_update(dcrtc);
  342. }
  343. }
  344. /* The mode_config.mutex will be held for this call */
  345. static bool armada_drm_crtc_mode_fixup(struct drm_crtc *crtc,
  346. const struct drm_display_mode *mode, struct drm_display_mode *adj)
  347. {
  348. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  349. int ret;
  350. /* We can't do interlaced modes if we don't have the SPU_ADV_REG */
  351. if (!dcrtc->variant->has_spu_adv_reg &&
  352. adj->flags & DRM_MODE_FLAG_INTERLACE)
  353. return false;
  354. /* Check whether the display mode is possible */
  355. ret = dcrtc->variant->compute_clock(dcrtc, adj, NULL);
  356. if (ret)
  357. return false;
  358. return true;
  359. }
  360. static void armada_drm_crtc_irq(struct armada_crtc *dcrtc, u32 stat)
  361. {
  362. void __iomem *base = dcrtc->base;
  363. struct drm_plane *ovl_plane;
  364. if (stat & DMA_FF_UNDERFLOW)
  365. DRM_ERROR("video underflow on crtc %u\n", dcrtc->num);
  366. if (stat & GRA_FF_UNDERFLOW)
  367. DRM_ERROR("graphics underflow on crtc %u\n", dcrtc->num);
  368. if (stat & VSYNC_IRQ)
  369. drm_crtc_handle_vblank(&dcrtc->crtc);
  370. spin_lock(&dcrtc->irq_lock);
  371. ovl_plane = dcrtc->plane;
  372. if (ovl_plane)
  373. armada_drm_plane_work_run(dcrtc, ovl_plane);
  374. if (stat & GRA_FRAME_IRQ && dcrtc->interlaced) {
  375. int i = stat & GRA_FRAME_IRQ0 ? 0 : 1;
  376. uint32_t val;
  377. writel_relaxed(dcrtc->v[i].spu_v_porch, base + LCD_SPU_V_PORCH);
  378. writel_relaxed(dcrtc->v[i].spu_v_h_total,
  379. base + LCD_SPUT_V_H_TOTAL);
  380. val = readl_relaxed(base + LCD_SPU_ADV_REG);
  381. val &= ~(ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF | ADV_VSYNCOFFEN);
  382. val |= dcrtc->v[i].spu_adv_reg;
  383. writel_relaxed(val, base + LCD_SPU_ADV_REG);
  384. }
  385. if (stat & DUMB_FRAMEDONE && dcrtc->cursor_update) {
  386. writel_relaxed(dcrtc->cursor_hw_pos,
  387. base + LCD_SPU_HWC_OVSA_HPXL_VLN);
  388. writel_relaxed(dcrtc->cursor_hw_sz,
  389. base + LCD_SPU_HWC_HPXL_VLN);
  390. armada_updatel(CFG_HWC_ENA,
  391. CFG_HWC_ENA | CFG_HWC_1BITMOD | CFG_HWC_1BITENA,
  392. base + LCD_SPU_DMA_CTRL0);
  393. dcrtc->cursor_update = false;
  394. armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
  395. }
  396. spin_unlock(&dcrtc->irq_lock);
  397. if (stat & GRA_FRAME_IRQ)
  398. armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
  399. }
  400. static irqreturn_t armada_drm_irq(int irq, void *arg)
  401. {
  402. struct armada_crtc *dcrtc = arg;
  403. u32 v, stat = readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR);
  404. /*
  405. * This is rediculous - rather than writing bits to clear, we
  406. * have to set the actual status register value. This is racy.
  407. */
  408. writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
  409. trace_armada_drm_irq(&dcrtc->crtc, stat);
  410. /* Mask out those interrupts we haven't enabled */
  411. v = stat & dcrtc->irq_ena;
  412. if (v & (VSYNC_IRQ|GRA_FRAME_IRQ|DUMB_FRAMEDONE)) {
  413. armada_drm_crtc_irq(dcrtc, stat);
  414. return IRQ_HANDLED;
  415. }
  416. return IRQ_NONE;
  417. }
  418. /* These are locked by dev->vbl_lock */
  419. void armada_drm_crtc_disable_irq(struct armada_crtc *dcrtc, u32 mask)
  420. {
  421. if (dcrtc->irq_ena & mask) {
  422. dcrtc->irq_ena &= ~mask;
  423. writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
  424. }
  425. }
  426. void armada_drm_crtc_enable_irq(struct armada_crtc *dcrtc, u32 mask)
  427. {
  428. if ((dcrtc->irq_ena & mask) != mask) {
  429. dcrtc->irq_ena |= mask;
  430. writel(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
  431. if (readl_relaxed(dcrtc->base + LCD_SPU_IRQ_ISR) & mask)
  432. writel(0, dcrtc->base + LCD_SPU_IRQ_ISR);
  433. }
  434. }
  435. static uint32_t armada_drm_crtc_calculate_csc(struct armada_crtc *dcrtc)
  436. {
  437. struct drm_display_mode *adj = &dcrtc->crtc.mode;
  438. uint32_t val = 0;
  439. if (dcrtc->csc_yuv_mode == CSC_YUV_CCIR709)
  440. val |= CFG_CSC_YUV_CCIR709;
  441. if (dcrtc->csc_rgb_mode == CSC_RGB_STUDIO)
  442. val |= CFG_CSC_RGB_STUDIO;
  443. /*
  444. * In auto mode, set the colorimetry, based upon the HDMI spec.
  445. * 1280x720p, 1920x1080p and 1920x1080i use ITU709, others use
  446. * ITU601. It may be more appropriate to set this depending on
  447. * the source - but what if the graphic frame is YUV and the
  448. * video frame is RGB?
  449. */
  450. if ((adj->hdisplay == 1280 && adj->vdisplay == 720 &&
  451. !(adj->flags & DRM_MODE_FLAG_INTERLACE)) ||
  452. (adj->hdisplay == 1920 && adj->vdisplay == 1080)) {
  453. if (dcrtc->csc_yuv_mode == CSC_AUTO)
  454. val |= CFG_CSC_YUV_CCIR709;
  455. }
  456. /*
  457. * We assume we're connected to a TV-like device, so the YUV->RGB
  458. * conversion should produce a limited range. We should set this
  459. * depending on the connectors attached to this CRTC, and what
  460. * kind of device they report being connected.
  461. */
  462. if (dcrtc->csc_rgb_mode == CSC_AUTO)
  463. val |= CFG_CSC_RGB_STUDIO;
  464. return val;
  465. }
  466. static void armada_drm_primary_set(struct drm_crtc *crtc,
  467. struct drm_plane *plane, int x, int y)
  468. {
  469. struct armada_plane_state *state = &drm_to_armada_plane(plane)->state;
  470. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  471. struct armada_regs regs[8];
  472. bool interlaced = dcrtc->interlaced;
  473. unsigned i;
  474. u32 ctrl0;
  475. i = armada_drm_crtc_calc_fb(plane->fb, x, y, regs, interlaced);
  476. armada_reg_queue_set(regs, i, state->dst_yx, LCD_SPU_GRA_OVSA_HPXL_VLN);
  477. armada_reg_queue_set(regs, i, state->src_hw, LCD_SPU_GRA_HPXL_VLN);
  478. armada_reg_queue_set(regs, i, state->dst_hw, LCD_SPU_GZM_HPXL_VLN);
  479. ctrl0 = state->ctrl0;
  480. if (interlaced)
  481. ctrl0 |= CFG_GRA_FTOGGLE;
  482. armada_reg_queue_mod(regs, i, ctrl0, CFG_GRAFORMAT |
  483. CFG_GRA_MOD(CFG_SWAPRB | CFG_SWAPUV |
  484. CFG_SWAPYU | CFG_YUV2RGB) |
  485. CFG_PALETTE_ENA | CFG_GRA_FTOGGLE,
  486. LCD_SPU_DMA_CTRL0);
  487. armada_reg_queue_end(regs, i);
  488. armada_drm_crtc_update_regs(dcrtc, regs);
  489. }
  490. /* The mode_config.mutex will be held for this call */
  491. static int armada_drm_crtc_mode_set(struct drm_crtc *crtc,
  492. struct drm_display_mode *mode, struct drm_display_mode *adj,
  493. int x, int y, struct drm_framebuffer *old_fb)
  494. {
  495. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  496. struct armada_regs regs[17];
  497. uint32_t lm, rm, tm, bm, val, sclk;
  498. unsigned long flags;
  499. unsigned i;
  500. bool interlaced;
  501. drm_framebuffer_reference(crtc->primary->fb);
  502. interlaced = !!(adj->flags & DRM_MODE_FLAG_INTERLACE);
  503. val = CFG_GRA_ENA | CFG_GRA_HSMOOTH;
  504. val |= CFG_GRA_FMT(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt);
  505. val |= CFG_GRA_MOD(drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->mod);
  506. if (drm_fb_to_armada_fb(dcrtc->crtc.primary->fb)->fmt > CFG_420)
  507. val |= CFG_PALETTE_ENA;
  508. drm_to_armada_plane(crtc->primary)->state.ctrl0 = val;
  509. drm_to_armada_plane(crtc->primary)->state.src_hw =
  510. drm_to_armada_plane(crtc->primary)->state.dst_hw =
  511. adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
  512. drm_to_armada_plane(crtc->primary)->state.dst_yx = 0;
  513. i = 0;
  514. rm = adj->crtc_hsync_start - adj->crtc_hdisplay;
  515. lm = adj->crtc_htotal - adj->crtc_hsync_end;
  516. bm = adj->crtc_vsync_start - adj->crtc_vdisplay;
  517. tm = adj->crtc_vtotal - adj->crtc_vsync_end;
  518. DRM_DEBUG_DRIVER("H: %d %d %d %d lm %d rm %d\n",
  519. adj->crtc_hdisplay,
  520. adj->crtc_hsync_start,
  521. adj->crtc_hsync_end,
  522. adj->crtc_htotal, lm, rm);
  523. DRM_DEBUG_DRIVER("V: %d %d %d %d tm %d bm %d\n",
  524. adj->crtc_vdisplay,
  525. adj->crtc_vsync_start,
  526. adj->crtc_vsync_end,
  527. adj->crtc_vtotal, tm, bm);
  528. /* Wait for pending flips to complete */
  529. armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
  530. MAX_SCHEDULE_TIMEOUT);
  531. drm_crtc_vblank_off(crtc);
  532. val = dcrtc->dumb_ctrl & ~CFG_DUMB_ENA;
  533. if (val != dcrtc->dumb_ctrl) {
  534. dcrtc->dumb_ctrl = val;
  535. writel_relaxed(val, dcrtc->base + LCD_SPU_DUMB_CTRL);
  536. }
  537. /*
  538. * If we are blanked, we would have disabled the clock. Re-enable
  539. * it so that compute_clock() does the right thing.
  540. */
  541. if (!IS_ERR(dcrtc->clk) && dpms_blanked(dcrtc->dpms))
  542. WARN_ON(clk_prepare_enable(dcrtc->clk));
  543. /* Now compute the divider for real */
  544. dcrtc->variant->compute_clock(dcrtc, adj, &sclk);
  545. /* Ensure graphic fifo is enabled */
  546. armada_reg_queue_mod(regs, i, 0, CFG_PDWN64x66, LCD_SPU_SRAM_PARA1);
  547. armada_reg_queue_set(regs, i, sclk, LCD_CFG_SCLK_DIV);
  548. if (interlaced ^ dcrtc->interlaced) {
  549. if (adj->flags & DRM_MODE_FLAG_INTERLACE)
  550. drm_crtc_vblank_get(&dcrtc->crtc);
  551. else
  552. drm_crtc_vblank_put(&dcrtc->crtc);
  553. dcrtc->interlaced = interlaced;
  554. }
  555. spin_lock_irqsave(&dcrtc->irq_lock, flags);
  556. /* Even interlaced/progressive frame */
  557. dcrtc->v[1].spu_v_h_total = adj->crtc_vtotal << 16 |
  558. adj->crtc_htotal;
  559. dcrtc->v[1].spu_v_porch = tm << 16 | bm;
  560. val = adj->crtc_hsync_start;
  561. dcrtc->v[1].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
  562. dcrtc->variant->spu_adv_reg;
  563. if (interlaced) {
  564. /* Odd interlaced frame */
  565. dcrtc->v[0].spu_v_h_total = dcrtc->v[1].spu_v_h_total +
  566. (1 << 16);
  567. dcrtc->v[0].spu_v_porch = dcrtc->v[1].spu_v_porch + 1;
  568. val = adj->crtc_hsync_start - adj->crtc_htotal / 2;
  569. dcrtc->v[0].spu_adv_reg = val << 20 | val | ADV_VSYNCOFFEN |
  570. dcrtc->variant->spu_adv_reg;
  571. } else {
  572. dcrtc->v[0] = dcrtc->v[1];
  573. }
  574. val = adj->crtc_vdisplay << 16 | adj->crtc_hdisplay;
  575. armada_reg_queue_set(regs, i, val, LCD_SPU_V_H_ACTIVE);
  576. armada_reg_queue_set(regs, i, (lm << 16) | rm, LCD_SPU_H_PORCH);
  577. armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_porch, LCD_SPU_V_PORCH);
  578. armada_reg_queue_set(regs, i, dcrtc->v[0].spu_v_h_total,
  579. LCD_SPUT_V_H_TOTAL);
  580. if (dcrtc->variant->has_spu_adv_reg) {
  581. armada_reg_queue_mod(regs, i, dcrtc->v[0].spu_adv_reg,
  582. ADV_VSYNC_L_OFF | ADV_VSYNC_H_OFF |
  583. ADV_VSYNCOFFEN, LCD_SPU_ADV_REG);
  584. }
  585. val = adj->flags & DRM_MODE_FLAG_NVSYNC ? CFG_VSYNC_INV : 0;
  586. armada_reg_queue_mod(regs, i, val, CFG_VSYNC_INV, LCD_SPU_DMA_CTRL1);
  587. val = dcrtc->spu_iopad_ctrl | armada_drm_crtc_calculate_csc(dcrtc);
  588. armada_reg_queue_set(regs, i, val, LCD_SPU_IOPAD_CONTROL);
  589. armada_reg_queue_end(regs, i);
  590. armada_drm_crtc_update_regs(dcrtc, regs);
  591. armada_drm_primary_set(crtc, crtc->primary, x, y);
  592. spin_unlock_irqrestore(&dcrtc->irq_lock, flags);
  593. armada_drm_crtc_update(dcrtc);
  594. drm_crtc_vblank_on(crtc);
  595. armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
  596. return 0;
  597. }
  598. /* The mode_config.mutex will be held for this call */
  599. static int armada_drm_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  600. struct drm_framebuffer *old_fb)
  601. {
  602. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  603. struct armada_regs regs[4];
  604. unsigned i;
  605. i = armada_drm_crtc_calc_fb(crtc->primary->fb, crtc->x, crtc->y, regs,
  606. dcrtc->interlaced);
  607. armada_reg_queue_end(regs, i);
  608. /* Wait for pending flips to complete */
  609. armada_drm_plane_work_wait(drm_to_armada_plane(dcrtc->crtc.primary),
  610. MAX_SCHEDULE_TIMEOUT);
  611. /* Take a reference to the new fb as we're using it */
  612. drm_framebuffer_reference(crtc->primary->fb);
  613. /* Update the base in the CRTC */
  614. armada_drm_crtc_update_regs(dcrtc, regs);
  615. /* Drop our previously held reference */
  616. armada_drm_crtc_finish_fb(dcrtc, old_fb, dpms_blanked(dcrtc->dpms));
  617. return 0;
  618. }
  619. void armada_drm_crtc_plane_disable(struct armada_crtc *dcrtc,
  620. struct drm_plane *plane)
  621. {
  622. u32 sram_para1, dma_ctrl0_mask;
  623. /*
  624. * Drop our reference on any framebuffer attached to this plane.
  625. * We don't need to NULL this out as drm_plane_force_disable(),
  626. * and __setplane_internal() will do so for an overlay plane, and
  627. * __drm_helper_disable_unused_functions() will do so for the
  628. * primary plane.
  629. */
  630. if (plane->fb)
  631. drm_framebuffer_unreference(plane->fb);
  632. /* Power down the Y/U/V FIFOs */
  633. sram_para1 = CFG_PDWN16x66 | CFG_PDWN32x66;
  634. /* Power down most RAMs and FIFOs if this is the primary plane */
  635. if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
  636. sram_para1 |= CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
  637. CFG_PDWN32x32 | CFG_PDWN64x66;
  638. dma_ctrl0_mask = CFG_GRA_ENA;
  639. } else {
  640. dma_ctrl0_mask = CFG_DMA_ENA;
  641. }
  642. spin_lock_irq(&dcrtc->irq_lock);
  643. armada_updatel(0, dma_ctrl0_mask, dcrtc->base + LCD_SPU_DMA_CTRL0);
  644. spin_unlock_irq(&dcrtc->irq_lock);
  645. armada_updatel(sram_para1, 0, dcrtc->base + LCD_SPU_SRAM_PARA1);
  646. }
  647. /* The mode_config.mutex will be held for this call */
  648. static void armada_drm_crtc_disable(struct drm_crtc *crtc)
  649. {
  650. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  651. armada_drm_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
  652. armada_drm_crtc_plane_disable(dcrtc, crtc->primary);
  653. }
  654. static const struct drm_crtc_helper_funcs armada_crtc_helper_funcs = {
  655. .dpms = armada_drm_crtc_dpms,
  656. .prepare = armada_drm_crtc_prepare,
  657. .commit = armada_drm_crtc_commit,
  658. .mode_fixup = armada_drm_crtc_mode_fixup,
  659. .mode_set = armada_drm_crtc_mode_set,
  660. .mode_set_base = armada_drm_crtc_mode_set_base,
  661. .disable = armada_drm_crtc_disable,
  662. };
  663. static void armada_load_cursor_argb(void __iomem *base, uint32_t *pix,
  664. unsigned stride, unsigned width, unsigned height)
  665. {
  666. uint32_t addr;
  667. unsigned y;
  668. addr = SRAM_HWC32_RAM1;
  669. for (y = 0; y < height; y++) {
  670. uint32_t *p = &pix[y * stride];
  671. unsigned x;
  672. for (x = 0; x < width; x++, p++) {
  673. uint32_t val = *p;
  674. val = (val & 0xff00ff00) |
  675. (val & 0x000000ff) << 16 |
  676. (val & 0x00ff0000) >> 16;
  677. writel_relaxed(val,
  678. base + LCD_SPU_SRAM_WRDAT);
  679. writel_relaxed(addr | SRAM_WRITE,
  680. base + LCD_SPU_SRAM_CTRL);
  681. readl_relaxed(base + LCD_SPU_HWC_OVSA_HPXL_VLN);
  682. addr += 1;
  683. if ((addr & 0x00ff) == 0)
  684. addr += 0xf00;
  685. if ((addr & 0x30ff) == 0)
  686. addr = SRAM_HWC32_RAM2;
  687. }
  688. }
  689. }
  690. static void armada_drm_crtc_cursor_tran(void __iomem *base)
  691. {
  692. unsigned addr;
  693. for (addr = 0; addr < 256; addr++) {
  694. /* write the default value */
  695. writel_relaxed(0x55555555, base + LCD_SPU_SRAM_WRDAT);
  696. writel_relaxed(addr | SRAM_WRITE | SRAM_HWC32_TRAN,
  697. base + LCD_SPU_SRAM_CTRL);
  698. }
  699. }
  700. static int armada_drm_crtc_cursor_update(struct armada_crtc *dcrtc, bool reload)
  701. {
  702. uint32_t xoff, xscr, w = dcrtc->cursor_w, s;
  703. uint32_t yoff, yscr, h = dcrtc->cursor_h;
  704. uint32_t para1;
  705. /*
  706. * Calculate the visible width and height of the cursor,
  707. * screen position, and the position in the cursor bitmap.
  708. */
  709. if (dcrtc->cursor_x < 0) {
  710. xoff = -dcrtc->cursor_x;
  711. xscr = 0;
  712. w -= min(xoff, w);
  713. } else if (dcrtc->cursor_x + w > dcrtc->crtc.mode.hdisplay) {
  714. xoff = 0;
  715. xscr = dcrtc->cursor_x;
  716. w = max_t(int, dcrtc->crtc.mode.hdisplay - dcrtc->cursor_x, 0);
  717. } else {
  718. xoff = 0;
  719. xscr = dcrtc->cursor_x;
  720. }
  721. if (dcrtc->cursor_y < 0) {
  722. yoff = -dcrtc->cursor_y;
  723. yscr = 0;
  724. h -= min(yoff, h);
  725. } else if (dcrtc->cursor_y + h > dcrtc->crtc.mode.vdisplay) {
  726. yoff = 0;
  727. yscr = dcrtc->cursor_y;
  728. h = max_t(int, dcrtc->crtc.mode.vdisplay - dcrtc->cursor_y, 0);
  729. } else {
  730. yoff = 0;
  731. yscr = dcrtc->cursor_y;
  732. }
  733. /* On interlaced modes, the vertical cursor size must be halved */
  734. s = dcrtc->cursor_w;
  735. if (dcrtc->interlaced) {
  736. s *= 2;
  737. yscr /= 2;
  738. h /= 2;
  739. }
  740. if (!dcrtc->cursor_obj || !h || !w) {
  741. spin_lock_irq(&dcrtc->irq_lock);
  742. armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
  743. dcrtc->cursor_update = false;
  744. armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
  745. spin_unlock_irq(&dcrtc->irq_lock);
  746. return 0;
  747. }
  748. para1 = readl_relaxed(dcrtc->base + LCD_SPU_SRAM_PARA1);
  749. armada_updatel(CFG_CSB_256x32, CFG_CSB_256x32 | CFG_PDWN256x32,
  750. dcrtc->base + LCD_SPU_SRAM_PARA1);
  751. /*
  752. * Initialize the transparency if the SRAM was powered down.
  753. * We must also reload the cursor data as well.
  754. */
  755. if (!(para1 & CFG_CSB_256x32)) {
  756. armada_drm_crtc_cursor_tran(dcrtc->base);
  757. reload = true;
  758. }
  759. if (dcrtc->cursor_hw_sz != (h << 16 | w)) {
  760. spin_lock_irq(&dcrtc->irq_lock);
  761. armada_drm_crtc_disable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
  762. dcrtc->cursor_update = false;
  763. armada_updatel(0, CFG_HWC_ENA, dcrtc->base + LCD_SPU_DMA_CTRL0);
  764. spin_unlock_irq(&dcrtc->irq_lock);
  765. reload = true;
  766. }
  767. if (reload) {
  768. struct armada_gem_object *obj = dcrtc->cursor_obj;
  769. uint32_t *pix;
  770. /* Set the top-left corner of the cursor image */
  771. pix = obj->addr;
  772. pix += yoff * s + xoff;
  773. armada_load_cursor_argb(dcrtc->base, pix, s, w, h);
  774. }
  775. /* Reload the cursor position, size and enable in the IRQ handler */
  776. spin_lock_irq(&dcrtc->irq_lock);
  777. dcrtc->cursor_hw_pos = yscr << 16 | xscr;
  778. dcrtc->cursor_hw_sz = h << 16 | w;
  779. dcrtc->cursor_update = true;
  780. armada_drm_crtc_enable_irq(dcrtc, DUMB_FRAMEDONE_ENA);
  781. spin_unlock_irq(&dcrtc->irq_lock);
  782. return 0;
  783. }
  784. static void cursor_update(void *data)
  785. {
  786. armada_drm_crtc_cursor_update(data, true);
  787. }
  788. static int armada_drm_crtc_cursor_set(struct drm_crtc *crtc,
  789. struct drm_file *file, uint32_t handle, uint32_t w, uint32_t h)
  790. {
  791. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  792. struct armada_gem_object *obj = NULL;
  793. int ret;
  794. /* If no cursor support, replicate drm's return value */
  795. if (!dcrtc->variant->has_spu_adv_reg)
  796. return -ENXIO;
  797. if (handle && w > 0 && h > 0) {
  798. /* maximum size is 64x32 or 32x64 */
  799. if (w > 64 || h > 64 || (w > 32 && h > 32))
  800. return -ENOMEM;
  801. obj = armada_gem_object_lookup(file, handle);
  802. if (!obj)
  803. return -ENOENT;
  804. /* Must be a kernel-mapped object */
  805. if (!obj->addr) {
  806. drm_gem_object_unreference_unlocked(&obj->obj);
  807. return -EINVAL;
  808. }
  809. if (obj->obj.size < w * h * 4) {
  810. DRM_ERROR("buffer is too small\n");
  811. drm_gem_object_unreference_unlocked(&obj->obj);
  812. return -ENOMEM;
  813. }
  814. }
  815. if (dcrtc->cursor_obj) {
  816. dcrtc->cursor_obj->update = NULL;
  817. dcrtc->cursor_obj->update_data = NULL;
  818. drm_gem_object_unreference_unlocked(&dcrtc->cursor_obj->obj);
  819. }
  820. dcrtc->cursor_obj = obj;
  821. dcrtc->cursor_w = w;
  822. dcrtc->cursor_h = h;
  823. ret = armada_drm_crtc_cursor_update(dcrtc, true);
  824. if (obj) {
  825. obj->update_data = dcrtc;
  826. obj->update = cursor_update;
  827. }
  828. return ret;
  829. }
  830. static int armada_drm_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  831. {
  832. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  833. int ret;
  834. /* If no cursor support, replicate drm's return value */
  835. if (!dcrtc->variant->has_spu_adv_reg)
  836. return -EFAULT;
  837. dcrtc->cursor_x = x;
  838. dcrtc->cursor_y = y;
  839. ret = armada_drm_crtc_cursor_update(dcrtc, false);
  840. return ret;
  841. }
  842. static void armada_drm_crtc_destroy(struct drm_crtc *crtc)
  843. {
  844. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  845. struct armada_private *priv = crtc->dev->dev_private;
  846. if (dcrtc->cursor_obj)
  847. drm_gem_object_unreference_unlocked(&dcrtc->cursor_obj->obj);
  848. priv->dcrtc[dcrtc->num] = NULL;
  849. drm_crtc_cleanup(&dcrtc->crtc);
  850. if (!IS_ERR(dcrtc->clk))
  851. clk_disable_unprepare(dcrtc->clk);
  852. writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ENA);
  853. of_node_put(dcrtc->crtc.port);
  854. kfree(dcrtc);
  855. }
  856. /*
  857. * The mode_config lock is held here, to prevent races between this
  858. * and a mode_set.
  859. */
  860. static int armada_drm_crtc_page_flip(struct drm_crtc *crtc,
  861. struct drm_framebuffer *fb, struct drm_pending_vblank_event *event, uint32_t page_flip_flags)
  862. {
  863. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  864. struct armada_frame_work *work;
  865. unsigned i;
  866. int ret;
  867. /* We don't support changing the pixel format */
  868. if (fb->format != crtc->primary->fb->format)
  869. return -EINVAL;
  870. work = kmalloc(sizeof(*work), GFP_KERNEL);
  871. if (!work)
  872. return -ENOMEM;
  873. work->work.fn = armada_drm_crtc_complete_frame_work;
  874. work->event = event;
  875. work->old_fb = dcrtc->crtc.primary->fb;
  876. i = armada_drm_crtc_calc_fb(fb, crtc->x, crtc->y, work->regs,
  877. dcrtc->interlaced);
  878. armada_reg_queue_end(work->regs, i);
  879. /*
  880. * Ensure that we hold a reference on the new framebuffer.
  881. * This has to match the behaviour in mode_set.
  882. */
  883. drm_framebuffer_reference(fb);
  884. ret = armada_drm_crtc_queue_frame_work(dcrtc, work);
  885. if (ret) {
  886. /* Undo our reference above */
  887. drm_framebuffer_unreference(fb);
  888. kfree(work);
  889. return ret;
  890. }
  891. /*
  892. * Don't take a reference on the new framebuffer;
  893. * drm_mode_page_flip_ioctl() has already grabbed a reference and
  894. * will _not_ drop that reference on successful return from this
  895. * function. Simply mark this new framebuffer as the current one.
  896. */
  897. dcrtc->crtc.primary->fb = fb;
  898. /*
  899. * Finally, if the display is blanked, we won't receive an
  900. * interrupt, so complete it now.
  901. */
  902. if (dpms_blanked(dcrtc->dpms))
  903. armada_drm_plane_work_run(dcrtc, dcrtc->crtc.primary);
  904. return 0;
  905. }
  906. static int
  907. armada_drm_crtc_set_property(struct drm_crtc *crtc,
  908. struct drm_property *property, uint64_t val)
  909. {
  910. struct armada_private *priv = crtc->dev->dev_private;
  911. struct armada_crtc *dcrtc = drm_to_armada_crtc(crtc);
  912. bool update_csc = false;
  913. if (property == priv->csc_yuv_prop) {
  914. dcrtc->csc_yuv_mode = val;
  915. update_csc = true;
  916. } else if (property == priv->csc_rgb_prop) {
  917. dcrtc->csc_rgb_mode = val;
  918. update_csc = true;
  919. }
  920. if (update_csc) {
  921. uint32_t val;
  922. val = dcrtc->spu_iopad_ctrl |
  923. armada_drm_crtc_calculate_csc(dcrtc);
  924. writel_relaxed(val, dcrtc->base + LCD_SPU_IOPAD_CONTROL);
  925. }
  926. return 0;
  927. }
  928. static const struct drm_crtc_funcs armada_crtc_funcs = {
  929. .cursor_set = armada_drm_crtc_cursor_set,
  930. .cursor_move = armada_drm_crtc_cursor_move,
  931. .destroy = armada_drm_crtc_destroy,
  932. .set_config = drm_crtc_helper_set_config,
  933. .page_flip = armada_drm_crtc_page_flip,
  934. .set_property = armada_drm_crtc_set_property,
  935. };
  936. static const struct drm_plane_funcs armada_primary_plane_funcs = {
  937. .update_plane = drm_primary_helper_update,
  938. .disable_plane = drm_primary_helper_disable,
  939. .destroy = drm_primary_helper_destroy,
  940. };
  941. int armada_drm_plane_init(struct armada_plane *plane)
  942. {
  943. init_waitqueue_head(&plane->frame_wait);
  944. return 0;
  945. }
  946. static struct drm_prop_enum_list armada_drm_csc_yuv_enum_list[] = {
  947. { CSC_AUTO, "Auto" },
  948. { CSC_YUV_CCIR601, "CCIR601" },
  949. { CSC_YUV_CCIR709, "CCIR709" },
  950. };
  951. static struct drm_prop_enum_list armada_drm_csc_rgb_enum_list[] = {
  952. { CSC_AUTO, "Auto" },
  953. { CSC_RGB_COMPUTER, "Computer system" },
  954. { CSC_RGB_STUDIO, "Studio" },
  955. };
  956. static int armada_drm_crtc_create_properties(struct drm_device *dev)
  957. {
  958. struct armada_private *priv = dev->dev_private;
  959. if (priv->csc_yuv_prop)
  960. return 0;
  961. priv->csc_yuv_prop = drm_property_create_enum(dev, 0,
  962. "CSC_YUV", armada_drm_csc_yuv_enum_list,
  963. ARRAY_SIZE(armada_drm_csc_yuv_enum_list));
  964. priv->csc_rgb_prop = drm_property_create_enum(dev, 0,
  965. "CSC_RGB", armada_drm_csc_rgb_enum_list,
  966. ARRAY_SIZE(armada_drm_csc_rgb_enum_list));
  967. if (!priv->csc_yuv_prop || !priv->csc_rgb_prop)
  968. return -ENOMEM;
  969. return 0;
  970. }
  971. static int armada_drm_crtc_create(struct drm_device *drm, struct device *dev,
  972. struct resource *res, int irq, const struct armada_variant *variant,
  973. struct device_node *port)
  974. {
  975. struct armada_private *priv = drm->dev_private;
  976. struct armada_crtc *dcrtc;
  977. struct armada_plane *primary;
  978. void __iomem *base;
  979. int ret;
  980. ret = armada_drm_crtc_create_properties(drm);
  981. if (ret)
  982. return ret;
  983. base = devm_ioremap_resource(dev, res);
  984. if (IS_ERR(base))
  985. return PTR_ERR(base);
  986. dcrtc = kzalloc(sizeof(*dcrtc), GFP_KERNEL);
  987. if (!dcrtc) {
  988. DRM_ERROR("failed to allocate Armada crtc\n");
  989. return -ENOMEM;
  990. }
  991. if (dev != drm->dev)
  992. dev_set_drvdata(dev, dcrtc);
  993. dcrtc->variant = variant;
  994. dcrtc->base = base;
  995. dcrtc->num = drm->mode_config.num_crtc;
  996. dcrtc->clk = ERR_PTR(-EINVAL);
  997. dcrtc->csc_yuv_mode = CSC_AUTO;
  998. dcrtc->csc_rgb_mode = CSC_AUTO;
  999. dcrtc->cfg_dumb_ctrl = DUMB24_RGB888_0;
  1000. dcrtc->spu_iopad_ctrl = CFG_VSCALE_LN_EN | CFG_IOPAD_DUMB24;
  1001. spin_lock_init(&dcrtc->irq_lock);
  1002. dcrtc->irq_ena = CLEAN_SPU_IRQ_ISR;
  1003. /* Initialize some registers which we don't otherwise set */
  1004. writel_relaxed(0x00000001, dcrtc->base + LCD_CFG_SCLK_DIV);
  1005. writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_BLANKCOLOR);
  1006. writel_relaxed(dcrtc->spu_iopad_ctrl,
  1007. dcrtc->base + LCD_SPU_IOPAD_CONTROL);
  1008. writel_relaxed(0x00000000, dcrtc->base + LCD_SPU_SRAM_PARA0);
  1009. writel_relaxed(CFG_PDWN256x32 | CFG_PDWN256x24 | CFG_PDWN256x8 |
  1010. CFG_PDWN32x32 | CFG_PDWN16x66 | CFG_PDWN32x66 |
  1011. CFG_PDWN64x66, dcrtc->base + LCD_SPU_SRAM_PARA1);
  1012. writel_relaxed(0x2032ff81, dcrtc->base + LCD_SPU_DMA_CTRL1);
  1013. writel_relaxed(dcrtc->irq_ena, dcrtc->base + LCD_SPU_IRQ_ENA);
  1014. writel_relaxed(0, dcrtc->base + LCD_SPU_IRQ_ISR);
  1015. ret = devm_request_irq(dev, irq, armada_drm_irq, 0, "armada_drm_crtc",
  1016. dcrtc);
  1017. if (ret < 0) {
  1018. kfree(dcrtc);
  1019. return ret;
  1020. }
  1021. if (dcrtc->variant->init) {
  1022. ret = dcrtc->variant->init(dcrtc, dev);
  1023. if (ret) {
  1024. kfree(dcrtc);
  1025. return ret;
  1026. }
  1027. }
  1028. /* Ensure AXI pipeline is enabled */
  1029. armada_updatel(CFG_ARBFAST_ENA, 0, dcrtc->base + LCD_SPU_DMA_CTRL0);
  1030. priv->dcrtc[dcrtc->num] = dcrtc;
  1031. dcrtc->crtc.port = port;
  1032. primary = kzalloc(sizeof(*primary), GFP_KERNEL);
  1033. if (!primary)
  1034. return -ENOMEM;
  1035. ret = armada_drm_plane_init(primary);
  1036. if (ret) {
  1037. kfree(primary);
  1038. return ret;
  1039. }
  1040. ret = drm_universal_plane_init(drm, &primary->base, 0,
  1041. &armada_primary_plane_funcs,
  1042. armada_primary_formats,
  1043. ARRAY_SIZE(armada_primary_formats),
  1044. DRM_PLANE_TYPE_PRIMARY, NULL);
  1045. if (ret) {
  1046. kfree(primary);
  1047. return ret;
  1048. }
  1049. ret = drm_crtc_init_with_planes(drm, &dcrtc->crtc, &primary->base, NULL,
  1050. &armada_crtc_funcs, NULL);
  1051. if (ret)
  1052. goto err_crtc_init;
  1053. drm_crtc_helper_add(&dcrtc->crtc, &armada_crtc_helper_funcs);
  1054. drm_object_attach_property(&dcrtc->crtc.base, priv->csc_yuv_prop,
  1055. dcrtc->csc_yuv_mode);
  1056. drm_object_attach_property(&dcrtc->crtc.base, priv->csc_rgb_prop,
  1057. dcrtc->csc_rgb_mode);
  1058. return armada_overlay_plane_create(drm, 1 << dcrtc->num);
  1059. err_crtc_init:
  1060. primary->base.funcs->destroy(&primary->base);
  1061. return ret;
  1062. }
  1063. static int
  1064. armada_lcd_bind(struct device *dev, struct device *master, void *data)
  1065. {
  1066. struct platform_device *pdev = to_platform_device(dev);
  1067. struct drm_device *drm = data;
  1068. struct resource *res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  1069. int irq = platform_get_irq(pdev, 0);
  1070. const struct armada_variant *variant;
  1071. struct device_node *port = NULL;
  1072. if (irq < 0)
  1073. return irq;
  1074. if (!dev->of_node) {
  1075. const struct platform_device_id *id;
  1076. id = platform_get_device_id(pdev);
  1077. if (!id)
  1078. return -ENXIO;
  1079. variant = (const struct armada_variant *)id->driver_data;
  1080. } else {
  1081. const struct of_device_id *match;
  1082. struct device_node *np, *parent = dev->of_node;
  1083. match = of_match_device(dev->driver->of_match_table, dev);
  1084. if (!match)
  1085. return -ENXIO;
  1086. np = of_get_child_by_name(parent, "ports");
  1087. if (np)
  1088. parent = np;
  1089. port = of_get_child_by_name(parent, "port");
  1090. of_node_put(np);
  1091. if (!port) {
  1092. dev_err(dev, "no port node found in %s\n",
  1093. parent->full_name);
  1094. return -ENXIO;
  1095. }
  1096. variant = match->data;
  1097. }
  1098. return armada_drm_crtc_create(drm, dev, res, irq, variant, port);
  1099. }
  1100. static void
  1101. armada_lcd_unbind(struct device *dev, struct device *master, void *data)
  1102. {
  1103. struct armada_crtc *dcrtc = dev_get_drvdata(dev);
  1104. armada_drm_crtc_destroy(&dcrtc->crtc);
  1105. }
  1106. static const struct component_ops armada_lcd_ops = {
  1107. .bind = armada_lcd_bind,
  1108. .unbind = armada_lcd_unbind,
  1109. };
  1110. static int armada_lcd_probe(struct platform_device *pdev)
  1111. {
  1112. return component_add(&pdev->dev, &armada_lcd_ops);
  1113. }
  1114. static int armada_lcd_remove(struct platform_device *pdev)
  1115. {
  1116. component_del(&pdev->dev, &armada_lcd_ops);
  1117. return 0;
  1118. }
  1119. static struct of_device_id armada_lcd_of_match[] = {
  1120. {
  1121. .compatible = "marvell,dove-lcd",
  1122. .data = &armada510_ops,
  1123. },
  1124. {}
  1125. };
  1126. MODULE_DEVICE_TABLE(of, armada_lcd_of_match);
  1127. static const struct platform_device_id armada_lcd_platform_ids[] = {
  1128. {
  1129. .name = "armada-lcd",
  1130. .driver_data = (unsigned long)&armada510_ops,
  1131. }, {
  1132. .name = "armada-510-lcd",
  1133. .driver_data = (unsigned long)&armada510_ops,
  1134. },
  1135. { },
  1136. };
  1137. MODULE_DEVICE_TABLE(platform, armada_lcd_platform_ids);
  1138. struct platform_driver armada_lcd_platform_driver = {
  1139. .probe = armada_lcd_probe,
  1140. .remove = armada_lcd_remove,
  1141. .driver = {
  1142. .name = "armada-lcd",
  1143. .owner = THIS_MODULE,
  1144. .of_match_table = armada_lcd_of_match,
  1145. },
  1146. .id_table = armada_lcd_platform_ids,
  1147. };