exynos_mixer.c 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273
  1. /*
  2. * Copyright (C) 2011 Samsung Electronics Co.Ltd
  3. * Authors:
  4. * Seung-Woo Kim <sw0312.kim@samsung.com>
  5. * Inki Dae <inki.dae@samsung.com>
  6. * Joonyoung Shim <jy0922.shim@samsung.com>
  7. *
  8. * Based on drivers/media/video/s5p-tv/mixer_reg.c
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the
  12. * Free Software Foundation; either version 2 of the License, or (at your
  13. * option) any later version.
  14. *
  15. */
  16. #include <drm/drmP.h>
  17. #include "regs-mixer.h"
  18. #include "regs-vp.h"
  19. #include <linux/kernel.h>
  20. #include <linux/spinlock.h>
  21. #include <linux/wait.h>
  22. #include <linux/i2c.h>
  23. #include <linux/platform_device.h>
  24. #include <linux/interrupt.h>
  25. #include <linux/irq.h>
  26. #include <linux/delay.h>
  27. #include <linux/pm_runtime.h>
  28. #include <linux/clk.h>
  29. #include <linux/regulator/consumer.h>
  30. #include <linux/of.h>
  31. #include <linux/of_device.h>
  32. #include <linux/component.h>
  33. #include <drm/exynos_drm.h>
  34. #include "exynos_drm_drv.h"
  35. #include "exynos_drm_crtc.h"
  36. #include "exynos_drm_fb.h"
  37. #include "exynos_drm_plane.h"
  38. #include "exynos_drm_iommu.h"
  39. #define MIXER_WIN_NR 3
  40. #define VP_DEFAULT_WIN 2
  41. /*
  42. * Mixer color space conversion coefficient triplet.
  43. * Used for CSC from RGB to YCbCr.
  44. * Each coefficient is a 10-bit fixed point number with
  45. * sign and no integer part, i.e.
  46. * [0:8] = fractional part (representing a value y = x / 2^9)
  47. * [9] = sign
  48. * Negative values are encoded with two's complement.
  49. */
  50. #define MXR_CSC_C(x) ((int)((x) * 512.0) & 0x3ff)
  51. #define MXR_CSC_CT(a0, a1, a2) \
  52. ((MXR_CSC_C(a0) << 20) | (MXR_CSC_C(a1) << 10) | (MXR_CSC_C(a2) << 0))
  53. /* YCbCr value, used for mixer background color configuration. */
  54. #define MXR_YCBCR_VAL(y, cb, cr) (((y) << 16) | ((cb) << 8) | ((cr) << 0))
  55. /* The pixelformats that are natively supported by the mixer. */
  56. #define MXR_FORMAT_RGB565 4
  57. #define MXR_FORMAT_ARGB1555 5
  58. #define MXR_FORMAT_ARGB4444 6
  59. #define MXR_FORMAT_ARGB8888 7
  60. enum mixer_version_id {
  61. MXR_VER_0_0_0_16,
  62. MXR_VER_16_0_33_0,
  63. MXR_VER_128_0_0_184,
  64. };
  65. enum mixer_flag_bits {
  66. MXR_BIT_POWERED,
  67. MXR_BIT_VSYNC,
  68. MXR_BIT_INTERLACE,
  69. MXR_BIT_VP_ENABLED,
  70. MXR_BIT_HAS_SCLK,
  71. };
  72. static const uint32_t mixer_formats[] = {
  73. DRM_FORMAT_XRGB4444,
  74. DRM_FORMAT_ARGB4444,
  75. DRM_FORMAT_XRGB1555,
  76. DRM_FORMAT_ARGB1555,
  77. DRM_FORMAT_RGB565,
  78. DRM_FORMAT_XRGB8888,
  79. DRM_FORMAT_ARGB8888,
  80. };
  81. static const uint32_t vp_formats[] = {
  82. DRM_FORMAT_NV12,
  83. DRM_FORMAT_NV21,
  84. };
  85. struct mixer_context {
  86. struct platform_device *pdev;
  87. struct device *dev;
  88. struct drm_device *drm_dev;
  89. struct exynos_drm_crtc *crtc;
  90. struct exynos_drm_plane planes[MIXER_WIN_NR];
  91. unsigned long flags;
  92. int irq;
  93. void __iomem *mixer_regs;
  94. void __iomem *vp_regs;
  95. spinlock_t reg_slock;
  96. struct clk *mixer;
  97. struct clk *vp;
  98. struct clk *hdmi;
  99. struct clk *sclk_mixer;
  100. struct clk *sclk_hdmi;
  101. struct clk *mout_mixer;
  102. enum mixer_version_id mxr_ver;
  103. int scan_value;
  104. };
  105. struct mixer_drv_data {
  106. enum mixer_version_id version;
  107. bool is_vp_enabled;
  108. bool has_sclk;
  109. };
  110. static const struct exynos_drm_plane_config plane_configs[MIXER_WIN_NR] = {
  111. {
  112. .zpos = 0,
  113. .type = DRM_PLANE_TYPE_PRIMARY,
  114. .pixel_formats = mixer_formats,
  115. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  116. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  117. EXYNOS_DRM_PLANE_CAP_ZPOS,
  118. }, {
  119. .zpos = 1,
  120. .type = DRM_PLANE_TYPE_CURSOR,
  121. .pixel_formats = mixer_formats,
  122. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  123. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  124. EXYNOS_DRM_PLANE_CAP_ZPOS,
  125. }, {
  126. .zpos = 2,
  127. .type = DRM_PLANE_TYPE_OVERLAY,
  128. .pixel_formats = vp_formats,
  129. .num_pixel_formats = ARRAY_SIZE(vp_formats),
  130. .capabilities = EXYNOS_DRM_PLANE_CAP_SCALE |
  131. EXYNOS_DRM_PLANE_CAP_ZPOS |
  132. EXYNOS_DRM_PLANE_CAP_TILE,
  133. },
  134. };
  135. static const u8 filter_y_horiz_tap8[] = {
  136. 0, -1, -1, -1, -1, -1, -1, -1,
  137. -1, -1, -1, -1, -1, 0, 0, 0,
  138. 0, 2, 4, 5, 6, 6, 6, 6,
  139. 6, 5, 5, 4, 3, 2, 1, 1,
  140. 0, -6, -12, -16, -18, -20, -21, -20,
  141. -20, -18, -16, -13, -10, -8, -5, -2,
  142. 127, 126, 125, 121, 114, 107, 99, 89,
  143. 79, 68, 57, 46, 35, 25, 16, 8,
  144. };
  145. static const u8 filter_y_vert_tap4[] = {
  146. 0, -3, -6, -8, -8, -8, -8, -7,
  147. -6, -5, -4, -3, -2, -1, -1, 0,
  148. 127, 126, 124, 118, 111, 102, 92, 81,
  149. 70, 59, 48, 37, 27, 19, 11, 5,
  150. 0, 5, 11, 19, 27, 37, 48, 59,
  151. 70, 81, 92, 102, 111, 118, 124, 126,
  152. 0, 0, -1, -1, -2, -3, -4, -5,
  153. -6, -7, -8, -8, -8, -8, -6, -3,
  154. };
  155. static const u8 filter_cr_horiz_tap4[] = {
  156. 0, -3, -6, -8, -8, -8, -8, -7,
  157. -6, -5, -4, -3, -2, -1, -1, 0,
  158. 127, 126, 124, 118, 111, 102, 92, 81,
  159. 70, 59, 48, 37, 27, 19, 11, 5,
  160. };
  161. static inline u32 vp_reg_read(struct mixer_context *ctx, u32 reg_id)
  162. {
  163. return readl(ctx->vp_regs + reg_id);
  164. }
  165. static inline void vp_reg_write(struct mixer_context *ctx, u32 reg_id,
  166. u32 val)
  167. {
  168. writel(val, ctx->vp_regs + reg_id);
  169. }
  170. static inline void vp_reg_writemask(struct mixer_context *ctx, u32 reg_id,
  171. u32 val, u32 mask)
  172. {
  173. u32 old = vp_reg_read(ctx, reg_id);
  174. val = (val & mask) | (old & ~mask);
  175. writel(val, ctx->vp_regs + reg_id);
  176. }
  177. static inline u32 mixer_reg_read(struct mixer_context *ctx, u32 reg_id)
  178. {
  179. return readl(ctx->mixer_regs + reg_id);
  180. }
  181. static inline void mixer_reg_write(struct mixer_context *ctx, u32 reg_id,
  182. u32 val)
  183. {
  184. writel(val, ctx->mixer_regs + reg_id);
  185. }
  186. static inline void mixer_reg_writemask(struct mixer_context *ctx,
  187. u32 reg_id, u32 val, u32 mask)
  188. {
  189. u32 old = mixer_reg_read(ctx, reg_id);
  190. val = (val & mask) | (old & ~mask);
  191. writel(val, ctx->mixer_regs + reg_id);
  192. }
  193. static void mixer_regs_dump(struct mixer_context *ctx)
  194. {
  195. #define DUMPREG(reg_id) \
  196. do { \
  197. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  198. (u32)readl(ctx->mixer_regs + reg_id)); \
  199. } while (0)
  200. DUMPREG(MXR_STATUS);
  201. DUMPREG(MXR_CFG);
  202. DUMPREG(MXR_INT_EN);
  203. DUMPREG(MXR_INT_STATUS);
  204. DUMPREG(MXR_LAYER_CFG);
  205. DUMPREG(MXR_VIDEO_CFG);
  206. DUMPREG(MXR_GRAPHIC0_CFG);
  207. DUMPREG(MXR_GRAPHIC0_BASE);
  208. DUMPREG(MXR_GRAPHIC0_SPAN);
  209. DUMPREG(MXR_GRAPHIC0_WH);
  210. DUMPREG(MXR_GRAPHIC0_SXY);
  211. DUMPREG(MXR_GRAPHIC0_DXY);
  212. DUMPREG(MXR_GRAPHIC1_CFG);
  213. DUMPREG(MXR_GRAPHIC1_BASE);
  214. DUMPREG(MXR_GRAPHIC1_SPAN);
  215. DUMPREG(MXR_GRAPHIC1_WH);
  216. DUMPREG(MXR_GRAPHIC1_SXY);
  217. DUMPREG(MXR_GRAPHIC1_DXY);
  218. #undef DUMPREG
  219. }
  220. static void vp_regs_dump(struct mixer_context *ctx)
  221. {
  222. #define DUMPREG(reg_id) \
  223. do { \
  224. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  225. (u32) readl(ctx->vp_regs + reg_id)); \
  226. } while (0)
  227. DUMPREG(VP_ENABLE);
  228. DUMPREG(VP_SRESET);
  229. DUMPREG(VP_SHADOW_UPDATE);
  230. DUMPREG(VP_FIELD_ID);
  231. DUMPREG(VP_MODE);
  232. DUMPREG(VP_IMG_SIZE_Y);
  233. DUMPREG(VP_IMG_SIZE_C);
  234. DUMPREG(VP_PER_RATE_CTRL);
  235. DUMPREG(VP_TOP_Y_PTR);
  236. DUMPREG(VP_BOT_Y_PTR);
  237. DUMPREG(VP_TOP_C_PTR);
  238. DUMPREG(VP_BOT_C_PTR);
  239. DUMPREG(VP_ENDIAN_MODE);
  240. DUMPREG(VP_SRC_H_POSITION);
  241. DUMPREG(VP_SRC_V_POSITION);
  242. DUMPREG(VP_SRC_WIDTH);
  243. DUMPREG(VP_SRC_HEIGHT);
  244. DUMPREG(VP_DST_H_POSITION);
  245. DUMPREG(VP_DST_V_POSITION);
  246. DUMPREG(VP_DST_WIDTH);
  247. DUMPREG(VP_DST_HEIGHT);
  248. DUMPREG(VP_H_RATIO);
  249. DUMPREG(VP_V_RATIO);
  250. #undef DUMPREG
  251. }
  252. static inline void vp_filter_set(struct mixer_context *ctx,
  253. int reg_id, const u8 *data, unsigned int size)
  254. {
  255. /* assure 4-byte align */
  256. BUG_ON(size & 3);
  257. for (; size; size -= 4, reg_id += 4, data += 4) {
  258. u32 val = (data[0] << 24) | (data[1] << 16) |
  259. (data[2] << 8) | data[3];
  260. vp_reg_write(ctx, reg_id, val);
  261. }
  262. }
  263. static void vp_default_filter(struct mixer_context *ctx)
  264. {
  265. vp_filter_set(ctx, VP_POLY8_Y0_LL,
  266. filter_y_horiz_tap8, sizeof(filter_y_horiz_tap8));
  267. vp_filter_set(ctx, VP_POLY4_Y0_LL,
  268. filter_y_vert_tap4, sizeof(filter_y_vert_tap4));
  269. vp_filter_set(ctx, VP_POLY4_C0_LL,
  270. filter_cr_horiz_tap4, sizeof(filter_cr_horiz_tap4));
  271. }
  272. static void mixer_cfg_gfx_blend(struct mixer_context *ctx, unsigned int win,
  273. bool alpha)
  274. {
  275. u32 val;
  276. val = MXR_GRP_CFG_COLOR_KEY_DISABLE; /* no blank key */
  277. if (alpha) {
  278. /* blending based on pixel alpha */
  279. val |= MXR_GRP_CFG_BLEND_PRE_MUL;
  280. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  281. }
  282. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  283. val, MXR_GRP_CFG_MISC_MASK);
  284. }
  285. static void mixer_cfg_vp_blend(struct mixer_context *ctx)
  286. {
  287. u32 val;
  288. /*
  289. * No blending at the moment since the NV12/NV21 pixelformats don't
  290. * have an alpha channel. However the mixer supports a global alpha
  291. * value for a layer. Once this functionality is exposed, we can
  292. * support blending of the video layer through this.
  293. */
  294. val = 0;
  295. mixer_reg_write(ctx, MXR_VIDEO_CFG, val);
  296. }
  297. static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable)
  298. {
  299. /* block update on vsync */
  300. mixer_reg_writemask(ctx, MXR_STATUS, enable ?
  301. MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE);
  302. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  303. vp_reg_write(ctx, VP_SHADOW_UPDATE, enable ?
  304. VP_SHADOW_UPDATE_ENABLE : 0);
  305. }
  306. static void mixer_cfg_scan(struct mixer_context *ctx, int width, int height)
  307. {
  308. u32 val;
  309. /* choosing between interlace and progressive mode */
  310. val = test_bit(MXR_BIT_INTERLACE, &ctx->flags) ?
  311. MXR_CFG_SCAN_INTERLACE : MXR_CFG_SCAN_PROGRESSIVE;
  312. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  313. mixer_reg_write(ctx, MXR_RESOLUTION,
  314. MXR_MXR_RES_HEIGHT(height) | MXR_MXR_RES_WIDTH(width));
  315. else
  316. val |= ctx->scan_value;
  317. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_SCAN_MASK);
  318. }
  319. static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, unsigned int height)
  320. {
  321. u32 val;
  322. switch (height) {
  323. case 480:
  324. case 576:
  325. val = MXR_CFG_RGB601_0_255;
  326. break;
  327. case 720:
  328. case 1080:
  329. default:
  330. val = MXR_CFG_RGB709_16_235;
  331. /* Configure the BT.709 CSC matrix for full range RGB. */
  332. mixer_reg_write(ctx, MXR_CM_COEFF_Y,
  333. MXR_CSC_CT( 0.184, 0.614, 0.063) |
  334. MXR_CM_COEFF_RGB_FULL);
  335. mixer_reg_write(ctx, MXR_CM_COEFF_CB,
  336. MXR_CSC_CT(-0.102, -0.338, 0.440));
  337. mixer_reg_write(ctx, MXR_CM_COEFF_CR,
  338. MXR_CSC_CT( 0.440, -0.399, -0.040));
  339. break;
  340. }
  341. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
  342. }
  343. static void mixer_cfg_layer(struct mixer_context *ctx, unsigned int win,
  344. unsigned int priority, bool enable)
  345. {
  346. u32 val = enable ? ~0 : 0;
  347. switch (win) {
  348. case 0:
  349. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
  350. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  351. MXR_LAYER_CFG_GRP0_VAL(priority),
  352. MXR_LAYER_CFG_GRP0_MASK);
  353. break;
  354. case 1:
  355. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
  356. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  357. MXR_LAYER_CFG_GRP1_VAL(priority),
  358. MXR_LAYER_CFG_GRP1_MASK);
  359. break;
  360. case VP_DEFAULT_WIN:
  361. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  362. vp_reg_writemask(ctx, VP_ENABLE, val, VP_ENABLE_ON);
  363. mixer_reg_writemask(ctx, MXR_CFG, val,
  364. MXR_CFG_VP_ENABLE);
  365. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  366. MXR_LAYER_CFG_VP_VAL(priority),
  367. MXR_LAYER_CFG_VP_MASK);
  368. }
  369. break;
  370. }
  371. }
  372. static void mixer_run(struct mixer_context *ctx)
  373. {
  374. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
  375. }
  376. static void mixer_stop(struct mixer_context *ctx)
  377. {
  378. int timeout = 20;
  379. mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_REG_RUN);
  380. while (!(mixer_reg_read(ctx, MXR_STATUS) & MXR_STATUS_REG_IDLE) &&
  381. --timeout)
  382. usleep_range(10000, 12000);
  383. }
  384. static void mixer_commit(struct mixer_context *ctx)
  385. {
  386. struct drm_display_mode *mode = &ctx->crtc->base.state->adjusted_mode;
  387. mixer_cfg_scan(ctx, mode->hdisplay, mode->vdisplay);
  388. mixer_cfg_rgb_fmt(ctx, mode->vdisplay);
  389. mixer_run(ctx);
  390. }
  391. static void vp_video_buffer(struct mixer_context *ctx,
  392. struct exynos_drm_plane *plane)
  393. {
  394. struct exynos_drm_plane_state *state =
  395. to_exynos_plane_state(plane->base.state);
  396. struct drm_framebuffer *fb = state->base.fb;
  397. unsigned int priority = state->base.normalized_zpos + 1;
  398. unsigned long flags;
  399. dma_addr_t luma_addr[2], chroma_addr[2];
  400. bool is_tiled, is_nv21;
  401. u32 val;
  402. is_nv21 = (fb->format->format == DRM_FORMAT_NV21);
  403. is_tiled = (fb->modifier == DRM_FORMAT_MOD_SAMSUNG_64_32_TILE);
  404. luma_addr[0] = exynos_drm_fb_dma_addr(fb, 0);
  405. chroma_addr[0] = exynos_drm_fb_dma_addr(fb, 1);
  406. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  407. if (is_tiled) {
  408. luma_addr[1] = luma_addr[0] + 0x40;
  409. chroma_addr[1] = chroma_addr[0] + 0x40;
  410. } else {
  411. luma_addr[1] = luma_addr[0] + fb->pitches[0];
  412. chroma_addr[1] = chroma_addr[0] + fb->pitches[0];
  413. }
  414. } else {
  415. luma_addr[1] = 0;
  416. chroma_addr[1] = 0;
  417. }
  418. spin_lock_irqsave(&ctx->reg_slock, flags);
  419. /* interlace or progressive scan mode */
  420. val = (test_bit(MXR_BIT_INTERLACE, &ctx->flags) ? ~0 : 0);
  421. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_LINE_SKIP);
  422. /* setup format */
  423. val = (is_nv21 ? VP_MODE_NV21 : VP_MODE_NV12);
  424. val |= (is_tiled ? VP_MODE_MEM_TILED : VP_MODE_MEM_LINEAR);
  425. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_FMT_MASK);
  426. /* setting size of input image */
  427. vp_reg_write(ctx, VP_IMG_SIZE_Y, VP_IMG_HSIZE(fb->pitches[0]) |
  428. VP_IMG_VSIZE(fb->height));
  429. /* chroma plane for NV12/NV21 is half the height of the luma plane */
  430. vp_reg_write(ctx, VP_IMG_SIZE_C, VP_IMG_HSIZE(fb->pitches[0]) |
  431. VP_IMG_VSIZE(fb->height / 2));
  432. vp_reg_write(ctx, VP_SRC_WIDTH, state->src.w);
  433. vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h);
  434. vp_reg_write(ctx, VP_SRC_H_POSITION,
  435. VP_SRC_H_POSITION_VAL(state->src.x));
  436. vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y);
  437. vp_reg_write(ctx, VP_DST_WIDTH, state->crtc.w);
  438. vp_reg_write(ctx, VP_DST_H_POSITION, state->crtc.x);
  439. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  440. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h / 2);
  441. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y / 2);
  442. } else {
  443. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h);
  444. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y);
  445. }
  446. vp_reg_write(ctx, VP_H_RATIO, state->h_ratio);
  447. vp_reg_write(ctx, VP_V_RATIO, state->v_ratio);
  448. vp_reg_write(ctx, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
  449. /* set buffer address to vp */
  450. vp_reg_write(ctx, VP_TOP_Y_PTR, luma_addr[0]);
  451. vp_reg_write(ctx, VP_BOT_Y_PTR, luma_addr[1]);
  452. vp_reg_write(ctx, VP_TOP_C_PTR, chroma_addr[0]);
  453. vp_reg_write(ctx, VP_BOT_C_PTR, chroma_addr[1]);
  454. mixer_cfg_layer(ctx, plane->index, priority, true);
  455. mixer_cfg_vp_blend(ctx);
  456. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  457. mixer_regs_dump(ctx);
  458. vp_regs_dump(ctx);
  459. }
  460. static void mixer_layer_update(struct mixer_context *ctx)
  461. {
  462. mixer_reg_writemask(ctx, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
  463. }
  464. static void mixer_graph_buffer(struct mixer_context *ctx,
  465. struct exynos_drm_plane *plane)
  466. {
  467. struct exynos_drm_plane_state *state =
  468. to_exynos_plane_state(plane->base.state);
  469. struct drm_framebuffer *fb = state->base.fb;
  470. unsigned int priority = state->base.normalized_zpos + 1;
  471. unsigned long flags;
  472. unsigned int win = plane->index;
  473. unsigned int x_ratio = 0, y_ratio = 0;
  474. unsigned int dst_x_offset, dst_y_offset;
  475. dma_addr_t dma_addr;
  476. unsigned int fmt;
  477. u32 val;
  478. switch (fb->format->format) {
  479. case DRM_FORMAT_XRGB4444:
  480. case DRM_FORMAT_ARGB4444:
  481. fmt = MXR_FORMAT_ARGB4444;
  482. break;
  483. case DRM_FORMAT_XRGB1555:
  484. case DRM_FORMAT_ARGB1555:
  485. fmt = MXR_FORMAT_ARGB1555;
  486. break;
  487. case DRM_FORMAT_RGB565:
  488. fmt = MXR_FORMAT_RGB565;
  489. break;
  490. case DRM_FORMAT_XRGB8888:
  491. case DRM_FORMAT_ARGB8888:
  492. default:
  493. fmt = MXR_FORMAT_ARGB8888;
  494. break;
  495. }
  496. /* ratio is already checked by common plane code */
  497. x_ratio = state->h_ratio == (1 << 15);
  498. y_ratio = state->v_ratio == (1 << 15);
  499. dst_x_offset = state->crtc.x;
  500. dst_y_offset = state->crtc.y;
  501. /* translate dma address base s.t. the source image offset is zero */
  502. dma_addr = exynos_drm_fb_dma_addr(fb, 0)
  503. + (state->src.x * fb->format->cpp[0])
  504. + (state->src.y * fb->pitches[0]);
  505. spin_lock_irqsave(&ctx->reg_slock, flags);
  506. /* setup format */
  507. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  508. MXR_GRP_CFG_FORMAT_VAL(fmt), MXR_GRP_CFG_FORMAT_MASK);
  509. /* setup geometry */
  510. mixer_reg_write(ctx, MXR_GRAPHIC_SPAN(win),
  511. fb->pitches[0] / fb->format->cpp[0]);
  512. val = MXR_GRP_WH_WIDTH(state->src.w);
  513. val |= MXR_GRP_WH_HEIGHT(state->src.h);
  514. val |= MXR_GRP_WH_H_SCALE(x_ratio);
  515. val |= MXR_GRP_WH_V_SCALE(y_ratio);
  516. mixer_reg_write(ctx, MXR_GRAPHIC_WH(win), val);
  517. /* setup offsets in display image */
  518. val = MXR_GRP_DXY_DX(dst_x_offset);
  519. val |= MXR_GRP_DXY_DY(dst_y_offset);
  520. mixer_reg_write(ctx, MXR_GRAPHIC_DXY(win), val);
  521. /* set buffer address to mixer */
  522. mixer_reg_write(ctx, MXR_GRAPHIC_BASE(win), dma_addr);
  523. mixer_cfg_layer(ctx, win, priority, true);
  524. mixer_cfg_gfx_blend(ctx, win, fb->format->has_alpha);
  525. /* layer update mandatory for mixer 16.0.33.0 */
  526. if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
  527. ctx->mxr_ver == MXR_VER_128_0_0_184)
  528. mixer_layer_update(ctx);
  529. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  530. mixer_regs_dump(ctx);
  531. }
  532. static void vp_win_reset(struct mixer_context *ctx)
  533. {
  534. unsigned int tries = 100;
  535. vp_reg_write(ctx, VP_SRESET, VP_SRESET_PROCESSING);
  536. while (--tries) {
  537. /* waiting until VP_SRESET_PROCESSING is 0 */
  538. if (~vp_reg_read(ctx, VP_SRESET) & VP_SRESET_PROCESSING)
  539. break;
  540. mdelay(10);
  541. }
  542. WARN(tries == 0, "failed to reset Video Processor\n");
  543. }
  544. static void mixer_win_reset(struct mixer_context *ctx)
  545. {
  546. unsigned long flags;
  547. spin_lock_irqsave(&ctx->reg_slock, flags);
  548. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
  549. /* set output in RGB888 mode */
  550. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
  551. /* 16 beat burst in DMA */
  552. mixer_reg_writemask(ctx, MXR_STATUS, MXR_STATUS_16_BURST,
  553. MXR_STATUS_BURST_MASK);
  554. /* reset default layer priority */
  555. mixer_reg_write(ctx, MXR_LAYER_CFG, 0);
  556. /* set all background colors to RGB (0,0,0) */
  557. mixer_reg_write(ctx, MXR_BG_COLOR0, MXR_YCBCR_VAL(0, 128, 128));
  558. mixer_reg_write(ctx, MXR_BG_COLOR1, MXR_YCBCR_VAL(0, 128, 128));
  559. mixer_reg_write(ctx, MXR_BG_COLOR2, MXR_YCBCR_VAL(0, 128, 128));
  560. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  561. /* configuration of Video Processor Registers */
  562. vp_win_reset(ctx);
  563. vp_default_filter(ctx);
  564. }
  565. /* disable all layers */
  566. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
  567. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
  568. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  569. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
  570. /* set all source image offsets to zero */
  571. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(0), 0);
  572. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(1), 0);
  573. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  574. }
  575. static irqreturn_t mixer_irq_handler(int irq, void *arg)
  576. {
  577. struct mixer_context *ctx = arg;
  578. u32 val, base, shadow;
  579. spin_lock(&ctx->reg_slock);
  580. /* read interrupt status for handling and clearing flags for VSYNC */
  581. val = mixer_reg_read(ctx, MXR_INT_STATUS);
  582. /* handling VSYNC */
  583. if (val & MXR_INT_STATUS_VSYNC) {
  584. /* vsync interrupt use different bit for read and clear */
  585. val |= MXR_INT_CLEAR_VSYNC;
  586. val &= ~MXR_INT_STATUS_VSYNC;
  587. /* interlace scan need to check shadow register */
  588. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  589. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(0));
  590. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(0));
  591. if (base != shadow)
  592. goto out;
  593. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(1));
  594. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(1));
  595. if (base != shadow)
  596. goto out;
  597. }
  598. drm_crtc_handle_vblank(&ctx->crtc->base);
  599. }
  600. out:
  601. /* clear interrupts */
  602. mixer_reg_write(ctx, MXR_INT_STATUS, val);
  603. spin_unlock(&ctx->reg_slock);
  604. return IRQ_HANDLED;
  605. }
  606. static int mixer_resources_init(struct mixer_context *mixer_ctx)
  607. {
  608. struct device *dev = &mixer_ctx->pdev->dev;
  609. struct resource *res;
  610. int ret;
  611. spin_lock_init(&mixer_ctx->reg_slock);
  612. mixer_ctx->mixer = devm_clk_get(dev, "mixer");
  613. if (IS_ERR(mixer_ctx->mixer)) {
  614. dev_err(dev, "failed to get clock 'mixer'\n");
  615. return -ENODEV;
  616. }
  617. mixer_ctx->hdmi = devm_clk_get(dev, "hdmi");
  618. if (IS_ERR(mixer_ctx->hdmi)) {
  619. dev_err(dev, "failed to get clock 'hdmi'\n");
  620. return PTR_ERR(mixer_ctx->hdmi);
  621. }
  622. mixer_ctx->sclk_hdmi = devm_clk_get(dev, "sclk_hdmi");
  623. if (IS_ERR(mixer_ctx->sclk_hdmi)) {
  624. dev_err(dev, "failed to get clock 'sclk_hdmi'\n");
  625. return -ENODEV;
  626. }
  627. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 0);
  628. if (res == NULL) {
  629. dev_err(dev, "get memory resource failed.\n");
  630. return -ENXIO;
  631. }
  632. mixer_ctx->mixer_regs = devm_ioremap(dev, res->start,
  633. resource_size(res));
  634. if (mixer_ctx->mixer_regs == NULL) {
  635. dev_err(dev, "register mapping failed.\n");
  636. return -ENXIO;
  637. }
  638. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_IRQ, 0);
  639. if (res == NULL) {
  640. dev_err(dev, "get interrupt resource failed.\n");
  641. return -ENXIO;
  642. }
  643. ret = devm_request_irq(dev, res->start, mixer_irq_handler,
  644. 0, "drm_mixer", mixer_ctx);
  645. if (ret) {
  646. dev_err(dev, "request interrupt failed.\n");
  647. return ret;
  648. }
  649. mixer_ctx->irq = res->start;
  650. return 0;
  651. }
  652. static int vp_resources_init(struct mixer_context *mixer_ctx)
  653. {
  654. struct device *dev = &mixer_ctx->pdev->dev;
  655. struct resource *res;
  656. mixer_ctx->vp = devm_clk_get(dev, "vp");
  657. if (IS_ERR(mixer_ctx->vp)) {
  658. dev_err(dev, "failed to get clock 'vp'\n");
  659. return -ENODEV;
  660. }
  661. if (test_bit(MXR_BIT_HAS_SCLK, &mixer_ctx->flags)) {
  662. mixer_ctx->sclk_mixer = devm_clk_get(dev, "sclk_mixer");
  663. if (IS_ERR(mixer_ctx->sclk_mixer)) {
  664. dev_err(dev, "failed to get clock 'sclk_mixer'\n");
  665. return -ENODEV;
  666. }
  667. mixer_ctx->mout_mixer = devm_clk_get(dev, "mout_mixer");
  668. if (IS_ERR(mixer_ctx->mout_mixer)) {
  669. dev_err(dev, "failed to get clock 'mout_mixer'\n");
  670. return -ENODEV;
  671. }
  672. if (mixer_ctx->sclk_hdmi && mixer_ctx->mout_mixer)
  673. clk_set_parent(mixer_ctx->mout_mixer,
  674. mixer_ctx->sclk_hdmi);
  675. }
  676. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 1);
  677. if (res == NULL) {
  678. dev_err(dev, "get memory resource failed.\n");
  679. return -ENXIO;
  680. }
  681. mixer_ctx->vp_regs = devm_ioremap(dev, res->start,
  682. resource_size(res));
  683. if (mixer_ctx->vp_regs == NULL) {
  684. dev_err(dev, "register mapping failed.\n");
  685. return -ENXIO;
  686. }
  687. return 0;
  688. }
  689. static int mixer_initialize(struct mixer_context *mixer_ctx,
  690. struct drm_device *drm_dev)
  691. {
  692. int ret;
  693. struct exynos_drm_private *priv;
  694. priv = drm_dev->dev_private;
  695. mixer_ctx->drm_dev = drm_dev;
  696. /* acquire resources: regs, irqs, clocks */
  697. ret = mixer_resources_init(mixer_ctx);
  698. if (ret) {
  699. DRM_ERROR("mixer_resources_init failed ret=%d\n", ret);
  700. return ret;
  701. }
  702. if (test_bit(MXR_BIT_VP_ENABLED, &mixer_ctx->flags)) {
  703. /* acquire vp resources: regs, irqs, clocks */
  704. ret = vp_resources_init(mixer_ctx);
  705. if (ret) {
  706. DRM_ERROR("vp_resources_init failed ret=%d\n", ret);
  707. return ret;
  708. }
  709. }
  710. return drm_iommu_attach_device(drm_dev, mixer_ctx->dev);
  711. }
  712. static void mixer_ctx_remove(struct mixer_context *mixer_ctx)
  713. {
  714. drm_iommu_detach_device(mixer_ctx->drm_dev, mixer_ctx->dev);
  715. }
  716. static int mixer_enable_vblank(struct exynos_drm_crtc *crtc)
  717. {
  718. struct mixer_context *mixer_ctx = crtc->ctx;
  719. __set_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  720. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  721. return 0;
  722. /* enable vsync interrupt */
  723. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  724. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  725. return 0;
  726. }
  727. static void mixer_disable_vblank(struct exynos_drm_crtc *crtc)
  728. {
  729. struct mixer_context *mixer_ctx = crtc->ctx;
  730. __clear_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  731. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  732. return;
  733. /* disable vsync interrupt */
  734. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  735. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, 0, MXR_INT_EN_VSYNC);
  736. }
  737. static void mixer_atomic_begin(struct exynos_drm_crtc *crtc)
  738. {
  739. struct mixer_context *mixer_ctx = crtc->ctx;
  740. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  741. return;
  742. mixer_vsync_set_update(mixer_ctx, false);
  743. }
  744. static void mixer_update_plane(struct exynos_drm_crtc *crtc,
  745. struct exynos_drm_plane *plane)
  746. {
  747. struct mixer_context *mixer_ctx = crtc->ctx;
  748. DRM_DEBUG_KMS("win: %d\n", plane->index);
  749. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  750. return;
  751. if (plane->index == VP_DEFAULT_WIN)
  752. vp_video_buffer(mixer_ctx, plane);
  753. else
  754. mixer_graph_buffer(mixer_ctx, plane);
  755. }
  756. static void mixer_disable_plane(struct exynos_drm_crtc *crtc,
  757. struct exynos_drm_plane *plane)
  758. {
  759. struct mixer_context *mixer_ctx = crtc->ctx;
  760. unsigned long flags;
  761. DRM_DEBUG_KMS("win: %d\n", plane->index);
  762. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  763. return;
  764. spin_lock_irqsave(&mixer_ctx->reg_slock, flags);
  765. mixer_cfg_layer(mixer_ctx, plane->index, 0, false);
  766. spin_unlock_irqrestore(&mixer_ctx->reg_slock, flags);
  767. }
  768. static void mixer_atomic_flush(struct exynos_drm_crtc *crtc)
  769. {
  770. struct mixer_context *mixer_ctx = crtc->ctx;
  771. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  772. return;
  773. mixer_vsync_set_update(mixer_ctx, true);
  774. exynos_crtc_handle_event(crtc);
  775. }
  776. static void mixer_enable(struct exynos_drm_crtc *crtc)
  777. {
  778. struct mixer_context *ctx = crtc->ctx;
  779. if (test_bit(MXR_BIT_POWERED, &ctx->flags))
  780. return;
  781. pm_runtime_get_sync(ctx->dev);
  782. exynos_drm_pipe_clk_enable(crtc, true);
  783. mixer_vsync_set_update(ctx, false);
  784. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SOFT_RESET);
  785. if (test_bit(MXR_BIT_VSYNC, &ctx->flags)) {
  786. mixer_reg_writemask(ctx, MXR_INT_STATUS, ~0,
  787. MXR_INT_CLEAR_VSYNC);
  788. mixer_reg_writemask(ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  789. }
  790. mixer_win_reset(ctx);
  791. mixer_commit(ctx);
  792. mixer_vsync_set_update(ctx, true);
  793. set_bit(MXR_BIT_POWERED, &ctx->flags);
  794. }
  795. static void mixer_disable(struct exynos_drm_crtc *crtc)
  796. {
  797. struct mixer_context *ctx = crtc->ctx;
  798. int i;
  799. if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
  800. return;
  801. mixer_stop(ctx);
  802. mixer_regs_dump(ctx);
  803. for (i = 0; i < MIXER_WIN_NR; i++)
  804. mixer_disable_plane(crtc, &ctx->planes[i]);
  805. exynos_drm_pipe_clk_enable(crtc, false);
  806. pm_runtime_put(ctx->dev);
  807. clear_bit(MXR_BIT_POWERED, &ctx->flags);
  808. }
  809. static int mixer_mode_valid(struct exynos_drm_crtc *crtc,
  810. const struct drm_display_mode *mode)
  811. {
  812. struct mixer_context *ctx = crtc->ctx;
  813. u32 w = mode->hdisplay, h = mode->vdisplay;
  814. DRM_DEBUG_KMS("xres=%d, yres=%d, refresh=%d, intl=%d\n", w, h,
  815. mode->vrefresh, !!(mode->flags & DRM_MODE_FLAG_INTERLACE));
  816. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  817. return MODE_OK;
  818. if ((w >= 464 && w <= 720 && h >= 261 && h <= 576) ||
  819. (w >= 1024 && w <= 1280 && h >= 576 && h <= 720) ||
  820. (w >= 1664 && w <= 1920 && h >= 936 && h <= 1080))
  821. return MODE_OK;
  822. if ((w == 1024 && h == 768) ||
  823. (w == 1366 && h == 768) ||
  824. (w == 1280 && h == 1024))
  825. return MODE_OK;
  826. return MODE_BAD;
  827. }
  828. static bool mixer_mode_fixup(struct exynos_drm_crtc *crtc,
  829. const struct drm_display_mode *mode,
  830. struct drm_display_mode *adjusted_mode)
  831. {
  832. struct mixer_context *ctx = crtc->ctx;
  833. int width = mode->hdisplay, height = mode->vdisplay, i;
  834. struct {
  835. int hdisplay, vdisplay, htotal, vtotal, scan_val;
  836. } static const modes[] = {
  837. { 720, 480, 858, 525, MXR_CFG_SCAN_NTSC | MXR_CFG_SCAN_SD },
  838. { 720, 576, 864, 625, MXR_CFG_SCAN_PAL | MXR_CFG_SCAN_SD },
  839. { 1280, 720, 1650, 750, MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD },
  840. { 1920, 1080, 2200, 1125, MXR_CFG_SCAN_HD_1080 |
  841. MXR_CFG_SCAN_HD }
  842. };
  843. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  844. __set_bit(MXR_BIT_INTERLACE, &ctx->flags);
  845. else
  846. __clear_bit(MXR_BIT_INTERLACE, &ctx->flags);
  847. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  848. return true;
  849. for (i = 0; i < ARRAY_SIZE(modes); ++i)
  850. if (width <= modes[i].hdisplay && height <= modes[i].vdisplay) {
  851. ctx->scan_value = modes[i].scan_val;
  852. if (width < modes[i].hdisplay ||
  853. height < modes[i].vdisplay) {
  854. adjusted_mode->hdisplay = modes[i].hdisplay;
  855. adjusted_mode->hsync_start = modes[i].hdisplay;
  856. adjusted_mode->hsync_end = modes[i].htotal;
  857. adjusted_mode->htotal = modes[i].htotal;
  858. adjusted_mode->vdisplay = modes[i].vdisplay;
  859. adjusted_mode->vsync_start = modes[i].vdisplay;
  860. adjusted_mode->vsync_end = modes[i].vtotal;
  861. adjusted_mode->vtotal = modes[i].vtotal;
  862. }
  863. return true;
  864. }
  865. return false;
  866. }
  867. static const struct exynos_drm_crtc_ops mixer_crtc_ops = {
  868. .enable = mixer_enable,
  869. .disable = mixer_disable,
  870. .enable_vblank = mixer_enable_vblank,
  871. .disable_vblank = mixer_disable_vblank,
  872. .atomic_begin = mixer_atomic_begin,
  873. .update_plane = mixer_update_plane,
  874. .disable_plane = mixer_disable_plane,
  875. .atomic_flush = mixer_atomic_flush,
  876. .mode_valid = mixer_mode_valid,
  877. .mode_fixup = mixer_mode_fixup,
  878. };
  879. static const struct mixer_drv_data exynos5420_mxr_drv_data = {
  880. .version = MXR_VER_128_0_0_184,
  881. .is_vp_enabled = 0,
  882. };
  883. static const struct mixer_drv_data exynos5250_mxr_drv_data = {
  884. .version = MXR_VER_16_0_33_0,
  885. .is_vp_enabled = 0,
  886. };
  887. static const struct mixer_drv_data exynos4212_mxr_drv_data = {
  888. .version = MXR_VER_0_0_0_16,
  889. .is_vp_enabled = 1,
  890. };
  891. static const struct mixer_drv_data exynos4210_mxr_drv_data = {
  892. .version = MXR_VER_0_0_0_16,
  893. .is_vp_enabled = 1,
  894. .has_sclk = 1,
  895. };
  896. static const struct of_device_id mixer_match_types[] = {
  897. {
  898. .compatible = "samsung,exynos4210-mixer",
  899. .data = &exynos4210_mxr_drv_data,
  900. }, {
  901. .compatible = "samsung,exynos4212-mixer",
  902. .data = &exynos4212_mxr_drv_data,
  903. }, {
  904. .compatible = "samsung,exynos5-mixer",
  905. .data = &exynos5250_mxr_drv_data,
  906. }, {
  907. .compatible = "samsung,exynos5250-mixer",
  908. .data = &exynos5250_mxr_drv_data,
  909. }, {
  910. .compatible = "samsung,exynos5420-mixer",
  911. .data = &exynos5420_mxr_drv_data,
  912. }, {
  913. /* end node */
  914. }
  915. };
  916. MODULE_DEVICE_TABLE(of, mixer_match_types);
  917. static int mixer_bind(struct device *dev, struct device *manager, void *data)
  918. {
  919. struct mixer_context *ctx = dev_get_drvdata(dev);
  920. struct drm_device *drm_dev = data;
  921. struct exynos_drm_plane *exynos_plane;
  922. unsigned int i;
  923. int ret;
  924. ret = mixer_initialize(ctx, drm_dev);
  925. if (ret)
  926. return ret;
  927. for (i = 0; i < MIXER_WIN_NR; i++) {
  928. if (i == VP_DEFAULT_WIN && !test_bit(MXR_BIT_VP_ENABLED,
  929. &ctx->flags))
  930. continue;
  931. ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
  932. &plane_configs[i]);
  933. if (ret)
  934. return ret;
  935. }
  936. exynos_plane = &ctx->planes[DEFAULT_WIN];
  937. ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
  938. EXYNOS_DISPLAY_TYPE_HDMI, &mixer_crtc_ops, ctx);
  939. if (IS_ERR(ctx->crtc)) {
  940. mixer_ctx_remove(ctx);
  941. ret = PTR_ERR(ctx->crtc);
  942. goto free_ctx;
  943. }
  944. return 0;
  945. free_ctx:
  946. devm_kfree(dev, ctx);
  947. return ret;
  948. }
  949. static void mixer_unbind(struct device *dev, struct device *master, void *data)
  950. {
  951. struct mixer_context *ctx = dev_get_drvdata(dev);
  952. mixer_ctx_remove(ctx);
  953. }
  954. static const struct component_ops mixer_component_ops = {
  955. .bind = mixer_bind,
  956. .unbind = mixer_unbind,
  957. };
  958. static int mixer_probe(struct platform_device *pdev)
  959. {
  960. struct device *dev = &pdev->dev;
  961. const struct mixer_drv_data *drv;
  962. struct mixer_context *ctx;
  963. int ret;
  964. ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
  965. if (!ctx) {
  966. DRM_ERROR("failed to alloc mixer context.\n");
  967. return -ENOMEM;
  968. }
  969. drv = of_device_get_match_data(dev);
  970. ctx->pdev = pdev;
  971. ctx->dev = dev;
  972. ctx->mxr_ver = drv->version;
  973. if (drv->is_vp_enabled)
  974. __set_bit(MXR_BIT_VP_ENABLED, &ctx->flags);
  975. if (drv->has_sclk)
  976. __set_bit(MXR_BIT_HAS_SCLK, &ctx->flags);
  977. platform_set_drvdata(pdev, ctx);
  978. ret = component_add(&pdev->dev, &mixer_component_ops);
  979. if (!ret)
  980. pm_runtime_enable(dev);
  981. return ret;
  982. }
  983. static int mixer_remove(struct platform_device *pdev)
  984. {
  985. pm_runtime_disable(&pdev->dev);
  986. component_del(&pdev->dev, &mixer_component_ops);
  987. return 0;
  988. }
  989. static int __maybe_unused exynos_mixer_suspend(struct device *dev)
  990. {
  991. struct mixer_context *ctx = dev_get_drvdata(dev);
  992. clk_disable_unprepare(ctx->hdmi);
  993. clk_disable_unprepare(ctx->mixer);
  994. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  995. clk_disable_unprepare(ctx->vp);
  996. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags))
  997. clk_disable_unprepare(ctx->sclk_mixer);
  998. }
  999. return 0;
  1000. }
  1001. static int __maybe_unused exynos_mixer_resume(struct device *dev)
  1002. {
  1003. struct mixer_context *ctx = dev_get_drvdata(dev);
  1004. int ret;
  1005. ret = clk_prepare_enable(ctx->mixer);
  1006. if (ret < 0) {
  1007. DRM_ERROR("Failed to prepare_enable the mixer clk [%d]\n", ret);
  1008. return ret;
  1009. }
  1010. ret = clk_prepare_enable(ctx->hdmi);
  1011. if (ret < 0) {
  1012. DRM_ERROR("Failed to prepare_enable the hdmi clk [%d]\n", ret);
  1013. return ret;
  1014. }
  1015. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  1016. ret = clk_prepare_enable(ctx->vp);
  1017. if (ret < 0) {
  1018. DRM_ERROR("Failed to prepare_enable the vp clk [%d]\n",
  1019. ret);
  1020. return ret;
  1021. }
  1022. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags)) {
  1023. ret = clk_prepare_enable(ctx->sclk_mixer);
  1024. if (ret < 0) {
  1025. DRM_ERROR("Failed to prepare_enable the " \
  1026. "sclk_mixer clk [%d]\n",
  1027. ret);
  1028. return ret;
  1029. }
  1030. }
  1031. }
  1032. return 0;
  1033. }
  1034. static const struct dev_pm_ops exynos_mixer_pm_ops = {
  1035. SET_RUNTIME_PM_OPS(exynos_mixer_suspend, exynos_mixer_resume, NULL)
  1036. };
  1037. struct platform_driver mixer_driver = {
  1038. .driver = {
  1039. .name = "exynos-mixer",
  1040. .owner = THIS_MODULE,
  1041. .pm = &exynos_mixer_pm_ops,
  1042. .of_match_table = mixer_match_types,
  1043. },
  1044. .probe = mixer_probe,
  1045. .remove = mixer_remove,
  1046. };