exynos_mixer.c 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307
  1. /*
  2. * Copyright (C) 2011 Samsung Electronics Co.Ltd
  3. * Authors:
  4. * Seung-Woo Kim <sw0312.kim@samsung.com>
  5. * Inki Dae <inki.dae@samsung.com>
  6. * Joonyoung Shim <jy0922.shim@samsung.com>
  7. *
  8. * Based on drivers/media/video/s5p-tv/mixer_reg.c
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the
  12. * Free Software Foundation; either version 2 of the License, or (at your
  13. * option) any later version.
  14. *
  15. */
  16. #include <drm/drmP.h>
  17. #include "regs-mixer.h"
  18. #include "regs-vp.h"
  19. #include <linux/kernel.h>
  20. #include <linux/spinlock.h>
  21. #include <linux/wait.h>
  22. #include <linux/i2c.h>
  23. #include <linux/platform_device.h>
  24. #include <linux/interrupt.h>
  25. #include <linux/irq.h>
  26. #include <linux/delay.h>
  27. #include <linux/pm_runtime.h>
  28. #include <linux/clk.h>
  29. #include <linux/regulator/consumer.h>
  30. #include <linux/of.h>
  31. #include <linux/of_device.h>
  32. #include <linux/component.h>
  33. #include <drm/exynos_drm.h>
  34. #include "exynos_drm_drv.h"
  35. #include "exynos_drm_crtc.h"
  36. #include "exynos_drm_fb.h"
  37. #include "exynos_drm_plane.h"
  38. #include "exynos_drm_iommu.h"
  39. #define MIXER_WIN_NR 3
  40. #define VP_DEFAULT_WIN 2
  41. /*
  42. * Mixer color space conversion coefficient triplet.
  43. * Used for CSC from RGB to YCbCr.
  44. * Each coefficient is a 10-bit fixed point number with
  45. * sign and no integer part, i.e.
  46. * [0:8] = fractional part (representing a value y = x / 2^9)
  47. * [9] = sign
  48. * Negative values are encoded with two's complement.
  49. */
  50. #define MXR_CSC_C(x) ((int)((x) * 512.0) & 0x3ff)
  51. #define MXR_CSC_CT(a0, a1, a2) \
  52. ((MXR_CSC_C(a0) << 20) | (MXR_CSC_C(a1) << 10) | (MXR_CSC_C(a2) << 0))
  53. /* YCbCr value, used for mixer background color configuration. */
  54. #define MXR_YCBCR_VAL(y, cb, cr) (((y) << 16) | ((cb) << 8) | ((cr) << 0))
  55. /* The pixelformats that are natively supported by the mixer. */
  56. #define MXR_FORMAT_RGB565 4
  57. #define MXR_FORMAT_ARGB1555 5
  58. #define MXR_FORMAT_ARGB4444 6
  59. #define MXR_FORMAT_ARGB8888 7
  60. enum mixer_version_id {
  61. MXR_VER_0_0_0_16,
  62. MXR_VER_16_0_33_0,
  63. MXR_VER_128_0_0_184,
  64. };
  65. enum mixer_flag_bits {
  66. MXR_BIT_POWERED,
  67. MXR_BIT_VSYNC,
  68. MXR_BIT_INTERLACE,
  69. MXR_BIT_VP_ENABLED,
  70. MXR_BIT_HAS_SCLK,
  71. };
  72. static const uint32_t mixer_formats[] = {
  73. DRM_FORMAT_XRGB4444,
  74. DRM_FORMAT_ARGB4444,
  75. DRM_FORMAT_XRGB1555,
  76. DRM_FORMAT_ARGB1555,
  77. DRM_FORMAT_RGB565,
  78. DRM_FORMAT_XRGB8888,
  79. DRM_FORMAT_ARGB8888,
  80. };
  81. static const uint32_t vp_formats[] = {
  82. DRM_FORMAT_NV12,
  83. DRM_FORMAT_NV21,
  84. };
  85. struct mixer_context {
  86. struct platform_device *pdev;
  87. struct device *dev;
  88. struct drm_device *drm_dev;
  89. struct exynos_drm_crtc *crtc;
  90. struct exynos_drm_plane planes[MIXER_WIN_NR];
  91. unsigned long flags;
  92. int irq;
  93. void __iomem *mixer_regs;
  94. void __iomem *vp_regs;
  95. spinlock_t reg_slock;
  96. struct clk *mixer;
  97. struct clk *vp;
  98. struct clk *hdmi;
  99. struct clk *sclk_mixer;
  100. struct clk *sclk_hdmi;
  101. struct clk *mout_mixer;
  102. enum mixer_version_id mxr_ver;
  103. int scan_value;
  104. };
  105. struct mixer_drv_data {
  106. enum mixer_version_id version;
  107. bool is_vp_enabled;
  108. bool has_sclk;
  109. };
  110. static const struct exynos_drm_plane_config plane_configs[MIXER_WIN_NR] = {
  111. {
  112. .zpos = 0,
  113. .type = DRM_PLANE_TYPE_PRIMARY,
  114. .pixel_formats = mixer_formats,
  115. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  116. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  117. EXYNOS_DRM_PLANE_CAP_ZPOS |
  118. EXYNOS_DRM_PLANE_CAP_PIX_BLEND |
  119. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  120. }, {
  121. .zpos = 1,
  122. .type = DRM_PLANE_TYPE_CURSOR,
  123. .pixel_formats = mixer_formats,
  124. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  125. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  126. EXYNOS_DRM_PLANE_CAP_ZPOS |
  127. EXYNOS_DRM_PLANE_CAP_PIX_BLEND |
  128. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  129. }, {
  130. .zpos = 2,
  131. .type = DRM_PLANE_TYPE_OVERLAY,
  132. .pixel_formats = vp_formats,
  133. .num_pixel_formats = ARRAY_SIZE(vp_formats),
  134. .capabilities = EXYNOS_DRM_PLANE_CAP_SCALE |
  135. EXYNOS_DRM_PLANE_CAP_ZPOS |
  136. EXYNOS_DRM_PLANE_CAP_TILE |
  137. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  138. },
  139. };
  140. static const u8 filter_y_horiz_tap8[] = {
  141. 0, -1, -1, -1, -1, -1, -1, -1,
  142. -1, -1, -1, -1, -1, 0, 0, 0,
  143. 0, 2, 4, 5, 6, 6, 6, 6,
  144. 6, 5, 5, 4, 3, 2, 1, 1,
  145. 0, -6, -12, -16, -18, -20, -21, -20,
  146. -20, -18, -16, -13, -10, -8, -5, -2,
  147. 127, 126, 125, 121, 114, 107, 99, 89,
  148. 79, 68, 57, 46, 35, 25, 16, 8,
  149. };
  150. static const u8 filter_y_vert_tap4[] = {
  151. 0, -3, -6, -8, -8, -8, -8, -7,
  152. -6, -5, -4, -3, -2, -1, -1, 0,
  153. 127, 126, 124, 118, 111, 102, 92, 81,
  154. 70, 59, 48, 37, 27, 19, 11, 5,
  155. 0, 5, 11, 19, 27, 37, 48, 59,
  156. 70, 81, 92, 102, 111, 118, 124, 126,
  157. 0, 0, -1, -1, -2, -3, -4, -5,
  158. -6, -7, -8, -8, -8, -8, -6, -3,
  159. };
  160. static const u8 filter_cr_horiz_tap4[] = {
  161. 0, -3, -6, -8, -8, -8, -8, -7,
  162. -6, -5, -4, -3, -2, -1, -1, 0,
  163. 127, 126, 124, 118, 111, 102, 92, 81,
  164. 70, 59, 48, 37, 27, 19, 11, 5,
  165. };
  166. static inline u32 vp_reg_read(struct mixer_context *ctx, u32 reg_id)
  167. {
  168. return readl(ctx->vp_regs + reg_id);
  169. }
  170. static inline void vp_reg_write(struct mixer_context *ctx, u32 reg_id,
  171. u32 val)
  172. {
  173. writel(val, ctx->vp_regs + reg_id);
  174. }
  175. static inline void vp_reg_writemask(struct mixer_context *ctx, u32 reg_id,
  176. u32 val, u32 mask)
  177. {
  178. u32 old = vp_reg_read(ctx, reg_id);
  179. val = (val & mask) | (old & ~mask);
  180. writel(val, ctx->vp_regs + reg_id);
  181. }
  182. static inline u32 mixer_reg_read(struct mixer_context *ctx, u32 reg_id)
  183. {
  184. return readl(ctx->mixer_regs + reg_id);
  185. }
  186. static inline void mixer_reg_write(struct mixer_context *ctx, u32 reg_id,
  187. u32 val)
  188. {
  189. writel(val, ctx->mixer_regs + reg_id);
  190. }
  191. static inline void mixer_reg_writemask(struct mixer_context *ctx,
  192. u32 reg_id, u32 val, u32 mask)
  193. {
  194. u32 old = mixer_reg_read(ctx, reg_id);
  195. val = (val & mask) | (old & ~mask);
  196. writel(val, ctx->mixer_regs + reg_id);
  197. }
  198. static void mixer_regs_dump(struct mixer_context *ctx)
  199. {
  200. #define DUMPREG(reg_id) \
  201. do { \
  202. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  203. (u32)readl(ctx->mixer_regs + reg_id)); \
  204. } while (0)
  205. DUMPREG(MXR_STATUS);
  206. DUMPREG(MXR_CFG);
  207. DUMPREG(MXR_INT_EN);
  208. DUMPREG(MXR_INT_STATUS);
  209. DUMPREG(MXR_LAYER_CFG);
  210. DUMPREG(MXR_VIDEO_CFG);
  211. DUMPREG(MXR_GRAPHIC0_CFG);
  212. DUMPREG(MXR_GRAPHIC0_BASE);
  213. DUMPREG(MXR_GRAPHIC0_SPAN);
  214. DUMPREG(MXR_GRAPHIC0_WH);
  215. DUMPREG(MXR_GRAPHIC0_SXY);
  216. DUMPREG(MXR_GRAPHIC0_DXY);
  217. DUMPREG(MXR_GRAPHIC1_CFG);
  218. DUMPREG(MXR_GRAPHIC1_BASE);
  219. DUMPREG(MXR_GRAPHIC1_SPAN);
  220. DUMPREG(MXR_GRAPHIC1_WH);
  221. DUMPREG(MXR_GRAPHIC1_SXY);
  222. DUMPREG(MXR_GRAPHIC1_DXY);
  223. #undef DUMPREG
  224. }
  225. static void vp_regs_dump(struct mixer_context *ctx)
  226. {
  227. #define DUMPREG(reg_id) \
  228. do { \
  229. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  230. (u32) readl(ctx->vp_regs + reg_id)); \
  231. } while (0)
  232. DUMPREG(VP_ENABLE);
  233. DUMPREG(VP_SRESET);
  234. DUMPREG(VP_SHADOW_UPDATE);
  235. DUMPREG(VP_FIELD_ID);
  236. DUMPREG(VP_MODE);
  237. DUMPREG(VP_IMG_SIZE_Y);
  238. DUMPREG(VP_IMG_SIZE_C);
  239. DUMPREG(VP_PER_RATE_CTRL);
  240. DUMPREG(VP_TOP_Y_PTR);
  241. DUMPREG(VP_BOT_Y_PTR);
  242. DUMPREG(VP_TOP_C_PTR);
  243. DUMPREG(VP_BOT_C_PTR);
  244. DUMPREG(VP_ENDIAN_MODE);
  245. DUMPREG(VP_SRC_H_POSITION);
  246. DUMPREG(VP_SRC_V_POSITION);
  247. DUMPREG(VP_SRC_WIDTH);
  248. DUMPREG(VP_SRC_HEIGHT);
  249. DUMPREG(VP_DST_H_POSITION);
  250. DUMPREG(VP_DST_V_POSITION);
  251. DUMPREG(VP_DST_WIDTH);
  252. DUMPREG(VP_DST_HEIGHT);
  253. DUMPREG(VP_H_RATIO);
  254. DUMPREG(VP_V_RATIO);
  255. #undef DUMPREG
  256. }
  257. static inline void vp_filter_set(struct mixer_context *ctx,
  258. int reg_id, const u8 *data, unsigned int size)
  259. {
  260. /* assure 4-byte align */
  261. BUG_ON(size & 3);
  262. for (; size; size -= 4, reg_id += 4, data += 4) {
  263. u32 val = (data[0] << 24) | (data[1] << 16) |
  264. (data[2] << 8) | data[3];
  265. vp_reg_write(ctx, reg_id, val);
  266. }
  267. }
  268. static void vp_default_filter(struct mixer_context *ctx)
  269. {
  270. vp_filter_set(ctx, VP_POLY8_Y0_LL,
  271. filter_y_horiz_tap8, sizeof(filter_y_horiz_tap8));
  272. vp_filter_set(ctx, VP_POLY4_Y0_LL,
  273. filter_y_vert_tap4, sizeof(filter_y_vert_tap4));
  274. vp_filter_set(ctx, VP_POLY4_C0_LL,
  275. filter_cr_horiz_tap4, sizeof(filter_cr_horiz_tap4));
  276. }
  277. static void mixer_cfg_gfx_blend(struct mixer_context *ctx, unsigned int win,
  278. unsigned int pixel_alpha, unsigned int alpha)
  279. {
  280. u32 win_alpha = alpha >> 8;
  281. u32 val;
  282. val = MXR_GRP_CFG_COLOR_KEY_DISABLE; /* no blank key */
  283. switch (pixel_alpha) {
  284. case DRM_MODE_BLEND_PIXEL_NONE:
  285. break;
  286. case DRM_MODE_BLEND_COVERAGE:
  287. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  288. break;
  289. case DRM_MODE_BLEND_PREMULTI:
  290. default:
  291. val |= MXR_GRP_CFG_BLEND_PRE_MUL;
  292. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  293. break;
  294. }
  295. if (alpha != DRM_BLEND_ALPHA_OPAQUE) {
  296. val |= MXR_GRP_CFG_WIN_BLEND_EN;
  297. val |= win_alpha;
  298. }
  299. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  300. val, MXR_GRP_CFG_MISC_MASK);
  301. }
  302. static void mixer_cfg_vp_blend(struct mixer_context *ctx, unsigned int alpha)
  303. {
  304. u32 win_alpha = alpha >> 8;
  305. u32 val = 0;
  306. if (alpha != DRM_BLEND_ALPHA_OPAQUE) {
  307. val |= MXR_VID_CFG_BLEND_EN;
  308. val |= win_alpha;
  309. }
  310. mixer_reg_write(ctx, MXR_VIDEO_CFG, val);
  311. }
  312. static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable)
  313. {
  314. /* block update on vsync */
  315. mixer_reg_writemask(ctx, MXR_STATUS, enable ?
  316. MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE);
  317. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  318. vp_reg_write(ctx, VP_SHADOW_UPDATE, enable ?
  319. VP_SHADOW_UPDATE_ENABLE : 0);
  320. }
  321. static void mixer_cfg_scan(struct mixer_context *ctx, int width, int height)
  322. {
  323. u32 val;
  324. /* choosing between interlace and progressive mode */
  325. val = test_bit(MXR_BIT_INTERLACE, &ctx->flags) ?
  326. MXR_CFG_SCAN_INTERLACE : MXR_CFG_SCAN_PROGRESSIVE;
  327. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  328. mixer_reg_write(ctx, MXR_RESOLUTION,
  329. MXR_MXR_RES_HEIGHT(height) | MXR_MXR_RES_WIDTH(width));
  330. else
  331. val |= ctx->scan_value;
  332. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_SCAN_MASK);
  333. }
  334. static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, unsigned int height)
  335. {
  336. u32 val;
  337. switch (height) {
  338. case 480:
  339. case 576:
  340. val = MXR_CFG_RGB601_0_255;
  341. break;
  342. case 720:
  343. case 1080:
  344. default:
  345. val = MXR_CFG_RGB709_16_235;
  346. /* Configure the BT.709 CSC matrix for full range RGB. */
  347. mixer_reg_write(ctx, MXR_CM_COEFF_Y,
  348. MXR_CSC_CT( 0.184, 0.614, 0.063) |
  349. MXR_CM_COEFF_RGB_FULL);
  350. mixer_reg_write(ctx, MXR_CM_COEFF_CB,
  351. MXR_CSC_CT(-0.102, -0.338, 0.440));
  352. mixer_reg_write(ctx, MXR_CM_COEFF_CR,
  353. MXR_CSC_CT( 0.440, -0.399, -0.040));
  354. break;
  355. }
  356. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
  357. }
  358. static void mixer_cfg_layer(struct mixer_context *ctx, unsigned int win,
  359. unsigned int priority, bool enable)
  360. {
  361. u32 val = enable ? ~0 : 0;
  362. switch (win) {
  363. case 0:
  364. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
  365. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  366. MXR_LAYER_CFG_GRP0_VAL(priority),
  367. MXR_LAYER_CFG_GRP0_MASK);
  368. break;
  369. case 1:
  370. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
  371. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  372. MXR_LAYER_CFG_GRP1_VAL(priority),
  373. MXR_LAYER_CFG_GRP1_MASK);
  374. break;
  375. case VP_DEFAULT_WIN:
  376. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  377. vp_reg_writemask(ctx, VP_ENABLE, val, VP_ENABLE_ON);
  378. mixer_reg_writemask(ctx, MXR_CFG, val,
  379. MXR_CFG_VP_ENABLE);
  380. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  381. MXR_LAYER_CFG_VP_VAL(priority),
  382. MXR_LAYER_CFG_VP_MASK);
  383. }
  384. break;
  385. }
  386. }
  387. static void mixer_run(struct mixer_context *ctx)
  388. {
  389. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
  390. }
  391. static void mixer_stop(struct mixer_context *ctx)
  392. {
  393. int timeout = 20;
  394. mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_REG_RUN);
  395. while (!(mixer_reg_read(ctx, MXR_STATUS) & MXR_STATUS_REG_IDLE) &&
  396. --timeout)
  397. usleep_range(10000, 12000);
  398. }
  399. static void mixer_commit(struct mixer_context *ctx)
  400. {
  401. struct drm_display_mode *mode = &ctx->crtc->base.state->adjusted_mode;
  402. mixer_cfg_scan(ctx, mode->hdisplay, mode->vdisplay);
  403. mixer_cfg_rgb_fmt(ctx, mode->vdisplay);
  404. mixer_run(ctx);
  405. }
  406. static void vp_video_buffer(struct mixer_context *ctx,
  407. struct exynos_drm_plane *plane)
  408. {
  409. struct exynos_drm_plane_state *state =
  410. to_exynos_plane_state(plane->base.state);
  411. struct drm_framebuffer *fb = state->base.fb;
  412. unsigned int priority = state->base.normalized_zpos + 1;
  413. unsigned long flags;
  414. dma_addr_t luma_addr[2], chroma_addr[2];
  415. bool is_tiled, is_nv21;
  416. u32 val;
  417. is_nv21 = (fb->format->format == DRM_FORMAT_NV21);
  418. is_tiled = (fb->modifier == DRM_FORMAT_MOD_SAMSUNG_64_32_TILE);
  419. luma_addr[0] = exynos_drm_fb_dma_addr(fb, 0);
  420. chroma_addr[0] = exynos_drm_fb_dma_addr(fb, 1);
  421. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  422. if (is_tiled) {
  423. luma_addr[1] = luma_addr[0] + 0x40;
  424. chroma_addr[1] = chroma_addr[0] + 0x40;
  425. } else {
  426. luma_addr[1] = luma_addr[0] + fb->pitches[0];
  427. chroma_addr[1] = chroma_addr[0] + fb->pitches[1];
  428. }
  429. } else {
  430. luma_addr[1] = 0;
  431. chroma_addr[1] = 0;
  432. }
  433. spin_lock_irqsave(&ctx->reg_slock, flags);
  434. vp_reg_write(ctx, VP_SHADOW_UPDATE, 1);
  435. /* interlace or progressive scan mode */
  436. val = (test_bit(MXR_BIT_INTERLACE, &ctx->flags) ? ~0 : 0);
  437. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_LINE_SKIP);
  438. /* setup format */
  439. val = (is_nv21 ? VP_MODE_NV21 : VP_MODE_NV12);
  440. val |= (is_tiled ? VP_MODE_MEM_TILED : VP_MODE_MEM_LINEAR);
  441. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_FMT_MASK);
  442. /* setting size of input image */
  443. vp_reg_write(ctx, VP_IMG_SIZE_Y, VP_IMG_HSIZE(fb->pitches[0]) |
  444. VP_IMG_VSIZE(fb->height));
  445. /* chroma plane for NV12/NV21 is half the height of the luma plane */
  446. vp_reg_write(ctx, VP_IMG_SIZE_C, VP_IMG_HSIZE(fb->pitches[1]) |
  447. VP_IMG_VSIZE(fb->height / 2));
  448. vp_reg_write(ctx, VP_SRC_WIDTH, state->src.w);
  449. vp_reg_write(ctx, VP_SRC_H_POSITION,
  450. VP_SRC_H_POSITION_VAL(state->src.x));
  451. vp_reg_write(ctx, VP_DST_WIDTH, state->crtc.w);
  452. vp_reg_write(ctx, VP_DST_H_POSITION, state->crtc.x);
  453. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  454. vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h / 2);
  455. vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y / 2);
  456. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h / 2);
  457. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y / 2);
  458. } else {
  459. vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h);
  460. vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y);
  461. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h);
  462. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y);
  463. }
  464. vp_reg_write(ctx, VP_H_RATIO, state->h_ratio);
  465. vp_reg_write(ctx, VP_V_RATIO, state->v_ratio);
  466. vp_reg_write(ctx, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
  467. /* set buffer address to vp */
  468. vp_reg_write(ctx, VP_TOP_Y_PTR, luma_addr[0]);
  469. vp_reg_write(ctx, VP_BOT_Y_PTR, luma_addr[1]);
  470. vp_reg_write(ctx, VP_TOP_C_PTR, chroma_addr[0]);
  471. vp_reg_write(ctx, VP_BOT_C_PTR, chroma_addr[1]);
  472. mixer_cfg_layer(ctx, plane->index, priority, true);
  473. mixer_cfg_vp_blend(ctx, state->base.alpha);
  474. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  475. mixer_regs_dump(ctx);
  476. vp_regs_dump(ctx);
  477. }
  478. static void mixer_layer_update(struct mixer_context *ctx)
  479. {
  480. mixer_reg_writemask(ctx, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
  481. }
  482. static void mixer_graph_buffer(struct mixer_context *ctx,
  483. struct exynos_drm_plane *plane)
  484. {
  485. struct exynos_drm_plane_state *state =
  486. to_exynos_plane_state(plane->base.state);
  487. struct drm_framebuffer *fb = state->base.fb;
  488. unsigned int priority = state->base.normalized_zpos + 1;
  489. unsigned long flags;
  490. unsigned int win = plane->index;
  491. unsigned int x_ratio = 0, y_ratio = 0;
  492. unsigned int dst_x_offset, dst_y_offset;
  493. unsigned int pixel_alpha;
  494. dma_addr_t dma_addr;
  495. unsigned int fmt;
  496. u32 val;
  497. if (fb->format->has_alpha)
  498. pixel_alpha = state->base.pixel_blend_mode;
  499. else
  500. pixel_alpha = DRM_MODE_BLEND_PIXEL_NONE;
  501. switch (fb->format->format) {
  502. case DRM_FORMAT_XRGB4444:
  503. case DRM_FORMAT_ARGB4444:
  504. fmt = MXR_FORMAT_ARGB4444;
  505. break;
  506. case DRM_FORMAT_XRGB1555:
  507. case DRM_FORMAT_ARGB1555:
  508. fmt = MXR_FORMAT_ARGB1555;
  509. break;
  510. case DRM_FORMAT_RGB565:
  511. fmt = MXR_FORMAT_RGB565;
  512. break;
  513. case DRM_FORMAT_XRGB8888:
  514. case DRM_FORMAT_ARGB8888:
  515. default:
  516. fmt = MXR_FORMAT_ARGB8888;
  517. break;
  518. }
  519. /* ratio is already checked by common plane code */
  520. x_ratio = state->h_ratio == (1 << 15);
  521. y_ratio = state->v_ratio == (1 << 15);
  522. dst_x_offset = state->crtc.x;
  523. dst_y_offset = state->crtc.y;
  524. /* translate dma address base s.t. the source image offset is zero */
  525. dma_addr = exynos_drm_fb_dma_addr(fb, 0)
  526. + (state->src.x * fb->format->cpp[0])
  527. + (state->src.y * fb->pitches[0]);
  528. spin_lock_irqsave(&ctx->reg_slock, flags);
  529. /* setup format */
  530. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  531. MXR_GRP_CFG_FORMAT_VAL(fmt), MXR_GRP_CFG_FORMAT_MASK);
  532. /* setup geometry */
  533. mixer_reg_write(ctx, MXR_GRAPHIC_SPAN(win),
  534. fb->pitches[0] / fb->format->cpp[0]);
  535. val = MXR_GRP_WH_WIDTH(state->src.w);
  536. val |= MXR_GRP_WH_HEIGHT(state->src.h);
  537. val |= MXR_GRP_WH_H_SCALE(x_ratio);
  538. val |= MXR_GRP_WH_V_SCALE(y_ratio);
  539. mixer_reg_write(ctx, MXR_GRAPHIC_WH(win), val);
  540. /* setup offsets in display image */
  541. val = MXR_GRP_DXY_DX(dst_x_offset);
  542. val |= MXR_GRP_DXY_DY(dst_y_offset);
  543. mixer_reg_write(ctx, MXR_GRAPHIC_DXY(win), val);
  544. /* set buffer address to mixer */
  545. mixer_reg_write(ctx, MXR_GRAPHIC_BASE(win), dma_addr);
  546. mixer_cfg_layer(ctx, win, priority, true);
  547. mixer_cfg_gfx_blend(ctx, win, pixel_alpha, state->base.alpha);
  548. /* layer update mandatory for mixer 16.0.33.0 */
  549. if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
  550. ctx->mxr_ver == MXR_VER_128_0_0_184)
  551. mixer_layer_update(ctx);
  552. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  553. mixer_regs_dump(ctx);
  554. }
  555. static void vp_win_reset(struct mixer_context *ctx)
  556. {
  557. unsigned int tries = 100;
  558. vp_reg_write(ctx, VP_SRESET, VP_SRESET_PROCESSING);
  559. while (--tries) {
  560. /* waiting until VP_SRESET_PROCESSING is 0 */
  561. if (~vp_reg_read(ctx, VP_SRESET) & VP_SRESET_PROCESSING)
  562. break;
  563. mdelay(10);
  564. }
  565. WARN(tries == 0, "failed to reset Video Processor\n");
  566. }
  567. static void mixer_win_reset(struct mixer_context *ctx)
  568. {
  569. unsigned long flags;
  570. spin_lock_irqsave(&ctx->reg_slock, flags);
  571. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
  572. /* set output in RGB888 mode */
  573. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
  574. /* 16 beat burst in DMA */
  575. mixer_reg_writemask(ctx, MXR_STATUS, MXR_STATUS_16_BURST,
  576. MXR_STATUS_BURST_MASK);
  577. /* reset default layer priority */
  578. mixer_reg_write(ctx, MXR_LAYER_CFG, 0);
  579. /* set all background colors to RGB (0,0,0) */
  580. mixer_reg_write(ctx, MXR_BG_COLOR0, MXR_YCBCR_VAL(0, 128, 128));
  581. mixer_reg_write(ctx, MXR_BG_COLOR1, MXR_YCBCR_VAL(0, 128, 128));
  582. mixer_reg_write(ctx, MXR_BG_COLOR2, MXR_YCBCR_VAL(0, 128, 128));
  583. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  584. /* configuration of Video Processor Registers */
  585. vp_win_reset(ctx);
  586. vp_default_filter(ctx);
  587. }
  588. /* disable all layers */
  589. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
  590. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
  591. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  592. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
  593. /* set all source image offsets to zero */
  594. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(0), 0);
  595. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(1), 0);
  596. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  597. }
  598. static irqreturn_t mixer_irq_handler(int irq, void *arg)
  599. {
  600. struct mixer_context *ctx = arg;
  601. u32 val, base, shadow;
  602. spin_lock(&ctx->reg_slock);
  603. /* read interrupt status for handling and clearing flags for VSYNC */
  604. val = mixer_reg_read(ctx, MXR_INT_STATUS);
  605. /* handling VSYNC */
  606. if (val & MXR_INT_STATUS_VSYNC) {
  607. /* vsync interrupt use different bit for read and clear */
  608. val |= MXR_INT_CLEAR_VSYNC;
  609. val &= ~MXR_INT_STATUS_VSYNC;
  610. /* interlace scan need to check shadow register */
  611. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  612. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags) &&
  613. vp_reg_read(ctx, VP_SHADOW_UPDATE))
  614. goto out;
  615. base = mixer_reg_read(ctx, MXR_CFG);
  616. shadow = mixer_reg_read(ctx, MXR_CFG_S);
  617. if (base != shadow)
  618. goto out;
  619. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(0));
  620. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(0));
  621. if (base != shadow)
  622. goto out;
  623. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(1));
  624. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(1));
  625. if (base != shadow)
  626. goto out;
  627. }
  628. drm_crtc_handle_vblank(&ctx->crtc->base);
  629. }
  630. out:
  631. /* clear interrupts */
  632. mixer_reg_write(ctx, MXR_INT_STATUS, val);
  633. spin_unlock(&ctx->reg_slock);
  634. return IRQ_HANDLED;
  635. }
  636. static int mixer_resources_init(struct mixer_context *mixer_ctx)
  637. {
  638. struct device *dev = &mixer_ctx->pdev->dev;
  639. struct resource *res;
  640. int ret;
  641. spin_lock_init(&mixer_ctx->reg_slock);
  642. mixer_ctx->mixer = devm_clk_get(dev, "mixer");
  643. if (IS_ERR(mixer_ctx->mixer)) {
  644. dev_err(dev, "failed to get clock 'mixer'\n");
  645. return -ENODEV;
  646. }
  647. mixer_ctx->hdmi = devm_clk_get(dev, "hdmi");
  648. if (IS_ERR(mixer_ctx->hdmi)) {
  649. dev_err(dev, "failed to get clock 'hdmi'\n");
  650. return PTR_ERR(mixer_ctx->hdmi);
  651. }
  652. mixer_ctx->sclk_hdmi = devm_clk_get(dev, "sclk_hdmi");
  653. if (IS_ERR(mixer_ctx->sclk_hdmi)) {
  654. dev_err(dev, "failed to get clock 'sclk_hdmi'\n");
  655. return -ENODEV;
  656. }
  657. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 0);
  658. if (res == NULL) {
  659. dev_err(dev, "get memory resource failed.\n");
  660. return -ENXIO;
  661. }
  662. mixer_ctx->mixer_regs = devm_ioremap(dev, res->start,
  663. resource_size(res));
  664. if (mixer_ctx->mixer_regs == NULL) {
  665. dev_err(dev, "register mapping failed.\n");
  666. return -ENXIO;
  667. }
  668. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_IRQ, 0);
  669. if (res == NULL) {
  670. dev_err(dev, "get interrupt resource failed.\n");
  671. return -ENXIO;
  672. }
  673. ret = devm_request_irq(dev, res->start, mixer_irq_handler,
  674. 0, "drm_mixer", mixer_ctx);
  675. if (ret) {
  676. dev_err(dev, "request interrupt failed.\n");
  677. return ret;
  678. }
  679. mixer_ctx->irq = res->start;
  680. return 0;
  681. }
  682. static int vp_resources_init(struct mixer_context *mixer_ctx)
  683. {
  684. struct device *dev = &mixer_ctx->pdev->dev;
  685. struct resource *res;
  686. mixer_ctx->vp = devm_clk_get(dev, "vp");
  687. if (IS_ERR(mixer_ctx->vp)) {
  688. dev_err(dev, "failed to get clock 'vp'\n");
  689. return -ENODEV;
  690. }
  691. if (test_bit(MXR_BIT_HAS_SCLK, &mixer_ctx->flags)) {
  692. mixer_ctx->sclk_mixer = devm_clk_get(dev, "sclk_mixer");
  693. if (IS_ERR(mixer_ctx->sclk_mixer)) {
  694. dev_err(dev, "failed to get clock 'sclk_mixer'\n");
  695. return -ENODEV;
  696. }
  697. mixer_ctx->mout_mixer = devm_clk_get(dev, "mout_mixer");
  698. if (IS_ERR(mixer_ctx->mout_mixer)) {
  699. dev_err(dev, "failed to get clock 'mout_mixer'\n");
  700. return -ENODEV;
  701. }
  702. if (mixer_ctx->sclk_hdmi && mixer_ctx->mout_mixer)
  703. clk_set_parent(mixer_ctx->mout_mixer,
  704. mixer_ctx->sclk_hdmi);
  705. }
  706. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 1);
  707. if (res == NULL) {
  708. dev_err(dev, "get memory resource failed.\n");
  709. return -ENXIO;
  710. }
  711. mixer_ctx->vp_regs = devm_ioremap(dev, res->start,
  712. resource_size(res));
  713. if (mixer_ctx->vp_regs == NULL) {
  714. dev_err(dev, "register mapping failed.\n");
  715. return -ENXIO;
  716. }
  717. return 0;
  718. }
  719. static int mixer_initialize(struct mixer_context *mixer_ctx,
  720. struct drm_device *drm_dev)
  721. {
  722. int ret;
  723. mixer_ctx->drm_dev = drm_dev;
  724. /* acquire resources: regs, irqs, clocks */
  725. ret = mixer_resources_init(mixer_ctx);
  726. if (ret) {
  727. DRM_ERROR("mixer_resources_init failed ret=%d\n", ret);
  728. return ret;
  729. }
  730. if (test_bit(MXR_BIT_VP_ENABLED, &mixer_ctx->flags)) {
  731. /* acquire vp resources: regs, irqs, clocks */
  732. ret = vp_resources_init(mixer_ctx);
  733. if (ret) {
  734. DRM_ERROR("vp_resources_init failed ret=%d\n", ret);
  735. return ret;
  736. }
  737. }
  738. return drm_iommu_attach_device(drm_dev, mixer_ctx->dev);
  739. }
  740. static void mixer_ctx_remove(struct mixer_context *mixer_ctx)
  741. {
  742. drm_iommu_detach_device(mixer_ctx->drm_dev, mixer_ctx->dev);
  743. }
  744. static int mixer_enable_vblank(struct exynos_drm_crtc *crtc)
  745. {
  746. struct mixer_context *mixer_ctx = crtc->ctx;
  747. __set_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  748. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  749. return 0;
  750. /* enable vsync interrupt */
  751. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  752. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  753. return 0;
  754. }
  755. static void mixer_disable_vblank(struct exynos_drm_crtc *crtc)
  756. {
  757. struct mixer_context *mixer_ctx = crtc->ctx;
  758. __clear_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  759. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  760. return;
  761. /* disable vsync interrupt */
  762. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  763. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, 0, MXR_INT_EN_VSYNC);
  764. }
  765. static void mixer_atomic_begin(struct exynos_drm_crtc *crtc)
  766. {
  767. struct mixer_context *mixer_ctx = crtc->ctx;
  768. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  769. return;
  770. mixer_vsync_set_update(mixer_ctx, false);
  771. }
  772. static void mixer_update_plane(struct exynos_drm_crtc *crtc,
  773. struct exynos_drm_plane *plane)
  774. {
  775. struct mixer_context *mixer_ctx = crtc->ctx;
  776. DRM_DEBUG_KMS("win: %d\n", plane->index);
  777. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  778. return;
  779. if (plane->index == VP_DEFAULT_WIN)
  780. vp_video_buffer(mixer_ctx, plane);
  781. else
  782. mixer_graph_buffer(mixer_ctx, plane);
  783. }
  784. static void mixer_disable_plane(struct exynos_drm_crtc *crtc,
  785. struct exynos_drm_plane *plane)
  786. {
  787. struct mixer_context *mixer_ctx = crtc->ctx;
  788. unsigned long flags;
  789. DRM_DEBUG_KMS("win: %d\n", plane->index);
  790. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  791. return;
  792. spin_lock_irqsave(&mixer_ctx->reg_slock, flags);
  793. mixer_cfg_layer(mixer_ctx, plane->index, 0, false);
  794. spin_unlock_irqrestore(&mixer_ctx->reg_slock, flags);
  795. }
  796. static void mixer_atomic_flush(struct exynos_drm_crtc *crtc)
  797. {
  798. struct mixer_context *mixer_ctx = crtc->ctx;
  799. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  800. return;
  801. mixer_vsync_set_update(mixer_ctx, true);
  802. exynos_crtc_handle_event(crtc);
  803. }
  804. static void mixer_enable(struct exynos_drm_crtc *crtc)
  805. {
  806. struct mixer_context *ctx = crtc->ctx;
  807. if (test_bit(MXR_BIT_POWERED, &ctx->flags))
  808. return;
  809. pm_runtime_get_sync(ctx->dev);
  810. exynos_drm_pipe_clk_enable(crtc, true);
  811. mixer_vsync_set_update(ctx, false);
  812. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SOFT_RESET);
  813. if (test_bit(MXR_BIT_VSYNC, &ctx->flags)) {
  814. mixer_reg_writemask(ctx, MXR_INT_STATUS, ~0,
  815. MXR_INT_CLEAR_VSYNC);
  816. mixer_reg_writemask(ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  817. }
  818. mixer_win_reset(ctx);
  819. mixer_commit(ctx);
  820. mixer_vsync_set_update(ctx, true);
  821. set_bit(MXR_BIT_POWERED, &ctx->flags);
  822. }
  823. static void mixer_disable(struct exynos_drm_crtc *crtc)
  824. {
  825. struct mixer_context *ctx = crtc->ctx;
  826. int i;
  827. if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
  828. return;
  829. mixer_stop(ctx);
  830. mixer_regs_dump(ctx);
  831. for (i = 0; i < MIXER_WIN_NR; i++)
  832. mixer_disable_plane(crtc, &ctx->planes[i]);
  833. exynos_drm_pipe_clk_enable(crtc, false);
  834. pm_runtime_put(ctx->dev);
  835. clear_bit(MXR_BIT_POWERED, &ctx->flags);
  836. }
  837. static int mixer_mode_valid(struct exynos_drm_crtc *crtc,
  838. const struct drm_display_mode *mode)
  839. {
  840. struct mixer_context *ctx = crtc->ctx;
  841. u32 w = mode->hdisplay, h = mode->vdisplay;
  842. DRM_DEBUG_KMS("xres=%d, yres=%d, refresh=%d, intl=%d\n", w, h,
  843. mode->vrefresh, !!(mode->flags & DRM_MODE_FLAG_INTERLACE));
  844. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  845. return MODE_OK;
  846. if ((w >= 464 && w <= 720 && h >= 261 && h <= 576) ||
  847. (w >= 1024 && w <= 1280 && h >= 576 && h <= 720) ||
  848. (w >= 1664 && w <= 1920 && h >= 936 && h <= 1080))
  849. return MODE_OK;
  850. if ((w == 1024 && h == 768) ||
  851. (w == 1366 && h == 768) ||
  852. (w == 1280 && h == 1024))
  853. return MODE_OK;
  854. return MODE_BAD;
  855. }
  856. static bool mixer_mode_fixup(struct exynos_drm_crtc *crtc,
  857. const struct drm_display_mode *mode,
  858. struct drm_display_mode *adjusted_mode)
  859. {
  860. struct mixer_context *ctx = crtc->ctx;
  861. int width = mode->hdisplay, height = mode->vdisplay, i;
  862. struct {
  863. int hdisplay, vdisplay, htotal, vtotal, scan_val;
  864. } static const modes[] = {
  865. { 720, 480, 858, 525, MXR_CFG_SCAN_NTSC | MXR_CFG_SCAN_SD },
  866. { 720, 576, 864, 625, MXR_CFG_SCAN_PAL | MXR_CFG_SCAN_SD },
  867. { 1280, 720, 1650, 750, MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD },
  868. { 1920, 1080, 2200, 1125, MXR_CFG_SCAN_HD_1080 |
  869. MXR_CFG_SCAN_HD }
  870. };
  871. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  872. __set_bit(MXR_BIT_INTERLACE, &ctx->flags);
  873. else
  874. __clear_bit(MXR_BIT_INTERLACE, &ctx->flags);
  875. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  876. return true;
  877. for (i = 0; i < ARRAY_SIZE(modes); ++i)
  878. if (width <= modes[i].hdisplay && height <= modes[i].vdisplay) {
  879. ctx->scan_value = modes[i].scan_val;
  880. if (width < modes[i].hdisplay ||
  881. height < modes[i].vdisplay) {
  882. adjusted_mode->hdisplay = modes[i].hdisplay;
  883. adjusted_mode->hsync_start = modes[i].hdisplay;
  884. adjusted_mode->hsync_end = modes[i].htotal;
  885. adjusted_mode->htotal = modes[i].htotal;
  886. adjusted_mode->vdisplay = modes[i].vdisplay;
  887. adjusted_mode->vsync_start = modes[i].vdisplay;
  888. adjusted_mode->vsync_end = modes[i].vtotal;
  889. adjusted_mode->vtotal = modes[i].vtotal;
  890. }
  891. return true;
  892. }
  893. return false;
  894. }
  895. static const struct exynos_drm_crtc_ops mixer_crtc_ops = {
  896. .enable = mixer_enable,
  897. .disable = mixer_disable,
  898. .enable_vblank = mixer_enable_vblank,
  899. .disable_vblank = mixer_disable_vblank,
  900. .atomic_begin = mixer_atomic_begin,
  901. .update_plane = mixer_update_plane,
  902. .disable_plane = mixer_disable_plane,
  903. .atomic_flush = mixer_atomic_flush,
  904. .mode_valid = mixer_mode_valid,
  905. .mode_fixup = mixer_mode_fixup,
  906. };
  907. static const struct mixer_drv_data exynos5420_mxr_drv_data = {
  908. .version = MXR_VER_128_0_0_184,
  909. .is_vp_enabled = 0,
  910. };
  911. static const struct mixer_drv_data exynos5250_mxr_drv_data = {
  912. .version = MXR_VER_16_0_33_0,
  913. .is_vp_enabled = 0,
  914. };
  915. static const struct mixer_drv_data exynos4212_mxr_drv_data = {
  916. .version = MXR_VER_0_0_0_16,
  917. .is_vp_enabled = 1,
  918. };
  919. static const struct mixer_drv_data exynos4210_mxr_drv_data = {
  920. .version = MXR_VER_0_0_0_16,
  921. .is_vp_enabled = 1,
  922. .has_sclk = 1,
  923. };
  924. static const struct of_device_id mixer_match_types[] = {
  925. {
  926. .compatible = "samsung,exynos4210-mixer",
  927. .data = &exynos4210_mxr_drv_data,
  928. }, {
  929. .compatible = "samsung,exynos4212-mixer",
  930. .data = &exynos4212_mxr_drv_data,
  931. }, {
  932. .compatible = "samsung,exynos5-mixer",
  933. .data = &exynos5250_mxr_drv_data,
  934. }, {
  935. .compatible = "samsung,exynos5250-mixer",
  936. .data = &exynos5250_mxr_drv_data,
  937. }, {
  938. .compatible = "samsung,exynos5420-mixer",
  939. .data = &exynos5420_mxr_drv_data,
  940. }, {
  941. /* end node */
  942. }
  943. };
  944. MODULE_DEVICE_TABLE(of, mixer_match_types);
  945. static int mixer_bind(struct device *dev, struct device *manager, void *data)
  946. {
  947. struct mixer_context *ctx = dev_get_drvdata(dev);
  948. struct drm_device *drm_dev = data;
  949. struct exynos_drm_plane *exynos_plane;
  950. unsigned int i;
  951. int ret;
  952. ret = mixer_initialize(ctx, drm_dev);
  953. if (ret)
  954. return ret;
  955. for (i = 0; i < MIXER_WIN_NR; i++) {
  956. if (i == VP_DEFAULT_WIN && !test_bit(MXR_BIT_VP_ENABLED,
  957. &ctx->flags))
  958. continue;
  959. ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
  960. &plane_configs[i]);
  961. if (ret)
  962. return ret;
  963. }
  964. exynos_plane = &ctx->planes[DEFAULT_WIN];
  965. ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
  966. EXYNOS_DISPLAY_TYPE_HDMI, &mixer_crtc_ops, ctx);
  967. if (IS_ERR(ctx->crtc)) {
  968. mixer_ctx_remove(ctx);
  969. ret = PTR_ERR(ctx->crtc);
  970. goto free_ctx;
  971. }
  972. return 0;
  973. free_ctx:
  974. devm_kfree(dev, ctx);
  975. return ret;
  976. }
  977. static void mixer_unbind(struct device *dev, struct device *master, void *data)
  978. {
  979. struct mixer_context *ctx = dev_get_drvdata(dev);
  980. mixer_ctx_remove(ctx);
  981. }
  982. static const struct component_ops mixer_component_ops = {
  983. .bind = mixer_bind,
  984. .unbind = mixer_unbind,
  985. };
  986. static int mixer_probe(struct platform_device *pdev)
  987. {
  988. struct device *dev = &pdev->dev;
  989. const struct mixer_drv_data *drv;
  990. struct mixer_context *ctx;
  991. int ret;
  992. ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
  993. if (!ctx) {
  994. DRM_ERROR("failed to alloc mixer context.\n");
  995. return -ENOMEM;
  996. }
  997. drv = of_device_get_match_data(dev);
  998. ctx->pdev = pdev;
  999. ctx->dev = dev;
  1000. ctx->mxr_ver = drv->version;
  1001. if (drv->is_vp_enabled)
  1002. __set_bit(MXR_BIT_VP_ENABLED, &ctx->flags);
  1003. if (drv->has_sclk)
  1004. __set_bit(MXR_BIT_HAS_SCLK, &ctx->flags);
  1005. platform_set_drvdata(pdev, ctx);
  1006. ret = component_add(&pdev->dev, &mixer_component_ops);
  1007. if (!ret)
  1008. pm_runtime_enable(dev);
  1009. return ret;
  1010. }
  1011. static int mixer_remove(struct platform_device *pdev)
  1012. {
  1013. pm_runtime_disable(&pdev->dev);
  1014. component_del(&pdev->dev, &mixer_component_ops);
  1015. return 0;
  1016. }
  1017. static int __maybe_unused exynos_mixer_suspend(struct device *dev)
  1018. {
  1019. struct mixer_context *ctx = dev_get_drvdata(dev);
  1020. clk_disable_unprepare(ctx->hdmi);
  1021. clk_disable_unprepare(ctx->mixer);
  1022. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  1023. clk_disable_unprepare(ctx->vp);
  1024. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags))
  1025. clk_disable_unprepare(ctx->sclk_mixer);
  1026. }
  1027. return 0;
  1028. }
  1029. static int __maybe_unused exynos_mixer_resume(struct device *dev)
  1030. {
  1031. struct mixer_context *ctx = dev_get_drvdata(dev);
  1032. int ret;
  1033. ret = clk_prepare_enable(ctx->mixer);
  1034. if (ret < 0) {
  1035. DRM_ERROR("Failed to prepare_enable the mixer clk [%d]\n", ret);
  1036. return ret;
  1037. }
  1038. ret = clk_prepare_enable(ctx->hdmi);
  1039. if (ret < 0) {
  1040. DRM_ERROR("Failed to prepare_enable the hdmi clk [%d]\n", ret);
  1041. return ret;
  1042. }
  1043. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  1044. ret = clk_prepare_enable(ctx->vp);
  1045. if (ret < 0) {
  1046. DRM_ERROR("Failed to prepare_enable the vp clk [%d]\n",
  1047. ret);
  1048. return ret;
  1049. }
  1050. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags)) {
  1051. ret = clk_prepare_enable(ctx->sclk_mixer);
  1052. if (ret < 0) {
  1053. DRM_ERROR("Failed to prepare_enable the " \
  1054. "sclk_mixer clk [%d]\n",
  1055. ret);
  1056. return ret;
  1057. }
  1058. }
  1059. }
  1060. return 0;
  1061. }
  1062. static const struct dev_pm_ops exynos_mixer_pm_ops = {
  1063. SET_RUNTIME_PM_OPS(exynos_mixer_suspend, exynos_mixer_resume, NULL)
  1064. SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
  1065. pm_runtime_force_resume)
  1066. };
  1067. struct platform_driver mixer_driver = {
  1068. .driver = {
  1069. .name = "exynos-mixer",
  1070. .owner = THIS_MODULE,
  1071. .pm = &exynos_mixer_pm_ops,
  1072. .of_match_table = mixer_match_types,
  1073. },
  1074. .probe = mixer_probe,
  1075. .remove = mixer_remove,
  1076. };