zx_vou.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893
  1. /*
  2. * Copyright 2016 Linaro Ltd.
  3. * Copyright 2016 ZTE Corporation.
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License version 2 as
  7. * published by the Free Software Foundation.
  8. *
  9. */
  10. #include <linux/clk.h>
  11. #include <linux/component.h>
  12. #include <linux/of_address.h>
  13. #include <video/videomode.h>
  14. #include <drm/drm_atomic_helper.h>
  15. #include <drm/drm_crtc.h>
  16. #include <drm/drm_crtc_helper.h>
  17. #include <drm/drm_fb_cma_helper.h>
  18. #include <drm/drm_fb_helper.h>
  19. #include <drm/drm_gem_cma_helper.h>
  20. #include <drm/drm_of.h>
  21. #include <drm/drm_plane_helper.h>
  22. #include <drm/drmP.h>
  23. #include "zx_drm_drv.h"
  24. #include "zx_plane.h"
  25. #include "zx_vou.h"
  26. #include "zx_vou_regs.h"
  27. #define GL_NUM 2
  28. #define VL_NUM 3
  29. enum vou_chn_type {
  30. VOU_CHN_MAIN,
  31. VOU_CHN_AUX,
  32. };
  33. struct zx_crtc_regs {
  34. u32 fir_active;
  35. u32 fir_htiming;
  36. u32 fir_vtiming;
  37. u32 sec_vtiming;
  38. u32 timing_shift;
  39. u32 timing_pi_shift;
  40. };
  41. static const struct zx_crtc_regs main_crtc_regs = {
  42. .fir_active = FIR_MAIN_ACTIVE,
  43. .fir_htiming = FIR_MAIN_H_TIMING,
  44. .fir_vtiming = FIR_MAIN_V_TIMING,
  45. .sec_vtiming = SEC_MAIN_V_TIMING,
  46. .timing_shift = TIMING_MAIN_SHIFT,
  47. .timing_pi_shift = TIMING_MAIN_PI_SHIFT,
  48. };
  49. static const struct zx_crtc_regs aux_crtc_regs = {
  50. .fir_active = FIR_AUX_ACTIVE,
  51. .fir_htiming = FIR_AUX_H_TIMING,
  52. .fir_vtiming = FIR_AUX_V_TIMING,
  53. .sec_vtiming = SEC_AUX_V_TIMING,
  54. .timing_shift = TIMING_AUX_SHIFT,
  55. .timing_pi_shift = TIMING_AUX_PI_SHIFT,
  56. };
  57. struct zx_crtc_bits {
  58. u32 polarity_mask;
  59. u32 polarity_shift;
  60. u32 int_frame_mask;
  61. u32 tc_enable;
  62. u32 sec_vactive_shift;
  63. u32 sec_vactive_mask;
  64. u32 interlace_select;
  65. u32 pi_enable;
  66. u32 div_vga_shift;
  67. u32 div_pic_shift;
  68. u32 div_tvenc_shift;
  69. u32 div_hdmi_pnx_shift;
  70. u32 div_hdmi_shift;
  71. u32 div_inf_shift;
  72. u32 div_layer_shift;
  73. };
  74. static const struct zx_crtc_bits main_crtc_bits = {
  75. .polarity_mask = MAIN_POL_MASK,
  76. .polarity_shift = MAIN_POL_SHIFT,
  77. .int_frame_mask = TIMING_INT_MAIN_FRAME,
  78. .tc_enable = MAIN_TC_EN,
  79. .sec_vactive_shift = SEC_VACT_MAIN_SHIFT,
  80. .sec_vactive_mask = SEC_VACT_MAIN_MASK,
  81. .interlace_select = MAIN_INTERLACE_SEL,
  82. .pi_enable = MAIN_PI_EN,
  83. .div_vga_shift = VGA_MAIN_DIV_SHIFT,
  84. .div_pic_shift = PIC_MAIN_DIV_SHIFT,
  85. .div_tvenc_shift = TVENC_MAIN_DIV_SHIFT,
  86. .div_hdmi_pnx_shift = HDMI_MAIN_PNX_DIV_SHIFT,
  87. .div_hdmi_shift = HDMI_MAIN_DIV_SHIFT,
  88. .div_inf_shift = INF_MAIN_DIV_SHIFT,
  89. .div_layer_shift = LAYER_MAIN_DIV_SHIFT,
  90. };
  91. static const struct zx_crtc_bits aux_crtc_bits = {
  92. .polarity_mask = AUX_POL_MASK,
  93. .polarity_shift = AUX_POL_SHIFT,
  94. .int_frame_mask = TIMING_INT_AUX_FRAME,
  95. .tc_enable = AUX_TC_EN,
  96. .sec_vactive_shift = SEC_VACT_AUX_SHIFT,
  97. .sec_vactive_mask = SEC_VACT_AUX_MASK,
  98. .interlace_select = AUX_INTERLACE_SEL,
  99. .pi_enable = AUX_PI_EN,
  100. .div_vga_shift = VGA_AUX_DIV_SHIFT,
  101. .div_pic_shift = PIC_AUX_DIV_SHIFT,
  102. .div_tvenc_shift = TVENC_AUX_DIV_SHIFT,
  103. .div_hdmi_pnx_shift = HDMI_AUX_PNX_DIV_SHIFT,
  104. .div_hdmi_shift = HDMI_AUX_DIV_SHIFT,
  105. .div_inf_shift = INF_AUX_DIV_SHIFT,
  106. .div_layer_shift = LAYER_AUX_DIV_SHIFT,
  107. };
  108. struct zx_crtc {
  109. struct drm_crtc crtc;
  110. struct drm_plane *primary;
  111. struct zx_vou_hw *vou;
  112. void __iomem *chnreg;
  113. const struct zx_crtc_regs *regs;
  114. const struct zx_crtc_bits *bits;
  115. enum vou_chn_type chn_type;
  116. struct clk *pixclk;
  117. };
  118. #define to_zx_crtc(x) container_of(x, struct zx_crtc, crtc)
  119. struct vou_layer_bits {
  120. u32 enable;
  121. u32 chnsel;
  122. u32 clksel;
  123. };
  124. static const struct vou_layer_bits zx_gl_bits[GL_NUM] = {
  125. {
  126. .enable = OSD_CTRL0_GL0_EN,
  127. .chnsel = OSD_CTRL0_GL0_SEL,
  128. .clksel = VOU_CLK_GL0_SEL,
  129. }, {
  130. .enable = OSD_CTRL0_GL1_EN,
  131. .chnsel = OSD_CTRL0_GL1_SEL,
  132. .clksel = VOU_CLK_GL1_SEL,
  133. },
  134. };
  135. static const struct vou_layer_bits zx_vl_bits[VL_NUM] = {
  136. {
  137. .enable = OSD_CTRL0_VL0_EN,
  138. .chnsel = OSD_CTRL0_VL0_SEL,
  139. .clksel = VOU_CLK_VL0_SEL,
  140. }, {
  141. .enable = OSD_CTRL0_VL1_EN,
  142. .chnsel = OSD_CTRL0_VL1_SEL,
  143. .clksel = VOU_CLK_VL1_SEL,
  144. }, {
  145. .enable = OSD_CTRL0_VL2_EN,
  146. .chnsel = OSD_CTRL0_VL2_SEL,
  147. .clksel = VOU_CLK_VL2_SEL,
  148. },
  149. };
  150. struct zx_vou_hw {
  151. struct device *dev;
  152. void __iomem *osd;
  153. void __iomem *timing;
  154. void __iomem *vouctl;
  155. void __iomem *otfppu;
  156. void __iomem *dtrc;
  157. struct clk *axi_clk;
  158. struct clk *ppu_clk;
  159. struct clk *main_clk;
  160. struct clk *aux_clk;
  161. struct zx_crtc *main_crtc;
  162. struct zx_crtc *aux_crtc;
  163. };
  164. enum vou_inf_data_sel {
  165. VOU_YUV444 = 0,
  166. VOU_RGB_101010 = 1,
  167. VOU_RGB_888 = 2,
  168. VOU_RGB_666 = 3,
  169. };
  170. struct vou_inf {
  171. enum vou_inf_id id;
  172. enum vou_inf_data_sel data_sel;
  173. u32 clocks_en_bits;
  174. u32 clocks_sel_bits;
  175. };
  176. static struct vou_inf vou_infs[] = {
  177. [VOU_HDMI] = {
  178. .data_sel = VOU_YUV444,
  179. .clocks_en_bits = BIT(24) | BIT(18) | BIT(6),
  180. .clocks_sel_bits = BIT(13) | BIT(2),
  181. },
  182. [VOU_TV_ENC] = {
  183. .data_sel = VOU_YUV444,
  184. .clocks_en_bits = BIT(15),
  185. .clocks_sel_bits = BIT(11) | BIT(0),
  186. },
  187. };
  188. static inline struct zx_vou_hw *crtc_to_vou(struct drm_crtc *crtc)
  189. {
  190. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  191. return zcrtc->vou;
  192. }
  193. void vou_inf_hdmi_audio_sel(struct drm_crtc *crtc,
  194. enum vou_inf_hdmi_audio aud)
  195. {
  196. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  197. struct zx_vou_hw *vou = zcrtc->vou;
  198. zx_writel_mask(vou->vouctl + VOU_INF_HDMI_CTRL, VOU_HDMI_AUD_MASK, aud);
  199. }
  200. void vou_inf_enable(enum vou_inf_id id, struct drm_crtc *crtc)
  201. {
  202. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  203. struct zx_vou_hw *vou = zcrtc->vou;
  204. struct vou_inf *inf = &vou_infs[id];
  205. bool is_main = zcrtc->chn_type == VOU_CHN_MAIN;
  206. u32 data_sel_shift = id << 1;
  207. /* Select data format */
  208. zx_writel_mask(vou->vouctl + VOU_INF_DATA_SEL, 0x3 << data_sel_shift,
  209. inf->data_sel << data_sel_shift);
  210. /* Select channel */
  211. zx_writel_mask(vou->vouctl + VOU_INF_CH_SEL, 0x1 << id,
  212. zcrtc->chn_type << id);
  213. /* Select interface clocks */
  214. zx_writel_mask(vou->vouctl + VOU_CLK_SEL, inf->clocks_sel_bits,
  215. is_main ? 0 : inf->clocks_sel_bits);
  216. /* Enable interface clocks */
  217. zx_writel_mask(vou->vouctl + VOU_CLK_EN, inf->clocks_en_bits,
  218. inf->clocks_en_bits);
  219. /* Enable the device */
  220. zx_writel_mask(vou->vouctl + VOU_INF_EN, 1 << id, 1 << id);
  221. }
  222. void vou_inf_disable(enum vou_inf_id id, struct drm_crtc *crtc)
  223. {
  224. struct zx_vou_hw *vou = crtc_to_vou(crtc);
  225. struct vou_inf *inf = &vou_infs[id];
  226. /* Disable the device */
  227. zx_writel_mask(vou->vouctl + VOU_INF_EN, 1 << id, 0);
  228. /* Disable interface clocks */
  229. zx_writel_mask(vou->vouctl + VOU_CLK_EN, inf->clocks_en_bits, 0);
  230. }
  231. void zx_vou_config_dividers(struct drm_crtc *crtc,
  232. struct vou_div_config *configs, int num)
  233. {
  234. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  235. struct zx_vou_hw *vou = zcrtc->vou;
  236. const struct zx_crtc_bits *bits = zcrtc->bits;
  237. int i;
  238. /* Clear update flag bit */
  239. zx_writel_mask(vou->vouctl + VOU_DIV_PARA, DIV_PARA_UPDATE, 0);
  240. for (i = 0; i < num; i++) {
  241. struct vou_div_config *cfg = configs + i;
  242. u32 reg, shift;
  243. switch (cfg->id) {
  244. case VOU_DIV_VGA:
  245. reg = VOU_CLK_SEL;
  246. shift = bits->div_vga_shift;
  247. break;
  248. case VOU_DIV_PIC:
  249. reg = VOU_CLK_SEL;
  250. shift = bits->div_pic_shift;
  251. break;
  252. case VOU_DIV_TVENC:
  253. reg = VOU_DIV_PARA;
  254. shift = bits->div_tvenc_shift;
  255. break;
  256. case VOU_DIV_HDMI_PNX:
  257. reg = VOU_DIV_PARA;
  258. shift = bits->div_hdmi_pnx_shift;
  259. break;
  260. case VOU_DIV_HDMI:
  261. reg = VOU_DIV_PARA;
  262. shift = bits->div_hdmi_shift;
  263. break;
  264. case VOU_DIV_INF:
  265. reg = VOU_DIV_PARA;
  266. shift = bits->div_inf_shift;
  267. break;
  268. case VOU_DIV_LAYER:
  269. reg = VOU_DIV_PARA;
  270. shift = bits->div_layer_shift;
  271. break;
  272. default:
  273. continue;
  274. }
  275. /* Each divider occupies 3 bits */
  276. zx_writel_mask(vou->vouctl + reg, 0x7 << shift,
  277. cfg->val << shift);
  278. }
  279. /* Set update flag bit to get dividers effected */
  280. zx_writel_mask(vou->vouctl + VOU_DIV_PARA, DIV_PARA_UPDATE,
  281. DIV_PARA_UPDATE);
  282. }
  283. static inline void vou_chn_set_update(struct zx_crtc *zcrtc)
  284. {
  285. zx_writel(zcrtc->chnreg + CHN_UPDATE, 1);
  286. }
  287. static void zx_crtc_enable(struct drm_crtc *crtc)
  288. {
  289. struct drm_display_mode *mode = &crtc->state->adjusted_mode;
  290. bool interlaced = mode->flags & DRM_MODE_FLAG_INTERLACE;
  291. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  292. struct zx_vou_hw *vou = zcrtc->vou;
  293. const struct zx_crtc_regs *regs = zcrtc->regs;
  294. const struct zx_crtc_bits *bits = zcrtc->bits;
  295. struct videomode vm;
  296. u32 scan_mask;
  297. u32 pol = 0;
  298. u32 val;
  299. int ret;
  300. drm_display_mode_to_videomode(mode, &vm);
  301. /* Set up timing parameters */
  302. val = V_ACTIVE((interlaced ? vm.vactive / 2 : vm.vactive) - 1);
  303. val |= H_ACTIVE(vm.hactive - 1);
  304. zx_writel(vou->timing + regs->fir_active, val);
  305. val = SYNC_WIDE(vm.hsync_len - 1);
  306. val |= BACK_PORCH(vm.hback_porch - 1);
  307. val |= FRONT_PORCH(vm.hfront_porch - 1);
  308. zx_writel(vou->timing + regs->fir_htiming, val);
  309. val = SYNC_WIDE(vm.vsync_len - 1);
  310. val |= BACK_PORCH(vm.vback_porch - 1);
  311. val |= FRONT_PORCH(vm.vfront_porch - 1);
  312. zx_writel(vou->timing + regs->fir_vtiming, val);
  313. if (interlaced) {
  314. u32 shift = bits->sec_vactive_shift;
  315. u32 mask = bits->sec_vactive_mask;
  316. val = zx_readl(vou->timing + SEC_V_ACTIVE);
  317. val &= ~mask;
  318. val |= ((vm.vactive / 2 - 1) << shift) & mask;
  319. zx_writel(vou->timing + SEC_V_ACTIVE, val);
  320. val = SYNC_WIDE(vm.vsync_len - 1);
  321. /*
  322. * The vback_porch for the second field needs to shift one on
  323. * the value for the first field.
  324. */
  325. val |= BACK_PORCH(vm.vback_porch);
  326. val |= FRONT_PORCH(vm.vfront_porch - 1);
  327. zx_writel(vou->timing + regs->sec_vtiming, val);
  328. }
  329. /* Set up polarities */
  330. if (vm.flags & DISPLAY_FLAGS_VSYNC_LOW)
  331. pol |= 1 << POL_VSYNC_SHIFT;
  332. if (vm.flags & DISPLAY_FLAGS_HSYNC_LOW)
  333. pol |= 1 << POL_HSYNC_SHIFT;
  334. zx_writel_mask(vou->timing + TIMING_CTRL, bits->polarity_mask,
  335. pol << bits->polarity_shift);
  336. /* Setup SHIFT register by following what ZTE BSP does */
  337. val = H_SHIFT_VAL;
  338. if (interlaced)
  339. val |= V_SHIFT_VAL << 16;
  340. zx_writel(vou->timing + regs->timing_shift, val);
  341. zx_writel(vou->timing + regs->timing_pi_shift, H_PI_SHIFT_VAL);
  342. /* Progressive or interlace scan select */
  343. scan_mask = bits->interlace_select | bits->pi_enable;
  344. zx_writel_mask(vou->timing + SCAN_CTRL, scan_mask,
  345. interlaced ? scan_mask : 0);
  346. /* Enable TIMING_CTRL */
  347. zx_writel_mask(vou->timing + TIMING_TC_ENABLE, bits->tc_enable,
  348. bits->tc_enable);
  349. /* Configure channel screen size */
  350. zx_writel_mask(zcrtc->chnreg + CHN_CTRL1, CHN_SCREEN_W_MASK,
  351. vm.hactive << CHN_SCREEN_W_SHIFT);
  352. zx_writel_mask(zcrtc->chnreg + CHN_CTRL1, CHN_SCREEN_H_MASK,
  353. vm.vactive << CHN_SCREEN_H_SHIFT);
  354. /* Configure channel interlace buffer control */
  355. zx_writel_mask(zcrtc->chnreg + CHN_INTERLACE_BUF_CTRL, CHN_INTERLACE_EN,
  356. interlaced ? CHN_INTERLACE_EN : 0);
  357. /* Update channel */
  358. vou_chn_set_update(zcrtc);
  359. /* Enable channel */
  360. zx_writel_mask(zcrtc->chnreg + CHN_CTRL0, CHN_ENABLE, CHN_ENABLE);
  361. drm_crtc_vblank_on(crtc);
  362. ret = clk_set_rate(zcrtc->pixclk, mode->clock * 1000);
  363. if (ret) {
  364. DRM_DEV_ERROR(vou->dev, "failed to set pixclk rate: %d\n", ret);
  365. return;
  366. }
  367. ret = clk_prepare_enable(zcrtc->pixclk);
  368. if (ret)
  369. DRM_DEV_ERROR(vou->dev, "failed to enable pixclk: %d\n", ret);
  370. }
  371. static void zx_crtc_disable(struct drm_crtc *crtc)
  372. {
  373. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  374. const struct zx_crtc_bits *bits = zcrtc->bits;
  375. struct zx_vou_hw *vou = zcrtc->vou;
  376. clk_disable_unprepare(zcrtc->pixclk);
  377. drm_crtc_vblank_off(crtc);
  378. /* Disable channel */
  379. zx_writel_mask(zcrtc->chnreg + CHN_CTRL0, CHN_ENABLE, 0);
  380. /* Disable TIMING_CTRL */
  381. zx_writel_mask(vou->timing + TIMING_TC_ENABLE, bits->tc_enable, 0);
  382. }
  383. static void zx_crtc_atomic_flush(struct drm_crtc *crtc,
  384. struct drm_crtc_state *old_state)
  385. {
  386. struct drm_pending_vblank_event *event = crtc->state->event;
  387. if (!event)
  388. return;
  389. crtc->state->event = NULL;
  390. spin_lock_irq(&crtc->dev->event_lock);
  391. if (drm_crtc_vblank_get(crtc) == 0)
  392. drm_crtc_arm_vblank_event(crtc, event);
  393. else
  394. drm_crtc_send_vblank_event(crtc, event);
  395. spin_unlock_irq(&crtc->dev->event_lock);
  396. }
  397. static const struct drm_crtc_helper_funcs zx_crtc_helper_funcs = {
  398. .enable = zx_crtc_enable,
  399. .disable = zx_crtc_disable,
  400. .atomic_flush = zx_crtc_atomic_flush,
  401. };
  402. static int zx_vou_enable_vblank(struct drm_crtc *crtc)
  403. {
  404. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  405. struct zx_vou_hw *vou = crtc_to_vou(crtc);
  406. u32 int_frame_mask = zcrtc->bits->int_frame_mask;
  407. zx_writel_mask(vou->timing + TIMING_INT_CTRL, int_frame_mask,
  408. int_frame_mask);
  409. return 0;
  410. }
  411. static void zx_vou_disable_vblank(struct drm_crtc *crtc)
  412. {
  413. struct zx_crtc *zcrtc = to_zx_crtc(crtc);
  414. struct zx_vou_hw *vou = crtc_to_vou(crtc);
  415. zx_writel_mask(vou->timing + TIMING_INT_CTRL,
  416. zcrtc->bits->int_frame_mask, 0);
  417. }
  418. static const struct drm_crtc_funcs zx_crtc_funcs = {
  419. .destroy = drm_crtc_cleanup,
  420. .set_config = drm_atomic_helper_set_config,
  421. .page_flip = drm_atomic_helper_page_flip,
  422. .reset = drm_atomic_helper_crtc_reset,
  423. .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
  424. .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
  425. .enable_vblank = zx_vou_enable_vblank,
  426. .disable_vblank = zx_vou_disable_vblank,
  427. };
  428. static int zx_crtc_init(struct drm_device *drm, struct zx_vou_hw *vou,
  429. enum vou_chn_type chn_type)
  430. {
  431. struct device *dev = vou->dev;
  432. struct zx_plane *zplane;
  433. struct zx_crtc *zcrtc;
  434. int ret;
  435. zcrtc = devm_kzalloc(dev, sizeof(*zcrtc), GFP_KERNEL);
  436. if (!zcrtc)
  437. return -ENOMEM;
  438. zcrtc->vou = vou;
  439. zcrtc->chn_type = chn_type;
  440. zplane = devm_kzalloc(dev, sizeof(*zplane), GFP_KERNEL);
  441. if (!zplane)
  442. return -ENOMEM;
  443. zplane->dev = dev;
  444. if (chn_type == VOU_CHN_MAIN) {
  445. zplane->layer = vou->osd + MAIN_GL_OFFSET;
  446. zplane->csc = vou->osd + MAIN_CSC_OFFSET;
  447. zplane->hbsc = vou->osd + MAIN_HBSC_OFFSET;
  448. zplane->rsz = vou->otfppu + MAIN_RSZ_OFFSET;
  449. zplane->bits = &zx_gl_bits[0];
  450. zcrtc->chnreg = vou->osd + OSD_MAIN_CHN;
  451. zcrtc->regs = &main_crtc_regs;
  452. zcrtc->bits = &main_crtc_bits;
  453. } else {
  454. zplane->layer = vou->osd + AUX_GL_OFFSET;
  455. zplane->csc = vou->osd + AUX_CSC_OFFSET;
  456. zplane->hbsc = vou->osd + AUX_HBSC_OFFSET;
  457. zplane->rsz = vou->otfppu + AUX_RSZ_OFFSET;
  458. zplane->bits = &zx_gl_bits[1];
  459. zcrtc->chnreg = vou->osd + OSD_AUX_CHN;
  460. zcrtc->regs = &aux_crtc_regs;
  461. zcrtc->bits = &aux_crtc_bits;
  462. }
  463. zcrtc->pixclk = devm_clk_get(dev, (chn_type == VOU_CHN_MAIN) ?
  464. "main_wclk" : "aux_wclk");
  465. if (IS_ERR(zcrtc->pixclk)) {
  466. ret = PTR_ERR(zcrtc->pixclk);
  467. DRM_DEV_ERROR(dev, "failed to get pix clk: %d\n", ret);
  468. return ret;
  469. }
  470. ret = zx_plane_init(drm, zplane, DRM_PLANE_TYPE_PRIMARY);
  471. if (ret) {
  472. DRM_DEV_ERROR(dev, "failed to init primary plane: %d\n", ret);
  473. return ret;
  474. }
  475. zcrtc->primary = &zplane->plane;
  476. ret = drm_crtc_init_with_planes(drm, &zcrtc->crtc, zcrtc->primary, NULL,
  477. &zx_crtc_funcs, NULL);
  478. if (ret) {
  479. DRM_DEV_ERROR(dev, "failed to init drm crtc: %d\n", ret);
  480. return ret;
  481. }
  482. drm_crtc_helper_add(&zcrtc->crtc, &zx_crtc_helper_funcs);
  483. if (chn_type == VOU_CHN_MAIN)
  484. vou->main_crtc = zcrtc;
  485. else
  486. vou->aux_crtc = zcrtc;
  487. return 0;
  488. }
  489. void zx_vou_layer_enable(struct drm_plane *plane)
  490. {
  491. struct zx_crtc *zcrtc = to_zx_crtc(plane->state->crtc);
  492. struct zx_vou_hw *vou = zcrtc->vou;
  493. struct zx_plane *zplane = to_zx_plane(plane);
  494. const struct vou_layer_bits *bits = zplane->bits;
  495. if (zcrtc->chn_type == VOU_CHN_MAIN) {
  496. zx_writel_mask(vou->osd + OSD_CTRL0, bits->chnsel, 0);
  497. zx_writel_mask(vou->vouctl + VOU_CLK_SEL, bits->clksel, 0);
  498. } else {
  499. zx_writel_mask(vou->osd + OSD_CTRL0, bits->chnsel,
  500. bits->chnsel);
  501. zx_writel_mask(vou->vouctl + VOU_CLK_SEL, bits->clksel,
  502. bits->clksel);
  503. }
  504. zx_writel_mask(vou->osd + OSD_CTRL0, bits->enable, bits->enable);
  505. }
  506. void zx_vou_layer_disable(struct drm_plane *plane)
  507. {
  508. struct zx_crtc *zcrtc = to_zx_crtc(plane->crtc);
  509. struct zx_vou_hw *vou = zcrtc->vou;
  510. struct zx_plane *zplane = to_zx_plane(plane);
  511. const struct vou_layer_bits *bits = zplane->bits;
  512. zx_writel_mask(vou->osd + OSD_CTRL0, bits->enable, 0);
  513. }
  514. static void zx_overlay_init(struct drm_device *drm, struct zx_vou_hw *vou)
  515. {
  516. struct device *dev = vou->dev;
  517. struct zx_plane *zplane;
  518. int i;
  519. int ret;
  520. /*
  521. * VL0 has some quirks on scaling support which need special handling.
  522. * Let's leave it out for now.
  523. */
  524. for (i = 1; i < VL_NUM; i++) {
  525. zplane = devm_kzalloc(dev, sizeof(*zplane), GFP_KERNEL);
  526. if (!zplane) {
  527. DRM_DEV_ERROR(dev, "failed to allocate zplane %d\n", i);
  528. return;
  529. }
  530. zplane->layer = vou->osd + OSD_VL_OFFSET(i);
  531. zplane->hbsc = vou->osd + HBSC_VL_OFFSET(i);
  532. zplane->rsz = vou->otfppu + RSZ_VL_OFFSET(i);
  533. zplane->bits = &zx_vl_bits[i];
  534. ret = zx_plane_init(drm, zplane, DRM_PLANE_TYPE_OVERLAY);
  535. if (ret) {
  536. DRM_DEV_ERROR(dev, "failed to init overlay %d\n", i);
  537. continue;
  538. }
  539. }
  540. }
  541. static inline void zx_osd_int_update(struct zx_crtc *zcrtc)
  542. {
  543. struct drm_crtc *crtc = &zcrtc->crtc;
  544. struct drm_plane *plane;
  545. vou_chn_set_update(zcrtc);
  546. drm_for_each_plane_mask(plane, crtc->dev, crtc->state->plane_mask)
  547. zx_plane_set_update(plane);
  548. }
  549. static irqreturn_t vou_irq_handler(int irq, void *dev_id)
  550. {
  551. struct zx_vou_hw *vou = dev_id;
  552. u32 state;
  553. /* Handle TIMING_CTRL frame interrupts */
  554. state = zx_readl(vou->timing + TIMING_INT_STATE);
  555. zx_writel(vou->timing + TIMING_INT_STATE, state);
  556. if (state & TIMING_INT_MAIN_FRAME)
  557. drm_crtc_handle_vblank(&vou->main_crtc->crtc);
  558. if (state & TIMING_INT_AUX_FRAME)
  559. drm_crtc_handle_vblank(&vou->aux_crtc->crtc);
  560. /* Handle OSD interrupts */
  561. state = zx_readl(vou->osd + OSD_INT_STA);
  562. zx_writel(vou->osd + OSD_INT_CLRSTA, state);
  563. if (state & OSD_INT_MAIN_UPT)
  564. zx_osd_int_update(vou->main_crtc);
  565. if (state & OSD_INT_AUX_UPT)
  566. zx_osd_int_update(vou->aux_crtc);
  567. if (state & OSD_INT_ERROR)
  568. DRM_DEV_ERROR(vou->dev, "OSD ERROR: 0x%08x!\n", state);
  569. return IRQ_HANDLED;
  570. }
  571. static void vou_dtrc_init(struct zx_vou_hw *vou)
  572. {
  573. /* Clear bit for bypass by ID */
  574. zx_writel_mask(vou->dtrc + DTRC_DETILE_CTRL,
  575. TILE2RASTESCAN_BYPASS_MODE, 0);
  576. /* Select ARIDR mode */
  577. zx_writel_mask(vou->dtrc + DTRC_DETILE_CTRL, DETILE_ARIDR_MODE_MASK,
  578. DETILE_ARID_IN_ARIDR);
  579. /* Bypass decompression for both frames */
  580. zx_writel_mask(vou->dtrc + DTRC_F0_CTRL, DTRC_DECOMPRESS_BYPASS,
  581. DTRC_DECOMPRESS_BYPASS);
  582. zx_writel_mask(vou->dtrc + DTRC_F1_CTRL, DTRC_DECOMPRESS_BYPASS,
  583. DTRC_DECOMPRESS_BYPASS);
  584. /* Set up ARID register */
  585. zx_writel(vou->dtrc + DTRC_ARID, DTRC_ARID3(0xf) | DTRC_ARID2(0xe) |
  586. DTRC_ARID1(0xf) | DTRC_ARID0(0xe));
  587. }
  588. static void vou_hw_init(struct zx_vou_hw *vou)
  589. {
  590. /* Release reset for all VOU modules */
  591. zx_writel(vou->vouctl + VOU_SOFT_RST, ~0);
  592. /* Enable clock auto-gating for all VOU modules */
  593. zx_writel(vou->vouctl + VOU_CLK_REQEN, ~0);
  594. /* Enable all VOU module clocks */
  595. zx_writel(vou->vouctl + VOU_CLK_EN, ~0);
  596. /* Clear both OSD and TIMING_CTRL interrupt state */
  597. zx_writel(vou->osd + OSD_INT_CLRSTA, ~0);
  598. zx_writel(vou->timing + TIMING_INT_STATE, ~0);
  599. /* Enable OSD and TIMING_CTRL interrrupts */
  600. zx_writel(vou->osd + OSD_INT_MSK, OSD_INT_ENABLE);
  601. zx_writel(vou->timing + TIMING_INT_CTRL, TIMING_INT_ENABLE);
  602. /* Select GPC as input to gl/vl scaler as a sane default setting */
  603. zx_writel(vou->otfppu + OTFPPU_RSZ_DATA_SOURCE, 0x2a);
  604. /*
  605. * Needs to reset channel and layer logic per frame when frame starts
  606. * to get VOU work properly.
  607. */
  608. zx_writel_mask(vou->osd + OSD_RST_CLR, RST_PER_FRAME, RST_PER_FRAME);
  609. vou_dtrc_init(vou);
  610. }
  611. static int zx_crtc_bind(struct device *dev, struct device *master, void *data)
  612. {
  613. struct platform_device *pdev = to_platform_device(dev);
  614. struct drm_device *drm = data;
  615. struct zx_vou_hw *vou;
  616. struct resource *res;
  617. int irq;
  618. int ret;
  619. vou = devm_kzalloc(dev, sizeof(*vou), GFP_KERNEL);
  620. if (!vou)
  621. return -ENOMEM;
  622. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "osd");
  623. vou->osd = devm_ioremap_resource(dev, res);
  624. if (IS_ERR(vou->osd)) {
  625. ret = PTR_ERR(vou->osd);
  626. DRM_DEV_ERROR(dev, "failed to remap osd region: %d\n", ret);
  627. return ret;
  628. }
  629. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "timing_ctrl");
  630. vou->timing = devm_ioremap_resource(dev, res);
  631. if (IS_ERR(vou->timing)) {
  632. ret = PTR_ERR(vou->timing);
  633. DRM_DEV_ERROR(dev, "failed to remap timing_ctrl region: %d\n",
  634. ret);
  635. return ret;
  636. }
  637. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "dtrc");
  638. vou->dtrc = devm_ioremap_resource(dev, res);
  639. if (IS_ERR(vou->dtrc)) {
  640. ret = PTR_ERR(vou->dtrc);
  641. DRM_DEV_ERROR(dev, "failed to remap dtrc region: %d\n", ret);
  642. return ret;
  643. }
  644. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "vou_ctrl");
  645. vou->vouctl = devm_ioremap_resource(dev, res);
  646. if (IS_ERR(vou->vouctl)) {
  647. ret = PTR_ERR(vou->vouctl);
  648. DRM_DEV_ERROR(dev, "failed to remap vou_ctrl region: %d\n",
  649. ret);
  650. return ret;
  651. }
  652. res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "otfppu");
  653. vou->otfppu = devm_ioremap_resource(dev, res);
  654. if (IS_ERR(vou->otfppu)) {
  655. ret = PTR_ERR(vou->otfppu);
  656. DRM_DEV_ERROR(dev, "failed to remap otfppu region: %d\n", ret);
  657. return ret;
  658. }
  659. irq = platform_get_irq(pdev, 0);
  660. if (irq < 0)
  661. return irq;
  662. vou->axi_clk = devm_clk_get(dev, "aclk");
  663. if (IS_ERR(vou->axi_clk)) {
  664. ret = PTR_ERR(vou->axi_clk);
  665. DRM_DEV_ERROR(dev, "failed to get axi_clk: %d\n", ret);
  666. return ret;
  667. }
  668. vou->ppu_clk = devm_clk_get(dev, "ppu_wclk");
  669. if (IS_ERR(vou->ppu_clk)) {
  670. ret = PTR_ERR(vou->ppu_clk);
  671. DRM_DEV_ERROR(dev, "failed to get ppu_clk: %d\n", ret);
  672. return ret;
  673. }
  674. ret = clk_prepare_enable(vou->axi_clk);
  675. if (ret) {
  676. DRM_DEV_ERROR(dev, "failed to enable axi_clk: %d\n", ret);
  677. return ret;
  678. }
  679. clk_prepare_enable(vou->ppu_clk);
  680. if (ret) {
  681. DRM_DEV_ERROR(dev, "failed to enable ppu_clk: %d\n", ret);
  682. goto disable_axi_clk;
  683. }
  684. vou->dev = dev;
  685. dev_set_drvdata(dev, vou);
  686. vou_hw_init(vou);
  687. ret = devm_request_irq(dev, irq, vou_irq_handler, 0, "zx_vou", vou);
  688. if (ret < 0) {
  689. DRM_DEV_ERROR(dev, "failed to request vou irq: %d\n", ret);
  690. goto disable_ppu_clk;
  691. }
  692. ret = zx_crtc_init(drm, vou, VOU_CHN_MAIN);
  693. if (ret) {
  694. DRM_DEV_ERROR(dev, "failed to init main channel crtc: %d\n",
  695. ret);
  696. goto disable_ppu_clk;
  697. }
  698. ret = zx_crtc_init(drm, vou, VOU_CHN_AUX);
  699. if (ret) {
  700. DRM_DEV_ERROR(dev, "failed to init aux channel crtc: %d\n",
  701. ret);
  702. goto disable_ppu_clk;
  703. }
  704. zx_overlay_init(drm, vou);
  705. return 0;
  706. disable_ppu_clk:
  707. clk_disable_unprepare(vou->ppu_clk);
  708. disable_axi_clk:
  709. clk_disable_unprepare(vou->axi_clk);
  710. return ret;
  711. }
  712. static void zx_crtc_unbind(struct device *dev, struct device *master,
  713. void *data)
  714. {
  715. struct zx_vou_hw *vou = dev_get_drvdata(dev);
  716. clk_disable_unprepare(vou->axi_clk);
  717. clk_disable_unprepare(vou->ppu_clk);
  718. }
  719. static const struct component_ops zx_crtc_component_ops = {
  720. .bind = zx_crtc_bind,
  721. .unbind = zx_crtc_unbind,
  722. };
  723. static int zx_crtc_probe(struct platform_device *pdev)
  724. {
  725. return component_add(&pdev->dev, &zx_crtc_component_ops);
  726. }
  727. static int zx_crtc_remove(struct platform_device *pdev)
  728. {
  729. component_del(&pdev->dev, &zx_crtc_component_ops);
  730. return 0;
  731. }
  732. static const struct of_device_id zx_crtc_of_match[] = {
  733. { .compatible = "zte,zx296718-dpc", },
  734. { /* end */ },
  735. };
  736. MODULE_DEVICE_TABLE(of, zx_crtc_of_match);
  737. struct platform_driver zx_crtc_driver = {
  738. .probe = zx_crtc_probe,
  739. .remove = zx_crtc_remove,
  740. .driver = {
  741. .name = "zx-crtc",
  742. .of_match_table = zx_crtc_of_match,
  743. },
  744. };