malidp_planes.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791
  1. /*
  2. * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
  3. * Author: Liviu Dudau <Liviu.Dudau@arm.com>
  4. *
  5. * This program is free software and is provided to you under the terms of the
  6. * GNU General Public License version 2 as published by the Free Software
  7. * Foundation, and any use by you of this program is subject to the terms
  8. * of such GNU licence.
  9. *
  10. * ARM Mali DP plane manipulation routines.
  11. */
  12. #include <linux/iommu.h>
  13. #include <drm/drmP.h>
  14. #include <drm/drm_atomic.h>
  15. #include <drm/drm_atomic_helper.h>
  16. #include <drm/drm_fb_cma_helper.h>
  17. #include <drm/drm_gem_cma_helper.h>
  18. #include <drm/drm_gem_framebuffer_helper.h>
  19. #include <drm/drm_plane_helper.h>
  20. #include <drm/drm_print.h>
  21. #include "malidp_hw.h"
  22. #include "malidp_drv.h"
  23. /* Layer specific register offsets */
  24. #define MALIDP_LAYER_FORMAT 0x000
  25. #define LAYER_FORMAT_MASK 0x3f
  26. #define MALIDP_LAYER_CONTROL 0x004
  27. #define LAYER_ENABLE (1 << 0)
  28. #define LAYER_FLOWCFG_MASK 7
  29. #define LAYER_FLOWCFG(x) (((x) & LAYER_FLOWCFG_MASK) << 1)
  30. #define LAYER_FLOWCFG_SCALE_SE 3
  31. #define LAYER_ROT_OFFSET 8
  32. #define LAYER_H_FLIP (1 << 10)
  33. #define LAYER_V_FLIP (1 << 11)
  34. #define LAYER_ROT_MASK (0xf << 8)
  35. #define LAYER_COMP_MASK (0x3 << 12)
  36. #define LAYER_COMP_PIXEL (0x3 << 12)
  37. #define LAYER_COMP_PLANE (0x2 << 12)
  38. #define LAYER_PMUL_ENABLE (0x1 << 14)
  39. #define LAYER_ALPHA_OFFSET (16)
  40. #define LAYER_ALPHA_MASK (0xff)
  41. #define LAYER_ALPHA(x) (((x) & LAYER_ALPHA_MASK) << LAYER_ALPHA_OFFSET)
  42. #define MALIDP_LAYER_COMPOSE 0x008
  43. #define MALIDP_LAYER_SIZE 0x00c
  44. #define LAYER_H_VAL(x) (((x) & 0x1fff) << 0)
  45. #define LAYER_V_VAL(x) (((x) & 0x1fff) << 16)
  46. #define MALIDP_LAYER_COMP_SIZE 0x010
  47. #define MALIDP_LAYER_OFFSET 0x014
  48. #define MALIDP550_LS_ENABLE 0x01c
  49. #define MALIDP550_LS_R1_IN_SIZE 0x020
  50. /*
  51. * This 4-entry look-up-table is used to determine the full 8-bit alpha value
  52. * for formats with 1- or 2-bit alpha channels.
  53. * We set it to give 100%/0% opacity for 1-bit formats and 100%/66%/33%/0%
  54. * opacity for 2-bit formats.
  55. */
  56. #define MALIDP_ALPHA_LUT 0xffaa5500
  57. /* page sizes the MMU prefetcher can support */
  58. #define MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES (SZ_4K | SZ_64K)
  59. #define MALIDP_MMU_PREFETCH_FULL_PGSIZES (SZ_1M | SZ_2M)
  60. /* readahead for partial-frame prefetch */
  61. #define MALIDP_MMU_PREFETCH_READAHEAD 8
  62. static void malidp_de_plane_destroy(struct drm_plane *plane)
  63. {
  64. struct malidp_plane *mp = to_malidp_plane(plane);
  65. drm_plane_cleanup(plane);
  66. kfree(mp);
  67. }
  68. /*
  69. * Replicate what the default ->reset hook does: free the state pointer and
  70. * allocate a new empty object. We just need enough space to store
  71. * a malidp_plane_state instead of a drm_plane_state.
  72. */
  73. static void malidp_plane_reset(struct drm_plane *plane)
  74. {
  75. struct malidp_plane_state *state = to_malidp_plane_state(plane->state);
  76. if (state)
  77. __drm_atomic_helper_plane_destroy_state(&state->base);
  78. kfree(state);
  79. plane->state = NULL;
  80. state = kzalloc(sizeof(*state), GFP_KERNEL);
  81. if (state)
  82. __drm_atomic_helper_plane_reset(plane, &state->base);
  83. }
  84. static struct
  85. drm_plane_state *malidp_duplicate_plane_state(struct drm_plane *plane)
  86. {
  87. struct malidp_plane_state *state, *m_state;
  88. if (!plane->state)
  89. return NULL;
  90. state = kmalloc(sizeof(*state), GFP_KERNEL);
  91. if (!state)
  92. return NULL;
  93. m_state = to_malidp_plane_state(plane->state);
  94. __drm_atomic_helper_plane_duplicate_state(plane, &state->base);
  95. state->rotmem_size = m_state->rotmem_size;
  96. state->format = m_state->format;
  97. state->n_planes = m_state->n_planes;
  98. state->mmu_prefetch_mode = m_state->mmu_prefetch_mode;
  99. state->mmu_prefetch_pgsize = m_state->mmu_prefetch_pgsize;
  100. return &state->base;
  101. }
  102. static void malidp_destroy_plane_state(struct drm_plane *plane,
  103. struct drm_plane_state *state)
  104. {
  105. struct malidp_plane_state *m_state = to_malidp_plane_state(state);
  106. __drm_atomic_helper_plane_destroy_state(state);
  107. kfree(m_state);
  108. }
  109. static const char * const prefetch_mode_names[] = {
  110. [MALIDP_PREFETCH_MODE_NONE] = "MMU_PREFETCH_NONE",
  111. [MALIDP_PREFETCH_MODE_PARTIAL] = "MMU_PREFETCH_PARTIAL",
  112. [MALIDP_PREFETCH_MODE_FULL] = "MMU_PREFETCH_FULL",
  113. };
  114. static void malidp_plane_atomic_print_state(struct drm_printer *p,
  115. const struct drm_plane_state *state)
  116. {
  117. struct malidp_plane_state *ms = to_malidp_plane_state(state);
  118. drm_printf(p, "\trotmem_size=%u\n", ms->rotmem_size);
  119. drm_printf(p, "\tformat_id=%u\n", ms->format);
  120. drm_printf(p, "\tn_planes=%u\n", ms->n_planes);
  121. drm_printf(p, "\tmmu_prefetch_mode=%s\n",
  122. prefetch_mode_names[ms->mmu_prefetch_mode]);
  123. drm_printf(p, "\tmmu_prefetch_pgsize=%d\n", ms->mmu_prefetch_pgsize);
  124. }
  125. static const struct drm_plane_funcs malidp_de_plane_funcs = {
  126. .update_plane = drm_atomic_helper_update_plane,
  127. .disable_plane = drm_atomic_helper_disable_plane,
  128. .destroy = malidp_de_plane_destroy,
  129. .reset = malidp_plane_reset,
  130. .atomic_duplicate_state = malidp_duplicate_plane_state,
  131. .atomic_destroy_state = malidp_destroy_plane_state,
  132. .atomic_print_state = malidp_plane_atomic_print_state,
  133. };
  134. static int malidp_se_check_scaling(struct malidp_plane *mp,
  135. struct drm_plane_state *state)
  136. {
  137. struct drm_crtc_state *crtc_state =
  138. drm_atomic_get_existing_crtc_state(state->state, state->crtc);
  139. struct malidp_crtc_state *mc;
  140. u32 src_w, src_h;
  141. int ret;
  142. if (!crtc_state)
  143. return -EINVAL;
  144. mc = to_malidp_crtc_state(crtc_state);
  145. ret = drm_atomic_helper_check_plane_state(state, crtc_state,
  146. 0, INT_MAX, true, true);
  147. if (ret)
  148. return ret;
  149. if (state->rotation & MALIDP_ROTATED_MASK) {
  150. src_w = state->src_h >> 16;
  151. src_h = state->src_w >> 16;
  152. } else {
  153. src_w = state->src_w >> 16;
  154. src_h = state->src_h >> 16;
  155. }
  156. if ((state->crtc_w == src_w) && (state->crtc_h == src_h)) {
  157. /* Scaling not necessary for this plane. */
  158. mc->scaled_planes_mask &= ~(mp->layer->id);
  159. return 0;
  160. }
  161. if (mp->layer->id & (DE_SMART | DE_GRAPHICS2))
  162. return -EINVAL;
  163. mc->scaled_planes_mask |= mp->layer->id;
  164. /* Defer scaling requirements calculation to the crtc check. */
  165. return 0;
  166. }
  167. static u32 malidp_get_pgsize_bitmap(struct malidp_plane *mp)
  168. {
  169. u32 pgsize_bitmap = 0;
  170. if (iommu_present(&platform_bus_type)) {
  171. struct iommu_domain *mmu_dom =
  172. iommu_get_domain_for_dev(mp->base.dev->dev);
  173. if (mmu_dom)
  174. pgsize_bitmap = mmu_dom->pgsize_bitmap;
  175. }
  176. return pgsize_bitmap;
  177. }
  178. /*
  179. * Check if the framebuffer is entirely made up of pages at least pgsize in
  180. * size. Only a heuristic: assumes that each scatterlist entry has been aligned
  181. * to the largest page size smaller than its length and that the MMU maps to
  182. * the largest page size possible.
  183. */
  184. static bool malidp_check_pages_threshold(struct malidp_plane_state *ms,
  185. u32 pgsize)
  186. {
  187. int i;
  188. for (i = 0; i < ms->n_planes; i++) {
  189. struct drm_gem_object *obj;
  190. struct drm_gem_cma_object *cma_obj;
  191. struct sg_table *sgt;
  192. struct scatterlist *sgl;
  193. obj = drm_gem_fb_get_obj(ms->base.fb, i);
  194. cma_obj = to_drm_gem_cma_obj(obj);
  195. if (cma_obj->sgt)
  196. sgt = cma_obj->sgt;
  197. else
  198. sgt = obj->dev->driver->gem_prime_get_sg_table(obj);
  199. if (!sgt)
  200. return false;
  201. sgl = sgt->sgl;
  202. while (sgl) {
  203. if (sgl->length < pgsize) {
  204. if (!cma_obj->sgt)
  205. kfree(sgt);
  206. return false;
  207. }
  208. sgl = sg_next(sgl);
  209. }
  210. if (!cma_obj->sgt)
  211. kfree(sgt);
  212. }
  213. return true;
  214. }
  215. /*
  216. * Check if it is possible to enable partial-frame MMU prefetch given the
  217. * current format, AFBC state and rotation.
  218. */
  219. static bool malidp_partial_prefetch_supported(u32 format, u64 modifier,
  220. unsigned int rotation)
  221. {
  222. bool afbc, sparse;
  223. /* rotation and horizontal flip not supported for partial prefetch */
  224. if (rotation & (DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
  225. DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X))
  226. return false;
  227. afbc = modifier & DRM_FORMAT_MOD_ARM_AFBC(0);
  228. sparse = modifier & AFBC_FORMAT_MOD_SPARSE;
  229. switch (format) {
  230. case DRM_FORMAT_ARGB2101010:
  231. case DRM_FORMAT_RGBA1010102:
  232. case DRM_FORMAT_BGRA1010102:
  233. case DRM_FORMAT_ARGB8888:
  234. case DRM_FORMAT_RGBA8888:
  235. case DRM_FORMAT_BGRA8888:
  236. case DRM_FORMAT_XRGB8888:
  237. case DRM_FORMAT_XBGR8888:
  238. case DRM_FORMAT_RGBX8888:
  239. case DRM_FORMAT_BGRX8888:
  240. case DRM_FORMAT_RGB888:
  241. case DRM_FORMAT_RGBA5551:
  242. case DRM_FORMAT_RGB565:
  243. /* always supported */
  244. return true;
  245. case DRM_FORMAT_ABGR2101010:
  246. case DRM_FORMAT_ABGR8888:
  247. case DRM_FORMAT_ABGR1555:
  248. case DRM_FORMAT_BGR565:
  249. /* supported, but if AFBC then must be sparse mode */
  250. return (!afbc) || (afbc && sparse);
  251. case DRM_FORMAT_BGR888:
  252. /* supported, but not for AFBC */
  253. return !afbc;
  254. case DRM_FORMAT_YUYV:
  255. case DRM_FORMAT_UYVY:
  256. case DRM_FORMAT_NV12:
  257. case DRM_FORMAT_YUV420:
  258. /* not supported */
  259. return false;
  260. default:
  261. return false;
  262. }
  263. }
  264. /*
  265. * Select the preferred MMU prefetch mode. Full-frame prefetch is preferred as
  266. * long as the framebuffer is all large pages. Otherwise partial-frame prefetch
  267. * is selected as long as it is supported for the current format. The selected
  268. * page size for prefetch is returned in pgsize_bitmap.
  269. */
  270. static enum mmu_prefetch_mode malidp_mmu_prefetch_select_mode
  271. (struct malidp_plane_state *ms, u32 *pgsize_bitmap)
  272. {
  273. u32 pgsizes;
  274. /* get the full-frame prefetch page size(s) supported by the MMU */
  275. pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_FULL_PGSIZES;
  276. while (pgsizes) {
  277. u32 largest_pgsize = 1 << __fls(pgsizes);
  278. if (malidp_check_pages_threshold(ms, largest_pgsize)) {
  279. *pgsize_bitmap = largest_pgsize;
  280. return MALIDP_PREFETCH_MODE_FULL;
  281. }
  282. pgsizes -= largest_pgsize;
  283. }
  284. /* get the partial-frame prefetch page size(s) supported by the MMU */
  285. pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES;
  286. if (malidp_partial_prefetch_supported(ms->base.fb->format->format,
  287. ms->base.fb->modifier,
  288. ms->base.rotation)) {
  289. /* partial prefetch using the smallest page size */
  290. *pgsize_bitmap = 1 << __ffs(pgsizes);
  291. return MALIDP_PREFETCH_MODE_PARTIAL;
  292. }
  293. *pgsize_bitmap = 0;
  294. return MALIDP_PREFETCH_MODE_NONE;
  295. }
  296. static u32 malidp_calc_mmu_control_value(enum mmu_prefetch_mode mode,
  297. u8 readahead, u8 n_planes, u32 pgsize)
  298. {
  299. u32 mmu_ctrl = 0;
  300. if (mode != MALIDP_PREFETCH_MODE_NONE) {
  301. mmu_ctrl |= MALIDP_MMU_CTRL_EN;
  302. if (mode == MALIDP_PREFETCH_MODE_PARTIAL) {
  303. mmu_ctrl |= MALIDP_MMU_CTRL_MODE;
  304. mmu_ctrl |= MALIDP_MMU_CTRL_PP_NUM_REQ(readahead);
  305. }
  306. if (pgsize == SZ_64K || pgsize == SZ_2M) {
  307. int i;
  308. for (i = 0; i < n_planes; i++)
  309. mmu_ctrl |= MALIDP_MMU_CTRL_PX_PS(i);
  310. }
  311. }
  312. return mmu_ctrl;
  313. }
  314. static void malidp_de_prefetch_settings(struct malidp_plane *mp,
  315. struct malidp_plane_state *ms)
  316. {
  317. if (!mp->layer->mmu_ctrl_offset)
  318. return;
  319. /* get the page sizes supported by the MMU */
  320. ms->mmu_prefetch_pgsize = malidp_get_pgsize_bitmap(mp);
  321. ms->mmu_prefetch_mode =
  322. malidp_mmu_prefetch_select_mode(ms, &ms->mmu_prefetch_pgsize);
  323. }
  324. static int malidp_de_plane_check(struct drm_plane *plane,
  325. struct drm_plane_state *state)
  326. {
  327. struct malidp_plane *mp = to_malidp_plane(plane);
  328. struct malidp_plane_state *ms = to_malidp_plane_state(state);
  329. bool rotated = state->rotation & MALIDP_ROTATED_MASK;
  330. struct drm_framebuffer *fb;
  331. u16 pixel_alpha = state->pixel_blend_mode;
  332. int i, ret;
  333. if (!state->crtc || !state->fb)
  334. return 0;
  335. fb = state->fb;
  336. ms->format = malidp_hw_get_format_id(&mp->hwdev->hw->map,
  337. mp->layer->id,
  338. fb->format->format);
  339. if (ms->format == MALIDP_INVALID_FORMAT_ID)
  340. return -EINVAL;
  341. ms->n_planes = fb->format->num_planes;
  342. for (i = 0; i < ms->n_planes; i++) {
  343. u8 alignment = malidp_hw_get_pitch_align(mp->hwdev, rotated);
  344. if (fb->pitches[i] & (alignment - 1)) {
  345. DRM_DEBUG_KMS("Invalid pitch %u for plane %d\n",
  346. fb->pitches[i], i);
  347. return -EINVAL;
  348. }
  349. }
  350. if ((state->crtc_w > mp->hwdev->max_line_size) ||
  351. (state->crtc_h > mp->hwdev->max_line_size) ||
  352. (state->crtc_w < mp->hwdev->min_line_size) ||
  353. (state->crtc_h < mp->hwdev->min_line_size))
  354. return -EINVAL;
  355. /*
  356. * DP550/650 video layers can accept 3 plane formats only if
  357. * fb->pitches[1] == fb->pitches[2] since they don't have a
  358. * third plane stride register.
  359. */
  360. if (ms->n_planes == 3 &&
  361. !(mp->hwdev->hw->features & MALIDP_DEVICE_LV_HAS_3_STRIDES) &&
  362. (state->fb->pitches[1] != state->fb->pitches[2]))
  363. return -EINVAL;
  364. ret = malidp_se_check_scaling(mp, state);
  365. if (ret)
  366. return ret;
  367. /* validate the rotation constraints for each layer */
  368. if (state->rotation != DRM_MODE_ROTATE_0) {
  369. if (mp->layer->rot == ROTATE_NONE)
  370. return -EINVAL;
  371. if ((mp->layer->rot == ROTATE_COMPRESSED) && !(fb->modifier))
  372. return -EINVAL;
  373. /*
  374. * packed RGB888 / BGR888 can't be rotated or flipped
  375. * unless they are stored in a compressed way
  376. */
  377. if ((fb->format->format == DRM_FORMAT_RGB888 ||
  378. fb->format->format == DRM_FORMAT_BGR888) && !(fb->modifier))
  379. return -EINVAL;
  380. }
  381. ms->rotmem_size = 0;
  382. if (state->rotation & MALIDP_ROTATED_MASK) {
  383. int val;
  384. val = mp->hwdev->hw->rotmem_required(mp->hwdev, state->crtc_w,
  385. state->crtc_h,
  386. fb->format->format);
  387. if (val < 0)
  388. return val;
  389. ms->rotmem_size = val;
  390. }
  391. /* HW can't support plane + pixel blending */
  392. if ((state->alpha != DRM_BLEND_ALPHA_OPAQUE) &&
  393. (pixel_alpha != DRM_MODE_BLEND_PIXEL_NONE) &&
  394. fb->format->has_alpha)
  395. return -EINVAL;
  396. malidp_de_prefetch_settings(mp, ms);
  397. return 0;
  398. }
  399. static void malidp_de_set_plane_pitches(struct malidp_plane *mp,
  400. int num_planes, unsigned int pitches[3])
  401. {
  402. int i;
  403. int num_strides = num_planes;
  404. if (!mp->layer->stride_offset)
  405. return;
  406. if (num_planes == 3)
  407. num_strides = (mp->hwdev->hw->features &
  408. MALIDP_DEVICE_LV_HAS_3_STRIDES) ? 3 : 2;
  409. for (i = 0; i < num_strides; ++i)
  410. malidp_hw_write(mp->hwdev, pitches[i],
  411. mp->layer->base +
  412. mp->layer->stride_offset + i * 4);
  413. }
  414. static const s16
  415. malidp_yuv2rgb_coeffs[][DRM_COLOR_RANGE_MAX][MALIDP_COLORADJ_NUM_COEFFS] = {
  416. [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  417. 1192, 0, 1634,
  418. 1192, -401, -832,
  419. 1192, 2066, 0,
  420. 64, 512, 512
  421. },
  422. [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_FULL_RANGE] = {
  423. 1024, 0, 1436,
  424. 1024, -352, -731,
  425. 1024, 1815, 0,
  426. 0, 512, 512
  427. },
  428. [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  429. 1192, 0, 1836,
  430. 1192, -218, -546,
  431. 1192, 2163, 0,
  432. 64, 512, 512
  433. },
  434. [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_FULL_RANGE] = {
  435. 1024, 0, 1613,
  436. 1024, -192, -479,
  437. 1024, 1900, 0,
  438. 0, 512, 512
  439. },
  440. [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  441. 1024, 0, 1476,
  442. 1024, -165, -572,
  443. 1024, 1884, 0,
  444. 0, 512, 512
  445. },
  446. [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_FULL_RANGE] = {
  447. 1024, 0, 1510,
  448. 1024, -168, -585,
  449. 1024, 1927, 0,
  450. 0, 512, 512
  451. }
  452. };
  453. static void malidp_de_set_color_encoding(struct malidp_plane *plane,
  454. enum drm_color_encoding enc,
  455. enum drm_color_range range)
  456. {
  457. unsigned int i;
  458. for (i = 0; i < MALIDP_COLORADJ_NUM_COEFFS; i++) {
  459. /* coefficients are signed, two's complement values */
  460. malidp_hw_write(plane->hwdev, malidp_yuv2rgb_coeffs[enc][range][i],
  461. plane->layer->base + plane->layer->yuv2rgb_offset +
  462. i * 4);
  463. }
  464. }
  465. static void malidp_de_set_mmu_control(struct malidp_plane *mp,
  466. struct malidp_plane_state *ms)
  467. {
  468. u32 mmu_ctrl;
  469. /* check hardware supports MMU prefetch */
  470. if (!mp->layer->mmu_ctrl_offset)
  471. return;
  472. mmu_ctrl = malidp_calc_mmu_control_value(ms->mmu_prefetch_mode,
  473. MALIDP_MMU_PREFETCH_READAHEAD,
  474. ms->n_planes,
  475. ms->mmu_prefetch_pgsize);
  476. malidp_hw_write(mp->hwdev, mmu_ctrl,
  477. mp->layer->base + mp->layer->mmu_ctrl_offset);
  478. }
  479. static void malidp_de_plane_update(struct drm_plane *plane,
  480. struct drm_plane_state *old_state)
  481. {
  482. struct malidp_plane *mp;
  483. struct malidp_plane_state *ms = to_malidp_plane_state(plane->state);
  484. struct drm_plane_state *state = plane->state;
  485. u16 pixel_alpha = state->pixel_blend_mode;
  486. u8 plane_alpha = state->alpha >> 8;
  487. u32 src_w, src_h, dest_w, dest_h, val;
  488. int i;
  489. mp = to_malidp_plane(plane);
  490. /* convert src values from Q16 fixed point to integer */
  491. src_w = state->src_w >> 16;
  492. src_h = state->src_h >> 16;
  493. dest_w = state->crtc_w;
  494. dest_h = state->crtc_h;
  495. val = malidp_hw_read(mp->hwdev, mp->layer->base);
  496. val = (val & ~LAYER_FORMAT_MASK) | ms->format;
  497. malidp_hw_write(mp->hwdev, val, mp->layer->base);
  498. for (i = 0; i < ms->n_planes; i++) {
  499. /* calculate the offset for the layer's plane registers */
  500. u16 ptr = mp->layer->ptr + (i << 4);
  501. dma_addr_t fb_addr = drm_fb_cma_get_gem_addr(state->fb,
  502. state, i);
  503. malidp_hw_write(mp->hwdev, lower_32_bits(fb_addr), ptr);
  504. malidp_hw_write(mp->hwdev, upper_32_bits(fb_addr), ptr + 4);
  505. }
  506. malidp_de_set_mmu_control(mp, ms);
  507. malidp_de_set_plane_pitches(mp, ms->n_planes,
  508. state->fb->pitches);
  509. if ((plane->state->color_encoding != old_state->color_encoding) ||
  510. (plane->state->color_range != old_state->color_range))
  511. malidp_de_set_color_encoding(mp, plane->state->color_encoding,
  512. plane->state->color_range);
  513. malidp_hw_write(mp->hwdev, LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
  514. mp->layer->base + MALIDP_LAYER_SIZE);
  515. malidp_hw_write(mp->hwdev, LAYER_H_VAL(dest_w) | LAYER_V_VAL(dest_h),
  516. mp->layer->base + MALIDP_LAYER_COMP_SIZE);
  517. malidp_hw_write(mp->hwdev, LAYER_H_VAL(state->crtc_x) |
  518. LAYER_V_VAL(state->crtc_y),
  519. mp->layer->base + MALIDP_LAYER_OFFSET);
  520. if (mp->layer->id == DE_SMART) {
  521. /*
  522. * Enable the first rectangle in the SMART layer to be
  523. * able to use it as a drm plane.
  524. */
  525. malidp_hw_write(mp->hwdev, 1,
  526. mp->layer->base + MALIDP550_LS_ENABLE);
  527. malidp_hw_write(mp->hwdev,
  528. LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
  529. mp->layer->base + MALIDP550_LS_R1_IN_SIZE);
  530. }
  531. /* first clear the rotation bits */
  532. val = malidp_hw_read(mp->hwdev, mp->layer->base + MALIDP_LAYER_CONTROL);
  533. val &= ~LAYER_ROT_MASK;
  534. /* setup the rotation and axis flip bits */
  535. if (state->rotation & DRM_MODE_ROTATE_MASK)
  536. val |= ilog2(plane->state->rotation & DRM_MODE_ROTATE_MASK) <<
  537. LAYER_ROT_OFFSET;
  538. if (state->rotation & DRM_MODE_REFLECT_X)
  539. val |= LAYER_H_FLIP;
  540. if (state->rotation & DRM_MODE_REFLECT_Y)
  541. val |= LAYER_V_FLIP;
  542. val &= ~(LAYER_COMP_MASK | LAYER_PMUL_ENABLE | LAYER_ALPHA(0xff));
  543. if (state->alpha != DRM_BLEND_ALPHA_OPAQUE) {
  544. val |= LAYER_COMP_PLANE;
  545. } else if (state->fb->format->has_alpha) {
  546. /* We only care about blend mode if the format has alpha */
  547. switch (pixel_alpha) {
  548. case DRM_MODE_BLEND_PREMULTI:
  549. val |= LAYER_COMP_PIXEL | LAYER_PMUL_ENABLE;
  550. break;
  551. case DRM_MODE_BLEND_COVERAGE:
  552. val |= LAYER_COMP_PIXEL;
  553. break;
  554. }
  555. }
  556. val |= LAYER_ALPHA(plane_alpha);
  557. val &= ~LAYER_FLOWCFG(LAYER_FLOWCFG_MASK);
  558. if (state->crtc) {
  559. struct malidp_crtc_state *m =
  560. to_malidp_crtc_state(state->crtc->state);
  561. if (m->scaler_config.scale_enable &&
  562. m->scaler_config.plane_src_id == mp->layer->id)
  563. val |= LAYER_FLOWCFG(LAYER_FLOWCFG_SCALE_SE);
  564. }
  565. /* set the 'enable layer' bit */
  566. val |= LAYER_ENABLE;
  567. malidp_hw_write(mp->hwdev, val,
  568. mp->layer->base + MALIDP_LAYER_CONTROL);
  569. }
  570. static void malidp_de_plane_disable(struct drm_plane *plane,
  571. struct drm_plane_state *state)
  572. {
  573. struct malidp_plane *mp = to_malidp_plane(plane);
  574. malidp_hw_clearbits(mp->hwdev,
  575. LAYER_ENABLE | LAYER_FLOWCFG(LAYER_FLOWCFG_MASK),
  576. mp->layer->base + MALIDP_LAYER_CONTROL);
  577. }
  578. static const struct drm_plane_helper_funcs malidp_de_plane_helper_funcs = {
  579. .atomic_check = malidp_de_plane_check,
  580. .atomic_update = malidp_de_plane_update,
  581. .atomic_disable = malidp_de_plane_disable,
  582. };
  583. int malidp_de_planes_init(struct drm_device *drm)
  584. {
  585. struct malidp_drm *malidp = drm->dev_private;
  586. const struct malidp_hw_regmap *map = &malidp->dev->hw->map;
  587. struct malidp_plane *plane = NULL;
  588. enum drm_plane_type plane_type;
  589. unsigned long crtcs = 1 << drm->mode_config.num_crtc;
  590. unsigned long flags = DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
  591. DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X | DRM_MODE_REFLECT_Y;
  592. unsigned int blend_caps = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
  593. BIT(DRM_MODE_BLEND_PREMULTI) |
  594. BIT(DRM_MODE_BLEND_COVERAGE);
  595. u32 *formats;
  596. int ret, i, j, n;
  597. formats = kcalloc(map->n_pixel_formats, sizeof(*formats), GFP_KERNEL);
  598. if (!formats) {
  599. ret = -ENOMEM;
  600. goto cleanup;
  601. }
  602. for (i = 0; i < map->n_layers; i++) {
  603. u8 id = map->layers[i].id;
  604. plane = kzalloc(sizeof(*plane), GFP_KERNEL);
  605. if (!plane) {
  606. ret = -ENOMEM;
  607. goto cleanup;
  608. }
  609. /* build the list of DRM supported formats based on the map */
  610. for (n = 0, j = 0; j < map->n_pixel_formats; j++) {
  611. if ((map->pixel_formats[j].layer & id) == id)
  612. formats[n++] = map->pixel_formats[j].format;
  613. }
  614. plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
  615. DRM_PLANE_TYPE_OVERLAY;
  616. ret = drm_universal_plane_init(drm, &plane->base, crtcs,
  617. &malidp_de_plane_funcs, formats,
  618. n, NULL, plane_type, NULL);
  619. if (ret < 0)
  620. goto cleanup;
  621. drm_plane_helper_add(&plane->base,
  622. &malidp_de_plane_helper_funcs);
  623. plane->hwdev = malidp->dev;
  624. plane->layer = &map->layers[i];
  625. drm_plane_create_alpha_property(&plane->base);
  626. drm_plane_create_blend_mode_property(&plane->base, blend_caps);
  627. if (id == DE_SMART) {
  628. /* Skip the features which the SMART layer doesn't have. */
  629. continue;
  630. }
  631. drm_plane_create_rotation_property(&plane->base, DRM_MODE_ROTATE_0, flags);
  632. malidp_hw_write(malidp->dev, MALIDP_ALPHA_LUT,
  633. plane->layer->base + MALIDP_LAYER_COMPOSE);
  634. /* Attach the YUV->RGB property only to video layers */
  635. if (id & (DE_VIDEO1 | DE_VIDEO2)) {
  636. /* default encoding for YUV->RGB is BT601 NARROW */
  637. enum drm_color_encoding enc = DRM_COLOR_YCBCR_BT601;
  638. enum drm_color_range range = DRM_COLOR_YCBCR_LIMITED_RANGE;
  639. ret = drm_plane_create_color_properties(&plane->base,
  640. BIT(DRM_COLOR_YCBCR_BT601) | \
  641. BIT(DRM_COLOR_YCBCR_BT709) | \
  642. BIT(DRM_COLOR_YCBCR_BT2020),
  643. BIT(DRM_COLOR_YCBCR_LIMITED_RANGE) | \
  644. BIT(DRM_COLOR_YCBCR_FULL_RANGE),
  645. enc, range);
  646. if (!ret)
  647. /* program the HW registers */
  648. malidp_de_set_color_encoding(plane, enc, range);
  649. else
  650. DRM_WARN("Failed to create video layer %d color properties\n", id);
  651. }
  652. }
  653. kfree(formats);
  654. return 0;
  655. cleanup:
  656. kfree(formats);
  657. return ret;
  658. }