camss-vfe-4-1.c 30 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * camss-vfe-4-1.c
  4. *
  5. * Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.1
  6. *
  7. * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
  8. * Copyright (C) 2015-2018 Linaro Ltd.
  9. */
  10. #include <linux/interrupt.h>
  11. #include <linux/io.h>
  12. #include <linux/iopoll.h>
  13. #include "camss-vfe.h"
  14. #define VFE_0_HW_VERSION 0x000
  15. #define VFE_0_GLOBAL_RESET_CMD 0x00c
  16. #define VFE_0_GLOBAL_RESET_CMD_CORE BIT(0)
  17. #define VFE_0_GLOBAL_RESET_CMD_CAMIF BIT(1)
  18. #define VFE_0_GLOBAL_RESET_CMD_BUS BIT(2)
  19. #define VFE_0_GLOBAL_RESET_CMD_BUS_BDG BIT(3)
  20. #define VFE_0_GLOBAL_RESET_CMD_REGISTER BIT(4)
  21. #define VFE_0_GLOBAL_RESET_CMD_TIMER BIT(5)
  22. #define VFE_0_GLOBAL_RESET_CMD_PM BIT(6)
  23. #define VFE_0_GLOBAL_RESET_CMD_BUS_MISR BIT(7)
  24. #define VFE_0_GLOBAL_RESET_CMD_TESTGEN BIT(8)
  25. #define VFE_0_MODULE_CFG 0x018
  26. #define VFE_0_MODULE_CFG_DEMUX BIT(2)
  27. #define VFE_0_MODULE_CFG_CHROMA_UPSAMPLE BIT(3)
  28. #define VFE_0_MODULE_CFG_SCALE_ENC BIT(23)
  29. #define VFE_0_MODULE_CFG_CROP_ENC BIT(27)
  30. #define VFE_0_CORE_CFG 0x01c
  31. #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR 0x4
  32. #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB 0x5
  33. #define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY 0x6
  34. #define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY 0x7
  35. #define VFE_0_IRQ_CMD 0x024
  36. #define VFE_0_IRQ_CMD_GLOBAL_CLEAR BIT(0)
  37. #define VFE_0_IRQ_MASK_0 0x028
  38. #define VFE_0_IRQ_MASK_0_CAMIF_SOF BIT(0)
  39. #define VFE_0_IRQ_MASK_0_CAMIF_EOF BIT(1)
  40. #define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n) BIT((n) + 5)
  41. #define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n) \
  42. ((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n))
  43. #define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8)
  44. #define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25)
  45. #define VFE_0_IRQ_MASK_0_RESET_ACK BIT(31)
  46. #define VFE_0_IRQ_MASK_1 0x02c
  47. #define VFE_0_IRQ_MASK_1_CAMIF_ERROR BIT(0)
  48. #define VFE_0_IRQ_MASK_1_VIOLATION BIT(7)
  49. #define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK BIT(8)
  50. #define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n) BIT((n) + 9)
  51. #define VFE_0_IRQ_MASK_1_RDIn_SOF(n) BIT((n) + 29)
  52. #define VFE_0_IRQ_CLEAR_0 0x030
  53. #define VFE_0_IRQ_CLEAR_1 0x034
  54. #define VFE_0_IRQ_STATUS_0 0x038
  55. #define VFE_0_IRQ_STATUS_0_CAMIF_SOF BIT(0)
  56. #define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n) BIT((n) + 5)
  57. #define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n) \
  58. ((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n))
  59. #define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8)
  60. #define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25)
  61. #define VFE_0_IRQ_STATUS_0_RESET_ACK BIT(31)
  62. #define VFE_0_IRQ_STATUS_1 0x03c
  63. #define VFE_0_IRQ_STATUS_1_VIOLATION BIT(7)
  64. #define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK BIT(8)
  65. #define VFE_0_IRQ_STATUS_1_RDIn_SOF(n) BIT((n) + 29)
  66. #define VFE_0_IRQ_COMPOSITE_MASK_0 0x40
  67. #define VFE_0_VIOLATION_STATUS 0x48
  68. #define VFE_0_BUS_CMD 0x4c
  69. #define VFE_0_BUS_CMD_Mx_RLD_CMD(x) BIT(x)
  70. #define VFE_0_BUS_CFG 0x050
  71. #define VFE_0_BUS_XBAR_CFG_x(x) (0x58 + 0x4 * ((x) / 2))
  72. #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN BIT(1)
  73. #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA (0x3 << 4)
  74. #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT 8
  75. #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA 0
  76. #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 5
  77. #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 6
  78. #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 7
  79. #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n) (0x06c + 0x24 * (n))
  80. #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT 0
  81. #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT 1
  82. #define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n) (0x070 + 0x24 * (n))
  83. #define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n) (0x074 + 0x24 * (n))
  84. #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n) (0x078 + 0x24 * (n))
  85. #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT 2
  86. #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK (0x1f << 2)
  87. #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n) (0x07c + 0x24 * (n))
  88. #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT 16
  89. #define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n) (0x080 + 0x24 * (n))
  90. #define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n) (0x084 + 0x24 * (n))
  91. #define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n) \
  92. (0x088 + 0x24 * (n))
  93. #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n) \
  94. (0x08c + 0x24 * (n))
  95. #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF 0xffffffff
  96. #define VFE_0_BUS_PING_PONG_STATUS 0x268
  97. #define VFE_0_BUS_BDG_CMD 0x2c0
  98. #define VFE_0_BUS_BDG_CMD_HALT_REQ 1
  99. #define VFE_0_BUS_BDG_QOS_CFG_0 0x2c4
  100. #define VFE_0_BUS_BDG_QOS_CFG_0_CFG 0xaaa5aaa5
  101. #define VFE_0_BUS_BDG_QOS_CFG_1 0x2c8
  102. #define VFE_0_BUS_BDG_QOS_CFG_2 0x2cc
  103. #define VFE_0_BUS_BDG_QOS_CFG_3 0x2d0
  104. #define VFE_0_BUS_BDG_QOS_CFG_4 0x2d4
  105. #define VFE_0_BUS_BDG_QOS_CFG_5 0x2d8
  106. #define VFE_0_BUS_BDG_QOS_CFG_6 0x2dc
  107. #define VFE_0_BUS_BDG_QOS_CFG_7 0x2e0
  108. #define VFE_0_BUS_BDG_QOS_CFG_7_CFG 0x0001aaa5
  109. #define VFE_0_RDI_CFG_x(x) (0x2e8 + (0x4 * (x)))
  110. #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT 28
  111. #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK (0xf << 28)
  112. #define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT 4
  113. #define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK (0xf << 4)
  114. #define VFE_0_RDI_CFG_x_RDI_EN_BIT BIT(2)
  115. #define VFE_0_RDI_CFG_x_MIPI_EN_BITS 0x3
  116. #define VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(r) BIT(16 + (r))
  117. #define VFE_0_CAMIF_CMD 0x2f4
  118. #define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY 0
  119. #define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY 1
  120. #define VFE_0_CAMIF_CMD_NO_CHANGE 3
  121. #define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS BIT(2)
  122. #define VFE_0_CAMIF_CFG 0x2f8
  123. #define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN BIT(6)
  124. #define VFE_0_CAMIF_FRAME_CFG 0x300
  125. #define VFE_0_CAMIF_WINDOW_WIDTH_CFG 0x304
  126. #define VFE_0_CAMIF_WINDOW_HEIGHT_CFG 0x308
  127. #define VFE_0_CAMIF_SUBSAMPLE_CFG_0 0x30c
  128. #define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN 0x314
  129. #define VFE_0_CAMIF_STATUS 0x31c
  130. #define VFE_0_CAMIF_STATUS_HALT BIT(31)
  131. #define VFE_0_REG_UPDATE 0x378
  132. #define VFE_0_REG_UPDATE_RDIn(n) BIT(1 + (n))
  133. #define VFE_0_REG_UPDATE_line_n(n) \
  134. ((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n))
  135. #define VFE_0_DEMUX_CFG 0x424
  136. #define VFE_0_DEMUX_CFG_PERIOD 0x3
  137. #define VFE_0_DEMUX_GAIN_0 0x428
  138. #define VFE_0_DEMUX_GAIN_0_CH0_EVEN (0x80 << 0)
  139. #define VFE_0_DEMUX_GAIN_0_CH0_ODD (0x80 << 16)
  140. #define VFE_0_DEMUX_GAIN_1 0x42c
  141. #define VFE_0_DEMUX_GAIN_1_CH1 (0x80 << 0)
  142. #define VFE_0_DEMUX_GAIN_1_CH2 (0x80 << 16)
  143. #define VFE_0_DEMUX_EVEN_CFG 0x438
  144. #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV 0x9cac
  145. #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU 0xac9c
  146. #define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY 0xc9ca
  147. #define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY 0xcac9
  148. #define VFE_0_DEMUX_ODD_CFG 0x43c
  149. #define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV 0x9cac
  150. #define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU 0xac9c
  151. #define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY 0xc9ca
  152. #define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY 0xcac9
  153. #define VFE_0_SCALE_ENC_Y_CFG 0x75c
  154. #define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE 0x760
  155. #define VFE_0_SCALE_ENC_Y_H_PHASE 0x764
  156. #define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE 0x76c
  157. #define VFE_0_SCALE_ENC_Y_V_PHASE 0x770
  158. #define VFE_0_SCALE_ENC_CBCR_CFG 0x778
  159. #define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE 0x77c
  160. #define VFE_0_SCALE_ENC_CBCR_H_PHASE 0x780
  161. #define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE 0x790
  162. #define VFE_0_SCALE_ENC_CBCR_V_PHASE 0x794
  163. #define VFE_0_CROP_ENC_Y_WIDTH 0x854
  164. #define VFE_0_CROP_ENC_Y_HEIGHT 0x858
  165. #define VFE_0_CROP_ENC_CBCR_WIDTH 0x85c
  166. #define VFE_0_CROP_ENC_CBCR_HEIGHT 0x860
  167. #define VFE_0_CLAMP_ENC_MAX_CFG 0x874
  168. #define VFE_0_CLAMP_ENC_MAX_CFG_CH0 (0xff << 0)
  169. #define VFE_0_CLAMP_ENC_MAX_CFG_CH1 (0xff << 8)
  170. #define VFE_0_CLAMP_ENC_MAX_CFG_CH2 (0xff << 16)
  171. #define VFE_0_CLAMP_ENC_MIN_CFG 0x878
  172. #define VFE_0_CLAMP_ENC_MIN_CFG_CH0 (0x0 << 0)
  173. #define VFE_0_CLAMP_ENC_MIN_CFG_CH1 (0x0 << 8)
  174. #define VFE_0_CLAMP_ENC_MIN_CFG_CH2 (0x0 << 16)
  175. #define VFE_0_CGC_OVERRIDE_1 0x974
  176. #define VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(x) BIT(x)
  177. #define CAMIF_TIMEOUT_SLEEP_US 1000
  178. #define CAMIF_TIMEOUT_ALL_US 1000000
  179. #define MSM_VFE_VFE0_UB_SIZE 1023
  180. #define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3)
  181. static void vfe_hw_version_read(struct vfe_device *vfe, struct device *dev)
  182. {
  183. u32 hw_version = readl_relaxed(vfe->base + VFE_0_HW_VERSION);
  184. dev_dbg(dev, "VFE HW Version = 0x%08x\n", hw_version);
  185. }
  186. static u16 vfe_get_ub_size(u8 vfe_id)
  187. {
  188. if (vfe_id == 0)
  189. return MSM_VFE_VFE0_UB_SIZE_RDI;
  190. return 0;
  191. }
  192. static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits)
  193. {
  194. u32 bits = readl_relaxed(vfe->base + reg);
  195. writel_relaxed(bits & ~clr_bits, vfe->base + reg);
  196. }
  197. static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits)
  198. {
  199. u32 bits = readl_relaxed(vfe->base + reg);
  200. writel_relaxed(bits | set_bits, vfe->base + reg);
  201. }
  202. static void vfe_global_reset(struct vfe_device *vfe)
  203. {
  204. u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_TESTGEN |
  205. VFE_0_GLOBAL_RESET_CMD_BUS_MISR |
  206. VFE_0_GLOBAL_RESET_CMD_PM |
  207. VFE_0_GLOBAL_RESET_CMD_TIMER |
  208. VFE_0_GLOBAL_RESET_CMD_REGISTER |
  209. VFE_0_GLOBAL_RESET_CMD_BUS_BDG |
  210. VFE_0_GLOBAL_RESET_CMD_BUS |
  211. VFE_0_GLOBAL_RESET_CMD_CAMIF |
  212. VFE_0_GLOBAL_RESET_CMD_CORE;
  213. writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD);
  214. }
  215. static void vfe_halt_request(struct vfe_device *vfe)
  216. {
  217. writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ,
  218. vfe->base + VFE_0_BUS_BDG_CMD);
  219. }
  220. static void vfe_halt_clear(struct vfe_device *vfe)
  221. {
  222. writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD);
  223. }
  224. static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable)
  225. {
  226. if (enable)
  227. vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
  228. 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
  229. else
  230. vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
  231. 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
  232. }
  233. static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable)
  234. {
  235. if (enable)
  236. vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
  237. 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
  238. else
  239. vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
  240. 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
  241. }
  242. #define CALC_WORD(width, M, N) (((width) * (M) + (N) - 1) / (N))
  243. static int vfe_word_per_line(u32 format, u32 pixel_per_line)
  244. {
  245. int val = 0;
  246. switch (format) {
  247. case V4L2_PIX_FMT_NV12:
  248. case V4L2_PIX_FMT_NV21:
  249. case V4L2_PIX_FMT_NV16:
  250. case V4L2_PIX_FMT_NV61:
  251. val = CALC_WORD(pixel_per_line, 1, 8);
  252. break;
  253. case V4L2_PIX_FMT_YUYV:
  254. case V4L2_PIX_FMT_YVYU:
  255. case V4L2_PIX_FMT_UYVY:
  256. case V4L2_PIX_FMT_VYUY:
  257. val = CALC_WORD(pixel_per_line, 2, 8);
  258. break;
  259. }
  260. return val;
  261. }
  262. static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane,
  263. u16 *width, u16 *height, u16 *bytesperline)
  264. {
  265. switch (pix->pixelformat) {
  266. case V4L2_PIX_FMT_NV12:
  267. case V4L2_PIX_FMT_NV21:
  268. *width = pix->width;
  269. *height = pix->height;
  270. *bytesperline = pix->plane_fmt[0].bytesperline;
  271. if (plane == 1)
  272. *height /= 2;
  273. break;
  274. case V4L2_PIX_FMT_NV16:
  275. case V4L2_PIX_FMT_NV61:
  276. *width = pix->width;
  277. *height = pix->height;
  278. *bytesperline = pix->plane_fmt[0].bytesperline;
  279. break;
  280. }
  281. }
  282. static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm,
  283. struct v4l2_pix_format_mplane *pix,
  284. u8 plane, u32 enable)
  285. {
  286. u32 reg;
  287. if (enable) {
  288. u16 width = 0, height = 0, bytesperline = 0, wpl;
  289. vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline);
  290. wpl = vfe_word_per_line(pix->pixelformat, width);
  291. reg = height - 1;
  292. reg |= ((wpl + 1) / 2 - 1) << 16;
  293. writel_relaxed(reg, vfe->base +
  294. VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
  295. wpl = vfe_word_per_line(pix->pixelformat, bytesperline);
  296. reg = 0x3;
  297. reg |= (height - 1) << 4;
  298. reg |= wpl << 16;
  299. writel_relaxed(reg, vfe->base +
  300. VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
  301. } else {
  302. writel_relaxed(0, vfe->base +
  303. VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
  304. writel_relaxed(0, vfe->base +
  305. VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
  306. }
  307. }
  308. static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per)
  309. {
  310. u32 reg;
  311. reg = readl_relaxed(vfe->base +
  312. VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
  313. reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK);
  314. reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT)
  315. & VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK;
  316. writel_relaxed(reg,
  317. vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
  318. }
  319. static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm,
  320. u32 pattern)
  321. {
  322. writel_relaxed(pattern,
  323. vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm));
  324. }
  325. static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm,
  326. u16 offset, u16 depth)
  327. {
  328. u32 reg;
  329. reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) |
  330. depth;
  331. writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm));
  332. }
  333. static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm)
  334. {
  335. wmb();
  336. writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD);
  337. wmb();
  338. }
  339. static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr)
  340. {
  341. writel_relaxed(addr,
  342. vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm));
  343. }
  344. static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr)
  345. {
  346. writel_relaxed(addr,
  347. vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm));
  348. }
  349. static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm)
  350. {
  351. u32 reg;
  352. reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS);
  353. return (reg >> wm) & 0x1;
  354. }
  355. static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable)
  356. {
  357. if (enable)
  358. writel_relaxed(0x10000009, vfe->base + VFE_0_BUS_CFG);
  359. else
  360. writel_relaxed(0, vfe->base + VFE_0_BUS_CFG);
  361. }
  362. static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm,
  363. enum vfe_line_id id)
  364. {
  365. u32 reg;
  366. reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
  367. reg |= VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
  368. vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg);
  369. reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
  370. reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) &
  371. VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK;
  372. vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg);
  373. switch (id) {
  374. case VFE_LINE_RDI0:
  375. default:
  376. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
  377. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  378. break;
  379. case VFE_LINE_RDI1:
  380. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
  381. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  382. break;
  383. case VFE_LINE_RDI2:
  384. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
  385. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  386. break;
  387. }
  388. if (wm % 2 == 1)
  389. reg <<= 16;
  390. vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
  391. }
  392. static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm)
  393. {
  394. writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF,
  395. vfe->base +
  396. VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm));
  397. }
  398. static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm,
  399. enum vfe_line_id id)
  400. {
  401. u32 reg;
  402. reg = VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
  403. vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(0), reg);
  404. reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
  405. vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg);
  406. switch (id) {
  407. case VFE_LINE_RDI0:
  408. default:
  409. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
  410. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  411. break;
  412. case VFE_LINE_RDI1:
  413. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
  414. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  415. break;
  416. case VFE_LINE_RDI2:
  417. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
  418. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  419. break;
  420. }
  421. if (wm % 2 == 1)
  422. reg <<= 16;
  423. vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
  424. }
  425. static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output,
  426. u8 enable)
  427. {
  428. struct vfe_line *line = container_of(output, struct vfe_line, output);
  429. u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
  430. u32 reg;
  431. unsigned int i;
  432. for (i = 0; i < output->wm_num; i++) {
  433. if (i == 0) {
  434. reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA <<
  435. VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
  436. } else if (i == 1) {
  437. reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
  438. if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16)
  439. reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
  440. } else {
  441. /* On current devices output->wm_num is always <= 2 */
  442. break;
  443. }
  444. if (output->wm_idx[i] % 2 == 1)
  445. reg <<= 16;
  446. if (enable)
  447. vfe_reg_set(vfe,
  448. VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
  449. reg);
  450. else
  451. vfe_reg_clr(vfe,
  452. VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
  453. reg);
  454. }
  455. }
  456. static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line,
  457. u8 enable)
  458. {
  459. /* empty */
  460. }
  461. static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid)
  462. {
  463. vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id),
  464. VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK);
  465. vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id),
  466. cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT);
  467. }
  468. static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id)
  469. {
  470. vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id);
  471. wmb();
  472. writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE);
  473. wmb();
  474. }
  475. static inline void vfe_reg_update_clear(struct vfe_device *vfe,
  476. enum vfe_line_id line_id)
  477. {
  478. vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id);
  479. }
  480. static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm,
  481. enum vfe_line_id line_id, u8 enable)
  482. {
  483. u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) |
  484. VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
  485. u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) |
  486. VFE_0_IRQ_MASK_1_RDIn_SOF(line_id);
  487. if (enable) {
  488. vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
  489. vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
  490. } else {
  491. vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
  492. vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
  493. }
  494. }
  495. static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp,
  496. enum vfe_line_id line_id, u8 enable)
  497. {
  498. struct vfe_output *output = &vfe->line[line_id].output;
  499. unsigned int i;
  500. u32 irq_en0;
  501. u32 irq_en1;
  502. u32 comp_mask = 0;
  503. irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF;
  504. irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF;
  505. irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp);
  506. irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
  507. irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR;
  508. for (i = 0; i < output->wm_num; i++) {
  509. irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(
  510. output->wm_idx[i]);
  511. comp_mask |= (1 << output->wm_idx[i]) << comp * 8;
  512. }
  513. if (enable) {
  514. vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
  515. vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
  516. vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
  517. } else {
  518. vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
  519. vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
  520. vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
  521. }
  522. }
  523. static void vfe_enable_irq_common(struct vfe_device *vfe)
  524. {
  525. u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK;
  526. u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION |
  527. VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK;
  528. vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
  529. vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
  530. }
  531. static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line)
  532. {
  533. u32 val, even_cfg, odd_cfg;
  534. writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG);
  535. val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD;
  536. writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0);
  537. val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2;
  538. writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1);
  539. switch (line->fmt[MSM_VFE_PAD_SINK].code) {
  540. case MEDIA_BUS_FMT_YUYV8_2X8:
  541. even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV;
  542. odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV;
  543. break;
  544. case MEDIA_BUS_FMT_YVYU8_2X8:
  545. even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU;
  546. odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU;
  547. break;
  548. case MEDIA_BUS_FMT_UYVY8_2X8:
  549. default:
  550. even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY;
  551. odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY;
  552. break;
  553. case MEDIA_BUS_FMT_VYUY8_2X8:
  554. even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY;
  555. odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY;
  556. break;
  557. }
  558. writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG);
  559. writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG);
  560. }
  561. static inline u8 vfe_calc_interp_reso(u16 input, u16 output)
  562. {
  563. if (input / output >= 16)
  564. return 0;
  565. if (input / output >= 8)
  566. return 1;
  567. if (input / output >= 4)
  568. return 2;
  569. return 3;
  570. }
  571. static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line)
  572. {
  573. u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
  574. u32 reg;
  575. u16 input, output;
  576. u8 interp_reso;
  577. u32 phase_mult;
  578. writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG);
  579. input = line->fmt[MSM_VFE_PAD_SINK].width;
  580. output = line->compose.width;
  581. reg = (output << 16) | input;
  582. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE);
  583. interp_reso = vfe_calc_interp_reso(input, output);
  584. phase_mult = input * (1 << (13 + interp_reso)) / output;
  585. reg = (interp_reso << 20) | phase_mult;
  586. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE);
  587. input = line->fmt[MSM_VFE_PAD_SINK].height;
  588. output = line->compose.height;
  589. reg = (output << 16) | input;
  590. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE);
  591. interp_reso = vfe_calc_interp_reso(input, output);
  592. phase_mult = input * (1 << (13 + interp_reso)) / output;
  593. reg = (interp_reso << 20) | phase_mult;
  594. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE);
  595. writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG);
  596. input = line->fmt[MSM_VFE_PAD_SINK].width;
  597. output = line->compose.width / 2;
  598. reg = (output << 16) | input;
  599. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE);
  600. interp_reso = vfe_calc_interp_reso(input, output);
  601. phase_mult = input * (1 << (13 + interp_reso)) / output;
  602. reg = (interp_reso << 20) | phase_mult;
  603. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE);
  604. input = line->fmt[MSM_VFE_PAD_SINK].height;
  605. output = line->compose.height;
  606. if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21)
  607. output = line->compose.height / 2;
  608. reg = (output << 16) | input;
  609. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE);
  610. interp_reso = vfe_calc_interp_reso(input, output);
  611. phase_mult = input * (1 << (13 + interp_reso)) / output;
  612. reg = (interp_reso << 20) | phase_mult;
  613. writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE);
  614. }
  615. static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line)
  616. {
  617. u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
  618. u32 reg;
  619. u16 first, last;
  620. first = line->crop.left;
  621. last = line->crop.left + line->crop.width - 1;
  622. reg = (first << 16) | last;
  623. writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH);
  624. first = line->crop.top;
  625. last = line->crop.top + line->crop.height - 1;
  626. reg = (first << 16) | last;
  627. writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT);
  628. first = line->crop.left / 2;
  629. last = line->crop.left / 2 + line->crop.width / 2 - 1;
  630. reg = (first << 16) | last;
  631. writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH);
  632. first = line->crop.top;
  633. last = line->crop.top + line->crop.height - 1;
  634. if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) {
  635. first = line->crop.top / 2;
  636. last = line->crop.top / 2 + line->crop.height / 2 - 1;
  637. }
  638. reg = (first << 16) | last;
  639. writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT);
  640. }
  641. static void vfe_set_clamp_cfg(struct vfe_device *vfe)
  642. {
  643. u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 |
  644. VFE_0_CLAMP_ENC_MAX_CFG_CH1 |
  645. VFE_0_CLAMP_ENC_MAX_CFG_CH2;
  646. writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG);
  647. val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 |
  648. VFE_0_CLAMP_ENC_MIN_CFG_CH1 |
  649. VFE_0_CLAMP_ENC_MIN_CFG_CH2;
  650. writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG);
  651. }
  652. static void vfe_set_qos(struct vfe_device *vfe)
  653. {
  654. u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG;
  655. u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG;
  656. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0);
  657. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1);
  658. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2);
  659. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3);
  660. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4);
  661. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5);
  662. writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6);
  663. writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7);
  664. }
  665. static void vfe_set_ds(struct vfe_device *vfe)
  666. {
  667. /* empty */
  668. }
  669. static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable)
  670. {
  671. u32 val = VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(wm);
  672. if (enable)
  673. vfe_reg_set(vfe, VFE_0_CGC_OVERRIDE_1, val);
  674. else
  675. vfe_reg_clr(vfe, VFE_0_CGC_OVERRIDE_1, val);
  676. wmb();
  677. }
  678. static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line)
  679. {
  680. u32 val;
  681. switch (line->fmt[MSM_VFE_PAD_SINK].code) {
  682. case MEDIA_BUS_FMT_YUYV8_2X8:
  683. val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR;
  684. break;
  685. case MEDIA_BUS_FMT_YVYU8_2X8:
  686. val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB;
  687. break;
  688. case MEDIA_BUS_FMT_UYVY8_2X8:
  689. default:
  690. val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY;
  691. break;
  692. case MEDIA_BUS_FMT_VYUY8_2X8:
  693. val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY;
  694. break;
  695. }
  696. writel_relaxed(val, vfe->base + VFE_0_CORE_CFG);
  697. val = line->fmt[MSM_VFE_PAD_SINK].width * 2;
  698. val |= line->fmt[MSM_VFE_PAD_SINK].height << 16;
  699. writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG);
  700. val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
  701. writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG);
  702. val = line->fmt[MSM_VFE_PAD_SINK].height - 1;
  703. writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG);
  704. val = 0xffffffff;
  705. writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG_0);
  706. val = 0xffffffff;
  707. writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN);
  708. val = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
  709. vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val);
  710. val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN;
  711. writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG);
  712. }
  713. static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable)
  714. {
  715. u32 cmd;
  716. cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE;
  717. writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
  718. wmb();
  719. if (enable)
  720. cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY;
  721. else
  722. cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY;
  723. writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
  724. }
  725. static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable)
  726. {
  727. u32 val = VFE_0_MODULE_CFG_DEMUX |
  728. VFE_0_MODULE_CFG_CHROMA_UPSAMPLE |
  729. VFE_0_MODULE_CFG_SCALE_ENC |
  730. VFE_0_MODULE_CFG_CROP_ENC;
  731. if (enable)
  732. writel_relaxed(val, vfe->base + VFE_0_MODULE_CFG);
  733. else
  734. writel_relaxed(0x0, vfe->base + VFE_0_MODULE_CFG);
  735. }
  736. static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev)
  737. {
  738. u32 val;
  739. int ret;
  740. ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS,
  741. val,
  742. (val & VFE_0_CAMIF_STATUS_HALT),
  743. CAMIF_TIMEOUT_SLEEP_US,
  744. CAMIF_TIMEOUT_ALL_US);
  745. if (ret < 0)
  746. dev_err(dev, "%s: camif stop timeout\n", __func__);
  747. return ret;
  748. }
  749. static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1)
  750. {
  751. *value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0);
  752. *value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1);
  753. writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0);
  754. writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1);
  755. wmb();
  756. writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD);
  757. }
  758. static void vfe_violation_read(struct vfe_device *vfe)
  759. {
  760. u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS);
  761. pr_err_ratelimited("VFE: violation = 0x%08x\n", violation);
  762. }
  763. /*
  764. * vfe_isr - ISPIF module interrupt handler
  765. * @irq: Interrupt line
  766. * @dev: VFE device
  767. *
  768. * Return IRQ_HANDLED on success
  769. */
  770. static irqreturn_t vfe_isr(int irq, void *dev)
  771. {
  772. struct vfe_device *vfe = dev;
  773. u32 value0, value1;
  774. int i, j;
  775. vfe->ops->isr_read(vfe, &value0, &value1);
  776. trace_printk("VFE: status0 = 0x%08x, status1 = 0x%08x\n",
  777. value0, value1);
  778. if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK)
  779. vfe->isr_ops.reset_ack(vfe);
  780. if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION)
  781. vfe->ops->violation_read(vfe);
  782. if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK)
  783. vfe->isr_ops.halt_ack(vfe);
  784. for (i = VFE_LINE_RDI0; i <= VFE_LINE_PIX; i++)
  785. if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i))
  786. vfe->isr_ops.reg_update(vfe, i);
  787. if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF)
  788. vfe->isr_ops.sof(vfe, VFE_LINE_PIX);
  789. for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++)
  790. if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i))
  791. vfe->isr_ops.sof(vfe, i);
  792. for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++)
  793. if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) {
  794. vfe->isr_ops.comp_done(vfe, i);
  795. for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++)
  796. if (vfe->wm_output_map[j] == VFE_LINE_PIX)
  797. value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j);
  798. }
  799. for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++)
  800. if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i))
  801. vfe->isr_ops.wm_done(vfe, i);
  802. return IRQ_HANDLED;
  803. }
  804. const struct vfe_hw_ops vfe_ops_4_1 = {
  805. .hw_version_read = vfe_hw_version_read,
  806. .get_ub_size = vfe_get_ub_size,
  807. .global_reset = vfe_global_reset,
  808. .halt_request = vfe_halt_request,
  809. .halt_clear = vfe_halt_clear,
  810. .wm_enable = vfe_wm_enable,
  811. .wm_frame_based = vfe_wm_frame_based,
  812. .wm_line_based = vfe_wm_line_based,
  813. .wm_set_framedrop_period = vfe_wm_set_framedrop_period,
  814. .wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern,
  815. .wm_set_ub_cfg = vfe_wm_set_ub_cfg,
  816. .bus_reload_wm = vfe_bus_reload_wm,
  817. .wm_set_ping_addr = vfe_wm_set_ping_addr,
  818. .wm_set_pong_addr = vfe_wm_set_pong_addr,
  819. .wm_get_ping_pong_status = vfe_wm_get_ping_pong_status,
  820. .bus_enable_wr_if = vfe_bus_enable_wr_if,
  821. .bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi,
  822. .wm_set_subsample = vfe_wm_set_subsample,
  823. .bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi,
  824. .set_xbar_cfg = vfe_set_xbar_cfg,
  825. .set_realign_cfg = vfe_set_realign_cfg,
  826. .set_rdi_cid = vfe_set_rdi_cid,
  827. .reg_update = vfe_reg_update,
  828. .reg_update_clear = vfe_reg_update_clear,
  829. .enable_irq_wm_line = vfe_enable_irq_wm_line,
  830. .enable_irq_pix_line = vfe_enable_irq_pix_line,
  831. .enable_irq_common = vfe_enable_irq_common,
  832. .set_demux_cfg = vfe_set_demux_cfg,
  833. .set_scale_cfg = vfe_set_scale_cfg,
  834. .set_crop_cfg = vfe_set_crop_cfg,
  835. .set_clamp_cfg = vfe_set_clamp_cfg,
  836. .set_qos = vfe_set_qos,
  837. .set_ds = vfe_set_ds,
  838. .set_cgc_override = vfe_set_cgc_override,
  839. .set_camif_cfg = vfe_set_camif_cfg,
  840. .set_camif_cmd = vfe_set_camif_cmd,
  841. .set_module_cfg = vfe_set_module_cfg,
  842. .camif_wait_for_stop = vfe_camif_wait_for_stop,
  843. .isr_read = vfe_isr_read,
  844. .violation_read = vfe_violation_read,
  845. .isr = vfe_isr,
  846. };