dcn10_resource.c 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502
  1. /*
  2. * Copyright 2016 Advanced Micro Devices, Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: AMD
  23. *
  24. */
  25. #include "dm_services.h"
  26. #include "dc.h"
  27. #include "resource.h"
  28. #include "include/irq_service_interface.h"
  29. #include "dcn10/dcn10_resource.h"
  30. #include "dcn10/dcn10_ipp.h"
  31. #include "dcn10/dcn10_mpc.h"
  32. #include "irq/dcn10/irq_service_dcn10.h"
  33. #include "dcn10/dcn10_dpp.h"
  34. #include "dcn10/dcn10_timing_generator.h"
  35. #include "dcn10/dcn10_hw_sequencer.h"
  36. #include "dce110/dce110_hw_sequencer.h"
  37. #include "dcn10/dcn10_opp.h"
  38. #include "dce/dce_link_encoder.h"
  39. #include "dce/dce_stream_encoder.h"
  40. #include "dce/dce_clocks.h"
  41. #include "dce/dce_clock_source.h"
  42. #include "dce/dce_audio.h"
  43. #include "dce/dce_hwseq.h"
  44. #include "../virtual/virtual_stream_encoder.h"
  45. #include "dce110/dce110_resource.h"
  46. #include "dce112/dce112_resource.h"
  47. #include "dcn10_hubp.h"
  48. #include "dcn10_hubbub.h"
  49. #include "vega10/soc15ip.h"
  50. #include "raven1/DCN/dcn_1_0_offset.h"
  51. #include "raven1/DCN/dcn_1_0_sh_mask.h"
  52. #include "raven1/NBIO/nbio_7_0_offset.h"
  53. #include "raven1/MMHUB/mmhub_9_1_offset.h"
  54. #include "raven1/MMHUB/mmhub_9_1_sh_mask.h"
  55. #include "reg_helper.h"
  56. #include "dce/dce_abm.h"
  57. #include "dce/dce_dmcu.h"
  58. #ifndef mmDP0_DP_DPHY_INTERNAL_CTRL
  59. #define mmDP0_DP_DPHY_INTERNAL_CTRL 0x210f
  60. #define mmDP0_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  61. #define mmDP1_DP_DPHY_INTERNAL_CTRL 0x220f
  62. #define mmDP1_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  63. #define mmDP2_DP_DPHY_INTERNAL_CTRL 0x230f
  64. #define mmDP2_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  65. #define mmDP3_DP_DPHY_INTERNAL_CTRL 0x240f
  66. #define mmDP3_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  67. #define mmDP4_DP_DPHY_INTERNAL_CTRL 0x250f
  68. #define mmDP4_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  69. #define mmDP5_DP_DPHY_INTERNAL_CTRL 0x260f
  70. #define mmDP5_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  71. #define mmDP6_DP_DPHY_INTERNAL_CTRL 0x270f
  72. #define mmDP6_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  73. #endif
  74. enum dcn10_clk_src_array_id {
  75. DCN10_CLK_SRC_PLL0,
  76. DCN10_CLK_SRC_PLL1,
  77. DCN10_CLK_SRC_PLL2,
  78. DCN10_CLK_SRC_PLL3,
  79. DCN10_CLK_SRC_TOTAL
  80. };
  81. /* begin *********************
  82. * macros to expend register list macro defined in HW object header file */
  83. /* DCN */
  84. #define BASE_INNER(seg) \
  85. DCE_BASE__INST0_SEG ## seg
  86. #define BASE(seg) \
  87. BASE_INNER(seg)
  88. #define SR(reg_name)\
  89. .reg_name = BASE(mm ## reg_name ## _BASE_IDX) + \
  90. mm ## reg_name
  91. #define SRI(reg_name, block, id)\
  92. .reg_name = BASE(mm ## block ## id ## _ ## reg_name ## _BASE_IDX) + \
  93. mm ## block ## id ## _ ## reg_name
  94. #define SRII(reg_name, block, id)\
  95. .reg_name[id] = BASE(mm ## block ## id ## _ ## reg_name ## _BASE_IDX) + \
  96. mm ## block ## id ## _ ## reg_name
  97. /* NBIO */
  98. #define NBIO_BASE_INNER(seg) \
  99. NBIF_BASE__INST0_SEG ## seg
  100. #define NBIO_BASE(seg) \
  101. NBIO_BASE_INNER(seg)
  102. #define NBIO_SR(reg_name)\
  103. .reg_name = NBIO_BASE(mm ## reg_name ## _BASE_IDX) + \
  104. mm ## reg_name
  105. /* MMHUB */
  106. #define MMHUB_BASE_INNER(seg) \
  107. MMHUB_BASE__INST0_SEG ## seg
  108. #define MMHUB_BASE(seg) \
  109. MMHUB_BASE_INNER(seg)
  110. #define MMHUB_SR(reg_name)\
  111. .reg_name = MMHUB_BASE(mm ## reg_name ## _BASE_IDX) + \
  112. mm ## reg_name
  113. /* macros to expend register list macro defined in HW object header file
  114. * end *********************/
  115. static const struct dce_dmcu_registers dmcu_regs = {
  116. DMCU_DCN10_REG_LIST()
  117. };
  118. static const struct dce_dmcu_shift dmcu_shift = {
  119. DMCU_MASK_SH_LIST_DCN10(__SHIFT)
  120. };
  121. static const struct dce_dmcu_mask dmcu_mask = {
  122. DMCU_MASK_SH_LIST_DCN10(_MASK)
  123. };
  124. static const struct dce_abm_registers abm_regs = {
  125. ABM_DCN10_REG_LIST(0)
  126. };
  127. static const struct dce_abm_shift abm_shift = {
  128. ABM_MASK_SH_LIST_DCN10(__SHIFT)
  129. };
  130. static const struct dce_abm_mask abm_mask = {
  131. ABM_MASK_SH_LIST_DCN10(_MASK)
  132. };
  133. #define stream_enc_regs(id)\
  134. [id] = {\
  135. SE_DCN_REG_LIST(id),\
  136. .TMDS_CNTL = 0,\
  137. .AFMT_AVI_INFO0 = 0,\
  138. .AFMT_AVI_INFO1 = 0,\
  139. .AFMT_AVI_INFO2 = 0,\
  140. .AFMT_AVI_INFO3 = 0,\
  141. }
  142. static const struct dce110_stream_enc_registers stream_enc_regs[] = {
  143. stream_enc_regs(0),
  144. stream_enc_regs(1),
  145. stream_enc_regs(2),
  146. stream_enc_regs(3),
  147. };
  148. static const struct dce_stream_encoder_shift se_shift = {
  149. SE_COMMON_MASK_SH_LIST_DCN10(__SHIFT)
  150. };
  151. static const struct dce_stream_encoder_mask se_mask = {
  152. SE_COMMON_MASK_SH_LIST_DCN10(_MASK),
  153. .AFMT_GENERIC0_UPDATE = 0,
  154. .AFMT_GENERIC2_UPDATE = 0,
  155. .DP_DYN_RANGE = 0,
  156. .DP_YCBCR_RANGE = 0,
  157. .HDMI_AVI_INFO_SEND = 0,
  158. .HDMI_AVI_INFO_CONT = 0,
  159. .HDMI_AVI_INFO_LINE = 0,
  160. .DP_SEC_AVI_ENABLE = 0,
  161. .AFMT_AVI_INFO_VERSION = 0
  162. };
  163. #define audio_regs(id)\
  164. [id] = {\
  165. AUD_COMMON_REG_LIST(id)\
  166. }
  167. static const struct dce_audio_registers audio_regs[] = {
  168. audio_regs(0),
  169. audio_regs(1),
  170. audio_regs(2),
  171. audio_regs(3),
  172. };
  173. #define DCE120_AUD_COMMON_MASK_SH_LIST(mask_sh)\
  174. SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_INDEX, AZALIA_ENDPOINT_REG_INDEX, mask_sh),\
  175. SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_DATA, AZALIA_ENDPOINT_REG_DATA, mask_sh),\
  176. AUD_COMMON_MASK_SH_LIST_BASE(mask_sh)
  177. static const struct dce_audio_shift audio_shift = {
  178. DCE120_AUD_COMMON_MASK_SH_LIST(__SHIFT)
  179. };
  180. static const struct dce_aduio_mask audio_mask = {
  181. DCE120_AUD_COMMON_MASK_SH_LIST(_MASK)
  182. };
  183. #define aux_regs(id)\
  184. [id] = {\
  185. AUX_REG_LIST(id)\
  186. }
  187. static const struct dce110_link_enc_aux_registers link_enc_aux_regs[] = {
  188. aux_regs(0),
  189. aux_regs(1),
  190. aux_regs(2),
  191. aux_regs(3),
  192. aux_regs(4),
  193. aux_regs(5)
  194. };
  195. #define hpd_regs(id)\
  196. [id] = {\
  197. HPD_REG_LIST(id)\
  198. }
  199. static const struct dce110_link_enc_hpd_registers link_enc_hpd_regs[] = {
  200. hpd_regs(0),
  201. hpd_regs(1),
  202. hpd_regs(2),
  203. hpd_regs(3),
  204. hpd_regs(4),
  205. hpd_regs(5)
  206. };
  207. #define link_regs(id)\
  208. [id] = {\
  209. LE_DCN10_REG_LIST(id), \
  210. SRI(DP_DPHY_INTERNAL_CTRL, DP, id) \
  211. }
  212. static const struct dce110_link_enc_registers link_enc_regs[] = {
  213. link_regs(0),
  214. link_regs(1),
  215. link_regs(2),
  216. link_regs(3),
  217. link_regs(4),
  218. link_regs(5),
  219. link_regs(6),
  220. };
  221. #define ipp_regs(id)\
  222. [id] = {\
  223. IPP_REG_LIST_DCN10(id),\
  224. }
  225. static const struct dcn10_ipp_registers ipp_regs[] = {
  226. ipp_regs(0),
  227. ipp_regs(1),
  228. ipp_regs(2),
  229. ipp_regs(3),
  230. };
  231. static const struct dcn10_ipp_shift ipp_shift = {
  232. IPP_MASK_SH_LIST_DCN10(__SHIFT)
  233. };
  234. static const struct dcn10_ipp_mask ipp_mask = {
  235. IPP_MASK_SH_LIST_DCN10(_MASK),
  236. };
  237. #define opp_regs(id)\
  238. [id] = {\
  239. OPP_REG_LIST_DCN10(id),\
  240. }
  241. static const struct dcn10_opp_registers opp_regs[] = {
  242. opp_regs(0),
  243. opp_regs(1),
  244. opp_regs(2),
  245. opp_regs(3),
  246. };
  247. static const struct dcn10_opp_shift opp_shift = {
  248. OPP_MASK_SH_LIST_DCN10(__SHIFT)
  249. };
  250. static const struct dcn10_opp_mask opp_mask = {
  251. OPP_MASK_SH_LIST_DCN10(_MASK),
  252. };
  253. #define tf_regs(id)\
  254. [id] = {\
  255. TF_REG_LIST_DCN10(id),\
  256. }
  257. static const struct dcn_dpp_registers tf_regs[] = {
  258. tf_regs(0),
  259. tf_regs(1),
  260. tf_regs(2),
  261. tf_regs(3),
  262. };
  263. static const struct dcn_dpp_shift tf_shift = {
  264. TF_REG_LIST_SH_MASK_DCN10(__SHIFT)
  265. };
  266. static const struct dcn_dpp_mask tf_mask = {
  267. TF_REG_LIST_SH_MASK_DCN10(_MASK),
  268. };
  269. static const struct dcn_mpc_registers mpc_regs = {
  270. MPC_COMMON_REG_LIST_DCN1_0(0),
  271. MPC_COMMON_REG_LIST_DCN1_0(1),
  272. MPC_COMMON_REG_LIST_DCN1_0(2),
  273. MPC_COMMON_REG_LIST_DCN1_0(3),
  274. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(0),
  275. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(1),
  276. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(2),
  277. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(3)
  278. };
  279. static const struct dcn_mpc_shift mpc_shift = {
  280. MPC_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  281. };
  282. static const struct dcn_mpc_mask mpc_mask = {
  283. MPC_COMMON_MASK_SH_LIST_DCN1_0(_MASK),
  284. };
  285. #define tg_regs(id)\
  286. [id] = {TG_COMMON_REG_LIST_DCN1_0(id)}
  287. static const struct dcn_tg_registers tg_regs[] = {
  288. tg_regs(0),
  289. tg_regs(1),
  290. tg_regs(2),
  291. tg_regs(3),
  292. };
  293. static const struct dcn_tg_shift tg_shift = {
  294. TG_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  295. };
  296. static const struct dcn_tg_mask tg_mask = {
  297. TG_COMMON_MASK_SH_LIST_DCN1_0(_MASK)
  298. };
  299. static const struct bios_registers bios_regs = {
  300. NBIO_SR(BIOS_SCRATCH_6)
  301. };
  302. #define hubp_regs(id)\
  303. [id] = {\
  304. HUBP_REG_LIST_DCN10(id)\
  305. }
  306. static const struct dcn_mi_registers hubp_regs[] = {
  307. hubp_regs(0),
  308. hubp_regs(1),
  309. hubp_regs(2),
  310. hubp_regs(3),
  311. };
  312. static const struct dcn_mi_shift hubp_shift = {
  313. HUBP_MASK_SH_LIST_DCN10(__SHIFT)
  314. };
  315. static const struct dcn_mi_mask hubp_mask = {
  316. HUBP_MASK_SH_LIST_DCN10(_MASK)
  317. };
  318. static const struct dcn_hubbub_registers hubbub_reg = {
  319. HUBBUB_REG_LIST_DCN10(0)
  320. };
  321. static const struct dcn_hubbub_shift hubbub_shift = {
  322. HUBBUB_MASK_SH_LIST_DCN10(__SHIFT)
  323. };
  324. static const struct dcn_hubbub_mask hubbub_mask = {
  325. HUBBUB_MASK_SH_LIST_DCN10(_MASK)
  326. };
  327. #define clk_src_regs(index, pllid)\
  328. [index] = {\
  329. CS_COMMON_REG_LIST_DCN1_0(index, pllid),\
  330. }
  331. static const struct dce110_clk_src_regs clk_src_regs[] = {
  332. clk_src_regs(0, A),
  333. clk_src_regs(1, B),
  334. clk_src_regs(2, C),
  335. clk_src_regs(3, D)
  336. };
  337. static const struct dce110_clk_src_shift cs_shift = {
  338. CS_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  339. };
  340. static const struct dce110_clk_src_mask cs_mask = {
  341. CS_COMMON_MASK_SH_LIST_DCN1_0(_MASK)
  342. };
  343. static const struct resource_caps res_cap = {
  344. .num_timing_generator = 4,
  345. .num_video_plane = 4,
  346. .num_audio = 4,
  347. .num_stream_encoder = 4,
  348. .num_pll = 4,
  349. };
  350. static const struct dc_debug debug_defaults_drv = {
  351. .sanity_checks = true,
  352. .disable_dmcu = true,
  353. .force_abm_enable = false,
  354. .timing_trace = false,
  355. .clock_trace = true,
  356. .min_disp_clk_khz = 300000,
  357. .disable_pplib_clock_request = true,
  358. .disable_pplib_wm_range = false,
  359. .pplib_wm_report_mode = WM_REPORT_DEFAULT,
  360. .pipe_split_policy = MPC_SPLIT_AVOID_MULT_DISP,
  361. .force_single_disp_pipe_split = true,
  362. .disable_dcc = DCC_ENABLE,
  363. .voltage_align_fclk = true,
  364. .disable_stereo_support = true,
  365. .vsr_support = true,
  366. .performance_trace = false,
  367. };
  368. static const struct dc_debug debug_defaults_diags = {
  369. .disable_dmcu = true,
  370. .force_abm_enable = false,
  371. .timing_trace = true,
  372. .clock_trace = true,
  373. .disable_stutter = true,
  374. .disable_pplib_clock_request = true,
  375. .disable_pplib_wm_range = true
  376. };
  377. static void dcn10_dpp_destroy(struct dpp **dpp)
  378. {
  379. kfree(TO_DCN10_DPP(*dpp));
  380. *dpp = NULL;
  381. }
  382. static struct dpp *dcn10_dpp_create(
  383. struct dc_context *ctx,
  384. uint32_t inst)
  385. {
  386. struct dcn10_dpp *dpp =
  387. kzalloc(sizeof(struct dcn10_dpp), GFP_KERNEL);
  388. if (!dpp)
  389. return NULL;
  390. dpp1_construct(dpp, ctx, inst,
  391. &tf_regs[inst], &tf_shift, &tf_mask);
  392. return &dpp->base;
  393. }
  394. static struct input_pixel_processor *dcn10_ipp_create(
  395. struct dc_context *ctx, uint32_t inst)
  396. {
  397. struct dcn10_ipp *ipp =
  398. kzalloc(sizeof(struct dcn10_ipp), GFP_KERNEL);
  399. if (!ipp) {
  400. BREAK_TO_DEBUGGER();
  401. return NULL;
  402. }
  403. dcn10_ipp_construct(ipp, ctx, inst,
  404. &ipp_regs[inst], &ipp_shift, &ipp_mask);
  405. return &ipp->base;
  406. }
  407. static struct output_pixel_processor *dcn10_opp_create(
  408. struct dc_context *ctx, uint32_t inst)
  409. {
  410. struct dcn10_opp *opp =
  411. kzalloc(sizeof(struct dcn10_opp), GFP_KERNEL);
  412. if (!opp) {
  413. BREAK_TO_DEBUGGER();
  414. return NULL;
  415. }
  416. dcn10_opp_construct(opp, ctx, inst,
  417. &opp_regs[inst], &opp_shift, &opp_mask);
  418. return &opp->base;
  419. }
  420. static struct mpc *dcn10_mpc_create(struct dc_context *ctx)
  421. {
  422. struct dcn10_mpc *mpc10 = kzalloc(sizeof(struct dcn10_mpc),
  423. GFP_KERNEL);
  424. if (!mpc10)
  425. return NULL;
  426. dcn10_mpc_construct(mpc10, ctx,
  427. &mpc_regs,
  428. &mpc_shift,
  429. &mpc_mask,
  430. 4);
  431. return &mpc10->base;
  432. }
  433. static struct hubbub *dcn10_hubbub_create(struct dc_context *ctx)
  434. {
  435. struct hubbub *hubbub = kzalloc(sizeof(struct hubbub),
  436. GFP_KERNEL);
  437. if (!hubbub)
  438. return NULL;
  439. hubbub1_construct(hubbub, ctx,
  440. &hubbub_reg,
  441. &hubbub_shift,
  442. &hubbub_mask);
  443. return hubbub;
  444. }
  445. static struct timing_generator *dcn10_timing_generator_create(
  446. struct dc_context *ctx,
  447. uint32_t instance)
  448. {
  449. struct dcn10_timing_generator *tgn10 =
  450. kzalloc(sizeof(struct dcn10_timing_generator), GFP_KERNEL);
  451. if (!tgn10)
  452. return NULL;
  453. tgn10->base.inst = instance;
  454. tgn10->base.ctx = ctx;
  455. tgn10->tg_regs = &tg_regs[instance];
  456. tgn10->tg_shift = &tg_shift;
  457. tgn10->tg_mask = &tg_mask;
  458. dcn10_timing_generator_init(tgn10);
  459. return &tgn10->base;
  460. }
  461. static const struct encoder_feature_support link_enc_feature = {
  462. .max_hdmi_deep_color = COLOR_DEPTH_121212,
  463. .max_hdmi_pixel_clock = 600000,
  464. .ycbcr420_supported = true,
  465. .flags.bits.IS_HBR2_CAPABLE = true,
  466. .flags.bits.IS_HBR3_CAPABLE = true,
  467. .flags.bits.IS_TPS3_CAPABLE = true,
  468. .flags.bits.IS_TPS4_CAPABLE = true,
  469. .flags.bits.IS_YCBCR_CAPABLE = true
  470. };
  471. struct link_encoder *dcn10_link_encoder_create(
  472. const struct encoder_init_data *enc_init_data)
  473. {
  474. struct dce110_link_encoder *enc110 =
  475. kzalloc(sizeof(struct dce110_link_encoder), GFP_KERNEL);
  476. if (!enc110)
  477. return NULL;
  478. dce110_link_encoder_construct(enc110,
  479. enc_init_data,
  480. &link_enc_feature,
  481. &link_enc_regs[enc_init_data->transmitter],
  482. &link_enc_aux_regs[enc_init_data->channel - 1],
  483. &link_enc_hpd_regs[enc_init_data->hpd_source]);
  484. return &enc110->base;
  485. }
  486. struct clock_source *dcn10_clock_source_create(
  487. struct dc_context *ctx,
  488. struct dc_bios *bios,
  489. enum clock_source_id id,
  490. const struct dce110_clk_src_regs *regs,
  491. bool dp_clk_src)
  492. {
  493. struct dce110_clk_src *clk_src =
  494. kzalloc(sizeof(struct dce110_clk_src), GFP_KERNEL);
  495. if (!clk_src)
  496. return NULL;
  497. if (dce110_clk_src_construct(clk_src, ctx, bios, id,
  498. regs, &cs_shift, &cs_mask)) {
  499. clk_src->base.dp_clk_src = dp_clk_src;
  500. return &clk_src->base;
  501. }
  502. BREAK_TO_DEBUGGER();
  503. return NULL;
  504. }
  505. static void read_dce_straps(
  506. struct dc_context *ctx,
  507. struct resource_straps *straps)
  508. {
  509. generic_reg_get(ctx, mmDC_PINSTRAPS + BASE(mmDC_PINSTRAPS_BASE_IDX),
  510. FN(DC_PINSTRAPS, DC_PINSTRAPS_AUDIO), &straps->dc_pinstraps_audio);
  511. }
  512. static struct audio *create_audio(
  513. struct dc_context *ctx, unsigned int inst)
  514. {
  515. return dce_audio_create(ctx, inst,
  516. &audio_regs[inst], &audio_shift, &audio_mask);
  517. }
  518. static struct stream_encoder *dcn10_stream_encoder_create(
  519. enum engine_id eng_id,
  520. struct dc_context *ctx)
  521. {
  522. struct dce110_stream_encoder *enc110 =
  523. kzalloc(sizeof(struct dce110_stream_encoder), GFP_KERNEL);
  524. if (!enc110)
  525. return NULL;
  526. dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
  527. &stream_enc_regs[eng_id],
  528. &se_shift, &se_mask);
  529. return &enc110->base;
  530. }
  531. static const struct dce_hwseq_registers hwseq_reg = {
  532. HWSEQ_DCN1_REG_LIST()
  533. };
  534. static const struct dce_hwseq_shift hwseq_shift = {
  535. HWSEQ_DCN1_MASK_SH_LIST(__SHIFT)
  536. };
  537. static const struct dce_hwseq_mask hwseq_mask = {
  538. HWSEQ_DCN1_MASK_SH_LIST(_MASK)
  539. };
  540. static struct dce_hwseq *dcn10_hwseq_create(
  541. struct dc_context *ctx)
  542. {
  543. struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL);
  544. if (hws) {
  545. hws->ctx = ctx;
  546. hws->regs = &hwseq_reg;
  547. hws->shifts = &hwseq_shift;
  548. hws->masks = &hwseq_mask;
  549. hws->wa.DEGVIDCN10_253 = true;
  550. }
  551. return hws;
  552. }
  553. static const struct resource_create_funcs res_create_funcs = {
  554. .read_dce_straps = read_dce_straps,
  555. .create_audio = create_audio,
  556. .create_stream_encoder = dcn10_stream_encoder_create,
  557. .create_hwseq = dcn10_hwseq_create,
  558. };
  559. static const struct resource_create_funcs res_create_maximus_funcs = {
  560. .read_dce_straps = NULL,
  561. .create_audio = NULL,
  562. .create_stream_encoder = NULL,
  563. .create_hwseq = dcn10_hwseq_create,
  564. };
  565. void dcn10_clock_source_destroy(struct clock_source **clk_src)
  566. {
  567. kfree(TO_DCE110_CLK_SRC(*clk_src));
  568. *clk_src = NULL;
  569. }
  570. static struct pp_smu_funcs_rv *dcn10_pp_smu_create(struct dc_context *ctx)
  571. {
  572. struct pp_smu_funcs_rv *pp_smu = kzalloc(sizeof(*pp_smu), GFP_KERNEL);
  573. if (!pp_smu)
  574. return pp_smu;
  575. dm_pp_get_funcs_rv(ctx, pp_smu);
  576. return pp_smu;
  577. }
  578. static void destruct(struct dcn10_resource_pool *pool)
  579. {
  580. unsigned int i;
  581. for (i = 0; i < pool->base.stream_enc_count; i++) {
  582. if (pool->base.stream_enc[i] != NULL) {
  583. /* TODO: free dcn version of stream encoder once implemented
  584. * rather than using virtual stream encoder
  585. */
  586. kfree(pool->base.stream_enc[i]);
  587. pool->base.stream_enc[i] = NULL;
  588. }
  589. }
  590. if (pool->base.mpc != NULL) {
  591. kfree(TO_DCN10_MPC(pool->base.mpc));
  592. pool->base.mpc = NULL;
  593. }
  594. if (pool->base.hubbub != NULL) {
  595. kfree(pool->base.hubbub);
  596. pool->base.hubbub = NULL;
  597. }
  598. for (i = 0; i < pool->base.pipe_count; i++) {
  599. if (pool->base.opps[i] != NULL)
  600. pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
  601. if (pool->base.dpps[i] != NULL)
  602. dcn10_dpp_destroy(&pool->base.dpps[i]);
  603. if (pool->base.ipps[i] != NULL)
  604. pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
  605. if (pool->base.hubps[i] != NULL) {
  606. kfree(TO_DCN10_HUBP(pool->base.hubps[i]));
  607. pool->base.hubps[i] = NULL;
  608. }
  609. if (pool->base.irqs != NULL) {
  610. dal_irq_service_destroy(&pool->base.irqs);
  611. }
  612. if (pool->base.timing_generators[i] != NULL) {
  613. kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
  614. pool->base.timing_generators[i] = NULL;
  615. }
  616. }
  617. for (i = 0; i < pool->base.stream_enc_count; i++)
  618. kfree(pool->base.stream_enc[i]);
  619. for (i = 0; i < pool->base.audio_count; i++) {
  620. if (pool->base.audios[i])
  621. dce_aud_destroy(&pool->base.audios[i]);
  622. }
  623. for (i = 0; i < pool->base.clk_src_count; i++) {
  624. if (pool->base.clock_sources[i] != NULL) {
  625. dcn10_clock_source_destroy(&pool->base.clock_sources[i]);
  626. pool->base.clock_sources[i] = NULL;
  627. }
  628. }
  629. if (pool->base.dp_clock_source != NULL) {
  630. dcn10_clock_source_destroy(&pool->base.dp_clock_source);
  631. pool->base.dp_clock_source = NULL;
  632. }
  633. if (pool->base.abm != NULL)
  634. dce_abm_destroy(&pool->base.abm);
  635. if (pool->base.dmcu != NULL)
  636. dce_dmcu_destroy(&pool->base.dmcu);
  637. if (pool->base.display_clock != NULL)
  638. dce_disp_clk_destroy(&pool->base.display_clock);
  639. kfree(pool->base.pp_smu);
  640. }
  641. static struct hubp *dcn10_hubp_create(
  642. struct dc_context *ctx,
  643. uint32_t inst)
  644. {
  645. struct dcn10_hubp *hubp1 =
  646. kzalloc(sizeof(struct dcn10_hubp), GFP_KERNEL);
  647. if (!hubp1)
  648. return NULL;
  649. dcn10_hubp_construct(hubp1, ctx, inst,
  650. &hubp_regs[inst], &hubp_shift, &hubp_mask);
  651. return &hubp1->base;
  652. }
  653. static void get_pixel_clock_parameters(
  654. const struct pipe_ctx *pipe_ctx,
  655. struct pixel_clk_params *pixel_clk_params)
  656. {
  657. const struct dc_stream_state *stream = pipe_ctx->stream;
  658. pixel_clk_params->requested_pix_clk = stream->timing.pix_clk_khz;
  659. pixel_clk_params->encoder_object_id = stream->sink->link->link_enc->id;
  660. pixel_clk_params->signal_type = pipe_ctx->stream->signal;
  661. pixel_clk_params->controller_id = pipe_ctx->pipe_idx + 1;
  662. /* TODO: un-hardcode*/
  663. pixel_clk_params->requested_sym_clk = LINK_RATE_LOW *
  664. LINK_RATE_REF_FREQ_IN_KHZ;
  665. pixel_clk_params->flags.ENABLE_SS = 0;
  666. pixel_clk_params->color_depth =
  667. stream->timing.display_color_depth;
  668. pixel_clk_params->flags.DISPLAY_BLANKED = 1;
  669. pixel_clk_params->pixel_encoding = stream->timing.pixel_encoding;
  670. if (stream->timing.pixel_encoding == PIXEL_ENCODING_YCBCR422)
  671. pixel_clk_params->color_depth = COLOR_DEPTH_888;
  672. if (stream->timing.pixel_encoding == PIXEL_ENCODING_YCBCR420)
  673. pixel_clk_params->requested_pix_clk /= 2;
  674. }
  675. static void build_clamping_params(struct dc_stream_state *stream)
  676. {
  677. stream->clamping.clamping_level = CLAMPING_FULL_RANGE;
  678. stream->clamping.c_depth = stream->timing.display_color_depth;
  679. stream->clamping.pixel_encoding = stream->timing.pixel_encoding;
  680. }
  681. static void build_pipe_hw_param(struct pipe_ctx *pipe_ctx)
  682. {
  683. get_pixel_clock_parameters(pipe_ctx, &pipe_ctx->stream_res.pix_clk_params);
  684. pipe_ctx->clock_source->funcs->get_pix_clk_dividers(
  685. pipe_ctx->clock_source,
  686. &pipe_ctx->stream_res.pix_clk_params,
  687. &pipe_ctx->pll_settings);
  688. pipe_ctx->stream->clamping.pixel_encoding = pipe_ctx->stream->timing.pixel_encoding;
  689. resource_build_bit_depth_reduction_params(pipe_ctx->stream,
  690. &pipe_ctx->stream->bit_depth_params);
  691. build_clamping_params(pipe_ctx->stream);
  692. }
  693. static enum dc_status build_mapped_resource(
  694. const struct dc *dc,
  695. struct dc_state *context,
  696. struct dc_stream_state *stream)
  697. {
  698. struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
  699. /*TODO Seems unneeded anymore */
  700. /* if (old_context && resource_is_stream_unchanged(old_context, stream)) {
  701. if (stream != NULL && old_context->streams[i] != NULL) {
  702. todo: shouldn't have to copy missing parameter here
  703. resource_build_bit_depth_reduction_params(stream,
  704. &stream->bit_depth_params);
  705. stream->clamping.pixel_encoding =
  706. stream->timing.pixel_encoding;
  707. resource_build_bit_depth_reduction_params(stream,
  708. &stream->bit_depth_params);
  709. build_clamping_params(stream);
  710. continue;
  711. }
  712. }
  713. */
  714. if (!pipe_ctx)
  715. return DC_ERROR_UNEXPECTED;
  716. build_pipe_hw_param(pipe_ctx);
  717. return DC_OK;
  718. }
  719. enum dc_status dcn10_add_stream_to_ctx(
  720. struct dc *dc,
  721. struct dc_state *new_ctx,
  722. struct dc_stream_state *dc_stream)
  723. {
  724. enum dc_status result = DC_ERROR_UNEXPECTED;
  725. result = resource_map_pool_resources(dc, new_ctx, dc_stream);
  726. if (result == DC_OK)
  727. result = resource_map_phy_clock_resources(dc, new_ctx, dc_stream);
  728. if (result == DC_OK)
  729. result = build_mapped_resource(dc, new_ctx, dc_stream);
  730. return result;
  731. }
  732. enum dc_status dcn10_validate_guaranteed(
  733. struct dc *dc,
  734. struct dc_stream_state *dc_stream,
  735. struct dc_state *context)
  736. {
  737. enum dc_status result = DC_ERROR_UNEXPECTED;
  738. context->streams[0] = dc_stream;
  739. dc_stream_retain(context->streams[0]);
  740. context->stream_count++;
  741. result = resource_map_pool_resources(dc, context, dc_stream);
  742. if (result == DC_OK)
  743. result = resource_map_phy_clock_resources(dc, context, dc_stream);
  744. if (result == DC_OK)
  745. result = build_mapped_resource(dc, context, dc_stream);
  746. if (result == DC_OK) {
  747. validate_guaranteed_copy_streams(
  748. context, dc->caps.max_streams);
  749. result = resource_build_scaling_params_for_context(dc, context);
  750. }
  751. if (result == DC_OK && !dcn_validate_bandwidth(dc, context))
  752. return DC_FAIL_BANDWIDTH_VALIDATE;
  753. return result;
  754. }
  755. static struct pipe_ctx *dcn10_acquire_idle_pipe_for_layer(
  756. struct dc_state *context,
  757. const struct resource_pool *pool,
  758. struct dc_stream_state *stream)
  759. {
  760. struct resource_context *res_ctx = &context->res_ctx;
  761. struct pipe_ctx *head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
  762. struct pipe_ctx *idle_pipe = find_idle_secondary_pipe(res_ctx, pool);
  763. if (!head_pipe) {
  764. ASSERT(0);
  765. return NULL;
  766. }
  767. if (!idle_pipe)
  768. return NULL;
  769. idle_pipe->stream = head_pipe->stream;
  770. idle_pipe->stream_res.tg = head_pipe->stream_res.tg;
  771. idle_pipe->stream_res.opp = head_pipe->stream_res.opp;
  772. idle_pipe->plane_res.hubp = pool->hubps[idle_pipe->pipe_idx];
  773. idle_pipe->plane_res.ipp = pool->ipps[idle_pipe->pipe_idx];
  774. idle_pipe->plane_res.dpp = pool->dpps[idle_pipe->pipe_idx];
  775. return idle_pipe;
  776. }
  777. enum dcc_control {
  778. dcc_control__256_256_xxx,
  779. dcc_control__128_128_xxx,
  780. dcc_control__256_64_64,
  781. };
  782. enum segment_order {
  783. segment_order__na,
  784. segment_order__contiguous,
  785. segment_order__non_contiguous,
  786. };
  787. static bool dcc_support_pixel_format(
  788. enum surface_pixel_format format,
  789. unsigned int *bytes_per_element)
  790. {
  791. /* DML: get_bytes_per_element */
  792. switch (format) {
  793. case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
  794. case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
  795. *bytes_per_element = 2;
  796. return true;
  797. case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
  798. case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
  799. case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
  800. case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
  801. *bytes_per_element = 4;
  802. return true;
  803. case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
  804. case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
  805. case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
  806. *bytes_per_element = 8;
  807. return true;
  808. default:
  809. return false;
  810. }
  811. }
  812. static bool dcc_support_swizzle(
  813. enum swizzle_mode_values swizzle,
  814. unsigned int bytes_per_element,
  815. enum segment_order *segment_order_horz,
  816. enum segment_order *segment_order_vert)
  817. {
  818. bool standard_swizzle = false;
  819. bool display_swizzle = false;
  820. switch (swizzle) {
  821. case DC_SW_4KB_S:
  822. case DC_SW_64KB_S:
  823. case DC_SW_VAR_S:
  824. case DC_SW_4KB_S_X:
  825. case DC_SW_64KB_S_X:
  826. case DC_SW_VAR_S_X:
  827. standard_swizzle = true;
  828. break;
  829. case DC_SW_4KB_D:
  830. case DC_SW_64KB_D:
  831. case DC_SW_VAR_D:
  832. case DC_SW_4KB_D_X:
  833. case DC_SW_64KB_D_X:
  834. case DC_SW_VAR_D_X:
  835. display_swizzle = true;
  836. break;
  837. default:
  838. break;
  839. }
  840. if (bytes_per_element == 1 && standard_swizzle) {
  841. *segment_order_horz = segment_order__contiguous;
  842. *segment_order_vert = segment_order__na;
  843. return true;
  844. }
  845. if (bytes_per_element == 2 && standard_swizzle) {
  846. *segment_order_horz = segment_order__non_contiguous;
  847. *segment_order_vert = segment_order__contiguous;
  848. return true;
  849. }
  850. if (bytes_per_element == 4 && standard_swizzle) {
  851. *segment_order_horz = segment_order__non_contiguous;
  852. *segment_order_vert = segment_order__contiguous;
  853. return true;
  854. }
  855. if (bytes_per_element == 8 && standard_swizzle) {
  856. *segment_order_horz = segment_order__na;
  857. *segment_order_vert = segment_order__contiguous;
  858. return true;
  859. }
  860. if (bytes_per_element == 8 && display_swizzle) {
  861. *segment_order_horz = segment_order__contiguous;
  862. *segment_order_vert = segment_order__non_contiguous;
  863. return true;
  864. }
  865. return false;
  866. }
  867. static void get_blk256_size(unsigned int *blk256_width, unsigned int *blk256_height,
  868. unsigned int bytes_per_element)
  869. {
  870. /* copied from DML. might want to refactor DML to leverage from DML */
  871. /* DML : get_blk256_size */
  872. if (bytes_per_element == 1) {
  873. *blk256_width = 16;
  874. *blk256_height = 16;
  875. } else if (bytes_per_element == 2) {
  876. *blk256_width = 16;
  877. *blk256_height = 8;
  878. } else if (bytes_per_element == 4) {
  879. *blk256_width = 8;
  880. *blk256_height = 8;
  881. } else if (bytes_per_element == 8) {
  882. *blk256_width = 8;
  883. *blk256_height = 4;
  884. }
  885. }
  886. static void det_request_size(
  887. unsigned int height,
  888. unsigned int width,
  889. unsigned int bpe,
  890. bool *req128_horz_wc,
  891. bool *req128_vert_wc)
  892. {
  893. unsigned int detile_buf_size = 164 * 1024; /* 164KB for DCN1.0 */
  894. unsigned int blk256_height = 0;
  895. unsigned int blk256_width = 0;
  896. unsigned int swath_bytes_horz_wc, swath_bytes_vert_wc;
  897. get_blk256_size(&blk256_width, &blk256_height, bpe);
  898. swath_bytes_horz_wc = height * blk256_height * bpe;
  899. swath_bytes_vert_wc = width * blk256_width * bpe;
  900. *req128_horz_wc = (2 * swath_bytes_horz_wc <= detile_buf_size) ?
  901. false : /* full 256B request */
  902. true; /* half 128b request */
  903. *req128_vert_wc = (2 * swath_bytes_vert_wc <= detile_buf_size) ?
  904. false : /* full 256B request */
  905. true; /* half 128b request */
  906. }
  907. static bool get_dcc_compression_cap(const struct dc *dc,
  908. const struct dc_dcc_surface_param *input,
  909. struct dc_surface_dcc_cap *output)
  910. {
  911. /* implement section 1.6.2.1 of DCN1_Programming_Guide.docx */
  912. enum dcc_control dcc_control;
  913. unsigned int bpe;
  914. enum segment_order segment_order_horz, segment_order_vert;
  915. bool req128_horz_wc, req128_vert_wc;
  916. memset(output, 0, sizeof(*output));
  917. if (dc->debug.disable_dcc == DCC_DISABLE)
  918. return false;
  919. if (!dcc_support_pixel_format(input->format,
  920. &bpe))
  921. return false;
  922. if (!dcc_support_swizzle(input->swizzle_mode, bpe,
  923. &segment_order_horz, &segment_order_vert))
  924. return false;
  925. det_request_size(input->surface_size.height, input->surface_size.width,
  926. bpe, &req128_horz_wc, &req128_vert_wc);
  927. if (!req128_horz_wc && !req128_vert_wc) {
  928. dcc_control = dcc_control__256_256_xxx;
  929. } else if (input->scan == SCAN_DIRECTION_HORIZONTAL) {
  930. if (!req128_horz_wc)
  931. dcc_control = dcc_control__256_256_xxx;
  932. else if (segment_order_horz == segment_order__contiguous)
  933. dcc_control = dcc_control__128_128_xxx;
  934. else
  935. dcc_control = dcc_control__256_64_64;
  936. } else if (input->scan == SCAN_DIRECTION_VERTICAL) {
  937. if (!req128_vert_wc)
  938. dcc_control = dcc_control__256_256_xxx;
  939. else if (segment_order_vert == segment_order__contiguous)
  940. dcc_control = dcc_control__128_128_xxx;
  941. else
  942. dcc_control = dcc_control__256_64_64;
  943. } else {
  944. if ((req128_horz_wc &&
  945. segment_order_horz == segment_order__non_contiguous) ||
  946. (req128_vert_wc &&
  947. segment_order_vert == segment_order__non_contiguous))
  948. /* access_dir not known, must use most constraining */
  949. dcc_control = dcc_control__256_64_64;
  950. else
  951. /* reg128 is true for either horz and vert
  952. * but segment_order is contiguous
  953. */
  954. dcc_control = dcc_control__128_128_xxx;
  955. }
  956. if (dc->debug.disable_dcc == DCC_HALF_REQ_DISALBE &&
  957. dcc_control != dcc_control__256_256_xxx)
  958. return false;
  959. switch (dcc_control) {
  960. case dcc_control__256_256_xxx:
  961. output->grph.rgb.max_uncompressed_blk_size = 256;
  962. output->grph.rgb.max_compressed_blk_size = 256;
  963. output->grph.rgb.independent_64b_blks = false;
  964. break;
  965. case dcc_control__128_128_xxx:
  966. output->grph.rgb.max_uncompressed_blk_size = 128;
  967. output->grph.rgb.max_compressed_blk_size = 128;
  968. output->grph.rgb.independent_64b_blks = false;
  969. break;
  970. case dcc_control__256_64_64:
  971. output->grph.rgb.max_uncompressed_blk_size = 256;
  972. output->grph.rgb.max_compressed_blk_size = 64;
  973. output->grph.rgb.independent_64b_blks = true;
  974. break;
  975. }
  976. output->capable = true;
  977. output->const_color_support = false;
  978. return true;
  979. }
  980. static void dcn10_destroy_resource_pool(struct resource_pool **pool)
  981. {
  982. struct dcn10_resource_pool *dcn10_pool = TO_DCN10_RES_POOL(*pool);
  983. destruct(dcn10_pool);
  984. kfree(dcn10_pool);
  985. *pool = NULL;
  986. }
  987. static enum dc_status dcn10_validate_plane(const struct dc_plane_state *plane_state, struct dc_caps *caps)
  988. {
  989. if (plane_state->format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN
  990. && caps->max_video_width != 0
  991. && plane_state->src_rect.width > caps->max_video_width)
  992. return DC_FAIL_SURFACE_VALIDATE;
  993. return DC_OK;
  994. }
  995. static struct dc_cap_funcs cap_funcs = {
  996. .get_dcc_compression_cap = get_dcc_compression_cap
  997. };
  998. static struct resource_funcs dcn10_res_pool_funcs = {
  999. .destroy = dcn10_destroy_resource_pool,
  1000. .link_enc_create = dcn10_link_encoder_create,
  1001. .validate_guaranteed = dcn10_validate_guaranteed,
  1002. .validate_bandwidth = dcn_validate_bandwidth,
  1003. .acquire_idle_pipe_for_layer = dcn10_acquire_idle_pipe_for_layer,
  1004. .validate_plane = dcn10_validate_plane,
  1005. .add_stream_to_ctx = dcn10_add_stream_to_ctx
  1006. };
  1007. static uint32_t read_pipe_fuses(struct dc_context *ctx)
  1008. {
  1009. uint32_t value = dm_read_reg_soc15(ctx, mmCC_DC_PIPE_DIS, 0);
  1010. /* RV1 support max 4 pipes */
  1011. value = value & 0xf;
  1012. return value;
  1013. }
  1014. static bool construct(
  1015. uint8_t num_virtual_links,
  1016. struct dc *dc,
  1017. struct dcn10_resource_pool *pool)
  1018. {
  1019. int i;
  1020. int j;
  1021. struct dc_context *ctx = dc->ctx;
  1022. uint32_t pipe_fuses = read_pipe_fuses(ctx);
  1023. ctx->dc_bios->regs = &bios_regs;
  1024. pool->base.res_cap = &res_cap;
  1025. pool->base.funcs = &dcn10_res_pool_funcs;
  1026. /*
  1027. * TODO fill in from actual raven resource when we create
  1028. * more than virtual encoder
  1029. */
  1030. /*************************************************
  1031. * Resource + asic cap harcoding *
  1032. *************************************************/
  1033. pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
  1034. /* max pipe num for ASIC before check pipe fuses */
  1035. pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
  1036. dc->caps.max_video_width = 3840;
  1037. dc->caps.max_downscale_ratio = 200;
  1038. dc->caps.i2c_speed_in_khz = 100;
  1039. dc->caps.max_cursor_size = 256;
  1040. dc->caps.max_slave_planes = 1;
  1041. dc->caps.is_apu = true;
  1042. if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV)
  1043. dc->debug = debug_defaults_drv;
  1044. else
  1045. dc->debug = debug_defaults_diags;
  1046. /*************************************************
  1047. * Create resources *
  1048. *************************************************/
  1049. pool->base.clock_sources[DCN10_CLK_SRC_PLL0] =
  1050. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1051. CLOCK_SOURCE_COMBO_PHY_PLL0,
  1052. &clk_src_regs[0], false);
  1053. pool->base.clock_sources[DCN10_CLK_SRC_PLL1] =
  1054. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1055. CLOCK_SOURCE_COMBO_PHY_PLL1,
  1056. &clk_src_regs[1], false);
  1057. pool->base.clock_sources[DCN10_CLK_SRC_PLL2] =
  1058. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1059. CLOCK_SOURCE_COMBO_PHY_PLL2,
  1060. &clk_src_regs[2], false);
  1061. pool->base.clock_sources[DCN10_CLK_SRC_PLL3] =
  1062. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1063. CLOCK_SOURCE_COMBO_PHY_PLL3,
  1064. &clk_src_regs[3], false);
  1065. pool->base.clk_src_count = DCN10_CLK_SRC_TOTAL;
  1066. pool->base.dp_clock_source =
  1067. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1068. CLOCK_SOURCE_ID_DP_DTO,
  1069. /* todo: not reuse phy_pll registers */
  1070. &clk_src_regs[0], true);
  1071. for (i = 0; i < pool->base.clk_src_count; i++) {
  1072. if (pool->base.clock_sources[i] == NULL) {
  1073. dm_error("DC: failed to create clock sources!\n");
  1074. BREAK_TO_DEBUGGER();
  1075. goto fail;
  1076. }
  1077. }
  1078. if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
  1079. pool->base.display_clock = dce120_disp_clk_create(ctx);
  1080. if (pool->base.display_clock == NULL) {
  1081. dm_error("DC: failed to create display clock!\n");
  1082. BREAK_TO_DEBUGGER();
  1083. goto fail;
  1084. }
  1085. }
  1086. pool->base.dmcu = dcn10_dmcu_create(ctx,
  1087. &dmcu_regs,
  1088. &dmcu_shift,
  1089. &dmcu_mask);
  1090. if (pool->base.dmcu == NULL) {
  1091. dm_error("DC: failed to create dmcu!\n");
  1092. BREAK_TO_DEBUGGER();
  1093. goto fail;
  1094. }
  1095. pool->base.abm = dce_abm_create(ctx,
  1096. &abm_regs,
  1097. &abm_shift,
  1098. &abm_mask);
  1099. if (pool->base.abm == NULL) {
  1100. dm_error("DC: failed to create abm!\n");
  1101. BREAK_TO_DEBUGGER();
  1102. goto fail;
  1103. }
  1104. dml_init_instance(&dc->dml, DML_PROJECT_RAVEN1);
  1105. memcpy(dc->dcn_ip, &dcn10_ip_defaults, sizeof(dcn10_ip_defaults));
  1106. memcpy(dc->dcn_soc, &dcn10_soc_defaults, sizeof(dcn10_soc_defaults));
  1107. if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
  1108. dc->dcn_soc->urgent_latency = 3;
  1109. dc->debug.disable_dmcu = true;
  1110. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 41.60f;
  1111. }
  1112. dc->dcn_soc->number_of_channels = dc->ctx->asic_id.vram_width / ddr4_dram_width;
  1113. ASSERT(dc->dcn_soc->number_of_channels < 3);
  1114. if (dc->dcn_soc->number_of_channels == 0)/*old sbios bug*/
  1115. dc->dcn_soc->number_of_channels = 2;
  1116. if (dc->dcn_soc->number_of_channels == 1) {
  1117. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 19.2f;
  1118. dc->dcn_soc->fabric_and_dram_bandwidth_vnom0p8 = 17.066f;
  1119. dc->dcn_soc->fabric_and_dram_bandwidth_vmid0p72 = 14.933f;
  1120. dc->dcn_soc->fabric_and_dram_bandwidth_vmin0p65 = 12.8f;
  1121. if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
  1122. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 20.80f;
  1123. }
  1124. }
  1125. pool->base.pp_smu = dcn10_pp_smu_create(ctx);
  1126. if (!dc->debug.disable_pplib_clock_request)
  1127. dcn_bw_update_from_pplib(dc);
  1128. dcn_bw_sync_calcs_and_dml(dc);
  1129. if (!dc->debug.disable_pplib_wm_range) {
  1130. dc->res_pool = &pool->base;
  1131. dcn_bw_notify_pplib_of_wm_ranges(dc);
  1132. }
  1133. {
  1134. struct irq_service_init_data init_data;
  1135. init_data.ctx = dc->ctx;
  1136. pool->base.irqs = dal_irq_service_dcn10_create(&init_data);
  1137. if (!pool->base.irqs)
  1138. goto fail;
  1139. }
  1140. /* index to valid pipe resource */
  1141. j = 0;
  1142. /* mem input -> ipp -> dpp -> opp -> TG */
  1143. for (i = 0; i < pool->base.pipe_count; i++) {
  1144. /* if pipe is disabled, skip instance of HW pipe,
  1145. * i.e, skip ASIC register instance
  1146. */
  1147. if ((pipe_fuses & (1 << i)) != 0)
  1148. continue;
  1149. pool->base.hubps[j] = dcn10_hubp_create(ctx, i);
  1150. if (pool->base.hubps[j] == NULL) {
  1151. BREAK_TO_DEBUGGER();
  1152. dm_error(
  1153. "DC: failed to create memory input!\n");
  1154. goto fail;
  1155. }
  1156. pool->base.ipps[j] = dcn10_ipp_create(ctx, i);
  1157. if (pool->base.ipps[j] == NULL) {
  1158. BREAK_TO_DEBUGGER();
  1159. dm_error(
  1160. "DC: failed to create input pixel processor!\n");
  1161. goto fail;
  1162. }
  1163. pool->base.dpps[j] = dcn10_dpp_create(ctx, i);
  1164. if (pool->base.dpps[j] == NULL) {
  1165. BREAK_TO_DEBUGGER();
  1166. dm_error(
  1167. "DC: failed to create dpp!\n");
  1168. goto fail;
  1169. }
  1170. pool->base.opps[j] = dcn10_opp_create(ctx, i);
  1171. if (pool->base.opps[j] == NULL) {
  1172. BREAK_TO_DEBUGGER();
  1173. dm_error(
  1174. "DC: failed to create output pixel processor!\n");
  1175. goto fail;
  1176. }
  1177. pool->base.timing_generators[j] = dcn10_timing_generator_create(
  1178. ctx, i);
  1179. if (pool->base.timing_generators[j] == NULL) {
  1180. BREAK_TO_DEBUGGER();
  1181. dm_error("DC: failed to create tg!\n");
  1182. goto fail;
  1183. }
  1184. /* check next valid pipe */
  1185. j++;
  1186. }
  1187. /* valid pipe num */
  1188. pool->base.pipe_count = j;
  1189. /* within dml lib, it is hard code to 4. If ASIC pipe is fused,
  1190. * the value may be changed
  1191. */
  1192. dc->dml.ip.max_num_dpp = pool->base.pipe_count;
  1193. dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
  1194. pool->base.mpc = dcn10_mpc_create(ctx);
  1195. if (pool->base.mpc == NULL) {
  1196. BREAK_TO_DEBUGGER();
  1197. dm_error("DC: failed to create mpc!\n");
  1198. goto fail;
  1199. }
  1200. pool->base.hubbub = dcn10_hubbub_create(ctx);
  1201. if (pool->base.hubbub == NULL) {
  1202. BREAK_TO_DEBUGGER();
  1203. dm_error("DC: failed to create hubbub!\n");
  1204. goto fail;
  1205. }
  1206. if (!resource_construct(num_virtual_links, dc, &pool->base,
  1207. (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ?
  1208. &res_create_funcs : &res_create_maximus_funcs)))
  1209. goto fail;
  1210. dcn10_hw_sequencer_construct(dc);
  1211. dc->caps.max_planes = pool->base.pipe_count;
  1212. dc->cap_funcs = cap_funcs;
  1213. return true;
  1214. fail:
  1215. destruct(pool);
  1216. return false;
  1217. }
  1218. struct resource_pool *dcn10_create_resource_pool(
  1219. uint8_t num_virtual_links,
  1220. struct dc *dc)
  1221. {
  1222. struct dcn10_resource_pool *pool =
  1223. kzalloc(sizeof(struct dcn10_resource_pool), GFP_KERNEL);
  1224. if (!pool)
  1225. return NULL;
  1226. if (construct(num_virtual_links, dc, pool))
  1227. return &pool->base;
  1228. BREAK_TO_DEBUGGER();
  1229. return NULL;
  1230. }