dcn10_resource.c 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503
  1. /*
  2. * Copyright 2016 Advanced Micro Devices, Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: AMD
  23. *
  24. */
  25. #include "dm_services.h"
  26. #include "dc.h"
  27. #include "resource.h"
  28. #include "include/irq_service_interface.h"
  29. #include "dcn10/dcn10_resource.h"
  30. #include "dcn10/dcn10_ipp.h"
  31. #include "dcn10/dcn10_mpc.h"
  32. #include "irq/dcn10/irq_service_dcn10.h"
  33. #include "dcn10/dcn10_dpp.h"
  34. #include "dcn10_optc.h"
  35. #include "dcn10/dcn10_hw_sequencer.h"
  36. #include "dce110/dce110_hw_sequencer.h"
  37. #include "dcn10/dcn10_opp.h"
  38. #include "dce/dce_link_encoder.h"
  39. #include "dce/dce_stream_encoder.h"
  40. #include "dce/dce_clocks.h"
  41. #include "dce/dce_clock_source.h"
  42. #include "dce/dce_audio.h"
  43. #include "dce/dce_hwseq.h"
  44. #include "../virtual/virtual_stream_encoder.h"
  45. #include "dce110/dce110_resource.h"
  46. #include "dce112/dce112_resource.h"
  47. #include "dcn10_hubp.h"
  48. #include "dcn10_hubbub.h"
  49. #include "soc15ip.h"
  50. #include "dcn/dcn_1_0_offset.h"
  51. #include "dcn/dcn_1_0_sh_mask.h"
  52. #include "nbio/nbio_7_0_offset.h"
  53. #include "mmhub/mmhub_9_1_offset.h"
  54. #include "mmhub/mmhub_9_1_sh_mask.h"
  55. #include "reg_helper.h"
  56. #include "dce/dce_abm.h"
  57. #include "dce/dce_dmcu.h"
  58. #ifndef mmDP0_DP_DPHY_INTERNAL_CTRL
  59. #define mmDP0_DP_DPHY_INTERNAL_CTRL 0x210f
  60. #define mmDP0_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  61. #define mmDP1_DP_DPHY_INTERNAL_CTRL 0x220f
  62. #define mmDP1_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  63. #define mmDP2_DP_DPHY_INTERNAL_CTRL 0x230f
  64. #define mmDP2_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  65. #define mmDP3_DP_DPHY_INTERNAL_CTRL 0x240f
  66. #define mmDP3_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  67. #define mmDP4_DP_DPHY_INTERNAL_CTRL 0x250f
  68. #define mmDP4_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  69. #define mmDP5_DP_DPHY_INTERNAL_CTRL 0x260f
  70. #define mmDP5_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  71. #define mmDP6_DP_DPHY_INTERNAL_CTRL 0x270f
  72. #define mmDP6_DP_DPHY_INTERNAL_CTRL_BASE_IDX 2
  73. #endif
  74. enum dcn10_clk_src_array_id {
  75. DCN10_CLK_SRC_PLL0,
  76. DCN10_CLK_SRC_PLL1,
  77. DCN10_CLK_SRC_PLL2,
  78. DCN10_CLK_SRC_PLL3,
  79. DCN10_CLK_SRC_TOTAL
  80. };
  81. /* begin *********************
  82. * macros to expend register list macro defined in HW object header file */
  83. /* DCN */
  84. #define BASE_INNER(seg) \
  85. DCE_BASE__INST0_SEG ## seg
  86. #define BASE(seg) \
  87. BASE_INNER(seg)
  88. #define SR(reg_name)\
  89. .reg_name = BASE(mm ## reg_name ## _BASE_IDX) + \
  90. mm ## reg_name
  91. #define SRI(reg_name, block, id)\
  92. .reg_name = BASE(mm ## block ## id ## _ ## reg_name ## _BASE_IDX) + \
  93. mm ## block ## id ## _ ## reg_name
  94. #define SRII(reg_name, block, id)\
  95. .reg_name[id] = BASE(mm ## block ## id ## _ ## reg_name ## _BASE_IDX) + \
  96. mm ## block ## id ## _ ## reg_name
  97. /* NBIO */
  98. #define NBIO_BASE_INNER(seg) \
  99. NBIF_BASE__INST0_SEG ## seg
  100. #define NBIO_BASE(seg) \
  101. NBIO_BASE_INNER(seg)
  102. #define NBIO_SR(reg_name)\
  103. .reg_name = NBIO_BASE(mm ## reg_name ## _BASE_IDX) + \
  104. mm ## reg_name
  105. /* MMHUB */
  106. #define MMHUB_BASE_INNER(seg) \
  107. MMHUB_BASE__INST0_SEG ## seg
  108. #define MMHUB_BASE(seg) \
  109. MMHUB_BASE_INNER(seg)
  110. #define MMHUB_SR(reg_name)\
  111. .reg_name = MMHUB_BASE(mm ## reg_name ## _BASE_IDX) + \
  112. mm ## reg_name
  113. /* macros to expend register list macro defined in HW object header file
  114. * end *********************/
  115. static const struct dce_dmcu_registers dmcu_regs = {
  116. DMCU_DCN10_REG_LIST()
  117. };
  118. static const struct dce_dmcu_shift dmcu_shift = {
  119. DMCU_MASK_SH_LIST_DCN10(__SHIFT)
  120. };
  121. static const struct dce_dmcu_mask dmcu_mask = {
  122. DMCU_MASK_SH_LIST_DCN10(_MASK)
  123. };
  124. static const struct dce_abm_registers abm_regs = {
  125. ABM_DCN10_REG_LIST(0)
  126. };
  127. static const struct dce_abm_shift abm_shift = {
  128. ABM_MASK_SH_LIST_DCN10(__SHIFT)
  129. };
  130. static const struct dce_abm_mask abm_mask = {
  131. ABM_MASK_SH_LIST_DCN10(_MASK)
  132. };
  133. #define stream_enc_regs(id)\
  134. [id] = {\
  135. SE_DCN_REG_LIST(id),\
  136. .TMDS_CNTL = 0,\
  137. .AFMT_AVI_INFO0 = 0,\
  138. .AFMT_AVI_INFO1 = 0,\
  139. .AFMT_AVI_INFO2 = 0,\
  140. .AFMT_AVI_INFO3 = 0,\
  141. }
  142. static const struct dce110_stream_enc_registers stream_enc_regs[] = {
  143. stream_enc_regs(0),
  144. stream_enc_regs(1),
  145. stream_enc_regs(2),
  146. stream_enc_regs(3),
  147. };
  148. static const struct dce_stream_encoder_shift se_shift = {
  149. SE_COMMON_MASK_SH_LIST_DCN10(__SHIFT)
  150. };
  151. static const struct dce_stream_encoder_mask se_mask = {
  152. SE_COMMON_MASK_SH_LIST_DCN10(_MASK),
  153. .AFMT_GENERIC0_UPDATE = 0,
  154. .AFMT_GENERIC2_UPDATE = 0,
  155. .DP_DYN_RANGE = 0,
  156. .DP_YCBCR_RANGE = 0,
  157. .HDMI_AVI_INFO_SEND = 0,
  158. .HDMI_AVI_INFO_CONT = 0,
  159. .HDMI_AVI_INFO_LINE = 0,
  160. .DP_SEC_AVI_ENABLE = 0,
  161. .AFMT_AVI_INFO_VERSION = 0
  162. };
  163. #define audio_regs(id)\
  164. [id] = {\
  165. AUD_COMMON_REG_LIST(id)\
  166. }
  167. static const struct dce_audio_registers audio_regs[] = {
  168. audio_regs(0),
  169. audio_regs(1),
  170. audio_regs(2),
  171. audio_regs(3),
  172. };
  173. #define DCE120_AUD_COMMON_MASK_SH_LIST(mask_sh)\
  174. SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_INDEX, AZALIA_ENDPOINT_REG_INDEX, mask_sh),\
  175. SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_DATA, AZALIA_ENDPOINT_REG_DATA, mask_sh),\
  176. AUD_COMMON_MASK_SH_LIST_BASE(mask_sh)
  177. static const struct dce_audio_shift audio_shift = {
  178. DCE120_AUD_COMMON_MASK_SH_LIST(__SHIFT)
  179. };
  180. static const struct dce_aduio_mask audio_mask = {
  181. DCE120_AUD_COMMON_MASK_SH_LIST(_MASK)
  182. };
  183. #define aux_regs(id)\
  184. [id] = {\
  185. AUX_REG_LIST(id)\
  186. }
  187. static const struct dce110_link_enc_aux_registers link_enc_aux_regs[] = {
  188. aux_regs(0),
  189. aux_regs(1),
  190. aux_regs(2),
  191. aux_regs(3),
  192. aux_regs(4),
  193. aux_regs(5)
  194. };
  195. #define hpd_regs(id)\
  196. [id] = {\
  197. HPD_REG_LIST(id)\
  198. }
  199. static const struct dce110_link_enc_hpd_registers link_enc_hpd_regs[] = {
  200. hpd_regs(0),
  201. hpd_regs(1),
  202. hpd_regs(2),
  203. hpd_regs(3),
  204. hpd_regs(4),
  205. hpd_regs(5)
  206. };
  207. #define link_regs(id)\
  208. [id] = {\
  209. LE_DCN10_REG_LIST(id), \
  210. SRI(DP_DPHY_INTERNAL_CTRL, DP, id) \
  211. }
  212. static const struct dce110_link_enc_registers link_enc_regs[] = {
  213. link_regs(0),
  214. link_regs(1),
  215. link_regs(2),
  216. link_regs(3),
  217. link_regs(4),
  218. link_regs(5),
  219. link_regs(6),
  220. };
  221. #define ipp_regs(id)\
  222. [id] = {\
  223. IPP_REG_LIST_DCN10(id),\
  224. }
  225. static const struct dcn10_ipp_registers ipp_regs[] = {
  226. ipp_regs(0),
  227. ipp_regs(1),
  228. ipp_regs(2),
  229. ipp_regs(3),
  230. };
  231. static const struct dcn10_ipp_shift ipp_shift = {
  232. IPP_MASK_SH_LIST_DCN10(__SHIFT)
  233. };
  234. static const struct dcn10_ipp_mask ipp_mask = {
  235. IPP_MASK_SH_LIST_DCN10(_MASK),
  236. };
  237. #define opp_regs(id)\
  238. [id] = {\
  239. OPP_REG_LIST_DCN10(id),\
  240. }
  241. static const struct dcn10_opp_registers opp_regs[] = {
  242. opp_regs(0),
  243. opp_regs(1),
  244. opp_regs(2),
  245. opp_regs(3),
  246. };
  247. static const struct dcn10_opp_shift opp_shift = {
  248. OPP_MASK_SH_LIST_DCN10(__SHIFT)
  249. };
  250. static const struct dcn10_opp_mask opp_mask = {
  251. OPP_MASK_SH_LIST_DCN10(_MASK),
  252. };
  253. #define tf_regs(id)\
  254. [id] = {\
  255. TF_REG_LIST_DCN10(id),\
  256. }
  257. static const struct dcn_dpp_registers tf_regs[] = {
  258. tf_regs(0),
  259. tf_regs(1),
  260. tf_regs(2),
  261. tf_regs(3),
  262. };
  263. static const struct dcn_dpp_shift tf_shift = {
  264. TF_REG_LIST_SH_MASK_DCN10(__SHIFT)
  265. };
  266. static const struct dcn_dpp_mask tf_mask = {
  267. TF_REG_LIST_SH_MASK_DCN10(_MASK),
  268. };
  269. static const struct dcn_mpc_registers mpc_regs = {
  270. MPC_COMMON_REG_LIST_DCN1_0(0),
  271. MPC_COMMON_REG_LIST_DCN1_0(1),
  272. MPC_COMMON_REG_LIST_DCN1_0(2),
  273. MPC_COMMON_REG_LIST_DCN1_0(3),
  274. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(0),
  275. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(1),
  276. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(2),
  277. MPC_OUT_MUX_COMMON_REG_LIST_DCN1_0(3)
  278. };
  279. static const struct dcn_mpc_shift mpc_shift = {
  280. MPC_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  281. };
  282. static const struct dcn_mpc_mask mpc_mask = {
  283. MPC_COMMON_MASK_SH_LIST_DCN1_0(_MASK),
  284. };
  285. #define tg_regs(id)\
  286. [id] = {TG_COMMON_REG_LIST_DCN1_0(id)}
  287. static const struct dcn_optc_registers tg_regs[] = {
  288. tg_regs(0),
  289. tg_regs(1),
  290. tg_regs(2),
  291. tg_regs(3),
  292. };
  293. static const struct dcn_optc_shift tg_shift = {
  294. TG_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  295. };
  296. static const struct dcn_optc_mask tg_mask = {
  297. TG_COMMON_MASK_SH_LIST_DCN1_0(_MASK)
  298. };
  299. static const struct bios_registers bios_regs = {
  300. NBIO_SR(BIOS_SCRATCH_6)
  301. };
  302. #define hubp_regs(id)\
  303. [id] = {\
  304. HUBP_REG_LIST_DCN10(id)\
  305. }
  306. static const struct dcn_mi_registers hubp_regs[] = {
  307. hubp_regs(0),
  308. hubp_regs(1),
  309. hubp_regs(2),
  310. hubp_regs(3),
  311. };
  312. static const struct dcn_mi_shift hubp_shift = {
  313. HUBP_MASK_SH_LIST_DCN10(__SHIFT)
  314. };
  315. static const struct dcn_mi_mask hubp_mask = {
  316. HUBP_MASK_SH_LIST_DCN10(_MASK)
  317. };
  318. static const struct dcn_hubbub_registers hubbub_reg = {
  319. HUBBUB_REG_LIST_DCN10(0)
  320. };
  321. static const struct dcn_hubbub_shift hubbub_shift = {
  322. HUBBUB_MASK_SH_LIST_DCN10(__SHIFT)
  323. };
  324. static const struct dcn_hubbub_mask hubbub_mask = {
  325. HUBBUB_MASK_SH_LIST_DCN10(_MASK)
  326. };
  327. #define clk_src_regs(index, pllid)\
  328. [index] = {\
  329. CS_COMMON_REG_LIST_DCN1_0(index, pllid),\
  330. }
  331. static const struct dce110_clk_src_regs clk_src_regs[] = {
  332. clk_src_regs(0, A),
  333. clk_src_regs(1, B),
  334. clk_src_regs(2, C),
  335. clk_src_regs(3, D)
  336. };
  337. static const struct dce110_clk_src_shift cs_shift = {
  338. CS_COMMON_MASK_SH_LIST_DCN1_0(__SHIFT)
  339. };
  340. static const struct dce110_clk_src_mask cs_mask = {
  341. CS_COMMON_MASK_SH_LIST_DCN1_0(_MASK)
  342. };
  343. static const struct resource_caps res_cap = {
  344. .num_timing_generator = 4,
  345. .num_video_plane = 4,
  346. .num_audio = 4,
  347. .num_stream_encoder = 4,
  348. .num_pll = 4,
  349. };
  350. static const struct dc_debug debug_defaults_drv = {
  351. .sanity_checks = true,
  352. .disable_dmcu = true,
  353. .force_abm_enable = false,
  354. .timing_trace = false,
  355. .clock_trace = true,
  356. .min_disp_clk_khz = 300000,
  357. .disable_pplib_clock_request = true,
  358. .disable_pplib_wm_range = false,
  359. .pplib_wm_report_mode = WM_REPORT_DEFAULT,
  360. .pipe_split_policy = MPC_SPLIT_AVOID_MULT_DISP,
  361. .force_single_disp_pipe_split = true,
  362. .disable_dcc = DCC_ENABLE,
  363. .voltage_align_fclk = true,
  364. .disable_stereo_support = true,
  365. .vsr_support = true,
  366. .performance_trace = false,
  367. };
  368. static const struct dc_debug debug_defaults_diags = {
  369. .disable_dmcu = true,
  370. .force_abm_enable = false,
  371. .timing_trace = true,
  372. .clock_trace = true,
  373. .disable_stutter = true,
  374. .disable_pplib_clock_request = true,
  375. .disable_pplib_wm_range = true
  376. };
  377. static void dcn10_dpp_destroy(struct dpp **dpp)
  378. {
  379. kfree(TO_DCN10_DPP(*dpp));
  380. *dpp = NULL;
  381. }
  382. static struct dpp *dcn10_dpp_create(
  383. struct dc_context *ctx,
  384. uint32_t inst)
  385. {
  386. struct dcn10_dpp *dpp =
  387. kzalloc(sizeof(struct dcn10_dpp), GFP_KERNEL);
  388. if (!dpp)
  389. return NULL;
  390. dpp1_construct(dpp, ctx, inst,
  391. &tf_regs[inst], &tf_shift, &tf_mask);
  392. return &dpp->base;
  393. }
  394. static struct input_pixel_processor *dcn10_ipp_create(
  395. struct dc_context *ctx, uint32_t inst)
  396. {
  397. struct dcn10_ipp *ipp =
  398. kzalloc(sizeof(struct dcn10_ipp), GFP_KERNEL);
  399. if (!ipp) {
  400. BREAK_TO_DEBUGGER();
  401. return NULL;
  402. }
  403. dcn10_ipp_construct(ipp, ctx, inst,
  404. &ipp_regs[inst], &ipp_shift, &ipp_mask);
  405. return &ipp->base;
  406. }
  407. static struct output_pixel_processor *dcn10_opp_create(
  408. struct dc_context *ctx, uint32_t inst)
  409. {
  410. struct dcn10_opp *opp =
  411. kzalloc(sizeof(struct dcn10_opp), GFP_KERNEL);
  412. if (!opp) {
  413. BREAK_TO_DEBUGGER();
  414. return NULL;
  415. }
  416. dcn10_opp_construct(opp, ctx, inst,
  417. &opp_regs[inst], &opp_shift, &opp_mask);
  418. return &opp->base;
  419. }
  420. static struct mpc *dcn10_mpc_create(struct dc_context *ctx)
  421. {
  422. struct dcn10_mpc *mpc10 = kzalloc(sizeof(struct dcn10_mpc),
  423. GFP_KERNEL);
  424. if (!mpc10)
  425. return NULL;
  426. dcn10_mpc_construct(mpc10, ctx,
  427. &mpc_regs,
  428. &mpc_shift,
  429. &mpc_mask,
  430. 4);
  431. return &mpc10->base;
  432. }
  433. static struct hubbub *dcn10_hubbub_create(struct dc_context *ctx)
  434. {
  435. struct hubbub *hubbub = kzalloc(sizeof(struct hubbub),
  436. GFP_KERNEL);
  437. if (!hubbub)
  438. return NULL;
  439. hubbub1_construct(hubbub, ctx,
  440. &hubbub_reg,
  441. &hubbub_shift,
  442. &hubbub_mask);
  443. return hubbub;
  444. }
  445. static struct timing_generator *dcn10_timing_generator_create(
  446. struct dc_context *ctx,
  447. uint32_t instance)
  448. {
  449. struct optc *tgn10 =
  450. kzalloc(sizeof(struct optc), GFP_KERNEL);
  451. if (!tgn10)
  452. return NULL;
  453. tgn10->base.inst = instance;
  454. tgn10->base.ctx = ctx;
  455. tgn10->tg_regs = &tg_regs[instance];
  456. tgn10->tg_shift = &tg_shift;
  457. tgn10->tg_mask = &tg_mask;
  458. dcn10_timing_generator_init(tgn10);
  459. return &tgn10->base;
  460. }
  461. static const struct encoder_feature_support link_enc_feature = {
  462. .max_hdmi_deep_color = COLOR_DEPTH_121212,
  463. .max_hdmi_pixel_clock = 600000,
  464. .ycbcr420_supported = true,
  465. .flags.bits.IS_HBR2_CAPABLE = true,
  466. .flags.bits.IS_HBR3_CAPABLE = true,
  467. .flags.bits.IS_TPS3_CAPABLE = true,
  468. .flags.bits.IS_TPS4_CAPABLE = true,
  469. .flags.bits.IS_YCBCR_CAPABLE = true
  470. };
  471. struct link_encoder *dcn10_link_encoder_create(
  472. const struct encoder_init_data *enc_init_data)
  473. {
  474. struct dce110_link_encoder *enc110 =
  475. kzalloc(sizeof(struct dce110_link_encoder), GFP_KERNEL);
  476. if (!enc110)
  477. return NULL;
  478. dce110_link_encoder_construct(enc110,
  479. enc_init_data,
  480. &link_enc_feature,
  481. &link_enc_regs[enc_init_data->transmitter],
  482. &link_enc_aux_regs[enc_init_data->channel - 1],
  483. &link_enc_hpd_regs[enc_init_data->hpd_source]);
  484. return &enc110->base;
  485. }
  486. struct clock_source *dcn10_clock_source_create(
  487. struct dc_context *ctx,
  488. struct dc_bios *bios,
  489. enum clock_source_id id,
  490. const struct dce110_clk_src_regs *regs,
  491. bool dp_clk_src)
  492. {
  493. struct dce110_clk_src *clk_src =
  494. kzalloc(sizeof(struct dce110_clk_src), GFP_KERNEL);
  495. if (!clk_src)
  496. return NULL;
  497. if (dce110_clk_src_construct(clk_src, ctx, bios, id,
  498. regs, &cs_shift, &cs_mask)) {
  499. clk_src->base.dp_clk_src = dp_clk_src;
  500. return &clk_src->base;
  501. }
  502. BREAK_TO_DEBUGGER();
  503. return NULL;
  504. }
  505. static void read_dce_straps(
  506. struct dc_context *ctx,
  507. struct resource_straps *straps)
  508. {
  509. generic_reg_get(ctx, mmDC_PINSTRAPS + BASE(mmDC_PINSTRAPS_BASE_IDX),
  510. FN(DC_PINSTRAPS, DC_PINSTRAPS_AUDIO), &straps->dc_pinstraps_audio);
  511. }
  512. static struct audio *create_audio(
  513. struct dc_context *ctx, unsigned int inst)
  514. {
  515. return dce_audio_create(ctx, inst,
  516. &audio_regs[inst], &audio_shift, &audio_mask);
  517. }
  518. static struct stream_encoder *dcn10_stream_encoder_create(
  519. enum engine_id eng_id,
  520. struct dc_context *ctx)
  521. {
  522. struct dce110_stream_encoder *enc110 =
  523. kzalloc(sizeof(struct dce110_stream_encoder), GFP_KERNEL);
  524. if (!enc110)
  525. return NULL;
  526. dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
  527. &stream_enc_regs[eng_id],
  528. &se_shift, &se_mask);
  529. return &enc110->base;
  530. }
  531. static const struct dce_hwseq_registers hwseq_reg = {
  532. HWSEQ_DCN1_REG_LIST()
  533. };
  534. static const struct dce_hwseq_shift hwseq_shift = {
  535. HWSEQ_DCN1_MASK_SH_LIST(__SHIFT)
  536. };
  537. static const struct dce_hwseq_mask hwseq_mask = {
  538. HWSEQ_DCN1_MASK_SH_LIST(_MASK)
  539. };
  540. static struct dce_hwseq *dcn10_hwseq_create(
  541. struct dc_context *ctx)
  542. {
  543. struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL);
  544. if (hws) {
  545. hws->ctx = ctx;
  546. hws->regs = &hwseq_reg;
  547. hws->shifts = &hwseq_shift;
  548. hws->masks = &hwseq_mask;
  549. hws->wa.DEGVIDCN10_253 = true;
  550. hws->wa.false_optc_underflow = true;
  551. }
  552. return hws;
  553. }
  554. static const struct resource_create_funcs res_create_funcs = {
  555. .read_dce_straps = read_dce_straps,
  556. .create_audio = create_audio,
  557. .create_stream_encoder = dcn10_stream_encoder_create,
  558. .create_hwseq = dcn10_hwseq_create,
  559. };
  560. static const struct resource_create_funcs res_create_maximus_funcs = {
  561. .read_dce_straps = NULL,
  562. .create_audio = NULL,
  563. .create_stream_encoder = NULL,
  564. .create_hwseq = dcn10_hwseq_create,
  565. };
  566. void dcn10_clock_source_destroy(struct clock_source **clk_src)
  567. {
  568. kfree(TO_DCE110_CLK_SRC(*clk_src));
  569. *clk_src = NULL;
  570. }
  571. static struct pp_smu_funcs_rv *dcn10_pp_smu_create(struct dc_context *ctx)
  572. {
  573. struct pp_smu_funcs_rv *pp_smu = kzalloc(sizeof(*pp_smu), GFP_KERNEL);
  574. if (!pp_smu)
  575. return pp_smu;
  576. dm_pp_get_funcs_rv(ctx, pp_smu);
  577. return pp_smu;
  578. }
  579. static void destruct(struct dcn10_resource_pool *pool)
  580. {
  581. unsigned int i;
  582. for (i = 0; i < pool->base.stream_enc_count; i++) {
  583. if (pool->base.stream_enc[i] != NULL) {
  584. /* TODO: free dcn version of stream encoder once implemented
  585. * rather than using virtual stream encoder
  586. */
  587. kfree(pool->base.stream_enc[i]);
  588. pool->base.stream_enc[i] = NULL;
  589. }
  590. }
  591. if (pool->base.mpc != NULL) {
  592. kfree(TO_DCN10_MPC(pool->base.mpc));
  593. pool->base.mpc = NULL;
  594. }
  595. if (pool->base.hubbub != NULL) {
  596. kfree(pool->base.hubbub);
  597. pool->base.hubbub = NULL;
  598. }
  599. for (i = 0; i < pool->base.pipe_count; i++) {
  600. if (pool->base.opps[i] != NULL)
  601. pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
  602. if (pool->base.dpps[i] != NULL)
  603. dcn10_dpp_destroy(&pool->base.dpps[i]);
  604. if (pool->base.ipps[i] != NULL)
  605. pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
  606. if (pool->base.hubps[i] != NULL) {
  607. kfree(TO_DCN10_HUBP(pool->base.hubps[i]));
  608. pool->base.hubps[i] = NULL;
  609. }
  610. if (pool->base.irqs != NULL) {
  611. dal_irq_service_destroy(&pool->base.irqs);
  612. }
  613. if (pool->base.timing_generators[i] != NULL) {
  614. kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
  615. pool->base.timing_generators[i] = NULL;
  616. }
  617. }
  618. for (i = 0; i < pool->base.stream_enc_count; i++)
  619. kfree(pool->base.stream_enc[i]);
  620. for (i = 0; i < pool->base.audio_count; i++) {
  621. if (pool->base.audios[i])
  622. dce_aud_destroy(&pool->base.audios[i]);
  623. }
  624. for (i = 0; i < pool->base.clk_src_count; i++) {
  625. if (pool->base.clock_sources[i] != NULL) {
  626. dcn10_clock_source_destroy(&pool->base.clock_sources[i]);
  627. pool->base.clock_sources[i] = NULL;
  628. }
  629. }
  630. if (pool->base.dp_clock_source != NULL) {
  631. dcn10_clock_source_destroy(&pool->base.dp_clock_source);
  632. pool->base.dp_clock_source = NULL;
  633. }
  634. if (pool->base.abm != NULL)
  635. dce_abm_destroy(&pool->base.abm);
  636. if (pool->base.dmcu != NULL)
  637. dce_dmcu_destroy(&pool->base.dmcu);
  638. if (pool->base.display_clock != NULL)
  639. dce_disp_clk_destroy(&pool->base.display_clock);
  640. kfree(pool->base.pp_smu);
  641. }
  642. static struct hubp *dcn10_hubp_create(
  643. struct dc_context *ctx,
  644. uint32_t inst)
  645. {
  646. struct dcn10_hubp *hubp1 =
  647. kzalloc(sizeof(struct dcn10_hubp), GFP_KERNEL);
  648. if (!hubp1)
  649. return NULL;
  650. dcn10_hubp_construct(hubp1, ctx, inst,
  651. &hubp_regs[inst], &hubp_shift, &hubp_mask);
  652. return &hubp1->base;
  653. }
  654. static void get_pixel_clock_parameters(
  655. const struct pipe_ctx *pipe_ctx,
  656. struct pixel_clk_params *pixel_clk_params)
  657. {
  658. const struct dc_stream_state *stream = pipe_ctx->stream;
  659. pixel_clk_params->requested_pix_clk = stream->timing.pix_clk_khz;
  660. pixel_clk_params->encoder_object_id = stream->sink->link->link_enc->id;
  661. pixel_clk_params->signal_type = pipe_ctx->stream->signal;
  662. pixel_clk_params->controller_id = pipe_ctx->pipe_idx + 1;
  663. /* TODO: un-hardcode*/
  664. pixel_clk_params->requested_sym_clk = LINK_RATE_LOW *
  665. LINK_RATE_REF_FREQ_IN_KHZ;
  666. pixel_clk_params->flags.ENABLE_SS = 0;
  667. pixel_clk_params->color_depth =
  668. stream->timing.display_color_depth;
  669. pixel_clk_params->flags.DISPLAY_BLANKED = 1;
  670. pixel_clk_params->pixel_encoding = stream->timing.pixel_encoding;
  671. if (stream->timing.pixel_encoding == PIXEL_ENCODING_YCBCR422)
  672. pixel_clk_params->color_depth = COLOR_DEPTH_888;
  673. if (stream->timing.pixel_encoding == PIXEL_ENCODING_YCBCR420)
  674. pixel_clk_params->requested_pix_clk /= 2;
  675. }
  676. static void build_clamping_params(struct dc_stream_state *stream)
  677. {
  678. stream->clamping.clamping_level = CLAMPING_FULL_RANGE;
  679. stream->clamping.c_depth = stream->timing.display_color_depth;
  680. stream->clamping.pixel_encoding = stream->timing.pixel_encoding;
  681. }
  682. static void build_pipe_hw_param(struct pipe_ctx *pipe_ctx)
  683. {
  684. get_pixel_clock_parameters(pipe_ctx, &pipe_ctx->stream_res.pix_clk_params);
  685. pipe_ctx->clock_source->funcs->get_pix_clk_dividers(
  686. pipe_ctx->clock_source,
  687. &pipe_ctx->stream_res.pix_clk_params,
  688. &pipe_ctx->pll_settings);
  689. pipe_ctx->stream->clamping.pixel_encoding = pipe_ctx->stream->timing.pixel_encoding;
  690. resource_build_bit_depth_reduction_params(pipe_ctx->stream,
  691. &pipe_ctx->stream->bit_depth_params);
  692. build_clamping_params(pipe_ctx->stream);
  693. }
  694. static enum dc_status build_mapped_resource(
  695. const struct dc *dc,
  696. struct dc_state *context,
  697. struct dc_stream_state *stream)
  698. {
  699. struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
  700. /*TODO Seems unneeded anymore */
  701. /* if (old_context && resource_is_stream_unchanged(old_context, stream)) {
  702. if (stream != NULL && old_context->streams[i] != NULL) {
  703. todo: shouldn't have to copy missing parameter here
  704. resource_build_bit_depth_reduction_params(stream,
  705. &stream->bit_depth_params);
  706. stream->clamping.pixel_encoding =
  707. stream->timing.pixel_encoding;
  708. resource_build_bit_depth_reduction_params(stream,
  709. &stream->bit_depth_params);
  710. build_clamping_params(stream);
  711. continue;
  712. }
  713. }
  714. */
  715. if (!pipe_ctx)
  716. return DC_ERROR_UNEXPECTED;
  717. build_pipe_hw_param(pipe_ctx);
  718. return DC_OK;
  719. }
  720. enum dc_status dcn10_add_stream_to_ctx(
  721. struct dc *dc,
  722. struct dc_state *new_ctx,
  723. struct dc_stream_state *dc_stream)
  724. {
  725. enum dc_status result = DC_ERROR_UNEXPECTED;
  726. result = resource_map_pool_resources(dc, new_ctx, dc_stream);
  727. if (result == DC_OK)
  728. result = resource_map_phy_clock_resources(dc, new_ctx, dc_stream);
  729. if (result == DC_OK)
  730. result = build_mapped_resource(dc, new_ctx, dc_stream);
  731. return result;
  732. }
  733. enum dc_status dcn10_validate_guaranteed(
  734. struct dc *dc,
  735. struct dc_stream_state *dc_stream,
  736. struct dc_state *context)
  737. {
  738. enum dc_status result = DC_ERROR_UNEXPECTED;
  739. context->streams[0] = dc_stream;
  740. dc_stream_retain(context->streams[0]);
  741. context->stream_count++;
  742. result = resource_map_pool_resources(dc, context, dc_stream);
  743. if (result == DC_OK)
  744. result = resource_map_phy_clock_resources(dc, context, dc_stream);
  745. if (result == DC_OK)
  746. result = build_mapped_resource(dc, context, dc_stream);
  747. if (result == DC_OK) {
  748. validate_guaranteed_copy_streams(
  749. context, dc->caps.max_streams);
  750. result = resource_build_scaling_params_for_context(dc, context);
  751. }
  752. if (result == DC_OK && !dcn_validate_bandwidth(dc, context))
  753. return DC_FAIL_BANDWIDTH_VALIDATE;
  754. return result;
  755. }
  756. static struct pipe_ctx *dcn10_acquire_idle_pipe_for_layer(
  757. struct dc_state *context,
  758. const struct resource_pool *pool,
  759. struct dc_stream_state *stream)
  760. {
  761. struct resource_context *res_ctx = &context->res_ctx;
  762. struct pipe_ctx *head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
  763. struct pipe_ctx *idle_pipe = find_idle_secondary_pipe(res_ctx, pool);
  764. if (!head_pipe) {
  765. ASSERT(0);
  766. return NULL;
  767. }
  768. if (!idle_pipe)
  769. return NULL;
  770. idle_pipe->stream = head_pipe->stream;
  771. idle_pipe->stream_res.tg = head_pipe->stream_res.tg;
  772. idle_pipe->stream_res.opp = head_pipe->stream_res.opp;
  773. idle_pipe->plane_res.hubp = pool->hubps[idle_pipe->pipe_idx];
  774. idle_pipe->plane_res.ipp = pool->ipps[idle_pipe->pipe_idx];
  775. idle_pipe->plane_res.dpp = pool->dpps[idle_pipe->pipe_idx];
  776. return idle_pipe;
  777. }
  778. enum dcc_control {
  779. dcc_control__256_256_xxx,
  780. dcc_control__128_128_xxx,
  781. dcc_control__256_64_64,
  782. };
  783. enum segment_order {
  784. segment_order__na,
  785. segment_order__contiguous,
  786. segment_order__non_contiguous,
  787. };
  788. static bool dcc_support_pixel_format(
  789. enum surface_pixel_format format,
  790. unsigned int *bytes_per_element)
  791. {
  792. /* DML: get_bytes_per_element */
  793. switch (format) {
  794. case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
  795. case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
  796. *bytes_per_element = 2;
  797. return true;
  798. case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
  799. case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
  800. case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
  801. case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
  802. *bytes_per_element = 4;
  803. return true;
  804. case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
  805. case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
  806. case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
  807. *bytes_per_element = 8;
  808. return true;
  809. default:
  810. return false;
  811. }
  812. }
  813. static bool dcc_support_swizzle(
  814. enum swizzle_mode_values swizzle,
  815. unsigned int bytes_per_element,
  816. enum segment_order *segment_order_horz,
  817. enum segment_order *segment_order_vert)
  818. {
  819. bool standard_swizzle = false;
  820. bool display_swizzle = false;
  821. switch (swizzle) {
  822. case DC_SW_4KB_S:
  823. case DC_SW_64KB_S:
  824. case DC_SW_VAR_S:
  825. case DC_SW_4KB_S_X:
  826. case DC_SW_64KB_S_X:
  827. case DC_SW_VAR_S_X:
  828. standard_swizzle = true;
  829. break;
  830. case DC_SW_4KB_D:
  831. case DC_SW_64KB_D:
  832. case DC_SW_VAR_D:
  833. case DC_SW_4KB_D_X:
  834. case DC_SW_64KB_D_X:
  835. case DC_SW_VAR_D_X:
  836. display_swizzle = true;
  837. break;
  838. default:
  839. break;
  840. }
  841. if (bytes_per_element == 1 && standard_swizzle) {
  842. *segment_order_horz = segment_order__contiguous;
  843. *segment_order_vert = segment_order__na;
  844. return true;
  845. }
  846. if (bytes_per_element == 2 && standard_swizzle) {
  847. *segment_order_horz = segment_order__non_contiguous;
  848. *segment_order_vert = segment_order__contiguous;
  849. return true;
  850. }
  851. if (bytes_per_element == 4 && standard_swizzle) {
  852. *segment_order_horz = segment_order__non_contiguous;
  853. *segment_order_vert = segment_order__contiguous;
  854. return true;
  855. }
  856. if (bytes_per_element == 8 && standard_swizzle) {
  857. *segment_order_horz = segment_order__na;
  858. *segment_order_vert = segment_order__contiguous;
  859. return true;
  860. }
  861. if (bytes_per_element == 8 && display_swizzle) {
  862. *segment_order_horz = segment_order__contiguous;
  863. *segment_order_vert = segment_order__non_contiguous;
  864. return true;
  865. }
  866. return false;
  867. }
  868. static void get_blk256_size(unsigned int *blk256_width, unsigned int *blk256_height,
  869. unsigned int bytes_per_element)
  870. {
  871. /* copied from DML. might want to refactor DML to leverage from DML */
  872. /* DML : get_blk256_size */
  873. if (bytes_per_element == 1) {
  874. *blk256_width = 16;
  875. *blk256_height = 16;
  876. } else if (bytes_per_element == 2) {
  877. *blk256_width = 16;
  878. *blk256_height = 8;
  879. } else if (bytes_per_element == 4) {
  880. *blk256_width = 8;
  881. *blk256_height = 8;
  882. } else if (bytes_per_element == 8) {
  883. *blk256_width = 8;
  884. *blk256_height = 4;
  885. }
  886. }
  887. static void det_request_size(
  888. unsigned int height,
  889. unsigned int width,
  890. unsigned int bpe,
  891. bool *req128_horz_wc,
  892. bool *req128_vert_wc)
  893. {
  894. unsigned int detile_buf_size = 164 * 1024; /* 164KB for DCN1.0 */
  895. unsigned int blk256_height = 0;
  896. unsigned int blk256_width = 0;
  897. unsigned int swath_bytes_horz_wc, swath_bytes_vert_wc;
  898. get_blk256_size(&blk256_width, &blk256_height, bpe);
  899. swath_bytes_horz_wc = height * blk256_height * bpe;
  900. swath_bytes_vert_wc = width * blk256_width * bpe;
  901. *req128_horz_wc = (2 * swath_bytes_horz_wc <= detile_buf_size) ?
  902. false : /* full 256B request */
  903. true; /* half 128b request */
  904. *req128_vert_wc = (2 * swath_bytes_vert_wc <= detile_buf_size) ?
  905. false : /* full 256B request */
  906. true; /* half 128b request */
  907. }
  908. static bool get_dcc_compression_cap(const struct dc *dc,
  909. const struct dc_dcc_surface_param *input,
  910. struct dc_surface_dcc_cap *output)
  911. {
  912. /* implement section 1.6.2.1 of DCN1_Programming_Guide.docx */
  913. enum dcc_control dcc_control;
  914. unsigned int bpe;
  915. enum segment_order segment_order_horz, segment_order_vert;
  916. bool req128_horz_wc, req128_vert_wc;
  917. memset(output, 0, sizeof(*output));
  918. if (dc->debug.disable_dcc == DCC_DISABLE)
  919. return false;
  920. if (!dcc_support_pixel_format(input->format,
  921. &bpe))
  922. return false;
  923. if (!dcc_support_swizzle(input->swizzle_mode, bpe,
  924. &segment_order_horz, &segment_order_vert))
  925. return false;
  926. det_request_size(input->surface_size.height, input->surface_size.width,
  927. bpe, &req128_horz_wc, &req128_vert_wc);
  928. if (!req128_horz_wc && !req128_vert_wc) {
  929. dcc_control = dcc_control__256_256_xxx;
  930. } else if (input->scan == SCAN_DIRECTION_HORIZONTAL) {
  931. if (!req128_horz_wc)
  932. dcc_control = dcc_control__256_256_xxx;
  933. else if (segment_order_horz == segment_order__contiguous)
  934. dcc_control = dcc_control__128_128_xxx;
  935. else
  936. dcc_control = dcc_control__256_64_64;
  937. } else if (input->scan == SCAN_DIRECTION_VERTICAL) {
  938. if (!req128_vert_wc)
  939. dcc_control = dcc_control__256_256_xxx;
  940. else if (segment_order_vert == segment_order__contiguous)
  941. dcc_control = dcc_control__128_128_xxx;
  942. else
  943. dcc_control = dcc_control__256_64_64;
  944. } else {
  945. if ((req128_horz_wc &&
  946. segment_order_horz == segment_order__non_contiguous) ||
  947. (req128_vert_wc &&
  948. segment_order_vert == segment_order__non_contiguous))
  949. /* access_dir not known, must use most constraining */
  950. dcc_control = dcc_control__256_64_64;
  951. else
  952. /* reg128 is true for either horz and vert
  953. * but segment_order is contiguous
  954. */
  955. dcc_control = dcc_control__128_128_xxx;
  956. }
  957. if (dc->debug.disable_dcc == DCC_HALF_REQ_DISALBE &&
  958. dcc_control != dcc_control__256_256_xxx)
  959. return false;
  960. switch (dcc_control) {
  961. case dcc_control__256_256_xxx:
  962. output->grph.rgb.max_uncompressed_blk_size = 256;
  963. output->grph.rgb.max_compressed_blk_size = 256;
  964. output->grph.rgb.independent_64b_blks = false;
  965. break;
  966. case dcc_control__128_128_xxx:
  967. output->grph.rgb.max_uncompressed_blk_size = 128;
  968. output->grph.rgb.max_compressed_blk_size = 128;
  969. output->grph.rgb.independent_64b_blks = false;
  970. break;
  971. case dcc_control__256_64_64:
  972. output->grph.rgb.max_uncompressed_blk_size = 256;
  973. output->grph.rgb.max_compressed_blk_size = 64;
  974. output->grph.rgb.independent_64b_blks = true;
  975. break;
  976. }
  977. output->capable = true;
  978. output->const_color_support = false;
  979. return true;
  980. }
  981. static void dcn10_destroy_resource_pool(struct resource_pool **pool)
  982. {
  983. struct dcn10_resource_pool *dcn10_pool = TO_DCN10_RES_POOL(*pool);
  984. destruct(dcn10_pool);
  985. kfree(dcn10_pool);
  986. *pool = NULL;
  987. }
  988. static enum dc_status dcn10_validate_plane(const struct dc_plane_state *plane_state, struct dc_caps *caps)
  989. {
  990. if (plane_state->format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN
  991. && caps->max_video_width != 0
  992. && plane_state->src_rect.width > caps->max_video_width)
  993. return DC_FAIL_SURFACE_VALIDATE;
  994. return DC_OK;
  995. }
  996. static struct dc_cap_funcs cap_funcs = {
  997. .get_dcc_compression_cap = get_dcc_compression_cap
  998. };
  999. static struct resource_funcs dcn10_res_pool_funcs = {
  1000. .destroy = dcn10_destroy_resource_pool,
  1001. .link_enc_create = dcn10_link_encoder_create,
  1002. .validate_guaranteed = dcn10_validate_guaranteed,
  1003. .validate_bandwidth = dcn_validate_bandwidth,
  1004. .acquire_idle_pipe_for_layer = dcn10_acquire_idle_pipe_for_layer,
  1005. .validate_plane = dcn10_validate_plane,
  1006. .add_stream_to_ctx = dcn10_add_stream_to_ctx
  1007. };
  1008. static uint32_t read_pipe_fuses(struct dc_context *ctx)
  1009. {
  1010. uint32_t value = dm_read_reg_soc15(ctx, mmCC_DC_PIPE_DIS, 0);
  1011. /* RV1 support max 4 pipes */
  1012. value = value & 0xf;
  1013. return value;
  1014. }
  1015. static bool construct(
  1016. uint8_t num_virtual_links,
  1017. struct dc *dc,
  1018. struct dcn10_resource_pool *pool)
  1019. {
  1020. int i;
  1021. int j;
  1022. struct dc_context *ctx = dc->ctx;
  1023. uint32_t pipe_fuses = read_pipe_fuses(ctx);
  1024. ctx->dc_bios->regs = &bios_regs;
  1025. pool->base.res_cap = &res_cap;
  1026. pool->base.funcs = &dcn10_res_pool_funcs;
  1027. /*
  1028. * TODO fill in from actual raven resource when we create
  1029. * more than virtual encoder
  1030. */
  1031. /*************************************************
  1032. * Resource + asic cap harcoding *
  1033. *************************************************/
  1034. pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
  1035. /* max pipe num for ASIC before check pipe fuses */
  1036. pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
  1037. dc->caps.max_video_width = 3840;
  1038. dc->caps.max_downscale_ratio = 200;
  1039. dc->caps.i2c_speed_in_khz = 100;
  1040. dc->caps.max_cursor_size = 256;
  1041. dc->caps.max_slave_planes = 1;
  1042. dc->caps.is_apu = true;
  1043. if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV)
  1044. dc->debug = debug_defaults_drv;
  1045. else
  1046. dc->debug = debug_defaults_diags;
  1047. /*************************************************
  1048. * Create resources *
  1049. *************************************************/
  1050. pool->base.clock_sources[DCN10_CLK_SRC_PLL0] =
  1051. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1052. CLOCK_SOURCE_COMBO_PHY_PLL0,
  1053. &clk_src_regs[0], false);
  1054. pool->base.clock_sources[DCN10_CLK_SRC_PLL1] =
  1055. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1056. CLOCK_SOURCE_COMBO_PHY_PLL1,
  1057. &clk_src_regs[1], false);
  1058. pool->base.clock_sources[DCN10_CLK_SRC_PLL2] =
  1059. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1060. CLOCK_SOURCE_COMBO_PHY_PLL2,
  1061. &clk_src_regs[2], false);
  1062. pool->base.clock_sources[DCN10_CLK_SRC_PLL3] =
  1063. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1064. CLOCK_SOURCE_COMBO_PHY_PLL3,
  1065. &clk_src_regs[3], false);
  1066. pool->base.clk_src_count = DCN10_CLK_SRC_TOTAL;
  1067. pool->base.dp_clock_source =
  1068. dcn10_clock_source_create(ctx, ctx->dc_bios,
  1069. CLOCK_SOURCE_ID_DP_DTO,
  1070. /* todo: not reuse phy_pll registers */
  1071. &clk_src_regs[0], true);
  1072. for (i = 0; i < pool->base.clk_src_count; i++) {
  1073. if (pool->base.clock_sources[i] == NULL) {
  1074. dm_error("DC: failed to create clock sources!\n");
  1075. BREAK_TO_DEBUGGER();
  1076. goto fail;
  1077. }
  1078. }
  1079. if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
  1080. pool->base.display_clock = dce120_disp_clk_create(ctx);
  1081. if (pool->base.display_clock == NULL) {
  1082. dm_error("DC: failed to create display clock!\n");
  1083. BREAK_TO_DEBUGGER();
  1084. goto fail;
  1085. }
  1086. }
  1087. pool->base.dmcu = dcn10_dmcu_create(ctx,
  1088. &dmcu_regs,
  1089. &dmcu_shift,
  1090. &dmcu_mask);
  1091. if (pool->base.dmcu == NULL) {
  1092. dm_error("DC: failed to create dmcu!\n");
  1093. BREAK_TO_DEBUGGER();
  1094. goto fail;
  1095. }
  1096. pool->base.abm = dce_abm_create(ctx,
  1097. &abm_regs,
  1098. &abm_shift,
  1099. &abm_mask);
  1100. if (pool->base.abm == NULL) {
  1101. dm_error("DC: failed to create abm!\n");
  1102. BREAK_TO_DEBUGGER();
  1103. goto fail;
  1104. }
  1105. dml_init_instance(&dc->dml, DML_PROJECT_RAVEN1);
  1106. memcpy(dc->dcn_ip, &dcn10_ip_defaults, sizeof(dcn10_ip_defaults));
  1107. memcpy(dc->dcn_soc, &dcn10_soc_defaults, sizeof(dcn10_soc_defaults));
  1108. if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
  1109. dc->dcn_soc->urgent_latency = 3;
  1110. dc->debug.disable_dmcu = true;
  1111. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 41.60f;
  1112. }
  1113. dc->dcn_soc->number_of_channels = dc->ctx->asic_id.vram_width / ddr4_dram_width;
  1114. ASSERT(dc->dcn_soc->number_of_channels < 3);
  1115. if (dc->dcn_soc->number_of_channels == 0)/*old sbios bug*/
  1116. dc->dcn_soc->number_of_channels = 2;
  1117. if (dc->dcn_soc->number_of_channels == 1) {
  1118. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 19.2f;
  1119. dc->dcn_soc->fabric_and_dram_bandwidth_vnom0p8 = 17.066f;
  1120. dc->dcn_soc->fabric_and_dram_bandwidth_vmid0p72 = 14.933f;
  1121. dc->dcn_soc->fabric_and_dram_bandwidth_vmin0p65 = 12.8f;
  1122. if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
  1123. dc->dcn_soc->fabric_and_dram_bandwidth_vmax0p9 = 20.80f;
  1124. }
  1125. }
  1126. pool->base.pp_smu = dcn10_pp_smu_create(ctx);
  1127. if (!dc->debug.disable_pplib_clock_request)
  1128. dcn_bw_update_from_pplib(dc);
  1129. dcn_bw_sync_calcs_and_dml(dc);
  1130. if (!dc->debug.disable_pplib_wm_range) {
  1131. dc->res_pool = &pool->base;
  1132. dcn_bw_notify_pplib_of_wm_ranges(dc);
  1133. }
  1134. {
  1135. struct irq_service_init_data init_data;
  1136. init_data.ctx = dc->ctx;
  1137. pool->base.irqs = dal_irq_service_dcn10_create(&init_data);
  1138. if (!pool->base.irqs)
  1139. goto fail;
  1140. }
  1141. /* index to valid pipe resource */
  1142. j = 0;
  1143. /* mem input -> ipp -> dpp -> opp -> TG */
  1144. for (i = 0; i < pool->base.pipe_count; i++) {
  1145. /* if pipe is disabled, skip instance of HW pipe,
  1146. * i.e, skip ASIC register instance
  1147. */
  1148. if ((pipe_fuses & (1 << i)) != 0)
  1149. continue;
  1150. pool->base.hubps[j] = dcn10_hubp_create(ctx, i);
  1151. if (pool->base.hubps[j] == NULL) {
  1152. BREAK_TO_DEBUGGER();
  1153. dm_error(
  1154. "DC: failed to create memory input!\n");
  1155. goto fail;
  1156. }
  1157. pool->base.ipps[j] = dcn10_ipp_create(ctx, i);
  1158. if (pool->base.ipps[j] == NULL) {
  1159. BREAK_TO_DEBUGGER();
  1160. dm_error(
  1161. "DC: failed to create input pixel processor!\n");
  1162. goto fail;
  1163. }
  1164. pool->base.dpps[j] = dcn10_dpp_create(ctx, i);
  1165. if (pool->base.dpps[j] == NULL) {
  1166. BREAK_TO_DEBUGGER();
  1167. dm_error(
  1168. "DC: failed to create dpp!\n");
  1169. goto fail;
  1170. }
  1171. pool->base.opps[j] = dcn10_opp_create(ctx, i);
  1172. if (pool->base.opps[j] == NULL) {
  1173. BREAK_TO_DEBUGGER();
  1174. dm_error(
  1175. "DC: failed to create output pixel processor!\n");
  1176. goto fail;
  1177. }
  1178. pool->base.timing_generators[j] = dcn10_timing_generator_create(
  1179. ctx, i);
  1180. if (pool->base.timing_generators[j] == NULL) {
  1181. BREAK_TO_DEBUGGER();
  1182. dm_error("DC: failed to create tg!\n");
  1183. goto fail;
  1184. }
  1185. /* check next valid pipe */
  1186. j++;
  1187. }
  1188. /* valid pipe num */
  1189. pool->base.pipe_count = j;
  1190. /* within dml lib, it is hard code to 4. If ASIC pipe is fused,
  1191. * the value may be changed
  1192. */
  1193. dc->dml.ip.max_num_dpp = pool->base.pipe_count;
  1194. dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
  1195. pool->base.mpc = dcn10_mpc_create(ctx);
  1196. if (pool->base.mpc == NULL) {
  1197. BREAK_TO_DEBUGGER();
  1198. dm_error("DC: failed to create mpc!\n");
  1199. goto fail;
  1200. }
  1201. pool->base.hubbub = dcn10_hubbub_create(ctx);
  1202. if (pool->base.hubbub == NULL) {
  1203. BREAK_TO_DEBUGGER();
  1204. dm_error("DC: failed to create hubbub!\n");
  1205. goto fail;
  1206. }
  1207. if (!resource_construct(num_virtual_links, dc, &pool->base,
  1208. (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ?
  1209. &res_create_funcs : &res_create_maximus_funcs)))
  1210. goto fail;
  1211. dcn10_hw_sequencer_construct(dc);
  1212. dc->caps.max_planes = pool->base.pipe_count;
  1213. dc->cap_funcs = cap_funcs;
  1214. return true;
  1215. fail:
  1216. destruct(pool);
  1217. return false;
  1218. }
  1219. struct resource_pool *dcn10_create_resource_pool(
  1220. uint8_t num_virtual_links,
  1221. struct dc *dc)
  1222. {
  1223. struct dcn10_resource_pool *pool =
  1224. kzalloc(sizeof(struct dcn10_resource_pool), GFP_KERNEL);
  1225. if (!pool)
  1226. return NULL;
  1227. if (construct(num_virtual_links, dc, pool))
  1228. return &pool->base;
  1229. BREAK_TO_DEBUGGER();
  1230. return NULL;
  1231. }