clk-pll.c 46 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873
  1. /*
  2. * Copyright (c) 2012, 2013, NVIDIA CORPORATION. All rights reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or modify it
  5. * under the terms and conditions of the GNU General Public License,
  6. * version 2, as published by the Free Software Foundation.
  7. *
  8. * This program is distributed in the hope it will be useful, but WITHOUT
  9. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
  11. * more details.
  12. *
  13. * You should have received a copy of the GNU General Public License
  14. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  15. */
  16. #include <linux/slab.h>
  17. #include <linux/io.h>
  18. #include <linux/delay.h>
  19. #include <linux/err.h>
  20. #include <linux/clk-provider.h>
  21. #include <linux/clk.h>
  22. #include "clk.h"
  23. #define PLL_BASE_BYPASS BIT(31)
  24. #define PLL_BASE_ENABLE BIT(30)
  25. #define PLL_BASE_REF_ENABLE BIT(29)
  26. #define PLL_BASE_OVERRIDE BIT(28)
  27. #define PLL_BASE_DIVP_SHIFT 20
  28. #define PLL_BASE_DIVP_WIDTH 3
  29. #define PLL_BASE_DIVN_SHIFT 8
  30. #define PLL_BASE_DIVN_WIDTH 10
  31. #define PLL_BASE_DIVM_SHIFT 0
  32. #define PLL_BASE_DIVM_WIDTH 5
  33. #define PLLU_POST_DIVP_MASK 0x1
  34. #define PLL_MISC_DCCON_SHIFT 20
  35. #define PLL_MISC_CPCON_SHIFT 8
  36. #define PLL_MISC_CPCON_WIDTH 4
  37. #define PLL_MISC_CPCON_MASK ((1 << PLL_MISC_CPCON_WIDTH) - 1)
  38. #define PLL_MISC_LFCON_SHIFT 4
  39. #define PLL_MISC_LFCON_WIDTH 4
  40. #define PLL_MISC_LFCON_MASK ((1 << PLL_MISC_LFCON_WIDTH) - 1)
  41. #define PLL_MISC_VCOCON_SHIFT 0
  42. #define PLL_MISC_VCOCON_WIDTH 4
  43. #define PLL_MISC_VCOCON_MASK ((1 << PLL_MISC_VCOCON_WIDTH) - 1)
  44. #define OUT_OF_TABLE_CPCON 8
  45. #define PMC_PLLP_WB0_OVERRIDE 0xf8
  46. #define PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE BIT(12)
  47. #define PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE BIT(11)
  48. #define PLL_POST_LOCK_DELAY 50
  49. #define PLLDU_LFCON_SET_DIVN 600
  50. #define PLLE_BASE_DIVCML_SHIFT 24
  51. #define PLLE_BASE_DIVCML_MASK 0xf
  52. #define PLLE_BASE_DIVP_SHIFT 16
  53. #define PLLE_BASE_DIVP_WIDTH 6
  54. #define PLLE_BASE_DIVN_SHIFT 8
  55. #define PLLE_BASE_DIVN_WIDTH 8
  56. #define PLLE_BASE_DIVM_SHIFT 0
  57. #define PLLE_BASE_DIVM_WIDTH 8
  58. #define PLLE_MISC_SETUP_BASE_SHIFT 16
  59. #define PLLE_MISC_SETUP_BASE_MASK (0xffff << PLLE_MISC_SETUP_BASE_SHIFT)
  60. #define PLLE_MISC_LOCK_ENABLE BIT(9)
  61. #define PLLE_MISC_READY BIT(15)
  62. #define PLLE_MISC_SETUP_EX_SHIFT 2
  63. #define PLLE_MISC_SETUP_EX_MASK (3 << PLLE_MISC_SETUP_EX_SHIFT)
  64. #define PLLE_MISC_SETUP_MASK (PLLE_MISC_SETUP_BASE_MASK | \
  65. PLLE_MISC_SETUP_EX_MASK)
  66. #define PLLE_MISC_SETUP_VALUE (7 << PLLE_MISC_SETUP_BASE_SHIFT)
  67. #define PLLE_SS_CTRL 0x68
  68. #define PLLE_SS_CNTL_BYPASS_SS BIT(10)
  69. #define PLLE_SS_CNTL_INTERP_RESET BIT(11)
  70. #define PLLE_SS_CNTL_SSC_BYP BIT(12)
  71. #define PLLE_SS_CNTL_CENTER BIT(14)
  72. #define PLLE_SS_CNTL_INVERT BIT(15)
  73. #define PLLE_SS_DISABLE (PLLE_SS_CNTL_BYPASS_SS | PLLE_SS_CNTL_INTERP_RESET |\
  74. PLLE_SS_CNTL_SSC_BYP)
  75. #define PLLE_SS_MAX_MASK 0x1ff
  76. #define PLLE_SS_MAX_VAL 0x25
  77. #define PLLE_SS_INC_MASK (0xff << 16)
  78. #define PLLE_SS_INC_VAL (0x1 << 16)
  79. #define PLLE_SS_INCINTRV_MASK (0x3f << 24)
  80. #define PLLE_SS_INCINTRV_VAL (0x20 << 24)
  81. #define PLLE_SS_COEFFICIENTS_MASK \
  82. (PLLE_SS_MAX_MASK | PLLE_SS_INC_MASK | PLLE_SS_INCINTRV_MASK)
  83. #define PLLE_SS_COEFFICIENTS_VAL \
  84. (PLLE_SS_MAX_VAL | PLLE_SS_INC_VAL | PLLE_SS_INCINTRV_VAL)
  85. #define PLLE_AUX_PLLP_SEL BIT(2)
  86. #define PLLE_AUX_USE_LOCKDET BIT(3)
  87. #define PLLE_AUX_ENABLE_SWCTL BIT(4)
  88. #define PLLE_AUX_SS_SWCTL BIT(6)
  89. #define PLLE_AUX_SEQ_ENABLE BIT(24)
  90. #define PLLE_AUX_SEQ_START_STATE BIT(25)
  91. #define PLLE_AUX_PLLRE_SEL BIT(28)
  92. #define XUSBIO_PLL_CFG0 0x51c
  93. #define XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL BIT(0)
  94. #define XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL BIT(2)
  95. #define XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET BIT(6)
  96. #define XUSBIO_PLL_CFG0_SEQ_ENABLE BIT(24)
  97. #define XUSBIO_PLL_CFG0_SEQ_START_STATE BIT(25)
  98. #define PLLE_MISC_PLLE_PTS BIT(8)
  99. #define PLLE_MISC_IDDQ_SW_VALUE BIT(13)
  100. #define PLLE_MISC_IDDQ_SW_CTRL BIT(14)
  101. #define PLLE_MISC_VREG_BG_CTRL_SHIFT 4
  102. #define PLLE_MISC_VREG_BG_CTRL_MASK (3 << PLLE_MISC_VREG_BG_CTRL_SHIFT)
  103. #define PLLE_MISC_VREG_CTRL_SHIFT 2
  104. #define PLLE_MISC_VREG_CTRL_MASK (2 << PLLE_MISC_VREG_CTRL_SHIFT)
  105. #define PLLCX_MISC_STROBE BIT(31)
  106. #define PLLCX_MISC_RESET BIT(30)
  107. #define PLLCX_MISC_SDM_DIV_SHIFT 28
  108. #define PLLCX_MISC_SDM_DIV_MASK (0x3 << PLLCX_MISC_SDM_DIV_SHIFT)
  109. #define PLLCX_MISC_FILT_DIV_SHIFT 26
  110. #define PLLCX_MISC_FILT_DIV_MASK (0x3 << PLLCX_MISC_FILT_DIV_SHIFT)
  111. #define PLLCX_MISC_ALPHA_SHIFT 18
  112. #define PLLCX_MISC_DIV_LOW_RANGE \
  113. ((0x1 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  114. (0x1 << PLLCX_MISC_FILT_DIV_SHIFT))
  115. #define PLLCX_MISC_DIV_HIGH_RANGE \
  116. ((0x2 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  117. (0x2 << PLLCX_MISC_FILT_DIV_SHIFT))
  118. #define PLLCX_MISC_COEF_LOW_RANGE \
  119. ((0x14 << PLLCX_MISC_KA_SHIFT) | (0x38 << PLLCX_MISC_KB_SHIFT))
  120. #define PLLCX_MISC_KA_SHIFT 2
  121. #define PLLCX_MISC_KB_SHIFT 9
  122. #define PLLCX_MISC_DEFAULT (PLLCX_MISC_COEF_LOW_RANGE | \
  123. (0x19 << PLLCX_MISC_ALPHA_SHIFT) | \
  124. PLLCX_MISC_DIV_LOW_RANGE | \
  125. PLLCX_MISC_RESET)
  126. #define PLLCX_MISC1_DEFAULT 0x000d2308
  127. #define PLLCX_MISC2_DEFAULT 0x30211200
  128. #define PLLCX_MISC3_DEFAULT 0x200
  129. #define PMC_SATA_PWRGT 0x1ac
  130. #define PMC_SATA_PWRGT_PLLE_IDDQ_VALUE BIT(5)
  131. #define PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL BIT(4)
  132. #define PLLSS_MISC_KCP 0
  133. #define PLLSS_MISC_KVCO 0
  134. #define PLLSS_MISC_SETUP 0
  135. #define PLLSS_EN_SDM 0
  136. #define PLLSS_EN_SSC 0
  137. #define PLLSS_EN_DITHER2 0
  138. #define PLLSS_EN_DITHER 1
  139. #define PLLSS_SDM_RESET 0
  140. #define PLLSS_CLAMP 0
  141. #define PLLSS_SDM_SSC_MAX 0
  142. #define PLLSS_SDM_SSC_MIN 0
  143. #define PLLSS_SDM_SSC_STEP 0
  144. #define PLLSS_SDM_DIN 0
  145. #define PLLSS_MISC_DEFAULT ((PLLSS_MISC_KCP << 25) | \
  146. (PLLSS_MISC_KVCO << 24) | \
  147. PLLSS_MISC_SETUP)
  148. #define PLLSS_CFG_DEFAULT ((PLLSS_EN_SDM << 31) | \
  149. (PLLSS_EN_SSC << 30) | \
  150. (PLLSS_EN_DITHER2 << 29) | \
  151. (PLLSS_EN_DITHER << 28) | \
  152. (PLLSS_SDM_RESET) << 27 | \
  153. (PLLSS_CLAMP << 22))
  154. #define PLLSS_CTRL1_DEFAULT \
  155. ((PLLSS_SDM_SSC_MAX << 16) | PLLSS_SDM_SSC_MIN)
  156. #define PLLSS_CTRL2_DEFAULT \
  157. ((PLLSS_SDM_SSC_STEP << 16) | PLLSS_SDM_DIN)
  158. #define PLLSS_LOCK_OVERRIDE BIT(24)
  159. #define PLLSS_REF_SRC_SEL_SHIFT 25
  160. #define PLLSS_REF_SRC_SEL_MASK (3 << PLLSS_REF_SRC_SEL_SHIFT)
  161. #define pll_readl(offset, p) readl_relaxed(p->clk_base + offset)
  162. #define pll_readl_base(p) pll_readl(p->params->base_reg, p)
  163. #define pll_readl_misc(p) pll_readl(p->params->misc_reg, p)
  164. #define pll_override_readl(offset, p) readl_relaxed(p->pmc + offset)
  165. #define pll_writel(val, offset, p) writel_relaxed(val, p->clk_base + offset)
  166. #define pll_writel_base(val, p) pll_writel(val, p->params->base_reg, p)
  167. #define pll_writel_misc(val, p) pll_writel(val, p->params->misc_reg, p)
  168. #define pll_override_writel(val, offset, p) writel(val, p->pmc + offset)
  169. #define mask(w) ((1 << (w)) - 1)
  170. #define divm_mask(p) mask(p->params->div_nmp->divm_width)
  171. #define divn_mask(p) mask(p->params->div_nmp->divn_width)
  172. #define divp_mask(p) (p->params->flags & TEGRA_PLLU ? PLLU_POST_DIVP_MASK :\
  173. mask(p->params->div_nmp->divp_width))
  174. #define divm_shift(p) (p)->params->div_nmp->divm_shift
  175. #define divn_shift(p) (p)->params->div_nmp->divn_shift
  176. #define divp_shift(p) (p)->params->div_nmp->divp_shift
  177. #define divm_mask_shifted(p) (divm_mask(p) << divm_shift(p))
  178. #define divn_mask_shifted(p) (divn_mask(p) << divn_shift(p))
  179. #define divp_mask_shifted(p) (divp_mask(p) << divp_shift(p))
  180. #define divm_max(p) (divm_mask(p))
  181. #define divn_max(p) (divn_mask(p))
  182. #define divp_max(p) (1 << (divp_mask(p)))
  183. static struct div_nmp default_nmp = {
  184. .divn_shift = PLL_BASE_DIVN_SHIFT,
  185. .divn_width = PLL_BASE_DIVN_WIDTH,
  186. .divm_shift = PLL_BASE_DIVM_SHIFT,
  187. .divm_width = PLL_BASE_DIVM_WIDTH,
  188. .divp_shift = PLL_BASE_DIVP_SHIFT,
  189. .divp_width = PLL_BASE_DIVP_WIDTH,
  190. };
  191. static void clk_pll_enable_lock(struct tegra_clk_pll *pll)
  192. {
  193. u32 val;
  194. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK))
  195. return;
  196. if (!(pll->params->flags & TEGRA_PLL_HAS_LOCK_ENABLE))
  197. return;
  198. val = pll_readl_misc(pll);
  199. val |= BIT(pll->params->lock_enable_bit_idx);
  200. pll_writel_misc(val, pll);
  201. }
  202. static int clk_pll_wait_for_lock(struct tegra_clk_pll *pll)
  203. {
  204. int i;
  205. u32 val, lock_mask;
  206. void __iomem *lock_addr;
  207. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK)) {
  208. udelay(pll->params->lock_delay);
  209. return 0;
  210. }
  211. lock_addr = pll->clk_base;
  212. if (pll->params->flags & TEGRA_PLL_LOCK_MISC)
  213. lock_addr += pll->params->misc_reg;
  214. else
  215. lock_addr += pll->params->base_reg;
  216. lock_mask = pll->params->lock_mask;
  217. for (i = 0; i < pll->params->lock_delay; i++) {
  218. val = readl_relaxed(lock_addr);
  219. if ((val & lock_mask) == lock_mask) {
  220. udelay(PLL_POST_LOCK_DELAY);
  221. return 0;
  222. }
  223. udelay(2); /* timeout = 2 * lock time */
  224. }
  225. pr_err("%s: Timed out waiting for pll %s lock\n", __func__,
  226. __clk_get_name(pll->hw.clk));
  227. return -1;
  228. }
  229. static int clk_pll_is_enabled(struct clk_hw *hw)
  230. {
  231. struct tegra_clk_pll *pll = to_clk_pll(hw);
  232. u32 val;
  233. if (pll->params->flags & TEGRA_PLLM) {
  234. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  235. if (val & PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)
  236. return val & PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE ? 1 : 0;
  237. }
  238. val = pll_readl_base(pll);
  239. return val & PLL_BASE_ENABLE ? 1 : 0;
  240. }
  241. static void _clk_pll_enable(struct clk_hw *hw)
  242. {
  243. struct tegra_clk_pll *pll = to_clk_pll(hw);
  244. u32 val;
  245. clk_pll_enable_lock(pll);
  246. val = pll_readl_base(pll);
  247. if (pll->params->flags & TEGRA_PLL_BYPASS)
  248. val &= ~PLL_BASE_BYPASS;
  249. val |= PLL_BASE_ENABLE;
  250. pll_writel_base(val, pll);
  251. if (pll->params->flags & TEGRA_PLLM) {
  252. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  253. val |= PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  254. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  255. }
  256. }
  257. static void _clk_pll_disable(struct clk_hw *hw)
  258. {
  259. struct tegra_clk_pll *pll = to_clk_pll(hw);
  260. u32 val;
  261. val = pll_readl_base(pll);
  262. if (pll->params->flags & TEGRA_PLL_BYPASS)
  263. val &= ~PLL_BASE_BYPASS;
  264. val &= ~PLL_BASE_ENABLE;
  265. pll_writel_base(val, pll);
  266. if (pll->params->flags & TEGRA_PLLM) {
  267. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  268. val &= ~PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  269. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  270. }
  271. }
  272. static int clk_pll_enable(struct clk_hw *hw)
  273. {
  274. struct tegra_clk_pll *pll = to_clk_pll(hw);
  275. unsigned long flags = 0;
  276. int ret;
  277. if (pll->lock)
  278. spin_lock_irqsave(pll->lock, flags);
  279. _clk_pll_enable(hw);
  280. ret = clk_pll_wait_for_lock(pll);
  281. if (pll->lock)
  282. spin_unlock_irqrestore(pll->lock, flags);
  283. return ret;
  284. }
  285. static void clk_pll_disable(struct clk_hw *hw)
  286. {
  287. struct tegra_clk_pll *pll = to_clk_pll(hw);
  288. unsigned long flags = 0;
  289. if (pll->lock)
  290. spin_lock_irqsave(pll->lock, flags);
  291. _clk_pll_disable(hw);
  292. if (pll->lock)
  293. spin_unlock_irqrestore(pll->lock, flags);
  294. }
  295. static int _p_div_to_hw(struct clk_hw *hw, u8 p_div)
  296. {
  297. struct tegra_clk_pll *pll = to_clk_pll(hw);
  298. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  299. if (p_tohw) {
  300. while (p_tohw->pdiv) {
  301. if (p_div <= p_tohw->pdiv)
  302. return p_tohw->hw_val;
  303. p_tohw++;
  304. }
  305. return -EINVAL;
  306. }
  307. return -EINVAL;
  308. }
  309. static int _hw_to_p_div(struct clk_hw *hw, u8 p_div_hw)
  310. {
  311. struct tegra_clk_pll *pll = to_clk_pll(hw);
  312. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  313. if (p_tohw) {
  314. while (p_tohw->pdiv) {
  315. if (p_div_hw == p_tohw->hw_val)
  316. return p_tohw->pdiv;
  317. p_tohw++;
  318. }
  319. return -EINVAL;
  320. }
  321. return 1 << p_div_hw;
  322. }
  323. static int _get_table_rate(struct clk_hw *hw,
  324. struct tegra_clk_pll_freq_table *cfg,
  325. unsigned long rate, unsigned long parent_rate)
  326. {
  327. struct tegra_clk_pll *pll = to_clk_pll(hw);
  328. struct tegra_clk_pll_freq_table *sel;
  329. for (sel = pll->params->freq_table; sel->input_rate != 0; sel++)
  330. if (sel->input_rate == parent_rate &&
  331. sel->output_rate == rate)
  332. break;
  333. if (sel->input_rate == 0)
  334. return -EINVAL;
  335. cfg->input_rate = sel->input_rate;
  336. cfg->output_rate = sel->output_rate;
  337. cfg->m = sel->m;
  338. cfg->n = sel->n;
  339. cfg->p = sel->p;
  340. cfg->cpcon = sel->cpcon;
  341. return 0;
  342. }
  343. static int _calc_rate(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  344. unsigned long rate, unsigned long parent_rate)
  345. {
  346. struct tegra_clk_pll *pll = to_clk_pll(hw);
  347. unsigned long cfreq;
  348. u32 p_div = 0;
  349. int ret;
  350. switch (parent_rate) {
  351. case 12000000:
  352. case 26000000:
  353. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2000000;
  354. break;
  355. case 13000000:
  356. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2600000;
  357. break;
  358. case 16800000:
  359. case 19200000:
  360. cfreq = (rate <= 1200000 * 1000) ? 1200000 : 2400000;
  361. break;
  362. case 9600000:
  363. case 28800000:
  364. /*
  365. * PLL_P_OUT1 rate is not listed in PLLA table
  366. */
  367. cfreq = parent_rate/(parent_rate/1000000);
  368. break;
  369. default:
  370. pr_err("%s Unexpected reference rate %lu\n",
  371. __func__, parent_rate);
  372. BUG();
  373. }
  374. /* Raise VCO to guarantee 0.5% accuracy */
  375. for (cfg->output_rate = rate; cfg->output_rate < 200 * cfreq;
  376. cfg->output_rate <<= 1)
  377. p_div++;
  378. cfg->m = parent_rate / cfreq;
  379. cfg->n = cfg->output_rate / cfreq;
  380. cfg->cpcon = OUT_OF_TABLE_CPCON;
  381. if (cfg->m > divm_max(pll) || cfg->n > divn_max(pll) ||
  382. (1 << p_div) > divp_max(pll)
  383. || cfg->output_rate > pll->params->vco_max) {
  384. return -EINVAL;
  385. }
  386. cfg->output_rate >>= p_div;
  387. if (pll->params->pdiv_tohw) {
  388. ret = _p_div_to_hw(hw, 1 << p_div);
  389. if (ret < 0)
  390. return ret;
  391. else
  392. cfg->p = ret;
  393. } else
  394. cfg->p = p_div;
  395. return 0;
  396. }
  397. static void _update_pll_mnp(struct tegra_clk_pll *pll,
  398. struct tegra_clk_pll_freq_table *cfg)
  399. {
  400. u32 val;
  401. struct tegra_clk_pll_params *params = pll->params;
  402. struct div_nmp *div_nmp = params->div_nmp;
  403. if ((params->flags & TEGRA_PLLM) &&
  404. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  405. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  406. val = pll_override_readl(params->pmc_divp_reg, pll);
  407. val &= ~(divp_mask(pll) << div_nmp->override_divp_shift);
  408. val |= cfg->p << div_nmp->override_divp_shift;
  409. pll_override_writel(val, params->pmc_divp_reg, pll);
  410. val = pll_override_readl(params->pmc_divnm_reg, pll);
  411. val &= ~(divm_mask(pll) << div_nmp->override_divm_shift) |
  412. ~(divn_mask(pll) << div_nmp->override_divn_shift);
  413. val |= (cfg->m << div_nmp->override_divm_shift) |
  414. (cfg->n << div_nmp->override_divn_shift);
  415. pll_override_writel(val, params->pmc_divnm_reg, pll);
  416. } else {
  417. val = pll_readl_base(pll);
  418. val &= ~(divm_mask_shifted(pll) | divn_mask_shifted(pll) |
  419. divp_mask_shifted(pll));
  420. val |= (cfg->m << divm_shift(pll)) |
  421. (cfg->n << divn_shift(pll)) |
  422. (cfg->p << divp_shift(pll));
  423. pll_writel_base(val, pll);
  424. }
  425. }
  426. static void _get_pll_mnp(struct tegra_clk_pll *pll,
  427. struct tegra_clk_pll_freq_table *cfg)
  428. {
  429. u32 val;
  430. struct tegra_clk_pll_params *params = pll->params;
  431. struct div_nmp *div_nmp = params->div_nmp;
  432. if ((params->flags & TEGRA_PLLM) &&
  433. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  434. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  435. val = pll_override_readl(params->pmc_divp_reg, pll);
  436. cfg->p = (val >> div_nmp->override_divp_shift) & divp_mask(pll);
  437. val = pll_override_readl(params->pmc_divnm_reg, pll);
  438. cfg->m = (val >> div_nmp->override_divm_shift) & divm_mask(pll);
  439. cfg->n = (val >> div_nmp->override_divn_shift) & divn_mask(pll);
  440. } else {
  441. val = pll_readl_base(pll);
  442. cfg->m = (val >> div_nmp->divm_shift) & divm_mask(pll);
  443. cfg->n = (val >> div_nmp->divn_shift) & divn_mask(pll);
  444. cfg->p = (val >> div_nmp->divp_shift) & divp_mask(pll);
  445. }
  446. }
  447. static void _update_pll_cpcon(struct tegra_clk_pll *pll,
  448. struct tegra_clk_pll_freq_table *cfg,
  449. unsigned long rate)
  450. {
  451. u32 val;
  452. val = pll_readl_misc(pll);
  453. val &= ~(PLL_MISC_CPCON_MASK << PLL_MISC_CPCON_SHIFT);
  454. val |= cfg->cpcon << PLL_MISC_CPCON_SHIFT;
  455. if (pll->params->flags & TEGRA_PLL_SET_LFCON) {
  456. val &= ~(PLL_MISC_LFCON_MASK << PLL_MISC_LFCON_SHIFT);
  457. if (cfg->n >= PLLDU_LFCON_SET_DIVN)
  458. val |= 1 << PLL_MISC_LFCON_SHIFT;
  459. } else if (pll->params->flags & TEGRA_PLL_SET_DCCON) {
  460. val &= ~(1 << PLL_MISC_DCCON_SHIFT);
  461. if (rate >= (pll->params->vco_max >> 1))
  462. val |= 1 << PLL_MISC_DCCON_SHIFT;
  463. }
  464. pll_writel_misc(val, pll);
  465. }
  466. static int _program_pll(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  467. unsigned long rate)
  468. {
  469. struct tegra_clk_pll *pll = to_clk_pll(hw);
  470. int state, ret = 0;
  471. state = clk_pll_is_enabled(hw);
  472. if (state)
  473. _clk_pll_disable(hw);
  474. _update_pll_mnp(pll, cfg);
  475. if (pll->params->flags & TEGRA_PLL_HAS_CPCON)
  476. _update_pll_cpcon(pll, cfg, rate);
  477. if (state) {
  478. _clk_pll_enable(hw);
  479. ret = clk_pll_wait_for_lock(pll);
  480. }
  481. return ret;
  482. }
  483. static int clk_pll_set_rate(struct clk_hw *hw, unsigned long rate,
  484. unsigned long parent_rate)
  485. {
  486. struct tegra_clk_pll *pll = to_clk_pll(hw);
  487. struct tegra_clk_pll_freq_table cfg, old_cfg;
  488. unsigned long flags = 0;
  489. int ret = 0;
  490. if (pll->params->flags & TEGRA_PLL_FIXED) {
  491. if (rate != pll->params->fixed_rate) {
  492. pr_err("%s: Can not change %s fixed rate %lu to %lu\n",
  493. __func__, __clk_get_name(hw->clk),
  494. pll->params->fixed_rate, rate);
  495. return -EINVAL;
  496. }
  497. return 0;
  498. }
  499. if (_get_table_rate(hw, &cfg, rate, parent_rate) &&
  500. _calc_rate(hw, &cfg, rate, parent_rate)) {
  501. pr_err("%s: Failed to set %s rate %lu\n", __func__,
  502. __clk_get_name(hw->clk), rate);
  503. WARN_ON(1);
  504. return -EINVAL;
  505. }
  506. if (pll->lock)
  507. spin_lock_irqsave(pll->lock, flags);
  508. _get_pll_mnp(pll, &old_cfg);
  509. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  510. ret = _program_pll(hw, &cfg, rate);
  511. if (pll->lock)
  512. spin_unlock_irqrestore(pll->lock, flags);
  513. return ret;
  514. }
  515. static long clk_pll_round_rate(struct clk_hw *hw, unsigned long rate,
  516. unsigned long *prate)
  517. {
  518. struct tegra_clk_pll *pll = to_clk_pll(hw);
  519. struct tegra_clk_pll_freq_table cfg;
  520. if (pll->params->flags & TEGRA_PLL_FIXED)
  521. return pll->params->fixed_rate;
  522. /* PLLM is used for memory; we do not change rate */
  523. if (pll->params->flags & TEGRA_PLLM)
  524. return __clk_get_rate(hw->clk);
  525. if (_get_table_rate(hw, &cfg, rate, *prate) &&
  526. _calc_rate(hw, &cfg, rate, *prate))
  527. return -EINVAL;
  528. return cfg.output_rate;
  529. }
  530. static unsigned long clk_pll_recalc_rate(struct clk_hw *hw,
  531. unsigned long parent_rate)
  532. {
  533. struct tegra_clk_pll *pll = to_clk_pll(hw);
  534. struct tegra_clk_pll_freq_table cfg;
  535. u32 val;
  536. u64 rate = parent_rate;
  537. int pdiv;
  538. val = pll_readl_base(pll);
  539. if ((pll->params->flags & TEGRA_PLL_BYPASS) && (val & PLL_BASE_BYPASS))
  540. return parent_rate;
  541. if ((pll->params->flags & TEGRA_PLL_FIXED) &&
  542. !(val & PLL_BASE_OVERRIDE)) {
  543. struct tegra_clk_pll_freq_table sel;
  544. if (_get_table_rate(hw, &sel, pll->params->fixed_rate,
  545. parent_rate)) {
  546. pr_err("Clock %s has unknown fixed frequency\n",
  547. __clk_get_name(hw->clk));
  548. BUG();
  549. }
  550. return pll->params->fixed_rate;
  551. }
  552. _get_pll_mnp(pll, &cfg);
  553. pdiv = _hw_to_p_div(hw, cfg.p);
  554. if (pdiv < 0) {
  555. WARN_ON(1);
  556. pdiv = 1;
  557. }
  558. cfg.m *= pdiv;
  559. rate *= cfg.n;
  560. do_div(rate, cfg.m);
  561. return rate;
  562. }
  563. static int clk_plle_training(struct tegra_clk_pll *pll)
  564. {
  565. u32 val;
  566. unsigned long timeout;
  567. if (!pll->pmc)
  568. return -ENOSYS;
  569. /*
  570. * PLLE is already disabled, and setup cleared;
  571. * create falling edge on PLLE IDDQ input.
  572. */
  573. val = readl(pll->pmc + PMC_SATA_PWRGT);
  574. val |= PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  575. writel(val, pll->pmc + PMC_SATA_PWRGT);
  576. val = readl(pll->pmc + PMC_SATA_PWRGT);
  577. val |= PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL;
  578. writel(val, pll->pmc + PMC_SATA_PWRGT);
  579. val = readl(pll->pmc + PMC_SATA_PWRGT);
  580. val &= ~PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  581. writel(val, pll->pmc + PMC_SATA_PWRGT);
  582. val = pll_readl_misc(pll);
  583. timeout = jiffies + msecs_to_jiffies(100);
  584. while (1) {
  585. val = pll_readl_misc(pll);
  586. if (val & PLLE_MISC_READY)
  587. break;
  588. if (time_after(jiffies, timeout)) {
  589. pr_err("%s: timeout waiting for PLLE\n", __func__);
  590. return -EBUSY;
  591. }
  592. udelay(300);
  593. }
  594. return 0;
  595. }
  596. static int clk_plle_enable(struct clk_hw *hw)
  597. {
  598. struct tegra_clk_pll *pll = to_clk_pll(hw);
  599. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  600. struct tegra_clk_pll_freq_table sel;
  601. u32 val;
  602. int err;
  603. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  604. return -EINVAL;
  605. clk_pll_disable(hw);
  606. val = pll_readl_misc(pll);
  607. val &= ~(PLLE_MISC_LOCK_ENABLE | PLLE_MISC_SETUP_MASK);
  608. pll_writel_misc(val, pll);
  609. val = pll_readl_misc(pll);
  610. if (!(val & PLLE_MISC_READY)) {
  611. err = clk_plle_training(pll);
  612. if (err)
  613. return err;
  614. }
  615. if (pll->params->flags & TEGRA_PLLE_CONFIGURE) {
  616. /* configure dividers */
  617. val = pll_readl_base(pll);
  618. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  619. divm_mask_shifted(pll));
  620. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  621. val |= sel.m << divm_shift(pll);
  622. val |= sel.n << divn_shift(pll);
  623. val |= sel.p << divp_shift(pll);
  624. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  625. pll_writel_base(val, pll);
  626. }
  627. val = pll_readl_misc(pll);
  628. val |= PLLE_MISC_SETUP_VALUE;
  629. val |= PLLE_MISC_LOCK_ENABLE;
  630. pll_writel_misc(val, pll);
  631. val = readl(pll->clk_base + PLLE_SS_CTRL);
  632. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  633. val |= PLLE_SS_DISABLE;
  634. writel(val, pll->clk_base + PLLE_SS_CTRL);
  635. val = pll_readl_base(pll);
  636. val |= (PLL_BASE_BYPASS | PLL_BASE_ENABLE);
  637. pll_writel_base(val, pll);
  638. clk_pll_wait_for_lock(pll);
  639. return 0;
  640. }
  641. static unsigned long clk_plle_recalc_rate(struct clk_hw *hw,
  642. unsigned long parent_rate)
  643. {
  644. struct tegra_clk_pll *pll = to_clk_pll(hw);
  645. u32 val = pll_readl_base(pll);
  646. u32 divn = 0, divm = 0, divp = 0;
  647. u64 rate = parent_rate;
  648. divp = (val >> pll->params->div_nmp->divp_shift) & (divp_mask(pll));
  649. divn = (val >> pll->params->div_nmp->divn_shift) & (divn_mask(pll));
  650. divm = (val >> pll->params->div_nmp->divm_shift) & (divm_mask(pll));
  651. divm *= divp;
  652. rate *= divn;
  653. do_div(rate, divm);
  654. return rate;
  655. }
  656. const struct clk_ops tegra_clk_pll_ops = {
  657. .is_enabled = clk_pll_is_enabled,
  658. .enable = clk_pll_enable,
  659. .disable = clk_pll_disable,
  660. .recalc_rate = clk_pll_recalc_rate,
  661. .round_rate = clk_pll_round_rate,
  662. .set_rate = clk_pll_set_rate,
  663. };
  664. const struct clk_ops tegra_clk_plle_ops = {
  665. .recalc_rate = clk_plle_recalc_rate,
  666. .is_enabled = clk_pll_is_enabled,
  667. .disable = clk_pll_disable,
  668. .enable = clk_plle_enable,
  669. };
  670. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || defined(CONFIG_ARCH_TEGRA_124_SOC)
  671. static int _pll_fixed_mdiv(struct tegra_clk_pll_params *pll_params,
  672. unsigned long parent_rate)
  673. {
  674. if (parent_rate > pll_params->cf_max)
  675. return 2;
  676. else
  677. return 1;
  678. }
  679. static unsigned long _clip_vco_min(unsigned long vco_min,
  680. unsigned long parent_rate)
  681. {
  682. return DIV_ROUND_UP(vco_min, parent_rate) * parent_rate;
  683. }
  684. static int _setup_dynamic_ramp(struct tegra_clk_pll_params *pll_params,
  685. void __iomem *clk_base,
  686. unsigned long parent_rate)
  687. {
  688. u32 val;
  689. u32 step_a, step_b;
  690. switch (parent_rate) {
  691. case 12000000:
  692. case 13000000:
  693. case 26000000:
  694. step_a = 0x2B;
  695. step_b = 0x0B;
  696. break;
  697. case 16800000:
  698. step_a = 0x1A;
  699. step_b = 0x09;
  700. break;
  701. case 19200000:
  702. step_a = 0x12;
  703. step_b = 0x08;
  704. break;
  705. default:
  706. pr_err("%s: Unexpected reference rate %lu\n",
  707. __func__, parent_rate);
  708. WARN_ON(1);
  709. return -EINVAL;
  710. }
  711. val = step_a << pll_params->stepa_shift;
  712. val |= step_b << pll_params->stepb_shift;
  713. writel_relaxed(val, clk_base + pll_params->dyn_ramp_reg);
  714. return 0;
  715. }
  716. static int clk_pll_iddq_enable(struct clk_hw *hw)
  717. {
  718. struct tegra_clk_pll *pll = to_clk_pll(hw);
  719. unsigned long flags = 0;
  720. u32 val;
  721. int ret;
  722. if (pll->lock)
  723. spin_lock_irqsave(pll->lock, flags);
  724. val = pll_readl(pll->params->iddq_reg, pll);
  725. val &= ~BIT(pll->params->iddq_bit_idx);
  726. pll_writel(val, pll->params->iddq_reg, pll);
  727. udelay(2);
  728. _clk_pll_enable(hw);
  729. ret = clk_pll_wait_for_lock(pll);
  730. if (pll->lock)
  731. spin_unlock_irqrestore(pll->lock, flags);
  732. return 0;
  733. }
  734. static void clk_pll_iddq_disable(struct clk_hw *hw)
  735. {
  736. struct tegra_clk_pll *pll = to_clk_pll(hw);
  737. unsigned long flags = 0;
  738. u32 val;
  739. if (pll->lock)
  740. spin_lock_irqsave(pll->lock, flags);
  741. _clk_pll_disable(hw);
  742. val = pll_readl(pll->params->iddq_reg, pll);
  743. val |= BIT(pll->params->iddq_bit_idx);
  744. pll_writel(val, pll->params->iddq_reg, pll);
  745. udelay(2);
  746. if (pll->lock)
  747. spin_unlock_irqrestore(pll->lock, flags);
  748. }
  749. static int _calc_dynamic_ramp_rate(struct clk_hw *hw,
  750. struct tegra_clk_pll_freq_table *cfg,
  751. unsigned long rate, unsigned long parent_rate)
  752. {
  753. struct tegra_clk_pll *pll = to_clk_pll(hw);
  754. unsigned int p;
  755. int p_div;
  756. if (!rate)
  757. return -EINVAL;
  758. p = DIV_ROUND_UP(pll->params->vco_min, rate);
  759. cfg->m = _pll_fixed_mdiv(pll->params, parent_rate);
  760. cfg->output_rate = rate * p;
  761. cfg->n = cfg->output_rate * cfg->m / parent_rate;
  762. p_div = _p_div_to_hw(hw, p);
  763. if (p_div < 0)
  764. return p_div;
  765. else
  766. cfg->p = p_div;
  767. if (cfg->n > divn_max(pll) || cfg->output_rate > pll->params->vco_max)
  768. return -EINVAL;
  769. return 0;
  770. }
  771. static int _pll_ramp_calc_pll(struct clk_hw *hw,
  772. struct tegra_clk_pll_freq_table *cfg,
  773. unsigned long rate, unsigned long parent_rate)
  774. {
  775. struct tegra_clk_pll *pll = to_clk_pll(hw);
  776. int err = 0, p_div;
  777. err = _get_table_rate(hw, cfg, rate, parent_rate);
  778. if (err < 0)
  779. err = _calc_dynamic_ramp_rate(hw, cfg, rate, parent_rate);
  780. else {
  781. if (cfg->m != _pll_fixed_mdiv(pll->params, parent_rate)) {
  782. WARN_ON(1);
  783. err = -EINVAL;
  784. goto out;
  785. }
  786. p_div = _p_div_to_hw(hw, cfg->p);
  787. if (p_div < 0)
  788. return p_div;
  789. else
  790. cfg->p = p_div;
  791. }
  792. if (cfg->p > pll->params->max_p)
  793. err = -EINVAL;
  794. out:
  795. return err;
  796. }
  797. static int clk_pllxc_set_rate(struct clk_hw *hw, unsigned long rate,
  798. unsigned long parent_rate)
  799. {
  800. struct tegra_clk_pll *pll = to_clk_pll(hw);
  801. struct tegra_clk_pll_freq_table cfg, old_cfg;
  802. unsigned long flags = 0;
  803. int ret = 0;
  804. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  805. if (ret < 0)
  806. return ret;
  807. if (pll->lock)
  808. spin_lock_irqsave(pll->lock, flags);
  809. _get_pll_mnp(pll, &old_cfg);
  810. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  811. ret = _program_pll(hw, &cfg, rate);
  812. if (pll->lock)
  813. spin_unlock_irqrestore(pll->lock, flags);
  814. return ret;
  815. }
  816. static long clk_pll_ramp_round_rate(struct clk_hw *hw, unsigned long rate,
  817. unsigned long *prate)
  818. {
  819. struct tegra_clk_pll_freq_table cfg;
  820. int ret = 0, p_div;
  821. u64 output_rate = *prate;
  822. ret = _pll_ramp_calc_pll(hw, &cfg, rate, *prate);
  823. if (ret < 0)
  824. return ret;
  825. p_div = _hw_to_p_div(hw, cfg.p);
  826. if (p_div < 0)
  827. return p_div;
  828. output_rate *= cfg.n;
  829. do_div(output_rate, cfg.m * p_div);
  830. return output_rate;
  831. }
  832. static int clk_pllm_set_rate(struct clk_hw *hw, unsigned long rate,
  833. unsigned long parent_rate)
  834. {
  835. struct tegra_clk_pll_freq_table cfg;
  836. struct tegra_clk_pll *pll = to_clk_pll(hw);
  837. unsigned long flags = 0;
  838. int state, ret = 0;
  839. if (pll->lock)
  840. spin_lock_irqsave(pll->lock, flags);
  841. state = clk_pll_is_enabled(hw);
  842. if (state) {
  843. if (rate != clk_get_rate(hw->clk)) {
  844. pr_err("%s: Cannot change active PLLM\n", __func__);
  845. ret = -EINVAL;
  846. goto out;
  847. }
  848. goto out;
  849. }
  850. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  851. if (ret < 0)
  852. goto out;
  853. _update_pll_mnp(pll, &cfg);
  854. out:
  855. if (pll->lock)
  856. spin_unlock_irqrestore(pll->lock, flags);
  857. return ret;
  858. }
  859. static void _pllcx_strobe(struct tegra_clk_pll *pll)
  860. {
  861. u32 val;
  862. val = pll_readl_misc(pll);
  863. val |= PLLCX_MISC_STROBE;
  864. pll_writel_misc(val, pll);
  865. udelay(2);
  866. val &= ~PLLCX_MISC_STROBE;
  867. pll_writel_misc(val, pll);
  868. }
  869. static int clk_pllc_enable(struct clk_hw *hw)
  870. {
  871. struct tegra_clk_pll *pll = to_clk_pll(hw);
  872. u32 val;
  873. int ret = 0;
  874. unsigned long flags = 0;
  875. if (pll->lock)
  876. spin_lock_irqsave(pll->lock, flags);
  877. _clk_pll_enable(hw);
  878. udelay(2);
  879. val = pll_readl_misc(pll);
  880. val &= ~PLLCX_MISC_RESET;
  881. pll_writel_misc(val, pll);
  882. udelay(2);
  883. _pllcx_strobe(pll);
  884. ret = clk_pll_wait_for_lock(pll);
  885. if (pll->lock)
  886. spin_unlock_irqrestore(pll->lock, flags);
  887. return ret;
  888. }
  889. static void _clk_pllc_disable(struct clk_hw *hw)
  890. {
  891. struct tegra_clk_pll *pll = to_clk_pll(hw);
  892. u32 val;
  893. _clk_pll_disable(hw);
  894. val = pll_readl_misc(pll);
  895. val |= PLLCX_MISC_RESET;
  896. pll_writel_misc(val, pll);
  897. udelay(2);
  898. }
  899. static void clk_pllc_disable(struct clk_hw *hw)
  900. {
  901. struct tegra_clk_pll *pll = to_clk_pll(hw);
  902. unsigned long flags = 0;
  903. if (pll->lock)
  904. spin_lock_irqsave(pll->lock, flags);
  905. _clk_pllc_disable(hw);
  906. if (pll->lock)
  907. spin_unlock_irqrestore(pll->lock, flags);
  908. }
  909. static int _pllcx_update_dynamic_coef(struct tegra_clk_pll *pll,
  910. unsigned long input_rate, u32 n)
  911. {
  912. u32 val, n_threshold;
  913. switch (input_rate) {
  914. case 12000000:
  915. n_threshold = 70;
  916. break;
  917. case 13000000:
  918. case 26000000:
  919. n_threshold = 71;
  920. break;
  921. case 16800000:
  922. n_threshold = 55;
  923. break;
  924. case 19200000:
  925. n_threshold = 48;
  926. break;
  927. default:
  928. pr_err("%s: Unexpected reference rate %lu\n",
  929. __func__, input_rate);
  930. return -EINVAL;
  931. }
  932. val = pll_readl_misc(pll);
  933. val &= ~(PLLCX_MISC_SDM_DIV_MASK | PLLCX_MISC_FILT_DIV_MASK);
  934. val |= n <= n_threshold ?
  935. PLLCX_MISC_DIV_LOW_RANGE : PLLCX_MISC_DIV_HIGH_RANGE;
  936. pll_writel_misc(val, pll);
  937. return 0;
  938. }
  939. static int clk_pllc_set_rate(struct clk_hw *hw, unsigned long rate,
  940. unsigned long parent_rate)
  941. {
  942. struct tegra_clk_pll_freq_table cfg, old_cfg;
  943. struct tegra_clk_pll *pll = to_clk_pll(hw);
  944. unsigned long flags = 0;
  945. int state, ret = 0;
  946. if (pll->lock)
  947. spin_lock_irqsave(pll->lock, flags);
  948. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  949. if (ret < 0)
  950. goto out;
  951. _get_pll_mnp(pll, &old_cfg);
  952. if (cfg.m != old_cfg.m) {
  953. WARN_ON(1);
  954. goto out;
  955. }
  956. if (old_cfg.n == cfg.n && old_cfg.p == cfg.p)
  957. goto out;
  958. state = clk_pll_is_enabled(hw);
  959. if (state)
  960. _clk_pllc_disable(hw);
  961. ret = _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  962. if (ret < 0)
  963. goto out;
  964. _update_pll_mnp(pll, &cfg);
  965. if (state)
  966. ret = clk_pllc_enable(hw);
  967. out:
  968. if (pll->lock)
  969. spin_unlock_irqrestore(pll->lock, flags);
  970. return ret;
  971. }
  972. static long _pllre_calc_rate(struct tegra_clk_pll *pll,
  973. struct tegra_clk_pll_freq_table *cfg,
  974. unsigned long rate, unsigned long parent_rate)
  975. {
  976. u16 m, n;
  977. u64 output_rate = parent_rate;
  978. m = _pll_fixed_mdiv(pll->params, parent_rate);
  979. n = rate * m / parent_rate;
  980. output_rate *= n;
  981. do_div(output_rate, m);
  982. if (cfg) {
  983. cfg->m = m;
  984. cfg->n = n;
  985. }
  986. return output_rate;
  987. }
  988. static int clk_pllre_set_rate(struct clk_hw *hw, unsigned long rate,
  989. unsigned long parent_rate)
  990. {
  991. struct tegra_clk_pll_freq_table cfg, old_cfg;
  992. struct tegra_clk_pll *pll = to_clk_pll(hw);
  993. unsigned long flags = 0;
  994. int state, ret = 0;
  995. if (pll->lock)
  996. spin_lock_irqsave(pll->lock, flags);
  997. _pllre_calc_rate(pll, &cfg, rate, parent_rate);
  998. _get_pll_mnp(pll, &old_cfg);
  999. cfg.p = old_cfg.p;
  1000. if (cfg.m != old_cfg.m || cfg.n != old_cfg.n) {
  1001. state = clk_pll_is_enabled(hw);
  1002. if (state)
  1003. _clk_pll_disable(hw);
  1004. _update_pll_mnp(pll, &cfg);
  1005. if (state) {
  1006. _clk_pll_enable(hw);
  1007. ret = clk_pll_wait_for_lock(pll);
  1008. }
  1009. }
  1010. if (pll->lock)
  1011. spin_unlock_irqrestore(pll->lock, flags);
  1012. return ret;
  1013. }
  1014. static unsigned long clk_pllre_recalc_rate(struct clk_hw *hw,
  1015. unsigned long parent_rate)
  1016. {
  1017. struct tegra_clk_pll_freq_table cfg;
  1018. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1019. u64 rate = parent_rate;
  1020. _get_pll_mnp(pll, &cfg);
  1021. rate *= cfg.n;
  1022. do_div(rate, cfg.m);
  1023. return rate;
  1024. }
  1025. static long clk_pllre_round_rate(struct clk_hw *hw, unsigned long rate,
  1026. unsigned long *prate)
  1027. {
  1028. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1029. return _pllre_calc_rate(pll, NULL, rate, *prate);
  1030. }
  1031. static int clk_plle_tegra114_enable(struct clk_hw *hw)
  1032. {
  1033. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1034. struct tegra_clk_pll_freq_table sel;
  1035. u32 val;
  1036. int ret;
  1037. unsigned long flags = 0;
  1038. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  1039. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  1040. return -EINVAL;
  1041. if (pll->lock)
  1042. spin_lock_irqsave(pll->lock, flags);
  1043. val = pll_readl_base(pll);
  1044. val &= ~BIT(29); /* Disable lock override */
  1045. pll_writel_base(val, pll);
  1046. val = pll_readl(pll->params->aux_reg, pll);
  1047. val |= PLLE_AUX_ENABLE_SWCTL;
  1048. val &= ~PLLE_AUX_SEQ_ENABLE;
  1049. pll_writel(val, pll->params->aux_reg, pll);
  1050. udelay(1);
  1051. val = pll_readl_misc(pll);
  1052. val |= PLLE_MISC_LOCK_ENABLE;
  1053. val |= PLLE_MISC_IDDQ_SW_CTRL;
  1054. val &= ~PLLE_MISC_IDDQ_SW_VALUE;
  1055. val |= PLLE_MISC_PLLE_PTS;
  1056. val |= PLLE_MISC_VREG_BG_CTRL_MASK | PLLE_MISC_VREG_CTRL_MASK;
  1057. pll_writel_misc(val, pll);
  1058. udelay(5);
  1059. val = pll_readl(PLLE_SS_CTRL, pll);
  1060. val |= PLLE_SS_DISABLE;
  1061. pll_writel(val, PLLE_SS_CTRL, pll);
  1062. val = pll_readl_base(pll);
  1063. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  1064. divm_mask_shifted(pll));
  1065. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  1066. val |= sel.m << divm_shift(pll);
  1067. val |= sel.n << divn_shift(pll);
  1068. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  1069. pll_writel_base(val, pll);
  1070. udelay(1);
  1071. _clk_pll_enable(hw);
  1072. ret = clk_pll_wait_for_lock(pll);
  1073. if (ret < 0)
  1074. goto out;
  1075. val = pll_readl(PLLE_SS_CTRL, pll);
  1076. val &= ~(PLLE_SS_CNTL_CENTER | PLLE_SS_CNTL_INVERT);
  1077. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  1078. val |= PLLE_SS_COEFFICIENTS_VAL;
  1079. pll_writel(val, PLLE_SS_CTRL, pll);
  1080. val &= ~(PLLE_SS_CNTL_SSC_BYP | PLLE_SS_CNTL_BYPASS_SS);
  1081. pll_writel(val, PLLE_SS_CTRL, pll);
  1082. udelay(1);
  1083. val &= ~PLLE_SS_CNTL_INTERP_RESET;
  1084. pll_writel(val, PLLE_SS_CTRL, pll);
  1085. udelay(1);
  1086. /* Enable hw control of xusb brick pll */
  1087. val = pll_readl_misc(pll);
  1088. val &= ~PLLE_MISC_IDDQ_SW_CTRL;
  1089. pll_writel_misc(val, pll);
  1090. val = pll_readl(pll->params->aux_reg, pll);
  1091. val |= (PLLE_AUX_USE_LOCKDET | PLLE_AUX_SEQ_START_STATE);
  1092. val &= ~(PLLE_AUX_ENABLE_SWCTL | PLLE_AUX_SS_SWCTL);
  1093. pll_writel(val, pll->params->aux_reg, pll);
  1094. udelay(1);
  1095. val |= PLLE_AUX_SEQ_ENABLE;
  1096. pll_writel(val, pll->params->aux_reg, pll);
  1097. val = pll_readl(XUSBIO_PLL_CFG0, pll);
  1098. val |= (XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET |
  1099. XUSBIO_PLL_CFG0_SEQ_START_STATE);
  1100. val &= ~(XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL |
  1101. XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL);
  1102. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1103. udelay(1);
  1104. val |= XUSBIO_PLL_CFG0_SEQ_ENABLE;
  1105. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1106. out:
  1107. if (pll->lock)
  1108. spin_unlock_irqrestore(pll->lock, flags);
  1109. return ret;
  1110. }
  1111. static void clk_plle_tegra114_disable(struct clk_hw *hw)
  1112. {
  1113. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1114. unsigned long flags = 0;
  1115. u32 val;
  1116. if (pll->lock)
  1117. spin_lock_irqsave(pll->lock, flags);
  1118. _clk_pll_disable(hw);
  1119. val = pll_readl_misc(pll);
  1120. val |= PLLE_MISC_IDDQ_SW_CTRL | PLLE_MISC_IDDQ_SW_VALUE;
  1121. pll_writel_misc(val, pll);
  1122. udelay(1);
  1123. if (pll->lock)
  1124. spin_unlock_irqrestore(pll->lock, flags);
  1125. }
  1126. #endif
  1127. static struct tegra_clk_pll *_tegra_init_pll(void __iomem *clk_base,
  1128. void __iomem *pmc, struct tegra_clk_pll_params *pll_params,
  1129. spinlock_t *lock)
  1130. {
  1131. struct tegra_clk_pll *pll;
  1132. pll = kzalloc(sizeof(*pll), GFP_KERNEL);
  1133. if (!pll)
  1134. return ERR_PTR(-ENOMEM);
  1135. pll->clk_base = clk_base;
  1136. pll->pmc = pmc;
  1137. pll->params = pll_params;
  1138. pll->lock = lock;
  1139. if (!pll_params->div_nmp)
  1140. pll_params->div_nmp = &default_nmp;
  1141. return pll;
  1142. }
  1143. static struct clk *_tegra_clk_register_pll(struct tegra_clk_pll *pll,
  1144. const char *name, const char *parent_name, unsigned long flags,
  1145. const struct clk_ops *ops)
  1146. {
  1147. struct clk_init_data init;
  1148. init.name = name;
  1149. init.ops = ops;
  1150. init.flags = flags;
  1151. init.parent_names = (parent_name ? &parent_name : NULL);
  1152. init.num_parents = (parent_name ? 1 : 0);
  1153. /* Data in .init is copied by clk_register(), so stack variable OK */
  1154. pll->hw.init = &init;
  1155. return clk_register(NULL, &pll->hw);
  1156. }
  1157. struct clk *tegra_clk_register_pll(const char *name, const char *parent_name,
  1158. void __iomem *clk_base, void __iomem *pmc,
  1159. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1160. spinlock_t *lock)
  1161. {
  1162. struct tegra_clk_pll *pll;
  1163. struct clk *clk;
  1164. pll_params->flags |= TEGRA_PLL_BYPASS;
  1165. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1166. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1167. if (IS_ERR(pll))
  1168. return ERR_CAST(pll);
  1169. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1170. &tegra_clk_pll_ops);
  1171. if (IS_ERR(clk))
  1172. kfree(pll);
  1173. return clk;
  1174. }
  1175. static struct div_nmp pll_e_nmp = {
  1176. .divn_shift = PLLE_BASE_DIVN_SHIFT,
  1177. .divn_width = PLLE_BASE_DIVN_WIDTH,
  1178. .divm_shift = PLLE_BASE_DIVM_SHIFT,
  1179. .divm_width = PLLE_BASE_DIVM_WIDTH,
  1180. .divp_shift = PLLE_BASE_DIVP_SHIFT,
  1181. .divp_width = PLLE_BASE_DIVP_WIDTH,
  1182. };
  1183. struct clk *tegra_clk_register_plle(const char *name, const char *parent_name,
  1184. void __iomem *clk_base, void __iomem *pmc,
  1185. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1186. spinlock_t *lock)
  1187. {
  1188. struct tegra_clk_pll *pll;
  1189. struct clk *clk;
  1190. pll_params->flags |= TEGRA_PLL_LOCK_MISC | TEGRA_PLL_BYPASS;
  1191. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1192. if (!pll_params->div_nmp)
  1193. pll_params->div_nmp = &pll_e_nmp;
  1194. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1195. if (IS_ERR(pll))
  1196. return ERR_CAST(pll);
  1197. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1198. &tegra_clk_plle_ops);
  1199. if (IS_ERR(clk))
  1200. kfree(pll);
  1201. return clk;
  1202. }
  1203. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || defined(CONFIG_ARCH_TEGRA_124_SOC)
  1204. static const struct clk_ops tegra_clk_pllxc_ops = {
  1205. .is_enabled = clk_pll_is_enabled,
  1206. .enable = clk_pll_iddq_enable,
  1207. .disable = clk_pll_iddq_disable,
  1208. .recalc_rate = clk_pll_recalc_rate,
  1209. .round_rate = clk_pll_ramp_round_rate,
  1210. .set_rate = clk_pllxc_set_rate,
  1211. };
  1212. static const struct clk_ops tegra_clk_pllm_ops = {
  1213. .is_enabled = clk_pll_is_enabled,
  1214. .enable = clk_pll_iddq_enable,
  1215. .disable = clk_pll_iddq_disable,
  1216. .recalc_rate = clk_pll_recalc_rate,
  1217. .round_rate = clk_pll_ramp_round_rate,
  1218. .set_rate = clk_pllm_set_rate,
  1219. };
  1220. static const struct clk_ops tegra_clk_pllc_ops = {
  1221. .is_enabled = clk_pll_is_enabled,
  1222. .enable = clk_pllc_enable,
  1223. .disable = clk_pllc_disable,
  1224. .recalc_rate = clk_pll_recalc_rate,
  1225. .round_rate = clk_pll_ramp_round_rate,
  1226. .set_rate = clk_pllc_set_rate,
  1227. };
  1228. static const struct clk_ops tegra_clk_pllre_ops = {
  1229. .is_enabled = clk_pll_is_enabled,
  1230. .enable = clk_pll_iddq_enable,
  1231. .disable = clk_pll_iddq_disable,
  1232. .recalc_rate = clk_pllre_recalc_rate,
  1233. .round_rate = clk_pllre_round_rate,
  1234. .set_rate = clk_pllre_set_rate,
  1235. };
  1236. static const struct clk_ops tegra_clk_plle_tegra114_ops = {
  1237. .is_enabled = clk_pll_is_enabled,
  1238. .enable = clk_plle_tegra114_enable,
  1239. .disable = clk_plle_tegra114_disable,
  1240. .recalc_rate = clk_pll_recalc_rate,
  1241. };
  1242. struct clk *tegra_clk_register_pllxc(const char *name, const char *parent_name,
  1243. void __iomem *clk_base, void __iomem *pmc,
  1244. unsigned long flags,
  1245. struct tegra_clk_pll_params *pll_params,
  1246. spinlock_t *lock)
  1247. {
  1248. struct tegra_clk_pll *pll;
  1249. struct clk *clk, *parent;
  1250. unsigned long parent_rate;
  1251. int err;
  1252. u32 val, val_iddq;
  1253. parent = __clk_lookup(parent_name);
  1254. if (!parent) {
  1255. WARN(1, "parent clk %s of %s must be registered first\n",
  1256. name, parent_name);
  1257. return ERR_PTR(-EINVAL);
  1258. }
  1259. if (!pll_params->pdiv_tohw)
  1260. return ERR_PTR(-EINVAL);
  1261. parent_rate = __clk_get_rate(parent);
  1262. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1263. err = _setup_dynamic_ramp(pll_params, clk_base, parent_rate);
  1264. if (err)
  1265. return ERR_PTR(err);
  1266. val = readl_relaxed(clk_base + pll_params->base_reg);
  1267. val_iddq = readl_relaxed(clk_base + pll_params->iddq_reg);
  1268. if (val & PLL_BASE_ENABLE)
  1269. WARN_ON(val_iddq & BIT(pll_params->iddq_bit_idx));
  1270. else {
  1271. val_iddq |= BIT(pll_params->iddq_bit_idx);
  1272. writel_relaxed(val_iddq, clk_base + pll_params->iddq_reg);
  1273. }
  1274. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1275. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1276. if (IS_ERR(pll))
  1277. return ERR_CAST(pll);
  1278. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1279. &tegra_clk_pllxc_ops);
  1280. if (IS_ERR(clk))
  1281. kfree(pll);
  1282. return clk;
  1283. }
  1284. struct clk *tegra_clk_register_pllre(const char *name, const char *parent_name,
  1285. void __iomem *clk_base, void __iomem *pmc,
  1286. unsigned long flags,
  1287. struct tegra_clk_pll_params *pll_params,
  1288. spinlock_t *lock, unsigned long parent_rate)
  1289. {
  1290. u32 val;
  1291. struct tegra_clk_pll *pll;
  1292. struct clk *clk;
  1293. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_LOCK_MISC;
  1294. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1295. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1296. if (IS_ERR(pll))
  1297. return ERR_CAST(pll);
  1298. /* program minimum rate by default */
  1299. val = pll_readl_base(pll);
  1300. if (val & PLL_BASE_ENABLE)
  1301. WARN_ON(val & pll_params->iddq_bit_idx);
  1302. else {
  1303. int m;
  1304. m = _pll_fixed_mdiv(pll_params, parent_rate);
  1305. val = m << divm_shift(pll);
  1306. val |= (pll_params->vco_min / parent_rate) << divn_shift(pll);
  1307. pll_writel_base(val, pll);
  1308. }
  1309. /* disable lock override */
  1310. val = pll_readl_misc(pll);
  1311. val &= ~BIT(29);
  1312. pll_writel_misc(val, pll);
  1313. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1314. &tegra_clk_pllre_ops);
  1315. if (IS_ERR(clk))
  1316. kfree(pll);
  1317. return clk;
  1318. }
  1319. struct clk *tegra_clk_register_pllm(const char *name, const char *parent_name,
  1320. void __iomem *clk_base, void __iomem *pmc,
  1321. unsigned long flags,
  1322. struct tegra_clk_pll_params *pll_params,
  1323. spinlock_t *lock)
  1324. {
  1325. struct tegra_clk_pll *pll;
  1326. struct clk *clk, *parent;
  1327. unsigned long parent_rate;
  1328. if (!pll_params->pdiv_tohw)
  1329. return ERR_PTR(-EINVAL);
  1330. parent = __clk_lookup(parent_name);
  1331. if (!parent) {
  1332. WARN(1, "parent clk %s of %s must be registered first\n",
  1333. name, parent_name);
  1334. return ERR_PTR(-EINVAL);
  1335. }
  1336. parent_rate = __clk_get_rate(parent);
  1337. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1338. pll_params->flags |= TEGRA_PLL_BYPASS;
  1339. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1340. pll_params->flags |= TEGRA_PLLM;
  1341. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1342. if (IS_ERR(pll))
  1343. return ERR_CAST(pll);
  1344. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1345. &tegra_clk_pllm_ops);
  1346. if (IS_ERR(clk))
  1347. kfree(pll);
  1348. return clk;
  1349. }
  1350. struct clk *tegra_clk_register_pllc(const char *name, const char *parent_name,
  1351. void __iomem *clk_base, void __iomem *pmc,
  1352. unsigned long flags,
  1353. struct tegra_clk_pll_params *pll_params,
  1354. spinlock_t *lock)
  1355. {
  1356. struct clk *parent, *clk;
  1357. struct pdiv_map *p_tohw = pll_params->pdiv_tohw;
  1358. struct tegra_clk_pll *pll;
  1359. struct tegra_clk_pll_freq_table cfg;
  1360. unsigned long parent_rate;
  1361. if (!p_tohw)
  1362. return ERR_PTR(-EINVAL);
  1363. parent = __clk_lookup(parent_name);
  1364. if (!parent) {
  1365. WARN(1, "parent clk %s of %s must be registered first\n",
  1366. name, parent_name);
  1367. return ERR_PTR(-EINVAL);
  1368. }
  1369. parent_rate = __clk_get_rate(parent);
  1370. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1371. pll_params->flags |= TEGRA_PLL_BYPASS;
  1372. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1373. if (IS_ERR(pll))
  1374. return ERR_CAST(pll);
  1375. /*
  1376. * Most of PLLC register fields are shadowed, and can not be read
  1377. * directly from PLL h/w. Hence, actual PLLC boot state is unknown.
  1378. * Initialize PLL to default state: disabled, reset; shadow registers
  1379. * loaded with default parameters; dividers are preset for half of
  1380. * minimum VCO rate (the latter assured that shadowed divider settings
  1381. * are within supported range).
  1382. */
  1383. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1384. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1385. while (p_tohw->pdiv) {
  1386. if (p_tohw->pdiv == 2) {
  1387. cfg.p = p_tohw->hw_val;
  1388. break;
  1389. }
  1390. p_tohw++;
  1391. }
  1392. if (!p_tohw->pdiv) {
  1393. WARN_ON(1);
  1394. return ERR_PTR(-EINVAL);
  1395. }
  1396. pll_writel_base(0, pll);
  1397. _update_pll_mnp(pll, &cfg);
  1398. pll_writel_misc(PLLCX_MISC_DEFAULT, pll);
  1399. pll_writel(PLLCX_MISC1_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1400. pll_writel(PLLCX_MISC2_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1401. pll_writel(PLLCX_MISC3_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1402. _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  1403. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1404. &tegra_clk_pllc_ops);
  1405. if (IS_ERR(clk))
  1406. kfree(pll);
  1407. return clk;
  1408. }
  1409. struct clk *tegra_clk_register_plle_tegra114(const char *name,
  1410. const char *parent_name,
  1411. void __iomem *clk_base, unsigned long flags,
  1412. struct tegra_clk_pll_params *pll_params,
  1413. spinlock_t *lock)
  1414. {
  1415. struct tegra_clk_pll *pll;
  1416. struct clk *clk;
  1417. u32 val, val_aux;
  1418. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1419. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1420. if (IS_ERR(pll))
  1421. return ERR_CAST(pll);
  1422. /* ensure parent is set to pll_re_vco */
  1423. val = pll_readl_base(pll);
  1424. val_aux = pll_readl(pll_params->aux_reg, pll);
  1425. if (val & PLL_BASE_ENABLE) {
  1426. if ((val_aux & PLLE_AUX_PLLRE_SEL) ||
  1427. (val_aux & PLLE_AUX_PLLP_SEL))
  1428. WARN(1, "pll_e enabled with unsupported parent %s\n",
  1429. (val_aux & PLLE_AUX_PLLP_SEL) ? "pllp_out0" :
  1430. "pll_re_vco");
  1431. } else {
  1432. val_aux &= ~(PLLE_AUX_PLLRE_SEL | PLLE_AUX_PLLP_SEL);
  1433. pll_writel(val_aux, pll_params->aux_reg, pll);
  1434. }
  1435. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1436. &tegra_clk_plle_tegra114_ops);
  1437. if (IS_ERR(clk))
  1438. kfree(pll);
  1439. return clk;
  1440. }
  1441. #endif
  1442. #ifdef CONFIG_ARCH_TEGRA_124_SOC
  1443. static const struct clk_ops tegra_clk_pllss_ops = {
  1444. .is_enabled = clk_pll_is_enabled,
  1445. .enable = clk_pll_iddq_enable,
  1446. .disable = clk_pll_iddq_disable,
  1447. .recalc_rate = clk_pll_recalc_rate,
  1448. .round_rate = clk_pll_ramp_round_rate,
  1449. .set_rate = clk_pllxc_set_rate,
  1450. };
  1451. struct clk *tegra_clk_register_pllss(const char *name, const char *parent_name,
  1452. void __iomem *clk_base, unsigned long flags,
  1453. struct tegra_clk_pll_params *pll_params,
  1454. spinlock_t *lock)
  1455. {
  1456. struct tegra_clk_pll *pll;
  1457. struct clk *clk, *parent;
  1458. struct tegra_clk_pll_freq_table cfg;
  1459. unsigned long parent_rate;
  1460. u32 val;
  1461. int i;
  1462. if (!pll_params->div_nmp)
  1463. return ERR_PTR(-EINVAL);
  1464. parent = __clk_lookup(parent_name);
  1465. if (!parent) {
  1466. WARN(1, "parent clk %s of %s must be registered first\n",
  1467. name, parent_name);
  1468. return ERR_PTR(-EINVAL);
  1469. }
  1470. pll_params->flags = TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_USE_LOCK;
  1471. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1472. if (IS_ERR(pll))
  1473. return ERR_CAST(pll);
  1474. val = pll_readl_base(pll);
  1475. val &= ~PLLSS_REF_SRC_SEL_MASK;
  1476. pll_writel_base(val, pll);
  1477. parent_rate = __clk_get_rate(parent);
  1478. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1479. /* initialize PLL to minimum rate */
  1480. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1481. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1482. for (i = 0; pll_params->pdiv_tohw[i].pdiv; i++)
  1483. ;
  1484. if (!i) {
  1485. kfree(pll);
  1486. return ERR_PTR(-EINVAL);
  1487. }
  1488. cfg.p = pll_params->pdiv_tohw[i-1].hw_val;
  1489. _update_pll_mnp(pll, &cfg);
  1490. pll_writel_misc(PLLSS_MISC_DEFAULT, pll);
  1491. pll_writel(PLLSS_CFG_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1492. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1493. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1494. val = pll_readl_base(pll);
  1495. if (val & PLL_BASE_ENABLE) {
  1496. if (val & BIT(pll_params->iddq_bit_idx)) {
  1497. WARN(1, "%s is on but IDDQ set\n", name);
  1498. kfree(pll);
  1499. return ERR_PTR(-EINVAL);
  1500. }
  1501. } else
  1502. val |= BIT(pll_params->iddq_bit_idx);
  1503. val &= ~PLLSS_LOCK_OVERRIDE;
  1504. pll_writel_base(val, pll);
  1505. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1506. &tegra_clk_pllss_ops);
  1507. if (IS_ERR(clk))
  1508. kfree(pll);
  1509. return clk;
  1510. }
  1511. #endif