clk-pll.c 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892
  1. /*
  2. * Copyright (c) 2012, 2013, NVIDIA CORPORATION. All rights reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or modify it
  5. * under the terms and conditions of the GNU General Public License,
  6. * version 2, as published by the Free Software Foundation.
  7. *
  8. * This program is distributed in the hope it will be useful, but WITHOUT
  9. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
  11. * more details.
  12. *
  13. * You should have received a copy of the GNU General Public License
  14. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  15. */
  16. #include <linux/slab.h>
  17. #include <linux/io.h>
  18. #include <linux/delay.h>
  19. #include <linux/err.h>
  20. #include <linux/clk-provider.h>
  21. #include <linux/clk.h>
  22. #include "clk.h"
  23. #define PLL_BASE_BYPASS BIT(31)
  24. #define PLL_BASE_ENABLE BIT(30)
  25. #define PLL_BASE_REF_ENABLE BIT(29)
  26. #define PLL_BASE_OVERRIDE BIT(28)
  27. #define PLL_BASE_DIVP_SHIFT 20
  28. #define PLL_BASE_DIVP_WIDTH 3
  29. #define PLL_BASE_DIVN_SHIFT 8
  30. #define PLL_BASE_DIVN_WIDTH 10
  31. #define PLL_BASE_DIVM_SHIFT 0
  32. #define PLL_BASE_DIVM_WIDTH 5
  33. #define PLLU_POST_DIVP_MASK 0x1
  34. #define PLL_MISC_DCCON_SHIFT 20
  35. #define PLL_MISC_CPCON_SHIFT 8
  36. #define PLL_MISC_CPCON_WIDTH 4
  37. #define PLL_MISC_CPCON_MASK ((1 << PLL_MISC_CPCON_WIDTH) - 1)
  38. #define PLL_MISC_LFCON_SHIFT 4
  39. #define PLL_MISC_LFCON_WIDTH 4
  40. #define PLL_MISC_LFCON_MASK ((1 << PLL_MISC_LFCON_WIDTH) - 1)
  41. #define PLL_MISC_VCOCON_SHIFT 0
  42. #define PLL_MISC_VCOCON_WIDTH 4
  43. #define PLL_MISC_VCOCON_MASK ((1 << PLL_MISC_VCOCON_WIDTH) - 1)
  44. #define OUT_OF_TABLE_CPCON 8
  45. #define PMC_PLLP_WB0_OVERRIDE 0xf8
  46. #define PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE BIT(12)
  47. #define PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE BIT(11)
  48. #define PLL_POST_LOCK_DELAY 50
  49. #define PLLDU_LFCON_SET_DIVN 600
  50. #define PLLE_BASE_DIVCML_SHIFT 24
  51. #define PLLE_BASE_DIVCML_MASK 0xf
  52. #define PLLE_BASE_DIVP_SHIFT 16
  53. #define PLLE_BASE_DIVP_WIDTH 6
  54. #define PLLE_BASE_DIVN_SHIFT 8
  55. #define PLLE_BASE_DIVN_WIDTH 8
  56. #define PLLE_BASE_DIVM_SHIFT 0
  57. #define PLLE_BASE_DIVM_WIDTH 8
  58. #define PLLE_MISC_SETUP_BASE_SHIFT 16
  59. #define PLLE_MISC_SETUP_BASE_MASK (0xffff << PLLE_MISC_SETUP_BASE_SHIFT)
  60. #define PLLE_MISC_LOCK_ENABLE BIT(9)
  61. #define PLLE_MISC_READY BIT(15)
  62. #define PLLE_MISC_SETUP_EX_SHIFT 2
  63. #define PLLE_MISC_SETUP_EX_MASK (3 << PLLE_MISC_SETUP_EX_SHIFT)
  64. #define PLLE_MISC_SETUP_MASK (PLLE_MISC_SETUP_BASE_MASK | \
  65. PLLE_MISC_SETUP_EX_MASK)
  66. #define PLLE_MISC_SETUP_VALUE (7 << PLLE_MISC_SETUP_BASE_SHIFT)
  67. #define PLLE_SS_CTRL 0x68
  68. #define PLLE_SS_CNTL_BYPASS_SS BIT(10)
  69. #define PLLE_SS_CNTL_INTERP_RESET BIT(11)
  70. #define PLLE_SS_CNTL_SSC_BYP BIT(12)
  71. #define PLLE_SS_CNTL_CENTER BIT(14)
  72. #define PLLE_SS_CNTL_INVERT BIT(15)
  73. #define PLLE_SS_DISABLE (PLLE_SS_CNTL_BYPASS_SS | PLLE_SS_CNTL_INTERP_RESET |\
  74. PLLE_SS_CNTL_SSC_BYP)
  75. #define PLLE_SS_MAX_MASK 0x1ff
  76. #define PLLE_SS_MAX_VAL 0x25
  77. #define PLLE_SS_INC_MASK (0xff << 16)
  78. #define PLLE_SS_INC_VAL (0x1 << 16)
  79. #define PLLE_SS_INCINTRV_MASK (0x3f << 24)
  80. #define PLLE_SS_INCINTRV_VAL (0x20 << 24)
  81. #define PLLE_SS_COEFFICIENTS_MASK \
  82. (PLLE_SS_MAX_MASK | PLLE_SS_INC_MASK | PLLE_SS_INCINTRV_MASK)
  83. #define PLLE_SS_COEFFICIENTS_VAL \
  84. (PLLE_SS_MAX_VAL | PLLE_SS_INC_VAL | PLLE_SS_INCINTRV_VAL)
  85. #define PLLE_AUX_PLLP_SEL BIT(2)
  86. #define PLLE_AUX_USE_LOCKDET BIT(3)
  87. #define PLLE_AUX_ENABLE_SWCTL BIT(4)
  88. #define PLLE_AUX_SS_SWCTL BIT(6)
  89. #define PLLE_AUX_SEQ_ENABLE BIT(24)
  90. #define PLLE_AUX_SEQ_START_STATE BIT(25)
  91. #define PLLE_AUX_PLLRE_SEL BIT(28)
  92. #define XUSBIO_PLL_CFG0 0x51c
  93. #define XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL BIT(0)
  94. #define XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL BIT(2)
  95. #define XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET BIT(6)
  96. #define XUSBIO_PLL_CFG0_SEQ_ENABLE BIT(24)
  97. #define XUSBIO_PLL_CFG0_SEQ_START_STATE BIT(25)
  98. #define SATA_PLL_CFG0 0x490
  99. #define SATA_PLL_CFG0_PADPLL_RESET_SWCTL BIT(0)
  100. #define SATA_PLL_CFG0_PADPLL_USE_LOCKDET BIT(2)
  101. #define SATA_PLL_CFG0_SEQ_ENABLE BIT(24)
  102. #define SATA_PLL_CFG0_SEQ_START_STATE BIT(25)
  103. #define PLLE_MISC_PLLE_PTS BIT(8)
  104. #define PLLE_MISC_IDDQ_SW_VALUE BIT(13)
  105. #define PLLE_MISC_IDDQ_SW_CTRL BIT(14)
  106. #define PLLE_MISC_VREG_BG_CTRL_SHIFT 4
  107. #define PLLE_MISC_VREG_BG_CTRL_MASK (3 << PLLE_MISC_VREG_BG_CTRL_SHIFT)
  108. #define PLLE_MISC_VREG_CTRL_SHIFT 2
  109. #define PLLE_MISC_VREG_CTRL_MASK (2 << PLLE_MISC_VREG_CTRL_SHIFT)
  110. #define PLLCX_MISC_STROBE BIT(31)
  111. #define PLLCX_MISC_RESET BIT(30)
  112. #define PLLCX_MISC_SDM_DIV_SHIFT 28
  113. #define PLLCX_MISC_SDM_DIV_MASK (0x3 << PLLCX_MISC_SDM_DIV_SHIFT)
  114. #define PLLCX_MISC_FILT_DIV_SHIFT 26
  115. #define PLLCX_MISC_FILT_DIV_MASK (0x3 << PLLCX_MISC_FILT_DIV_SHIFT)
  116. #define PLLCX_MISC_ALPHA_SHIFT 18
  117. #define PLLCX_MISC_DIV_LOW_RANGE \
  118. ((0x1 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  119. (0x1 << PLLCX_MISC_FILT_DIV_SHIFT))
  120. #define PLLCX_MISC_DIV_HIGH_RANGE \
  121. ((0x2 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  122. (0x2 << PLLCX_MISC_FILT_DIV_SHIFT))
  123. #define PLLCX_MISC_COEF_LOW_RANGE \
  124. ((0x14 << PLLCX_MISC_KA_SHIFT) | (0x38 << PLLCX_MISC_KB_SHIFT))
  125. #define PLLCX_MISC_KA_SHIFT 2
  126. #define PLLCX_MISC_KB_SHIFT 9
  127. #define PLLCX_MISC_DEFAULT (PLLCX_MISC_COEF_LOW_RANGE | \
  128. (0x19 << PLLCX_MISC_ALPHA_SHIFT) | \
  129. PLLCX_MISC_DIV_LOW_RANGE | \
  130. PLLCX_MISC_RESET)
  131. #define PLLCX_MISC1_DEFAULT 0x000d2308
  132. #define PLLCX_MISC2_DEFAULT 0x30211200
  133. #define PLLCX_MISC3_DEFAULT 0x200
  134. #define PMC_SATA_PWRGT 0x1ac
  135. #define PMC_SATA_PWRGT_PLLE_IDDQ_VALUE BIT(5)
  136. #define PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL BIT(4)
  137. #define PLLSS_MISC_KCP 0
  138. #define PLLSS_MISC_KVCO 0
  139. #define PLLSS_MISC_SETUP 0
  140. #define PLLSS_EN_SDM 0
  141. #define PLLSS_EN_SSC 0
  142. #define PLLSS_EN_DITHER2 0
  143. #define PLLSS_EN_DITHER 1
  144. #define PLLSS_SDM_RESET 0
  145. #define PLLSS_CLAMP 0
  146. #define PLLSS_SDM_SSC_MAX 0
  147. #define PLLSS_SDM_SSC_MIN 0
  148. #define PLLSS_SDM_SSC_STEP 0
  149. #define PLLSS_SDM_DIN 0
  150. #define PLLSS_MISC_DEFAULT ((PLLSS_MISC_KCP << 25) | \
  151. (PLLSS_MISC_KVCO << 24) | \
  152. PLLSS_MISC_SETUP)
  153. #define PLLSS_CFG_DEFAULT ((PLLSS_EN_SDM << 31) | \
  154. (PLLSS_EN_SSC << 30) | \
  155. (PLLSS_EN_DITHER2 << 29) | \
  156. (PLLSS_EN_DITHER << 28) | \
  157. (PLLSS_SDM_RESET) << 27 | \
  158. (PLLSS_CLAMP << 22))
  159. #define PLLSS_CTRL1_DEFAULT \
  160. ((PLLSS_SDM_SSC_MAX << 16) | PLLSS_SDM_SSC_MIN)
  161. #define PLLSS_CTRL2_DEFAULT \
  162. ((PLLSS_SDM_SSC_STEP << 16) | PLLSS_SDM_DIN)
  163. #define PLLSS_LOCK_OVERRIDE BIT(24)
  164. #define PLLSS_REF_SRC_SEL_SHIFT 25
  165. #define PLLSS_REF_SRC_SEL_MASK (3 << PLLSS_REF_SRC_SEL_SHIFT)
  166. #define pll_readl(offset, p) readl_relaxed(p->clk_base + offset)
  167. #define pll_readl_base(p) pll_readl(p->params->base_reg, p)
  168. #define pll_readl_misc(p) pll_readl(p->params->misc_reg, p)
  169. #define pll_override_readl(offset, p) readl_relaxed(p->pmc + offset)
  170. #define pll_writel(val, offset, p) writel_relaxed(val, p->clk_base + offset)
  171. #define pll_writel_base(val, p) pll_writel(val, p->params->base_reg, p)
  172. #define pll_writel_misc(val, p) pll_writel(val, p->params->misc_reg, p)
  173. #define pll_override_writel(val, offset, p) writel(val, p->pmc + offset)
  174. #define mask(w) ((1 << (w)) - 1)
  175. #define divm_mask(p) mask(p->params->div_nmp->divm_width)
  176. #define divn_mask(p) mask(p->params->div_nmp->divn_width)
  177. #define divp_mask(p) (p->params->flags & TEGRA_PLLU ? PLLU_POST_DIVP_MASK :\
  178. mask(p->params->div_nmp->divp_width))
  179. #define divm_shift(p) (p)->params->div_nmp->divm_shift
  180. #define divn_shift(p) (p)->params->div_nmp->divn_shift
  181. #define divp_shift(p) (p)->params->div_nmp->divp_shift
  182. #define divm_mask_shifted(p) (divm_mask(p) << divm_shift(p))
  183. #define divn_mask_shifted(p) (divn_mask(p) << divn_shift(p))
  184. #define divp_mask_shifted(p) (divp_mask(p) << divp_shift(p))
  185. #define divm_max(p) (divm_mask(p))
  186. #define divn_max(p) (divn_mask(p))
  187. #define divp_max(p) (1 << (divp_mask(p)))
  188. static struct div_nmp default_nmp = {
  189. .divn_shift = PLL_BASE_DIVN_SHIFT,
  190. .divn_width = PLL_BASE_DIVN_WIDTH,
  191. .divm_shift = PLL_BASE_DIVM_SHIFT,
  192. .divm_width = PLL_BASE_DIVM_WIDTH,
  193. .divp_shift = PLL_BASE_DIVP_SHIFT,
  194. .divp_width = PLL_BASE_DIVP_WIDTH,
  195. };
  196. static void clk_pll_enable_lock(struct tegra_clk_pll *pll)
  197. {
  198. u32 val;
  199. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK))
  200. return;
  201. if (!(pll->params->flags & TEGRA_PLL_HAS_LOCK_ENABLE))
  202. return;
  203. val = pll_readl_misc(pll);
  204. val |= BIT(pll->params->lock_enable_bit_idx);
  205. pll_writel_misc(val, pll);
  206. }
  207. static int clk_pll_wait_for_lock(struct tegra_clk_pll *pll)
  208. {
  209. int i;
  210. u32 val, lock_mask;
  211. void __iomem *lock_addr;
  212. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK)) {
  213. udelay(pll->params->lock_delay);
  214. return 0;
  215. }
  216. lock_addr = pll->clk_base;
  217. if (pll->params->flags & TEGRA_PLL_LOCK_MISC)
  218. lock_addr += pll->params->misc_reg;
  219. else
  220. lock_addr += pll->params->base_reg;
  221. lock_mask = pll->params->lock_mask;
  222. for (i = 0; i < pll->params->lock_delay; i++) {
  223. val = readl_relaxed(lock_addr);
  224. if ((val & lock_mask) == lock_mask) {
  225. udelay(PLL_POST_LOCK_DELAY);
  226. return 0;
  227. }
  228. udelay(2); /* timeout = 2 * lock time */
  229. }
  230. pr_err("%s: Timed out waiting for pll %s lock\n", __func__,
  231. __clk_get_name(pll->hw.clk));
  232. return -1;
  233. }
  234. static int clk_pll_is_enabled(struct clk_hw *hw)
  235. {
  236. struct tegra_clk_pll *pll = to_clk_pll(hw);
  237. u32 val;
  238. if (pll->params->flags & TEGRA_PLLM) {
  239. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  240. if (val & PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)
  241. return val & PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE ? 1 : 0;
  242. }
  243. val = pll_readl_base(pll);
  244. return val & PLL_BASE_ENABLE ? 1 : 0;
  245. }
  246. static void _clk_pll_enable(struct clk_hw *hw)
  247. {
  248. struct tegra_clk_pll *pll = to_clk_pll(hw);
  249. u32 val;
  250. clk_pll_enable_lock(pll);
  251. val = pll_readl_base(pll);
  252. if (pll->params->flags & TEGRA_PLL_BYPASS)
  253. val &= ~PLL_BASE_BYPASS;
  254. val |= PLL_BASE_ENABLE;
  255. pll_writel_base(val, pll);
  256. if (pll->params->flags & TEGRA_PLLM) {
  257. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  258. val |= PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  259. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  260. }
  261. }
  262. static void _clk_pll_disable(struct clk_hw *hw)
  263. {
  264. struct tegra_clk_pll *pll = to_clk_pll(hw);
  265. u32 val;
  266. val = pll_readl_base(pll);
  267. if (pll->params->flags & TEGRA_PLL_BYPASS)
  268. val &= ~PLL_BASE_BYPASS;
  269. val &= ~PLL_BASE_ENABLE;
  270. pll_writel_base(val, pll);
  271. if (pll->params->flags & TEGRA_PLLM) {
  272. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  273. val &= ~PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  274. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  275. }
  276. }
  277. static int clk_pll_enable(struct clk_hw *hw)
  278. {
  279. struct tegra_clk_pll *pll = to_clk_pll(hw);
  280. unsigned long flags = 0;
  281. int ret;
  282. if (pll->lock)
  283. spin_lock_irqsave(pll->lock, flags);
  284. _clk_pll_enable(hw);
  285. ret = clk_pll_wait_for_lock(pll);
  286. if (pll->lock)
  287. spin_unlock_irqrestore(pll->lock, flags);
  288. return ret;
  289. }
  290. static void clk_pll_disable(struct clk_hw *hw)
  291. {
  292. struct tegra_clk_pll *pll = to_clk_pll(hw);
  293. unsigned long flags = 0;
  294. if (pll->lock)
  295. spin_lock_irqsave(pll->lock, flags);
  296. _clk_pll_disable(hw);
  297. if (pll->lock)
  298. spin_unlock_irqrestore(pll->lock, flags);
  299. }
  300. static int _p_div_to_hw(struct clk_hw *hw, u8 p_div)
  301. {
  302. struct tegra_clk_pll *pll = to_clk_pll(hw);
  303. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  304. if (p_tohw) {
  305. while (p_tohw->pdiv) {
  306. if (p_div <= p_tohw->pdiv)
  307. return p_tohw->hw_val;
  308. p_tohw++;
  309. }
  310. return -EINVAL;
  311. }
  312. return -EINVAL;
  313. }
  314. static int _hw_to_p_div(struct clk_hw *hw, u8 p_div_hw)
  315. {
  316. struct tegra_clk_pll *pll = to_clk_pll(hw);
  317. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  318. if (p_tohw) {
  319. while (p_tohw->pdiv) {
  320. if (p_div_hw == p_tohw->hw_val)
  321. return p_tohw->pdiv;
  322. p_tohw++;
  323. }
  324. return -EINVAL;
  325. }
  326. return 1 << p_div_hw;
  327. }
  328. static int _get_table_rate(struct clk_hw *hw,
  329. struct tegra_clk_pll_freq_table *cfg,
  330. unsigned long rate, unsigned long parent_rate)
  331. {
  332. struct tegra_clk_pll *pll = to_clk_pll(hw);
  333. struct tegra_clk_pll_freq_table *sel;
  334. for (sel = pll->params->freq_table; sel->input_rate != 0; sel++)
  335. if (sel->input_rate == parent_rate &&
  336. sel->output_rate == rate)
  337. break;
  338. if (sel->input_rate == 0)
  339. return -EINVAL;
  340. cfg->input_rate = sel->input_rate;
  341. cfg->output_rate = sel->output_rate;
  342. cfg->m = sel->m;
  343. cfg->n = sel->n;
  344. cfg->p = sel->p;
  345. cfg->cpcon = sel->cpcon;
  346. return 0;
  347. }
  348. static int _calc_rate(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  349. unsigned long rate, unsigned long parent_rate)
  350. {
  351. struct tegra_clk_pll *pll = to_clk_pll(hw);
  352. unsigned long cfreq;
  353. u32 p_div = 0;
  354. int ret;
  355. switch (parent_rate) {
  356. case 12000000:
  357. case 26000000:
  358. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2000000;
  359. break;
  360. case 13000000:
  361. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2600000;
  362. break;
  363. case 16800000:
  364. case 19200000:
  365. cfreq = (rate <= 1200000 * 1000) ? 1200000 : 2400000;
  366. break;
  367. case 9600000:
  368. case 28800000:
  369. /*
  370. * PLL_P_OUT1 rate is not listed in PLLA table
  371. */
  372. cfreq = parent_rate/(parent_rate/1000000);
  373. break;
  374. default:
  375. pr_err("%s Unexpected reference rate %lu\n",
  376. __func__, parent_rate);
  377. BUG();
  378. }
  379. /* Raise VCO to guarantee 0.5% accuracy */
  380. for (cfg->output_rate = rate; cfg->output_rate < 200 * cfreq;
  381. cfg->output_rate <<= 1)
  382. p_div++;
  383. cfg->m = parent_rate / cfreq;
  384. cfg->n = cfg->output_rate / cfreq;
  385. cfg->cpcon = OUT_OF_TABLE_CPCON;
  386. if (cfg->m > divm_max(pll) || cfg->n > divn_max(pll) ||
  387. (1 << p_div) > divp_max(pll)
  388. || cfg->output_rate > pll->params->vco_max) {
  389. return -EINVAL;
  390. }
  391. cfg->output_rate >>= p_div;
  392. if (pll->params->pdiv_tohw) {
  393. ret = _p_div_to_hw(hw, 1 << p_div);
  394. if (ret < 0)
  395. return ret;
  396. else
  397. cfg->p = ret;
  398. } else
  399. cfg->p = p_div;
  400. return 0;
  401. }
  402. static void _update_pll_mnp(struct tegra_clk_pll *pll,
  403. struct tegra_clk_pll_freq_table *cfg)
  404. {
  405. u32 val;
  406. struct tegra_clk_pll_params *params = pll->params;
  407. struct div_nmp *div_nmp = params->div_nmp;
  408. if ((params->flags & TEGRA_PLLM) &&
  409. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  410. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  411. val = pll_override_readl(params->pmc_divp_reg, pll);
  412. val &= ~(divp_mask(pll) << div_nmp->override_divp_shift);
  413. val |= cfg->p << div_nmp->override_divp_shift;
  414. pll_override_writel(val, params->pmc_divp_reg, pll);
  415. val = pll_override_readl(params->pmc_divnm_reg, pll);
  416. val &= ~(divm_mask(pll) << div_nmp->override_divm_shift) |
  417. ~(divn_mask(pll) << div_nmp->override_divn_shift);
  418. val |= (cfg->m << div_nmp->override_divm_shift) |
  419. (cfg->n << div_nmp->override_divn_shift);
  420. pll_override_writel(val, params->pmc_divnm_reg, pll);
  421. } else {
  422. val = pll_readl_base(pll);
  423. val &= ~(divm_mask_shifted(pll) | divn_mask_shifted(pll) |
  424. divp_mask_shifted(pll));
  425. val |= (cfg->m << divm_shift(pll)) |
  426. (cfg->n << divn_shift(pll)) |
  427. (cfg->p << divp_shift(pll));
  428. pll_writel_base(val, pll);
  429. }
  430. }
  431. static void _get_pll_mnp(struct tegra_clk_pll *pll,
  432. struct tegra_clk_pll_freq_table *cfg)
  433. {
  434. u32 val;
  435. struct tegra_clk_pll_params *params = pll->params;
  436. struct div_nmp *div_nmp = params->div_nmp;
  437. if ((params->flags & TEGRA_PLLM) &&
  438. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  439. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  440. val = pll_override_readl(params->pmc_divp_reg, pll);
  441. cfg->p = (val >> div_nmp->override_divp_shift) & divp_mask(pll);
  442. val = pll_override_readl(params->pmc_divnm_reg, pll);
  443. cfg->m = (val >> div_nmp->override_divm_shift) & divm_mask(pll);
  444. cfg->n = (val >> div_nmp->override_divn_shift) & divn_mask(pll);
  445. } else {
  446. val = pll_readl_base(pll);
  447. cfg->m = (val >> div_nmp->divm_shift) & divm_mask(pll);
  448. cfg->n = (val >> div_nmp->divn_shift) & divn_mask(pll);
  449. cfg->p = (val >> div_nmp->divp_shift) & divp_mask(pll);
  450. }
  451. }
  452. static void _update_pll_cpcon(struct tegra_clk_pll *pll,
  453. struct tegra_clk_pll_freq_table *cfg,
  454. unsigned long rate)
  455. {
  456. u32 val;
  457. val = pll_readl_misc(pll);
  458. val &= ~(PLL_MISC_CPCON_MASK << PLL_MISC_CPCON_SHIFT);
  459. val |= cfg->cpcon << PLL_MISC_CPCON_SHIFT;
  460. if (pll->params->flags & TEGRA_PLL_SET_LFCON) {
  461. val &= ~(PLL_MISC_LFCON_MASK << PLL_MISC_LFCON_SHIFT);
  462. if (cfg->n >= PLLDU_LFCON_SET_DIVN)
  463. val |= 1 << PLL_MISC_LFCON_SHIFT;
  464. } else if (pll->params->flags & TEGRA_PLL_SET_DCCON) {
  465. val &= ~(1 << PLL_MISC_DCCON_SHIFT);
  466. if (rate >= (pll->params->vco_max >> 1))
  467. val |= 1 << PLL_MISC_DCCON_SHIFT;
  468. }
  469. pll_writel_misc(val, pll);
  470. }
  471. static int _program_pll(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  472. unsigned long rate)
  473. {
  474. struct tegra_clk_pll *pll = to_clk_pll(hw);
  475. int state, ret = 0;
  476. state = clk_pll_is_enabled(hw);
  477. if (state)
  478. _clk_pll_disable(hw);
  479. _update_pll_mnp(pll, cfg);
  480. if (pll->params->flags & TEGRA_PLL_HAS_CPCON)
  481. _update_pll_cpcon(pll, cfg, rate);
  482. if (state) {
  483. _clk_pll_enable(hw);
  484. ret = clk_pll_wait_for_lock(pll);
  485. }
  486. return ret;
  487. }
  488. static int clk_pll_set_rate(struct clk_hw *hw, unsigned long rate,
  489. unsigned long parent_rate)
  490. {
  491. struct tegra_clk_pll *pll = to_clk_pll(hw);
  492. struct tegra_clk_pll_freq_table cfg, old_cfg;
  493. unsigned long flags = 0;
  494. int ret = 0;
  495. if (pll->params->flags & TEGRA_PLL_FIXED) {
  496. if (rate != pll->params->fixed_rate) {
  497. pr_err("%s: Can not change %s fixed rate %lu to %lu\n",
  498. __func__, __clk_get_name(hw->clk),
  499. pll->params->fixed_rate, rate);
  500. return -EINVAL;
  501. }
  502. return 0;
  503. }
  504. if (_get_table_rate(hw, &cfg, rate, parent_rate) &&
  505. _calc_rate(hw, &cfg, rate, parent_rate)) {
  506. pr_err("%s: Failed to set %s rate %lu\n", __func__,
  507. __clk_get_name(hw->clk), rate);
  508. WARN_ON(1);
  509. return -EINVAL;
  510. }
  511. if (pll->lock)
  512. spin_lock_irqsave(pll->lock, flags);
  513. _get_pll_mnp(pll, &old_cfg);
  514. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  515. ret = _program_pll(hw, &cfg, rate);
  516. if (pll->lock)
  517. spin_unlock_irqrestore(pll->lock, flags);
  518. return ret;
  519. }
  520. static long clk_pll_round_rate(struct clk_hw *hw, unsigned long rate,
  521. unsigned long *prate)
  522. {
  523. struct tegra_clk_pll *pll = to_clk_pll(hw);
  524. struct tegra_clk_pll_freq_table cfg;
  525. if (pll->params->flags & TEGRA_PLL_FIXED)
  526. return pll->params->fixed_rate;
  527. /* PLLM is used for memory; we do not change rate */
  528. if (pll->params->flags & TEGRA_PLLM)
  529. return __clk_get_rate(hw->clk);
  530. if (_get_table_rate(hw, &cfg, rate, *prate) &&
  531. _calc_rate(hw, &cfg, rate, *prate))
  532. return -EINVAL;
  533. return cfg.output_rate;
  534. }
  535. static unsigned long clk_pll_recalc_rate(struct clk_hw *hw,
  536. unsigned long parent_rate)
  537. {
  538. struct tegra_clk_pll *pll = to_clk_pll(hw);
  539. struct tegra_clk_pll_freq_table cfg;
  540. u32 val;
  541. u64 rate = parent_rate;
  542. int pdiv;
  543. val = pll_readl_base(pll);
  544. if ((pll->params->flags & TEGRA_PLL_BYPASS) && (val & PLL_BASE_BYPASS))
  545. return parent_rate;
  546. if ((pll->params->flags & TEGRA_PLL_FIXED) &&
  547. !(val & PLL_BASE_OVERRIDE)) {
  548. struct tegra_clk_pll_freq_table sel;
  549. if (_get_table_rate(hw, &sel, pll->params->fixed_rate,
  550. parent_rate)) {
  551. pr_err("Clock %s has unknown fixed frequency\n",
  552. __clk_get_name(hw->clk));
  553. BUG();
  554. }
  555. return pll->params->fixed_rate;
  556. }
  557. _get_pll_mnp(pll, &cfg);
  558. pdiv = _hw_to_p_div(hw, cfg.p);
  559. if (pdiv < 0) {
  560. WARN_ON(1);
  561. pdiv = 1;
  562. }
  563. cfg.m *= pdiv;
  564. rate *= cfg.n;
  565. do_div(rate, cfg.m);
  566. return rate;
  567. }
  568. static int clk_plle_training(struct tegra_clk_pll *pll)
  569. {
  570. u32 val;
  571. unsigned long timeout;
  572. if (!pll->pmc)
  573. return -ENOSYS;
  574. /*
  575. * PLLE is already disabled, and setup cleared;
  576. * create falling edge on PLLE IDDQ input.
  577. */
  578. val = readl(pll->pmc + PMC_SATA_PWRGT);
  579. val |= PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  580. writel(val, pll->pmc + PMC_SATA_PWRGT);
  581. val = readl(pll->pmc + PMC_SATA_PWRGT);
  582. val |= PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL;
  583. writel(val, pll->pmc + PMC_SATA_PWRGT);
  584. val = readl(pll->pmc + PMC_SATA_PWRGT);
  585. val &= ~PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  586. writel(val, pll->pmc + PMC_SATA_PWRGT);
  587. val = pll_readl_misc(pll);
  588. timeout = jiffies + msecs_to_jiffies(100);
  589. while (1) {
  590. val = pll_readl_misc(pll);
  591. if (val & PLLE_MISC_READY)
  592. break;
  593. if (time_after(jiffies, timeout)) {
  594. pr_err("%s: timeout waiting for PLLE\n", __func__);
  595. return -EBUSY;
  596. }
  597. udelay(300);
  598. }
  599. return 0;
  600. }
  601. static int clk_plle_enable(struct clk_hw *hw)
  602. {
  603. struct tegra_clk_pll *pll = to_clk_pll(hw);
  604. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  605. struct tegra_clk_pll_freq_table sel;
  606. u32 val;
  607. int err;
  608. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  609. return -EINVAL;
  610. clk_pll_disable(hw);
  611. val = pll_readl_misc(pll);
  612. val &= ~(PLLE_MISC_LOCK_ENABLE | PLLE_MISC_SETUP_MASK);
  613. pll_writel_misc(val, pll);
  614. val = pll_readl_misc(pll);
  615. if (!(val & PLLE_MISC_READY)) {
  616. err = clk_plle_training(pll);
  617. if (err)
  618. return err;
  619. }
  620. if (pll->params->flags & TEGRA_PLLE_CONFIGURE) {
  621. /* configure dividers */
  622. val = pll_readl_base(pll);
  623. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  624. divm_mask_shifted(pll));
  625. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  626. val |= sel.m << divm_shift(pll);
  627. val |= sel.n << divn_shift(pll);
  628. val |= sel.p << divp_shift(pll);
  629. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  630. pll_writel_base(val, pll);
  631. }
  632. val = pll_readl_misc(pll);
  633. val |= PLLE_MISC_SETUP_VALUE;
  634. val |= PLLE_MISC_LOCK_ENABLE;
  635. pll_writel_misc(val, pll);
  636. val = readl(pll->clk_base + PLLE_SS_CTRL);
  637. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  638. val |= PLLE_SS_DISABLE;
  639. writel(val, pll->clk_base + PLLE_SS_CTRL);
  640. val = pll_readl_base(pll);
  641. val |= (PLL_BASE_BYPASS | PLL_BASE_ENABLE);
  642. pll_writel_base(val, pll);
  643. clk_pll_wait_for_lock(pll);
  644. return 0;
  645. }
  646. static unsigned long clk_plle_recalc_rate(struct clk_hw *hw,
  647. unsigned long parent_rate)
  648. {
  649. struct tegra_clk_pll *pll = to_clk_pll(hw);
  650. u32 val = pll_readl_base(pll);
  651. u32 divn = 0, divm = 0, divp = 0;
  652. u64 rate = parent_rate;
  653. divp = (val >> pll->params->div_nmp->divp_shift) & (divp_mask(pll));
  654. divn = (val >> pll->params->div_nmp->divn_shift) & (divn_mask(pll));
  655. divm = (val >> pll->params->div_nmp->divm_shift) & (divm_mask(pll));
  656. divm *= divp;
  657. rate *= divn;
  658. do_div(rate, divm);
  659. return rate;
  660. }
  661. const struct clk_ops tegra_clk_pll_ops = {
  662. .is_enabled = clk_pll_is_enabled,
  663. .enable = clk_pll_enable,
  664. .disable = clk_pll_disable,
  665. .recalc_rate = clk_pll_recalc_rate,
  666. .round_rate = clk_pll_round_rate,
  667. .set_rate = clk_pll_set_rate,
  668. };
  669. const struct clk_ops tegra_clk_plle_ops = {
  670. .recalc_rate = clk_plle_recalc_rate,
  671. .is_enabled = clk_pll_is_enabled,
  672. .disable = clk_pll_disable,
  673. .enable = clk_plle_enable,
  674. };
  675. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || defined(CONFIG_ARCH_TEGRA_124_SOC)
  676. static int _pll_fixed_mdiv(struct tegra_clk_pll_params *pll_params,
  677. unsigned long parent_rate)
  678. {
  679. if (parent_rate > pll_params->cf_max)
  680. return 2;
  681. else
  682. return 1;
  683. }
  684. static unsigned long _clip_vco_min(unsigned long vco_min,
  685. unsigned long parent_rate)
  686. {
  687. return DIV_ROUND_UP(vco_min, parent_rate) * parent_rate;
  688. }
  689. static int _setup_dynamic_ramp(struct tegra_clk_pll_params *pll_params,
  690. void __iomem *clk_base,
  691. unsigned long parent_rate)
  692. {
  693. u32 val;
  694. u32 step_a, step_b;
  695. switch (parent_rate) {
  696. case 12000000:
  697. case 13000000:
  698. case 26000000:
  699. step_a = 0x2B;
  700. step_b = 0x0B;
  701. break;
  702. case 16800000:
  703. step_a = 0x1A;
  704. step_b = 0x09;
  705. break;
  706. case 19200000:
  707. step_a = 0x12;
  708. step_b = 0x08;
  709. break;
  710. default:
  711. pr_err("%s: Unexpected reference rate %lu\n",
  712. __func__, parent_rate);
  713. WARN_ON(1);
  714. return -EINVAL;
  715. }
  716. val = step_a << pll_params->stepa_shift;
  717. val |= step_b << pll_params->stepb_shift;
  718. writel_relaxed(val, clk_base + pll_params->dyn_ramp_reg);
  719. return 0;
  720. }
  721. static int clk_pll_iddq_enable(struct clk_hw *hw)
  722. {
  723. struct tegra_clk_pll *pll = to_clk_pll(hw);
  724. unsigned long flags = 0;
  725. u32 val;
  726. int ret;
  727. if (pll->lock)
  728. spin_lock_irqsave(pll->lock, flags);
  729. val = pll_readl(pll->params->iddq_reg, pll);
  730. val &= ~BIT(pll->params->iddq_bit_idx);
  731. pll_writel(val, pll->params->iddq_reg, pll);
  732. udelay(2);
  733. _clk_pll_enable(hw);
  734. ret = clk_pll_wait_for_lock(pll);
  735. if (pll->lock)
  736. spin_unlock_irqrestore(pll->lock, flags);
  737. return 0;
  738. }
  739. static void clk_pll_iddq_disable(struct clk_hw *hw)
  740. {
  741. struct tegra_clk_pll *pll = to_clk_pll(hw);
  742. unsigned long flags = 0;
  743. u32 val;
  744. if (pll->lock)
  745. spin_lock_irqsave(pll->lock, flags);
  746. _clk_pll_disable(hw);
  747. val = pll_readl(pll->params->iddq_reg, pll);
  748. val |= BIT(pll->params->iddq_bit_idx);
  749. pll_writel(val, pll->params->iddq_reg, pll);
  750. udelay(2);
  751. if (pll->lock)
  752. spin_unlock_irqrestore(pll->lock, flags);
  753. }
  754. static int _calc_dynamic_ramp_rate(struct clk_hw *hw,
  755. struct tegra_clk_pll_freq_table *cfg,
  756. unsigned long rate, unsigned long parent_rate)
  757. {
  758. struct tegra_clk_pll *pll = to_clk_pll(hw);
  759. unsigned int p;
  760. int p_div;
  761. if (!rate)
  762. return -EINVAL;
  763. p = DIV_ROUND_UP(pll->params->vco_min, rate);
  764. cfg->m = _pll_fixed_mdiv(pll->params, parent_rate);
  765. cfg->output_rate = rate * p;
  766. cfg->n = cfg->output_rate * cfg->m / parent_rate;
  767. p_div = _p_div_to_hw(hw, p);
  768. if (p_div < 0)
  769. return p_div;
  770. else
  771. cfg->p = p_div;
  772. if (cfg->n > divn_max(pll) || cfg->output_rate > pll->params->vco_max)
  773. return -EINVAL;
  774. return 0;
  775. }
  776. static int _pll_ramp_calc_pll(struct clk_hw *hw,
  777. struct tegra_clk_pll_freq_table *cfg,
  778. unsigned long rate, unsigned long parent_rate)
  779. {
  780. struct tegra_clk_pll *pll = to_clk_pll(hw);
  781. int err = 0, p_div;
  782. err = _get_table_rate(hw, cfg, rate, parent_rate);
  783. if (err < 0)
  784. err = _calc_dynamic_ramp_rate(hw, cfg, rate, parent_rate);
  785. else {
  786. if (cfg->m != _pll_fixed_mdiv(pll->params, parent_rate)) {
  787. WARN_ON(1);
  788. err = -EINVAL;
  789. goto out;
  790. }
  791. p_div = _p_div_to_hw(hw, cfg->p);
  792. if (p_div < 0)
  793. return p_div;
  794. else
  795. cfg->p = p_div;
  796. }
  797. if (cfg->p > pll->params->max_p)
  798. err = -EINVAL;
  799. out:
  800. return err;
  801. }
  802. static int clk_pllxc_set_rate(struct clk_hw *hw, unsigned long rate,
  803. unsigned long parent_rate)
  804. {
  805. struct tegra_clk_pll *pll = to_clk_pll(hw);
  806. struct tegra_clk_pll_freq_table cfg, old_cfg;
  807. unsigned long flags = 0;
  808. int ret = 0;
  809. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  810. if (ret < 0)
  811. return ret;
  812. if (pll->lock)
  813. spin_lock_irqsave(pll->lock, flags);
  814. _get_pll_mnp(pll, &old_cfg);
  815. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  816. ret = _program_pll(hw, &cfg, rate);
  817. if (pll->lock)
  818. spin_unlock_irqrestore(pll->lock, flags);
  819. return ret;
  820. }
  821. static long clk_pll_ramp_round_rate(struct clk_hw *hw, unsigned long rate,
  822. unsigned long *prate)
  823. {
  824. struct tegra_clk_pll_freq_table cfg;
  825. int ret = 0, p_div;
  826. u64 output_rate = *prate;
  827. ret = _pll_ramp_calc_pll(hw, &cfg, rate, *prate);
  828. if (ret < 0)
  829. return ret;
  830. p_div = _hw_to_p_div(hw, cfg.p);
  831. if (p_div < 0)
  832. return p_div;
  833. output_rate *= cfg.n;
  834. do_div(output_rate, cfg.m * p_div);
  835. return output_rate;
  836. }
  837. static int clk_pllm_set_rate(struct clk_hw *hw, unsigned long rate,
  838. unsigned long parent_rate)
  839. {
  840. struct tegra_clk_pll_freq_table cfg;
  841. struct tegra_clk_pll *pll = to_clk_pll(hw);
  842. unsigned long flags = 0;
  843. int state, ret = 0;
  844. if (pll->lock)
  845. spin_lock_irqsave(pll->lock, flags);
  846. state = clk_pll_is_enabled(hw);
  847. if (state) {
  848. if (rate != clk_get_rate(hw->clk)) {
  849. pr_err("%s: Cannot change active PLLM\n", __func__);
  850. ret = -EINVAL;
  851. goto out;
  852. }
  853. goto out;
  854. }
  855. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  856. if (ret < 0)
  857. goto out;
  858. _update_pll_mnp(pll, &cfg);
  859. out:
  860. if (pll->lock)
  861. spin_unlock_irqrestore(pll->lock, flags);
  862. return ret;
  863. }
  864. static void _pllcx_strobe(struct tegra_clk_pll *pll)
  865. {
  866. u32 val;
  867. val = pll_readl_misc(pll);
  868. val |= PLLCX_MISC_STROBE;
  869. pll_writel_misc(val, pll);
  870. udelay(2);
  871. val &= ~PLLCX_MISC_STROBE;
  872. pll_writel_misc(val, pll);
  873. }
  874. static int clk_pllc_enable(struct clk_hw *hw)
  875. {
  876. struct tegra_clk_pll *pll = to_clk_pll(hw);
  877. u32 val;
  878. int ret = 0;
  879. unsigned long flags = 0;
  880. if (pll->lock)
  881. spin_lock_irqsave(pll->lock, flags);
  882. _clk_pll_enable(hw);
  883. udelay(2);
  884. val = pll_readl_misc(pll);
  885. val &= ~PLLCX_MISC_RESET;
  886. pll_writel_misc(val, pll);
  887. udelay(2);
  888. _pllcx_strobe(pll);
  889. ret = clk_pll_wait_for_lock(pll);
  890. if (pll->lock)
  891. spin_unlock_irqrestore(pll->lock, flags);
  892. return ret;
  893. }
  894. static void _clk_pllc_disable(struct clk_hw *hw)
  895. {
  896. struct tegra_clk_pll *pll = to_clk_pll(hw);
  897. u32 val;
  898. _clk_pll_disable(hw);
  899. val = pll_readl_misc(pll);
  900. val |= PLLCX_MISC_RESET;
  901. pll_writel_misc(val, pll);
  902. udelay(2);
  903. }
  904. static void clk_pllc_disable(struct clk_hw *hw)
  905. {
  906. struct tegra_clk_pll *pll = to_clk_pll(hw);
  907. unsigned long flags = 0;
  908. if (pll->lock)
  909. spin_lock_irqsave(pll->lock, flags);
  910. _clk_pllc_disable(hw);
  911. if (pll->lock)
  912. spin_unlock_irqrestore(pll->lock, flags);
  913. }
  914. static int _pllcx_update_dynamic_coef(struct tegra_clk_pll *pll,
  915. unsigned long input_rate, u32 n)
  916. {
  917. u32 val, n_threshold;
  918. switch (input_rate) {
  919. case 12000000:
  920. n_threshold = 70;
  921. break;
  922. case 13000000:
  923. case 26000000:
  924. n_threshold = 71;
  925. break;
  926. case 16800000:
  927. n_threshold = 55;
  928. break;
  929. case 19200000:
  930. n_threshold = 48;
  931. break;
  932. default:
  933. pr_err("%s: Unexpected reference rate %lu\n",
  934. __func__, input_rate);
  935. return -EINVAL;
  936. }
  937. val = pll_readl_misc(pll);
  938. val &= ~(PLLCX_MISC_SDM_DIV_MASK | PLLCX_MISC_FILT_DIV_MASK);
  939. val |= n <= n_threshold ?
  940. PLLCX_MISC_DIV_LOW_RANGE : PLLCX_MISC_DIV_HIGH_RANGE;
  941. pll_writel_misc(val, pll);
  942. return 0;
  943. }
  944. static int clk_pllc_set_rate(struct clk_hw *hw, unsigned long rate,
  945. unsigned long parent_rate)
  946. {
  947. struct tegra_clk_pll_freq_table cfg, old_cfg;
  948. struct tegra_clk_pll *pll = to_clk_pll(hw);
  949. unsigned long flags = 0;
  950. int state, ret = 0;
  951. if (pll->lock)
  952. spin_lock_irqsave(pll->lock, flags);
  953. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  954. if (ret < 0)
  955. goto out;
  956. _get_pll_mnp(pll, &old_cfg);
  957. if (cfg.m != old_cfg.m) {
  958. WARN_ON(1);
  959. goto out;
  960. }
  961. if (old_cfg.n == cfg.n && old_cfg.p == cfg.p)
  962. goto out;
  963. state = clk_pll_is_enabled(hw);
  964. if (state)
  965. _clk_pllc_disable(hw);
  966. ret = _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  967. if (ret < 0)
  968. goto out;
  969. _update_pll_mnp(pll, &cfg);
  970. if (state)
  971. ret = clk_pllc_enable(hw);
  972. out:
  973. if (pll->lock)
  974. spin_unlock_irqrestore(pll->lock, flags);
  975. return ret;
  976. }
  977. static long _pllre_calc_rate(struct tegra_clk_pll *pll,
  978. struct tegra_clk_pll_freq_table *cfg,
  979. unsigned long rate, unsigned long parent_rate)
  980. {
  981. u16 m, n;
  982. u64 output_rate = parent_rate;
  983. m = _pll_fixed_mdiv(pll->params, parent_rate);
  984. n = rate * m / parent_rate;
  985. output_rate *= n;
  986. do_div(output_rate, m);
  987. if (cfg) {
  988. cfg->m = m;
  989. cfg->n = n;
  990. }
  991. return output_rate;
  992. }
  993. static int clk_pllre_set_rate(struct clk_hw *hw, unsigned long rate,
  994. unsigned long parent_rate)
  995. {
  996. struct tegra_clk_pll_freq_table cfg, old_cfg;
  997. struct tegra_clk_pll *pll = to_clk_pll(hw);
  998. unsigned long flags = 0;
  999. int state, ret = 0;
  1000. if (pll->lock)
  1001. spin_lock_irqsave(pll->lock, flags);
  1002. _pllre_calc_rate(pll, &cfg, rate, parent_rate);
  1003. _get_pll_mnp(pll, &old_cfg);
  1004. cfg.p = old_cfg.p;
  1005. if (cfg.m != old_cfg.m || cfg.n != old_cfg.n) {
  1006. state = clk_pll_is_enabled(hw);
  1007. if (state)
  1008. _clk_pll_disable(hw);
  1009. _update_pll_mnp(pll, &cfg);
  1010. if (state) {
  1011. _clk_pll_enable(hw);
  1012. ret = clk_pll_wait_for_lock(pll);
  1013. }
  1014. }
  1015. if (pll->lock)
  1016. spin_unlock_irqrestore(pll->lock, flags);
  1017. return ret;
  1018. }
  1019. static unsigned long clk_pllre_recalc_rate(struct clk_hw *hw,
  1020. unsigned long parent_rate)
  1021. {
  1022. struct tegra_clk_pll_freq_table cfg;
  1023. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1024. u64 rate = parent_rate;
  1025. _get_pll_mnp(pll, &cfg);
  1026. rate *= cfg.n;
  1027. do_div(rate, cfg.m);
  1028. return rate;
  1029. }
  1030. static long clk_pllre_round_rate(struct clk_hw *hw, unsigned long rate,
  1031. unsigned long *prate)
  1032. {
  1033. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1034. return _pllre_calc_rate(pll, NULL, rate, *prate);
  1035. }
  1036. static int clk_plle_tegra114_enable(struct clk_hw *hw)
  1037. {
  1038. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1039. struct tegra_clk_pll_freq_table sel;
  1040. u32 val;
  1041. int ret;
  1042. unsigned long flags = 0;
  1043. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  1044. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  1045. return -EINVAL;
  1046. if (pll->lock)
  1047. spin_lock_irqsave(pll->lock, flags);
  1048. val = pll_readl_base(pll);
  1049. val &= ~BIT(29); /* Disable lock override */
  1050. pll_writel_base(val, pll);
  1051. val = pll_readl(pll->params->aux_reg, pll);
  1052. val |= PLLE_AUX_ENABLE_SWCTL;
  1053. val &= ~PLLE_AUX_SEQ_ENABLE;
  1054. pll_writel(val, pll->params->aux_reg, pll);
  1055. udelay(1);
  1056. val = pll_readl_misc(pll);
  1057. val |= PLLE_MISC_LOCK_ENABLE;
  1058. val |= PLLE_MISC_IDDQ_SW_CTRL;
  1059. val &= ~PLLE_MISC_IDDQ_SW_VALUE;
  1060. val |= PLLE_MISC_PLLE_PTS;
  1061. val |= PLLE_MISC_VREG_BG_CTRL_MASK | PLLE_MISC_VREG_CTRL_MASK;
  1062. pll_writel_misc(val, pll);
  1063. udelay(5);
  1064. val = pll_readl(PLLE_SS_CTRL, pll);
  1065. val |= PLLE_SS_DISABLE;
  1066. pll_writel(val, PLLE_SS_CTRL, pll);
  1067. val = pll_readl_base(pll);
  1068. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  1069. divm_mask_shifted(pll));
  1070. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  1071. val |= sel.m << divm_shift(pll);
  1072. val |= sel.n << divn_shift(pll);
  1073. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  1074. pll_writel_base(val, pll);
  1075. udelay(1);
  1076. _clk_pll_enable(hw);
  1077. ret = clk_pll_wait_for_lock(pll);
  1078. if (ret < 0)
  1079. goto out;
  1080. val = pll_readl(PLLE_SS_CTRL, pll);
  1081. val &= ~(PLLE_SS_CNTL_CENTER | PLLE_SS_CNTL_INVERT);
  1082. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  1083. val |= PLLE_SS_COEFFICIENTS_VAL;
  1084. pll_writel(val, PLLE_SS_CTRL, pll);
  1085. val &= ~(PLLE_SS_CNTL_SSC_BYP | PLLE_SS_CNTL_BYPASS_SS);
  1086. pll_writel(val, PLLE_SS_CTRL, pll);
  1087. udelay(1);
  1088. val &= ~PLLE_SS_CNTL_INTERP_RESET;
  1089. pll_writel(val, PLLE_SS_CTRL, pll);
  1090. udelay(1);
  1091. /* Enable hw control of xusb brick pll */
  1092. val = pll_readl_misc(pll);
  1093. val &= ~PLLE_MISC_IDDQ_SW_CTRL;
  1094. pll_writel_misc(val, pll);
  1095. val = pll_readl(pll->params->aux_reg, pll);
  1096. val |= (PLLE_AUX_USE_LOCKDET | PLLE_AUX_SEQ_START_STATE);
  1097. val &= ~(PLLE_AUX_ENABLE_SWCTL | PLLE_AUX_SS_SWCTL);
  1098. pll_writel(val, pll->params->aux_reg, pll);
  1099. udelay(1);
  1100. val |= PLLE_AUX_SEQ_ENABLE;
  1101. pll_writel(val, pll->params->aux_reg, pll);
  1102. val = pll_readl(XUSBIO_PLL_CFG0, pll);
  1103. val |= (XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET |
  1104. XUSBIO_PLL_CFG0_SEQ_START_STATE);
  1105. val &= ~(XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL |
  1106. XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL);
  1107. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1108. udelay(1);
  1109. val |= XUSBIO_PLL_CFG0_SEQ_ENABLE;
  1110. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1111. /* Enable hw control of SATA pll */
  1112. val = pll_readl(SATA_PLL_CFG0, pll);
  1113. val &= ~SATA_PLL_CFG0_PADPLL_RESET_SWCTL;
  1114. val |= SATA_PLL_CFG0_PADPLL_USE_LOCKDET;
  1115. val |= SATA_PLL_CFG0_SEQ_START_STATE;
  1116. pll_writel(val, SATA_PLL_CFG0, pll);
  1117. udelay(1);
  1118. val = pll_readl(SATA_PLL_CFG0, pll);
  1119. val |= SATA_PLL_CFG0_SEQ_ENABLE;
  1120. pll_writel(val, SATA_PLL_CFG0, pll);
  1121. out:
  1122. if (pll->lock)
  1123. spin_unlock_irqrestore(pll->lock, flags);
  1124. return ret;
  1125. }
  1126. static void clk_plle_tegra114_disable(struct clk_hw *hw)
  1127. {
  1128. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1129. unsigned long flags = 0;
  1130. u32 val;
  1131. if (pll->lock)
  1132. spin_lock_irqsave(pll->lock, flags);
  1133. _clk_pll_disable(hw);
  1134. val = pll_readl_misc(pll);
  1135. val |= PLLE_MISC_IDDQ_SW_CTRL | PLLE_MISC_IDDQ_SW_VALUE;
  1136. pll_writel_misc(val, pll);
  1137. udelay(1);
  1138. if (pll->lock)
  1139. spin_unlock_irqrestore(pll->lock, flags);
  1140. }
  1141. #endif
  1142. static struct tegra_clk_pll *_tegra_init_pll(void __iomem *clk_base,
  1143. void __iomem *pmc, struct tegra_clk_pll_params *pll_params,
  1144. spinlock_t *lock)
  1145. {
  1146. struct tegra_clk_pll *pll;
  1147. pll = kzalloc(sizeof(*pll), GFP_KERNEL);
  1148. if (!pll)
  1149. return ERR_PTR(-ENOMEM);
  1150. pll->clk_base = clk_base;
  1151. pll->pmc = pmc;
  1152. pll->params = pll_params;
  1153. pll->lock = lock;
  1154. if (!pll_params->div_nmp)
  1155. pll_params->div_nmp = &default_nmp;
  1156. return pll;
  1157. }
  1158. static struct clk *_tegra_clk_register_pll(struct tegra_clk_pll *pll,
  1159. const char *name, const char *parent_name, unsigned long flags,
  1160. const struct clk_ops *ops)
  1161. {
  1162. struct clk_init_data init;
  1163. init.name = name;
  1164. init.ops = ops;
  1165. init.flags = flags;
  1166. init.parent_names = (parent_name ? &parent_name : NULL);
  1167. init.num_parents = (parent_name ? 1 : 0);
  1168. /* Data in .init is copied by clk_register(), so stack variable OK */
  1169. pll->hw.init = &init;
  1170. return clk_register(NULL, &pll->hw);
  1171. }
  1172. struct clk *tegra_clk_register_pll(const char *name, const char *parent_name,
  1173. void __iomem *clk_base, void __iomem *pmc,
  1174. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1175. spinlock_t *lock)
  1176. {
  1177. struct tegra_clk_pll *pll;
  1178. struct clk *clk;
  1179. pll_params->flags |= TEGRA_PLL_BYPASS;
  1180. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1181. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1182. if (IS_ERR(pll))
  1183. return ERR_CAST(pll);
  1184. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1185. &tegra_clk_pll_ops);
  1186. if (IS_ERR(clk))
  1187. kfree(pll);
  1188. return clk;
  1189. }
  1190. static struct div_nmp pll_e_nmp = {
  1191. .divn_shift = PLLE_BASE_DIVN_SHIFT,
  1192. .divn_width = PLLE_BASE_DIVN_WIDTH,
  1193. .divm_shift = PLLE_BASE_DIVM_SHIFT,
  1194. .divm_width = PLLE_BASE_DIVM_WIDTH,
  1195. .divp_shift = PLLE_BASE_DIVP_SHIFT,
  1196. .divp_width = PLLE_BASE_DIVP_WIDTH,
  1197. };
  1198. struct clk *tegra_clk_register_plle(const char *name, const char *parent_name,
  1199. void __iomem *clk_base, void __iomem *pmc,
  1200. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1201. spinlock_t *lock)
  1202. {
  1203. struct tegra_clk_pll *pll;
  1204. struct clk *clk;
  1205. pll_params->flags |= TEGRA_PLL_LOCK_MISC | TEGRA_PLL_BYPASS;
  1206. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1207. if (!pll_params->div_nmp)
  1208. pll_params->div_nmp = &pll_e_nmp;
  1209. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1210. if (IS_ERR(pll))
  1211. return ERR_CAST(pll);
  1212. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1213. &tegra_clk_plle_ops);
  1214. if (IS_ERR(clk))
  1215. kfree(pll);
  1216. return clk;
  1217. }
  1218. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || defined(CONFIG_ARCH_TEGRA_124_SOC)
  1219. static const struct clk_ops tegra_clk_pllxc_ops = {
  1220. .is_enabled = clk_pll_is_enabled,
  1221. .enable = clk_pll_iddq_enable,
  1222. .disable = clk_pll_iddq_disable,
  1223. .recalc_rate = clk_pll_recalc_rate,
  1224. .round_rate = clk_pll_ramp_round_rate,
  1225. .set_rate = clk_pllxc_set_rate,
  1226. };
  1227. static const struct clk_ops tegra_clk_pllm_ops = {
  1228. .is_enabled = clk_pll_is_enabled,
  1229. .enable = clk_pll_iddq_enable,
  1230. .disable = clk_pll_iddq_disable,
  1231. .recalc_rate = clk_pll_recalc_rate,
  1232. .round_rate = clk_pll_ramp_round_rate,
  1233. .set_rate = clk_pllm_set_rate,
  1234. };
  1235. static const struct clk_ops tegra_clk_pllc_ops = {
  1236. .is_enabled = clk_pll_is_enabled,
  1237. .enable = clk_pllc_enable,
  1238. .disable = clk_pllc_disable,
  1239. .recalc_rate = clk_pll_recalc_rate,
  1240. .round_rate = clk_pll_ramp_round_rate,
  1241. .set_rate = clk_pllc_set_rate,
  1242. };
  1243. static const struct clk_ops tegra_clk_pllre_ops = {
  1244. .is_enabled = clk_pll_is_enabled,
  1245. .enable = clk_pll_iddq_enable,
  1246. .disable = clk_pll_iddq_disable,
  1247. .recalc_rate = clk_pllre_recalc_rate,
  1248. .round_rate = clk_pllre_round_rate,
  1249. .set_rate = clk_pllre_set_rate,
  1250. };
  1251. static const struct clk_ops tegra_clk_plle_tegra114_ops = {
  1252. .is_enabled = clk_pll_is_enabled,
  1253. .enable = clk_plle_tegra114_enable,
  1254. .disable = clk_plle_tegra114_disable,
  1255. .recalc_rate = clk_pll_recalc_rate,
  1256. };
  1257. struct clk *tegra_clk_register_pllxc(const char *name, const char *parent_name,
  1258. void __iomem *clk_base, void __iomem *pmc,
  1259. unsigned long flags,
  1260. struct tegra_clk_pll_params *pll_params,
  1261. spinlock_t *lock)
  1262. {
  1263. struct tegra_clk_pll *pll;
  1264. struct clk *clk, *parent;
  1265. unsigned long parent_rate;
  1266. int err;
  1267. u32 val, val_iddq;
  1268. parent = __clk_lookup(parent_name);
  1269. if (!parent) {
  1270. WARN(1, "parent clk %s of %s must be registered first\n",
  1271. name, parent_name);
  1272. return ERR_PTR(-EINVAL);
  1273. }
  1274. if (!pll_params->pdiv_tohw)
  1275. return ERR_PTR(-EINVAL);
  1276. parent_rate = __clk_get_rate(parent);
  1277. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1278. err = _setup_dynamic_ramp(pll_params, clk_base, parent_rate);
  1279. if (err)
  1280. return ERR_PTR(err);
  1281. val = readl_relaxed(clk_base + pll_params->base_reg);
  1282. val_iddq = readl_relaxed(clk_base + pll_params->iddq_reg);
  1283. if (val & PLL_BASE_ENABLE)
  1284. WARN_ON(val_iddq & BIT(pll_params->iddq_bit_idx));
  1285. else {
  1286. val_iddq |= BIT(pll_params->iddq_bit_idx);
  1287. writel_relaxed(val_iddq, clk_base + pll_params->iddq_reg);
  1288. }
  1289. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1290. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1291. if (IS_ERR(pll))
  1292. return ERR_CAST(pll);
  1293. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1294. &tegra_clk_pllxc_ops);
  1295. if (IS_ERR(clk))
  1296. kfree(pll);
  1297. return clk;
  1298. }
  1299. struct clk *tegra_clk_register_pllre(const char *name, const char *parent_name,
  1300. void __iomem *clk_base, void __iomem *pmc,
  1301. unsigned long flags,
  1302. struct tegra_clk_pll_params *pll_params,
  1303. spinlock_t *lock, unsigned long parent_rate)
  1304. {
  1305. u32 val;
  1306. struct tegra_clk_pll *pll;
  1307. struct clk *clk;
  1308. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_LOCK_MISC;
  1309. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1310. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1311. if (IS_ERR(pll))
  1312. return ERR_CAST(pll);
  1313. /* program minimum rate by default */
  1314. val = pll_readl_base(pll);
  1315. if (val & PLL_BASE_ENABLE)
  1316. WARN_ON(val & pll_params->iddq_bit_idx);
  1317. else {
  1318. int m;
  1319. m = _pll_fixed_mdiv(pll_params, parent_rate);
  1320. val = m << divm_shift(pll);
  1321. val |= (pll_params->vco_min / parent_rate) << divn_shift(pll);
  1322. pll_writel_base(val, pll);
  1323. }
  1324. /* disable lock override */
  1325. val = pll_readl_misc(pll);
  1326. val &= ~BIT(29);
  1327. pll_writel_misc(val, pll);
  1328. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1329. &tegra_clk_pllre_ops);
  1330. if (IS_ERR(clk))
  1331. kfree(pll);
  1332. return clk;
  1333. }
  1334. struct clk *tegra_clk_register_pllm(const char *name, const char *parent_name,
  1335. void __iomem *clk_base, void __iomem *pmc,
  1336. unsigned long flags,
  1337. struct tegra_clk_pll_params *pll_params,
  1338. spinlock_t *lock)
  1339. {
  1340. struct tegra_clk_pll *pll;
  1341. struct clk *clk, *parent;
  1342. unsigned long parent_rate;
  1343. if (!pll_params->pdiv_tohw)
  1344. return ERR_PTR(-EINVAL);
  1345. parent = __clk_lookup(parent_name);
  1346. if (!parent) {
  1347. WARN(1, "parent clk %s of %s must be registered first\n",
  1348. name, parent_name);
  1349. return ERR_PTR(-EINVAL);
  1350. }
  1351. parent_rate = __clk_get_rate(parent);
  1352. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1353. pll_params->flags |= TEGRA_PLL_BYPASS;
  1354. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1355. pll_params->flags |= TEGRA_PLLM;
  1356. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1357. if (IS_ERR(pll))
  1358. return ERR_CAST(pll);
  1359. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1360. &tegra_clk_pllm_ops);
  1361. if (IS_ERR(clk))
  1362. kfree(pll);
  1363. return clk;
  1364. }
  1365. struct clk *tegra_clk_register_pllc(const char *name, const char *parent_name,
  1366. void __iomem *clk_base, void __iomem *pmc,
  1367. unsigned long flags,
  1368. struct tegra_clk_pll_params *pll_params,
  1369. spinlock_t *lock)
  1370. {
  1371. struct clk *parent, *clk;
  1372. struct pdiv_map *p_tohw = pll_params->pdiv_tohw;
  1373. struct tegra_clk_pll *pll;
  1374. struct tegra_clk_pll_freq_table cfg;
  1375. unsigned long parent_rate;
  1376. if (!p_tohw)
  1377. return ERR_PTR(-EINVAL);
  1378. parent = __clk_lookup(parent_name);
  1379. if (!parent) {
  1380. WARN(1, "parent clk %s of %s must be registered first\n",
  1381. name, parent_name);
  1382. return ERR_PTR(-EINVAL);
  1383. }
  1384. parent_rate = __clk_get_rate(parent);
  1385. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1386. pll_params->flags |= TEGRA_PLL_BYPASS;
  1387. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1388. if (IS_ERR(pll))
  1389. return ERR_CAST(pll);
  1390. /*
  1391. * Most of PLLC register fields are shadowed, and can not be read
  1392. * directly from PLL h/w. Hence, actual PLLC boot state is unknown.
  1393. * Initialize PLL to default state: disabled, reset; shadow registers
  1394. * loaded with default parameters; dividers are preset for half of
  1395. * minimum VCO rate (the latter assured that shadowed divider settings
  1396. * are within supported range).
  1397. */
  1398. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1399. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1400. while (p_tohw->pdiv) {
  1401. if (p_tohw->pdiv == 2) {
  1402. cfg.p = p_tohw->hw_val;
  1403. break;
  1404. }
  1405. p_tohw++;
  1406. }
  1407. if (!p_tohw->pdiv) {
  1408. WARN_ON(1);
  1409. return ERR_PTR(-EINVAL);
  1410. }
  1411. pll_writel_base(0, pll);
  1412. _update_pll_mnp(pll, &cfg);
  1413. pll_writel_misc(PLLCX_MISC_DEFAULT, pll);
  1414. pll_writel(PLLCX_MISC1_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1415. pll_writel(PLLCX_MISC2_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1416. pll_writel(PLLCX_MISC3_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1417. _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  1418. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1419. &tegra_clk_pllc_ops);
  1420. if (IS_ERR(clk))
  1421. kfree(pll);
  1422. return clk;
  1423. }
  1424. struct clk *tegra_clk_register_plle_tegra114(const char *name,
  1425. const char *parent_name,
  1426. void __iomem *clk_base, unsigned long flags,
  1427. struct tegra_clk_pll_params *pll_params,
  1428. spinlock_t *lock)
  1429. {
  1430. struct tegra_clk_pll *pll;
  1431. struct clk *clk;
  1432. u32 val, val_aux;
  1433. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1434. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1435. if (IS_ERR(pll))
  1436. return ERR_CAST(pll);
  1437. /* ensure parent is set to pll_re_vco */
  1438. val = pll_readl_base(pll);
  1439. val_aux = pll_readl(pll_params->aux_reg, pll);
  1440. if (val & PLL_BASE_ENABLE) {
  1441. if ((val_aux & PLLE_AUX_PLLRE_SEL) ||
  1442. (val_aux & PLLE_AUX_PLLP_SEL))
  1443. WARN(1, "pll_e enabled with unsupported parent %s\n",
  1444. (val_aux & PLLE_AUX_PLLP_SEL) ? "pllp_out0" :
  1445. "pll_re_vco");
  1446. } else {
  1447. val_aux &= ~(PLLE_AUX_PLLRE_SEL | PLLE_AUX_PLLP_SEL);
  1448. pll_writel(val_aux, pll_params->aux_reg, pll);
  1449. }
  1450. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1451. &tegra_clk_plle_tegra114_ops);
  1452. if (IS_ERR(clk))
  1453. kfree(pll);
  1454. return clk;
  1455. }
  1456. #endif
  1457. #ifdef CONFIG_ARCH_TEGRA_124_SOC
  1458. static const struct clk_ops tegra_clk_pllss_ops = {
  1459. .is_enabled = clk_pll_is_enabled,
  1460. .enable = clk_pll_iddq_enable,
  1461. .disable = clk_pll_iddq_disable,
  1462. .recalc_rate = clk_pll_recalc_rate,
  1463. .round_rate = clk_pll_ramp_round_rate,
  1464. .set_rate = clk_pllxc_set_rate,
  1465. };
  1466. struct clk *tegra_clk_register_pllss(const char *name, const char *parent_name,
  1467. void __iomem *clk_base, unsigned long flags,
  1468. struct tegra_clk_pll_params *pll_params,
  1469. spinlock_t *lock)
  1470. {
  1471. struct tegra_clk_pll *pll;
  1472. struct clk *clk, *parent;
  1473. struct tegra_clk_pll_freq_table cfg;
  1474. unsigned long parent_rate;
  1475. u32 val;
  1476. int i;
  1477. if (!pll_params->div_nmp)
  1478. return ERR_PTR(-EINVAL);
  1479. parent = __clk_lookup(parent_name);
  1480. if (!parent) {
  1481. WARN(1, "parent clk %s of %s must be registered first\n",
  1482. name, parent_name);
  1483. return ERR_PTR(-EINVAL);
  1484. }
  1485. pll_params->flags = TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_USE_LOCK;
  1486. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1487. if (IS_ERR(pll))
  1488. return ERR_CAST(pll);
  1489. val = pll_readl_base(pll);
  1490. val &= ~PLLSS_REF_SRC_SEL_MASK;
  1491. pll_writel_base(val, pll);
  1492. parent_rate = __clk_get_rate(parent);
  1493. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1494. /* initialize PLL to minimum rate */
  1495. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1496. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1497. for (i = 0; pll_params->pdiv_tohw[i].pdiv; i++)
  1498. ;
  1499. if (!i) {
  1500. kfree(pll);
  1501. return ERR_PTR(-EINVAL);
  1502. }
  1503. cfg.p = pll_params->pdiv_tohw[i-1].hw_val;
  1504. _update_pll_mnp(pll, &cfg);
  1505. pll_writel_misc(PLLSS_MISC_DEFAULT, pll);
  1506. pll_writel(PLLSS_CFG_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1507. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1508. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1509. val = pll_readl_base(pll);
  1510. if (val & PLL_BASE_ENABLE) {
  1511. if (val & BIT(pll_params->iddq_bit_idx)) {
  1512. WARN(1, "%s is on but IDDQ set\n", name);
  1513. kfree(pll);
  1514. return ERR_PTR(-EINVAL);
  1515. }
  1516. } else
  1517. val |= BIT(pll_params->iddq_bit_idx);
  1518. val &= ~PLLSS_LOCK_OVERRIDE;
  1519. pll_writel_base(val, pll);
  1520. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1521. &tegra_clk_pllss_ops);
  1522. if (IS_ERR(clk))
  1523. kfree(pll);
  1524. return clk;
  1525. }
  1526. #endif