clk-rcg2.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870
  1. /*
  2. * Copyright (c) 2013, The Linux Foundation. All rights reserved.
  3. *
  4. * This software is licensed under the terms of the GNU General Public
  5. * License version 2, as published by the Free Software Foundation, and
  6. * may be copied, distributed, and modified under those terms.
  7. *
  8. * This program is distributed in the hope that it will be useful,
  9. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. * GNU General Public License for more details.
  12. */
  13. #include <linux/kernel.h>
  14. #include <linux/bitops.h>
  15. #include <linux/err.h>
  16. #include <linux/bug.h>
  17. #include <linux/export.h>
  18. #include <linux/clk-provider.h>
  19. #include <linux/delay.h>
  20. #include <linux/regmap.h>
  21. #include <linux/math64.h>
  22. #include <asm/div64.h>
  23. #include "clk-rcg.h"
  24. #include "common.h"
  25. #define CMD_REG 0x0
  26. #define CMD_UPDATE BIT(0)
  27. #define CMD_ROOT_EN BIT(1)
  28. #define CMD_DIRTY_CFG BIT(4)
  29. #define CMD_DIRTY_N BIT(5)
  30. #define CMD_DIRTY_M BIT(6)
  31. #define CMD_DIRTY_D BIT(7)
  32. #define CMD_ROOT_OFF BIT(31)
  33. #define CFG_REG 0x4
  34. #define CFG_SRC_DIV_SHIFT 0
  35. #define CFG_SRC_SEL_SHIFT 8
  36. #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
  37. #define CFG_MODE_SHIFT 12
  38. #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
  39. #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
  40. #define M_REG 0x8
  41. #define N_REG 0xc
  42. #define D_REG 0x10
  43. enum freq_policy {
  44. FLOOR,
  45. CEIL,
  46. };
  47. static int clk_rcg2_is_enabled(struct clk_hw *hw)
  48. {
  49. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  50. u32 cmd;
  51. int ret;
  52. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  53. if (ret)
  54. return ret;
  55. return (cmd & CMD_ROOT_OFF) == 0;
  56. }
  57. static u8 clk_rcg2_get_parent(struct clk_hw *hw)
  58. {
  59. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  60. int num_parents = clk_hw_get_num_parents(hw);
  61. u32 cfg;
  62. int i, ret;
  63. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  64. if (ret)
  65. goto err;
  66. cfg &= CFG_SRC_SEL_MASK;
  67. cfg >>= CFG_SRC_SEL_SHIFT;
  68. for (i = 0; i < num_parents; i++)
  69. if (cfg == rcg->parent_map[i].cfg)
  70. return i;
  71. err:
  72. pr_debug("%s: Clock %s has invalid parent, using default.\n",
  73. __func__, clk_hw_get_name(hw));
  74. return 0;
  75. }
  76. static int update_config(struct clk_rcg2 *rcg)
  77. {
  78. int count, ret;
  79. u32 cmd;
  80. struct clk_hw *hw = &rcg->clkr.hw;
  81. const char *name = clk_hw_get_name(hw);
  82. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  83. CMD_UPDATE, CMD_UPDATE);
  84. if (ret)
  85. return ret;
  86. /* Wait for update to take effect */
  87. for (count = 500; count > 0; count--) {
  88. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  89. if (ret)
  90. return ret;
  91. if (!(cmd & CMD_UPDATE))
  92. return 0;
  93. udelay(1);
  94. }
  95. WARN(1, "%s: rcg didn't update its configuration.", name);
  96. return 0;
  97. }
  98. static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
  99. {
  100. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  101. int ret;
  102. u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  103. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  104. CFG_SRC_SEL_MASK, cfg);
  105. if (ret)
  106. return ret;
  107. return update_config(rcg);
  108. }
  109. /*
  110. * Calculate m/n:d rate
  111. *
  112. * parent_rate m
  113. * rate = ----------- x ---
  114. * hid_div n
  115. */
  116. static unsigned long
  117. calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
  118. {
  119. if (hid_div) {
  120. rate *= 2;
  121. rate /= hid_div + 1;
  122. }
  123. if (mode) {
  124. u64 tmp = rate;
  125. tmp *= m;
  126. do_div(tmp, n);
  127. rate = tmp;
  128. }
  129. return rate;
  130. }
  131. static unsigned long
  132. clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  133. {
  134. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  135. u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
  136. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  137. if (rcg->mnd_width) {
  138. mask = BIT(rcg->mnd_width) - 1;
  139. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
  140. m &= mask;
  141. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
  142. n = ~n;
  143. n &= mask;
  144. n += m;
  145. mode = cfg & CFG_MODE_MASK;
  146. mode >>= CFG_MODE_SHIFT;
  147. }
  148. mask = BIT(rcg->hid_width) - 1;
  149. hid_div = cfg >> CFG_SRC_DIV_SHIFT;
  150. hid_div &= mask;
  151. return calc_rate(parent_rate, m, n, mode, hid_div);
  152. }
  153. static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
  154. struct clk_rate_request *req,
  155. enum freq_policy policy)
  156. {
  157. unsigned long clk_flags, rate = req->rate;
  158. struct clk_hw *p;
  159. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  160. int index;
  161. switch (policy) {
  162. case FLOOR:
  163. f = qcom_find_freq_floor(f, rate);
  164. break;
  165. case CEIL:
  166. f = qcom_find_freq(f, rate);
  167. break;
  168. default:
  169. return -EINVAL;
  170. };
  171. if (!f)
  172. return -EINVAL;
  173. index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  174. if (index < 0)
  175. return index;
  176. clk_flags = clk_hw_get_flags(hw);
  177. p = clk_hw_get_parent_by_index(hw, index);
  178. if (clk_flags & CLK_SET_RATE_PARENT) {
  179. if (f->pre_div) {
  180. rate /= 2;
  181. rate *= f->pre_div + 1;
  182. }
  183. if (f->n) {
  184. u64 tmp = rate;
  185. tmp = tmp * f->n;
  186. do_div(tmp, f->m);
  187. rate = tmp;
  188. }
  189. } else {
  190. rate = clk_hw_get_rate(p);
  191. }
  192. req->best_parent_hw = p;
  193. req->best_parent_rate = rate;
  194. req->rate = f->freq;
  195. return 0;
  196. }
  197. static int clk_rcg2_determine_rate(struct clk_hw *hw,
  198. struct clk_rate_request *req)
  199. {
  200. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  201. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
  202. }
  203. static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
  204. struct clk_rate_request *req)
  205. {
  206. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  207. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
  208. }
  209. static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
  210. {
  211. u32 cfg, mask;
  212. struct clk_hw *hw = &rcg->clkr.hw;
  213. int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  214. if (index < 0)
  215. return index;
  216. if (rcg->mnd_width && f->n) {
  217. mask = BIT(rcg->mnd_width) - 1;
  218. ret = regmap_update_bits(rcg->clkr.regmap,
  219. rcg->cmd_rcgr + M_REG, mask, f->m);
  220. if (ret)
  221. return ret;
  222. ret = regmap_update_bits(rcg->clkr.regmap,
  223. rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
  224. if (ret)
  225. return ret;
  226. ret = regmap_update_bits(rcg->clkr.regmap,
  227. rcg->cmd_rcgr + D_REG, mask, ~f->n);
  228. if (ret)
  229. return ret;
  230. }
  231. mask = BIT(rcg->hid_width) - 1;
  232. mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
  233. cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
  234. cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  235. if (rcg->mnd_width && f->n && (f->m != f->n))
  236. cfg |= CFG_MODE_DUAL_EDGE;
  237. ret = regmap_update_bits(rcg->clkr.regmap,
  238. rcg->cmd_rcgr + CFG_REG, mask, cfg);
  239. if (ret)
  240. return ret;
  241. return update_config(rcg);
  242. }
  243. static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
  244. enum freq_policy policy)
  245. {
  246. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  247. const struct freq_tbl *f;
  248. switch (policy) {
  249. case FLOOR:
  250. f = qcom_find_freq_floor(rcg->freq_tbl, rate);
  251. break;
  252. case CEIL:
  253. f = qcom_find_freq(rcg->freq_tbl, rate);
  254. break;
  255. default:
  256. return -EINVAL;
  257. };
  258. if (!f)
  259. return -EINVAL;
  260. return clk_rcg2_configure(rcg, f);
  261. }
  262. static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
  263. unsigned long parent_rate)
  264. {
  265. return __clk_rcg2_set_rate(hw, rate, CEIL);
  266. }
  267. static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
  268. unsigned long parent_rate)
  269. {
  270. return __clk_rcg2_set_rate(hw, rate, FLOOR);
  271. }
  272. static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
  273. unsigned long rate, unsigned long parent_rate, u8 index)
  274. {
  275. return __clk_rcg2_set_rate(hw, rate, CEIL);
  276. }
  277. static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
  278. unsigned long rate, unsigned long parent_rate, u8 index)
  279. {
  280. return __clk_rcg2_set_rate(hw, rate, FLOOR);
  281. }
  282. const struct clk_ops clk_rcg2_ops = {
  283. .is_enabled = clk_rcg2_is_enabled,
  284. .get_parent = clk_rcg2_get_parent,
  285. .set_parent = clk_rcg2_set_parent,
  286. .recalc_rate = clk_rcg2_recalc_rate,
  287. .determine_rate = clk_rcg2_determine_rate,
  288. .set_rate = clk_rcg2_set_rate,
  289. .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
  290. };
  291. EXPORT_SYMBOL_GPL(clk_rcg2_ops);
  292. const struct clk_ops clk_rcg2_floor_ops = {
  293. .is_enabled = clk_rcg2_is_enabled,
  294. .get_parent = clk_rcg2_get_parent,
  295. .set_parent = clk_rcg2_set_parent,
  296. .recalc_rate = clk_rcg2_recalc_rate,
  297. .determine_rate = clk_rcg2_determine_floor_rate,
  298. .set_rate = clk_rcg2_set_floor_rate,
  299. .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
  300. };
  301. EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
  302. static int clk_rcg2_shared_force_enable(struct clk_hw *hw, unsigned long rate)
  303. {
  304. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  305. const char *name = clk_hw_get_name(hw);
  306. int ret, count;
  307. /* force enable RCG */
  308. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  309. CMD_ROOT_EN, CMD_ROOT_EN);
  310. if (ret)
  311. return ret;
  312. /* wait for RCG to turn ON */
  313. for (count = 500; count > 0; count--) {
  314. ret = clk_rcg2_is_enabled(hw);
  315. if (ret)
  316. break;
  317. udelay(1);
  318. }
  319. if (!count)
  320. pr_err("%s: RCG did not turn on\n", name);
  321. /* set clock rate */
  322. ret = __clk_rcg2_set_rate(hw, rate, CEIL);
  323. if (ret)
  324. return ret;
  325. /* clear force enable RCG */
  326. return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  327. CMD_ROOT_EN, 0);
  328. }
  329. static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
  330. unsigned long parent_rate)
  331. {
  332. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  333. /* cache the rate */
  334. rcg->current_freq = rate;
  335. if (!__clk_is_enabled(hw->clk))
  336. return 0;
  337. return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
  338. }
  339. static unsigned long
  340. clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  341. {
  342. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  343. return rcg->current_freq = clk_rcg2_recalc_rate(hw, parent_rate);
  344. }
  345. static int clk_rcg2_shared_enable(struct clk_hw *hw)
  346. {
  347. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  348. return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
  349. }
  350. static void clk_rcg2_shared_disable(struct clk_hw *hw)
  351. {
  352. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  353. /* switch to XO, which is the lowest entry in the freq table */
  354. clk_rcg2_shared_set_rate(hw, rcg->freq_tbl[0].freq, 0);
  355. }
  356. const struct clk_ops clk_rcg2_shared_ops = {
  357. .enable = clk_rcg2_shared_enable,
  358. .disable = clk_rcg2_shared_disable,
  359. .get_parent = clk_rcg2_get_parent,
  360. .recalc_rate = clk_rcg2_shared_recalc_rate,
  361. .determine_rate = clk_rcg2_determine_rate,
  362. .set_rate = clk_rcg2_shared_set_rate,
  363. };
  364. EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
  365. struct frac_entry {
  366. int num;
  367. int den;
  368. };
  369. static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
  370. { 52, 295 }, /* 119 M */
  371. { 11, 57 }, /* 130.25 M */
  372. { 63, 307 }, /* 138.50 M */
  373. { 11, 50 }, /* 148.50 M */
  374. { 47, 206 }, /* 154 M */
  375. { 31, 100 }, /* 205.25 M */
  376. { 107, 269 }, /* 268.50 M */
  377. { },
  378. };
  379. static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
  380. { 31, 211 }, /* 119 M */
  381. { 32, 199 }, /* 130.25 M */
  382. { 63, 307 }, /* 138.50 M */
  383. { 11, 60 }, /* 148.50 M */
  384. { 50, 263 }, /* 154 M */
  385. { 31, 120 }, /* 205.25 M */
  386. { 119, 359 }, /* 268.50 M */
  387. { },
  388. };
  389. static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
  390. unsigned long parent_rate)
  391. {
  392. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  393. struct freq_tbl f = *rcg->freq_tbl;
  394. const struct frac_entry *frac;
  395. int delta = 100000;
  396. s64 src_rate = parent_rate;
  397. s64 request;
  398. u32 mask = BIT(rcg->hid_width) - 1;
  399. u32 hid_div;
  400. if (src_rate == 810000000)
  401. frac = frac_table_810m;
  402. else
  403. frac = frac_table_675m;
  404. for (; frac->num; frac++) {
  405. request = rate;
  406. request *= frac->den;
  407. request = div_s64(request, frac->num);
  408. if ((src_rate < (request - delta)) ||
  409. (src_rate > (request + delta)))
  410. continue;
  411. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  412. &hid_div);
  413. f.pre_div = hid_div;
  414. f.pre_div >>= CFG_SRC_DIV_SHIFT;
  415. f.pre_div &= mask;
  416. f.m = frac->num;
  417. f.n = frac->den;
  418. return clk_rcg2_configure(rcg, &f);
  419. }
  420. return -EINVAL;
  421. }
  422. static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
  423. unsigned long rate, unsigned long parent_rate, u8 index)
  424. {
  425. /* Parent index is set statically in frequency table */
  426. return clk_edp_pixel_set_rate(hw, rate, parent_rate);
  427. }
  428. static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
  429. struct clk_rate_request *req)
  430. {
  431. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  432. const struct freq_tbl *f = rcg->freq_tbl;
  433. const struct frac_entry *frac;
  434. int delta = 100000;
  435. s64 request;
  436. u32 mask = BIT(rcg->hid_width) - 1;
  437. u32 hid_div;
  438. int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  439. /* Force the correct parent */
  440. req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
  441. req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
  442. if (req->best_parent_rate == 810000000)
  443. frac = frac_table_810m;
  444. else
  445. frac = frac_table_675m;
  446. for (; frac->num; frac++) {
  447. request = req->rate;
  448. request *= frac->den;
  449. request = div_s64(request, frac->num);
  450. if ((req->best_parent_rate < (request - delta)) ||
  451. (req->best_parent_rate > (request + delta)))
  452. continue;
  453. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  454. &hid_div);
  455. hid_div >>= CFG_SRC_DIV_SHIFT;
  456. hid_div &= mask;
  457. req->rate = calc_rate(req->best_parent_rate,
  458. frac->num, frac->den,
  459. !!frac->den, hid_div);
  460. return 0;
  461. }
  462. return -EINVAL;
  463. }
  464. const struct clk_ops clk_edp_pixel_ops = {
  465. .is_enabled = clk_rcg2_is_enabled,
  466. .get_parent = clk_rcg2_get_parent,
  467. .set_parent = clk_rcg2_set_parent,
  468. .recalc_rate = clk_rcg2_recalc_rate,
  469. .set_rate = clk_edp_pixel_set_rate,
  470. .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
  471. .determine_rate = clk_edp_pixel_determine_rate,
  472. };
  473. EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
  474. static int clk_byte_determine_rate(struct clk_hw *hw,
  475. struct clk_rate_request *req)
  476. {
  477. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  478. const struct freq_tbl *f = rcg->freq_tbl;
  479. int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  480. unsigned long parent_rate, div;
  481. u32 mask = BIT(rcg->hid_width) - 1;
  482. struct clk_hw *p;
  483. if (req->rate == 0)
  484. return -EINVAL;
  485. req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
  486. req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
  487. div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
  488. div = min_t(u32, div, mask);
  489. req->rate = calc_rate(parent_rate, 0, 0, 0, div);
  490. return 0;
  491. }
  492. static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
  493. unsigned long parent_rate)
  494. {
  495. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  496. struct freq_tbl f = *rcg->freq_tbl;
  497. unsigned long div;
  498. u32 mask = BIT(rcg->hid_width) - 1;
  499. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  500. div = min_t(u32, div, mask);
  501. f.pre_div = div;
  502. return clk_rcg2_configure(rcg, &f);
  503. }
  504. static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
  505. unsigned long rate, unsigned long parent_rate, u8 index)
  506. {
  507. /* Parent index is set statically in frequency table */
  508. return clk_byte_set_rate(hw, rate, parent_rate);
  509. }
  510. const struct clk_ops clk_byte_ops = {
  511. .is_enabled = clk_rcg2_is_enabled,
  512. .get_parent = clk_rcg2_get_parent,
  513. .set_parent = clk_rcg2_set_parent,
  514. .recalc_rate = clk_rcg2_recalc_rate,
  515. .set_rate = clk_byte_set_rate,
  516. .set_rate_and_parent = clk_byte_set_rate_and_parent,
  517. .determine_rate = clk_byte_determine_rate,
  518. };
  519. EXPORT_SYMBOL_GPL(clk_byte_ops);
  520. static int clk_byte2_determine_rate(struct clk_hw *hw,
  521. struct clk_rate_request *req)
  522. {
  523. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  524. unsigned long parent_rate, div;
  525. u32 mask = BIT(rcg->hid_width) - 1;
  526. struct clk_hw *p;
  527. unsigned long rate = req->rate;
  528. if (rate == 0)
  529. return -EINVAL;
  530. p = req->best_parent_hw;
  531. req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
  532. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  533. div = min_t(u32, div, mask);
  534. req->rate = calc_rate(parent_rate, 0, 0, 0, div);
  535. return 0;
  536. }
  537. static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
  538. unsigned long parent_rate)
  539. {
  540. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  541. struct freq_tbl f = { 0 };
  542. unsigned long div;
  543. int i, num_parents = clk_hw_get_num_parents(hw);
  544. u32 mask = BIT(rcg->hid_width) - 1;
  545. u32 cfg;
  546. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  547. div = min_t(u32, div, mask);
  548. f.pre_div = div;
  549. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  550. cfg &= CFG_SRC_SEL_MASK;
  551. cfg >>= CFG_SRC_SEL_SHIFT;
  552. for (i = 0; i < num_parents; i++) {
  553. if (cfg == rcg->parent_map[i].cfg) {
  554. f.src = rcg->parent_map[i].src;
  555. return clk_rcg2_configure(rcg, &f);
  556. }
  557. }
  558. return -EINVAL;
  559. }
  560. static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
  561. unsigned long rate, unsigned long parent_rate, u8 index)
  562. {
  563. /* Read the hardware to determine parent during set_rate */
  564. return clk_byte2_set_rate(hw, rate, parent_rate);
  565. }
  566. const struct clk_ops clk_byte2_ops = {
  567. .is_enabled = clk_rcg2_is_enabled,
  568. .get_parent = clk_rcg2_get_parent,
  569. .set_parent = clk_rcg2_set_parent,
  570. .recalc_rate = clk_rcg2_recalc_rate,
  571. .set_rate = clk_byte2_set_rate,
  572. .set_rate_and_parent = clk_byte2_set_rate_and_parent,
  573. .determine_rate = clk_byte2_determine_rate,
  574. };
  575. EXPORT_SYMBOL_GPL(clk_byte2_ops);
  576. static const struct frac_entry frac_table_pixel[] = {
  577. { 3, 8 },
  578. { 2, 9 },
  579. { 4, 9 },
  580. { 1, 1 },
  581. { }
  582. };
  583. static int clk_pixel_determine_rate(struct clk_hw *hw,
  584. struct clk_rate_request *req)
  585. {
  586. unsigned long request, src_rate;
  587. int delta = 100000;
  588. const struct frac_entry *frac = frac_table_pixel;
  589. for (; frac->num; frac++) {
  590. request = (req->rate * frac->den) / frac->num;
  591. src_rate = clk_hw_round_rate(req->best_parent_hw, request);
  592. if ((src_rate < (request - delta)) ||
  593. (src_rate > (request + delta)))
  594. continue;
  595. req->best_parent_rate = src_rate;
  596. req->rate = (src_rate * frac->num) / frac->den;
  597. return 0;
  598. }
  599. return -EINVAL;
  600. }
  601. static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
  602. unsigned long parent_rate)
  603. {
  604. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  605. struct freq_tbl f = { 0 };
  606. const struct frac_entry *frac = frac_table_pixel;
  607. unsigned long request;
  608. int delta = 100000;
  609. u32 mask = BIT(rcg->hid_width) - 1;
  610. u32 hid_div, cfg;
  611. int i, num_parents = clk_hw_get_num_parents(hw);
  612. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  613. cfg &= CFG_SRC_SEL_MASK;
  614. cfg >>= CFG_SRC_SEL_SHIFT;
  615. for (i = 0; i < num_parents; i++)
  616. if (cfg == rcg->parent_map[i].cfg) {
  617. f.src = rcg->parent_map[i].src;
  618. break;
  619. }
  620. for (; frac->num; frac++) {
  621. request = (rate * frac->den) / frac->num;
  622. if ((parent_rate < (request - delta)) ||
  623. (parent_rate > (request + delta)))
  624. continue;
  625. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  626. &hid_div);
  627. f.pre_div = hid_div;
  628. f.pre_div >>= CFG_SRC_DIV_SHIFT;
  629. f.pre_div &= mask;
  630. f.m = frac->num;
  631. f.n = frac->den;
  632. return clk_rcg2_configure(rcg, &f);
  633. }
  634. return -EINVAL;
  635. }
  636. static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
  637. unsigned long parent_rate, u8 index)
  638. {
  639. return clk_pixel_set_rate(hw, rate, parent_rate);
  640. }
  641. const struct clk_ops clk_pixel_ops = {
  642. .is_enabled = clk_rcg2_is_enabled,
  643. .get_parent = clk_rcg2_get_parent,
  644. .set_parent = clk_rcg2_set_parent,
  645. .recalc_rate = clk_rcg2_recalc_rate,
  646. .set_rate = clk_pixel_set_rate,
  647. .set_rate_and_parent = clk_pixel_set_rate_and_parent,
  648. .determine_rate = clk_pixel_determine_rate,
  649. };
  650. EXPORT_SYMBOL_GPL(clk_pixel_ops);
  651. static int clk_gfx3d_determine_rate(struct clk_hw *hw,
  652. struct clk_rate_request *req)
  653. {
  654. struct clk_rate_request parent_req = { };
  655. struct clk_hw *p2, *p8, *p9, *xo;
  656. unsigned long p9_rate;
  657. int ret;
  658. xo = clk_hw_get_parent_by_index(hw, 0);
  659. if (req->rate == clk_hw_get_rate(xo)) {
  660. req->best_parent_hw = xo;
  661. return 0;
  662. }
  663. p9 = clk_hw_get_parent_by_index(hw, 2);
  664. p2 = clk_hw_get_parent_by_index(hw, 3);
  665. p8 = clk_hw_get_parent_by_index(hw, 4);
  666. /* PLL9 is a fixed rate PLL */
  667. p9_rate = clk_hw_get_rate(p9);
  668. parent_req.rate = req->rate = min(req->rate, p9_rate);
  669. if (req->rate == p9_rate) {
  670. req->rate = req->best_parent_rate = p9_rate;
  671. req->best_parent_hw = p9;
  672. return 0;
  673. }
  674. if (req->best_parent_hw == p9) {
  675. /* Are we going back to a previously used rate? */
  676. if (clk_hw_get_rate(p8) == req->rate)
  677. req->best_parent_hw = p8;
  678. else
  679. req->best_parent_hw = p2;
  680. } else if (req->best_parent_hw == p8) {
  681. req->best_parent_hw = p2;
  682. } else {
  683. req->best_parent_hw = p8;
  684. }
  685. ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
  686. if (ret)
  687. return ret;
  688. req->rate = req->best_parent_rate = parent_req.rate;
  689. return 0;
  690. }
  691. static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
  692. unsigned long parent_rate, u8 index)
  693. {
  694. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  695. u32 cfg;
  696. int ret;
  697. /* Just mux it, we don't use the division or m/n hardware */
  698. cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  699. ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
  700. if (ret)
  701. return ret;
  702. return update_config(rcg);
  703. }
  704. static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
  705. unsigned long parent_rate)
  706. {
  707. /*
  708. * We should never get here; clk_gfx3d_determine_rate() should always
  709. * make us use a different parent than what we're currently using, so
  710. * clk_gfx3d_set_rate_and_parent() should always be called.
  711. */
  712. return 0;
  713. }
  714. const struct clk_ops clk_gfx3d_ops = {
  715. .is_enabled = clk_rcg2_is_enabled,
  716. .get_parent = clk_rcg2_get_parent,
  717. .set_parent = clk_rcg2_set_parent,
  718. .recalc_rate = clk_rcg2_recalc_rate,
  719. .set_rate = clk_gfx3d_set_rate,
  720. .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
  721. .determine_rate = clk_gfx3d_determine_rate,
  722. };
  723. EXPORT_SYMBOL_GPL(clk_gfx3d_ops);