mt76x2_phy.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. /*
  2. * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include <linux/delay.h>
  17. #include "mt76x2.h"
  18. #include "mt76x2_mcu.h"
  19. #include "mt76x2_eeprom.h"
  20. static void
  21. mt76x2_adjust_high_lna_gain(struct mt76x2_dev *dev, int reg, s8 offset)
  22. {
  23. s8 gain;
  24. gain = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, mt76_rr(dev, MT_BBP(AGC, reg)));
  25. gain -= offset / 2;
  26. mt76_rmw_field(dev, MT_BBP(AGC, reg), MT_BBP_AGC_LNA_HIGH_GAIN, gain);
  27. }
  28. static void
  29. mt76x2_adjust_agc_gain(struct mt76x2_dev *dev, int reg, s8 offset)
  30. {
  31. s8 gain;
  32. gain = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, reg)));
  33. gain += offset;
  34. mt76_rmw_field(dev, MT_BBP(AGC, reg), MT_BBP_AGC_GAIN, gain);
  35. }
  36. static void
  37. mt76x2_apply_gain_adj(struct mt76x2_dev *dev)
  38. {
  39. s8 *gain_adj = dev->cal.rx.high_gain;
  40. mt76x2_adjust_high_lna_gain(dev, 4, gain_adj[0]);
  41. mt76x2_adjust_high_lna_gain(dev, 5, gain_adj[1]);
  42. mt76x2_adjust_agc_gain(dev, 8, gain_adj[0]);
  43. mt76x2_adjust_agc_gain(dev, 9, gain_adj[1]);
  44. }
  45. static u32
  46. mt76x2_tx_power_mask(u8 v1, u8 v2, u8 v3, u8 v4)
  47. {
  48. u32 val = 0;
  49. val |= (v1 & (BIT(6) - 1)) << 0;
  50. val |= (v2 & (BIT(6) - 1)) << 8;
  51. val |= (v3 & (BIT(6) - 1)) << 16;
  52. val |= (v4 & (BIT(6) - 1)) << 24;
  53. return val;
  54. }
  55. int mt76x2_phy_get_rssi(struct mt76x2_dev *dev, s8 rssi, int chain)
  56. {
  57. struct mt76x2_rx_freq_cal *cal = &dev->cal.rx;
  58. rssi += cal->rssi_offset[chain];
  59. rssi -= cal->lna_gain;
  60. return rssi;
  61. }
  62. static u8
  63. mt76x2_txpower_check(int value)
  64. {
  65. if (value < 0)
  66. return 0;
  67. if (value > 0x2f)
  68. return 0x2f;
  69. return value;
  70. }
  71. static void
  72. mt76x2_add_rate_power_offset(struct mt76_rate_power *r, int offset)
  73. {
  74. int i;
  75. for (i = 0; i < sizeof(r->all); i++)
  76. r->all[i] += offset;
  77. }
  78. static void
  79. mt76x2_limit_rate_power(struct mt76_rate_power *r, int limit)
  80. {
  81. int i;
  82. for (i = 0; i < sizeof(r->all); i++)
  83. if (r->all[i] > limit)
  84. r->all[i] = limit;
  85. }
  86. void mt76x2_phy_set_txpower(struct mt76x2_dev *dev)
  87. {
  88. enum nl80211_chan_width width = dev->mt76.chandef.width;
  89. struct ieee80211_channel *chan = dev->mt76.chandef.chan;
  90. struct mt76x2_tx_power_info txp;
  91. int txp_0, txp_1, delta = 0;
  92. struct mt76_rate_power t = {};
  93. mt76x2_get_power_info(dev, &txp, chan);
  94. if (width == NL80211_CHAN_WIDTH_40)
  95. delta = txp.delta_bw40;
  96. else if (width == NL80211_CHAN_WIDTH_80)
  97. delta = txp.delta_bw80;
  98. if (txp.target_power > dev->txpower_conf)
  99. delta -= txp.target_power - dev->txpower_conf;
  100. mt76x2_get_rate_power(dev, &t, chan);
  101. mt76x2_add_rate_power_offset(&t, txp.chain[0].target_power +
  102. txp.chain[0].delta);
  103. mt76x2_limit_rate_power(&t, dev->txpower_conf);
  104. dev->txpower_cur = mt76x2_get_max_rate_power(&t);
  105. mt76x2_add_rate_power_offset(&t, -(txp.chain[0].target_power +
  106. txp.chain[0].delta + delta));
  107. dev->target_power = txp.chain[0].target_power;
  108. dev->target_power_delta[0] = txp.chain[0].delta + delta;
  109. dev->target_power_delta[1] = txp.chain[1].delta + delta;
  110. dev->rate_power = t;
  111. txp_0 = mt76x2_txpower_check(txp.chain[0].target_power +
  112. txp.chain[0].delta + delta);
  113. txp_1 = mt76x2_txpower_check(txp.chain[1].target_power +
  114. txp.chain[1].delta + delta);
  115. mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_0, txp_0);
  116. mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_1, txp_1);
  117. mt76_wr(dev, MT_TX_PWR_CFG_0,
  118. mt76x2_tx_power_mask(t.cck[0], t.cck[2], t.ofdm[0], t.ofdm[2]));
  119. mt76_wr(dev, MT_TX_PWR_CFG_1,
  120. mt76x2_tx_power_mask(t.ofdm[4], t.ofdm[6], t.ht[0], t.ht[2]));
  121. mt76_wr(dev, MT_TX_PWR_CFG_2,
  122. mt76x2_tx_power_mask(t.ht[4], t.ht[6], t.ht[8], t.ht[10]));
  123. mt76_wr(dev, MT_TX_PWR_CFG_3,
  124. mt76x2_tx_power_mask(t.ht[12], t.ht[14], t.ht[0], t.ht[2]));
  125. mt76_wr(dev, MT_TX_PWR_CFG_4,
  126. mt76x2_tx_power_mask(t.ht[4], t.ht[6], 0, 0));
  127. mt76_wr(dev, MT_TX_PWR_CFG_7,
  128. mt76x2_tx_power_mask(t.ofdm[6], t.vht[8], t.ht[6], t.vht[8]));
  129. mt76_wr(dev, MT_TX_PWR_CFG_8,
  130. mt76x2_tx_power_mask(t.ht[14], t.vht[8], t.vht[8], 0));
  131. mt76_wr(dev, MT_TX_PWR_CFG_9,
  132. mt76x2_tx_power_mask(t.ht[6], t.vht[8], t.vht[8], 0));
  133. }
  134. static bool
  135. mt76x2_channel_silent(struct mt76x2_dev *dev)
  136. {
  137. struct ieee80211_channel *chan = dev->mt76.chandef.chan;
  138. return ((chan->flags & IEEE80211_CHAN_RADAR) &&
  139. chan->dfs_state != NL80211_DFS_AVAILABLE);
  140. }
  141. static bool
  142. mt76x2_phy_tssi_init_cal(struct mt76x2_dev *dev)
  143. {
  144. struct ieee80211_channel *chan = dev->mt76.chandef.chan;
  145. u32 flag = 0;
  146. if (!mt76x2_tssi_enabled(dev))
  147. return false;
  148. if (mt76x2_channel_silent(dev))
  149. return false;
  150. if (chan->band == NL80211_BAND_2GHZ)
  151. flag |= BIT(0);
  152. if (mt76x2_ext_pa_enabled(dev, chan->band))
  153. flag |= BIT(8);
  154. mt76x2_mcu_calibrate(dev, MCU_CAL_TSSI, flag);
  155. dev->cal.tssi_cal_done = true;
  156. return true;
  157. }
  158. static void
  159. mt76x2_phy_channel_calibrate(struct mt76x2_dev *dev, bool mac_stopped)
  160. {
  161. struct ieee80211_channel *chan = dev->mt76.chandef.chan;
  162. bool is_5ghz = chan->band == NL80211_BAND_5GHZ;
  163. if (dev->cal.channel_cal_done)
  164. return;
  165. if (mt76x2_channel_silent(dev))
  166. return;
  167. if (!dev->cal.tssi_cal_done)
  168. mt76x2_phy_tssi_init_cal(dev);
  169. if (!mac_stopped)
  170. mt76x2_mac_stop(dev, false);
  171. if (is_5ghz)
  172. mt76x2_mcu_calibrate(dev, MCU_CAL_LC, 0);
  173. mt76x2_mcu_calibrate(dev, MCU_CAL_TX_LOFT, is_5ghz);
  174. mt76x2_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz);
  175. mt76x2_mcu_calibrate(dev, MCU_CAL_RXIQC_FI, is_5ghz);
  176. mt76x2_mcu_calibrate(dev, MCU_CAL_TEMP_SENSOR, 0);
  177. mt76x2_mcu_calibrate(dev, MCU_CAL_TX_SHAPING, 0);
  178. if (!mac_stopped)
  179. mt76x2_mac_resume(dev);
  180. mt76x2_apply_gain_adj(dev);
  181. dev->cal.channel_cal_done = true;
  182. }
  183. static void
  184. mt76x2_phy_set_txpower_regs(struct mt76x2_dev *dev, enum nl80211_band band)
  185. {
  186. u32 pa_mode[2];
  187. u32 pa_mode_adj;
  188. if (band == NL80211_BAND_2GHZ) {
  189. pa_mode[0] = 0x010055ff;
  190. pa_mode[1] = 0x00550055;
  191. mt76_wr(dev, MT_TX_ALC_CFG_2, 0x35160a00);
  192. mt76_wr(dev, MT_TX_ALC_CFG_3, 0x35160a06);
  193. if (mt76x2_ext_pa_enabled(dev, band)) {
  194. mt76_wr(dev, MT_RF_PA_MODE_ADJ0, 0x0000ec00);
  195. mt76_wr(dev, MT_RF_PA_MODE_ADJ1, 0x0000ec00);
  196. } else {
  197. mt76_wr(dev, MT_RF_PA_MODE_ADJ0, 0xf4000200);
  198. mt76_wr(dev, MT_RF_PA_MODE_ADJ1, 0xfa000200);
  199. }
  200. } else {
  201. pa_mode[0] = 0x0000ffff;
  202. pa_mode[1] = 0x00ff00ff;
  203. if (mt76x2_ext_pa_enabled(dev, band)) {
  204. mt76_wr(dev, MT_TX_ALC_CFG_2, 0x2f0f0400);
  205. mt76_wr(dev, MT_TX_ALC_CFG_3, 0x2f0f0476);
  206. } else {
  207. mt76_wr(dev, MT_TX_ALC_CFG_2, 0x1b0f0400);
  208. mt76_wr(dev, MT_TX_ALC_CFG_3, 0x1b0f0476);
  209. }
  210. mt76_wr(dev, MT_TX_ALC_CFG_4, 0);
  211. if (mt76x2_ext_pa_enabled(dev, band))
  212. pa_mode_adj = 0x04000000;
  213. else
  214. pa_mode_adj = 0;
  215. mt76_wr(dev, MT_RF_PA_MODE_ADJ0, pa_mode_adj);
  216. mt76_wr(dev, MT_RF_PA_MODE_ADJ1, pa_mode_adj);
  217. }
  218. mt76_wr(dev, MT_BB_PA_MODE_CFG0, pa_mode[0]);
  219. mt76_wr(dev, MT_BB_PA_MODE_CFG1, pa_mode[1]);
  220. mt76_wr(dev, MT_RF_PA_MODE_CFG0, pa_mode[0]);
  221. mt76_wr(dev, MT_RF_PA_MODE_CFG1, pa_mode[1]);
  222. if (mt76x2_ext_pa_enabled(dev, band)) {
  223. u32 val;
  224. if (band == NL80211_BAND_2GHZ)
  225. val = 0x3c3c023c;
  226. else
  227. val = 0x363c023c;
  228. mt76_wr(dev, MT_TX0_RF_GAIN_CORR, val);
  229. mt76_wr(dev, MT_TX1_RF_GAIN_CORR, val);
  230. mt76_wr(dev, MT_TX_ALC_CFG_4, 0x00001818);
  231. } else {
  232. if (band == NL80211_BAND_2GHZ) {
  233. u32 val = 0x0f3c3c3c;
  234. mt76_wr(dev, MT_TX0_RF_GAIN_CORR, val);
  235. mt76_wr(dev, MT_TX1_RF_GAIN_CORR, val);
  236. mt76_wr(dev, MT_TX_ALC_CFG_4, 0x00000606);
  237. } else {
  238. mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x383c023c);
  239. mt76_wr(dev, MT_TX1_RF_GAIN_CORR, 0x24282e28);
  240. mt76_wr(dev, MT_TX_ALC_CFG_4, 0);
  241. }
  242. }
  243. }
  244. static void
  245. mt76x2_configure_tx_delay(struct mt76x2_dev *dev, enum nl80211_band band, u8 bw)
  246. {
  247. u32 cfg0, cfg1;
  248. if (mt76x2_ext_pa_enabled(dev, band)) {
  249. cfg0 = bw ? 0x000b0c01 : 0x00101101;
  250. cfg1 = 0x00011414;
  251. } else {
  252. cfg0 = bw ? 0x000b0b01 : 0x00101001;
  253. cfg1 = 0x00021414;
  254. }
  255. mt76_wr(dev, MT_TX_SW_CFG0, cfg0);
  256. mt76_wr(dev, MT_TX_SW_CFG1, cfg1);
  257. mt76_rmw_field(dev, MT_XIFS_TIME_CFG, MT_XIFS_TIME_CFG_OFDM_SIFS, 15);
  258. }
  259. static void
  260. mt76x2_phy_set_bw(struct mt76x2_dev *dev, int width, u8 ctrl)
  261. {
  262. int core_val, agc_val;
  263. switch (width) {
  264. case NL80211_CHAN_WIDTH_80:
  265. core_val = 3;
  266. agc_val = 7;
  267. break;
  268. case NL80211_CHAN_WIDTH_40:
  269. core_val = 2;
  270. agc_val = 3;
  271. break;
  272. default:
  273. core_val = 0;
  274. agc_val = 1;
  275. break;
  276. }
  277. mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
  278. mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
  279. mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
  280. mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
  281. }
  282. static void
  283. mt76x2_phy_set_band(struct mt76x2_dev *dev, int band, bool primary_upper)
  284. {
  285. switch (band) {
  286. case NL80211_BAND_2GHZ:
  287. mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
  288. mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
  289. break;
  290. case NL80211_BAND_5GHZ:
  291. mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
  292. mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
  293. break;
  294. }
  295. mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
  296. primary_upper);
  297. }
  298. void mt76x2_phy_set_antenna(struct mt76x2_dev *dev)
  299. {
  300. u32 val;
  301. val = mt76_rr(dev, MT_BBP(AGC, 0));
  302. val &= ~(BIT(4) | BIT(1));
  303. switch (dev->mt76.antenna_mask) {
  304. case 1:
  305. /* disable mac DAC control */
  306. mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
  307. mt76_clear(dev, MT_BBP(TXBE, 5), 3);
  308. mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0x3);
  309. mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 2);
  310. /* disable DAC 1 */
  311. mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 4);
  312. val &= ~(BIT(3) | BIT(0));
  313. break;
  314. case 2:
  315. /* disable mac DAC control */
  316. mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
  317. mt76_rmw_field(dev, MT_BBP(TXBE, 5), 3, 1);
  318. mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xc);
  319. mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 1);
  320. /* disable DAC 0 */
  321. mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 1);
  322. val &= ~BIT(3);
  323. val |= BIT(0);
  324. break;
  325. case 3:
  326. default:
  327. /* enable mac DAC control */
  328. mt76_set(dev, MT_BBP(IBI, 9), BIT(11));
  329. mt76_set(dev, MT_BBP(TXBE, 5), 3);
  330. mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xf);
  331. mt76_clear(dev, MT_BBP(CORE, 32), GENMASK(21, 20));
  332. mt76_clear(dev, MT_BBP(CORE, 33), GENMASK(12, 9));
  333. val &= ~BIT(0);
  334. val |= BIT(3);
  335. break;
  336. }
  337. mt76_wr(dev, MT_BBP(AGC, 0), val);
  338. }
  339. static void
  340. mt76x2_get_agc_gain(struct mt76x2_dev *dev, u8 *dest)
  341. {
  342. dest[0] = mt76_get_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN);
  343. dest[1] = mt76_get_field(dev, MT_BBP(AGC, 9), MT_BBP_AGC_GAIN);
  344. }
  345. static int
  346. mt76x2_get_rssi_gain_thresh(struct mt76x2_dev *dev)
  347. {
  348. switch (dev->mt76.chandef.width) {
  349. case NL80211_CHAN_WIDTH_80:
  350. return -62;
  351. case NL80211_CHAN_WIDTH_40:
  352. return -65;
  353. default:
  354. return -68;
  355. }
  356. }
  357. static int
  358. mt76x2_get_low_rssi_gain_thresh(struct mt76x2_dev *dev)
  359. {
  360. switch (dev->mt76.chandef.width) {
  361. case NL80211_CHAN_WIDTH_80:
  362. return -76;
  363. case NL80211_CHAN_WIDTH_40:
  364. return -79;
  365. default:
  366. return -82;
  367. }
  368. }
  369. static void
  370. mt76x2_phy_set_gain_val(struct mt76x2_dev *dev)
  371. {
  372. u32 val;
  373. u8 gain_val[2];
  374. gain_val[0] = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
  375. gain_val[1] = dev->cal.agc_gain_cur[1] - dev->cal.agc_gain_adjust;
  376. if (dev->mt76.chandef.width >= NL80211_CHAN_WIDTH_40)
  377. val = 0x1e42 << 16;
  378. else
  379. val = 0x1836 << 16;
  380. val |= 0xf8;
  381. mt76_wr(dev, MT_BBP(AGC, 8),
  382. val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[0]));
  383. mt76_wr(dev, MT_BBP(AGC, 9),
  384. val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[1]));
  385. if (dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR)
  386. mt76x2_dfs_adjust_agc(dev);
  387. }
  388. static void
  389. mt76x2_phy_adjust_vga_gain(struct mt76x2_dev *dev)
  390. {
  391. u32 false_cca;
  392. u8 limit = dev->cal.low_gain > 1 ? 4 : 16;
  393. false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
  394. if (false_cca > 800 && dev->cal.agc_gain_adjust < limit)
  395. dev->cal.agc_gain_adjust += 2;
  396. else if (false_cca < 10 && dev->cal.agc_gain_adjust > 0)
  397. dev->cal.agc_gain_adjust -= 2;
  398. else
  399. return;
  400. mt76x2_phy_set_gain_val(dev);
  401. }
  402. static void
  403. mt76x2_phy_update_channel_gain(struct mt76x2_dev *dev)
  404. {
  405. u32 val = mt76_rr(dev, MT_BBP(AGC, 20));
  406. int rssi0 = (s8) FIELD_GET(MT_BBP_AGC20_RSSI0, val);
  407. int rssi1 = (s8) FIELD_GET(MT_BBP_AGC20_RSSI1, val);
  408. u8 *gain = dev->cal.agc_gain_init;
  409. u8 gain_delta;
  410. int low_gain;
  411. dev->cal.avg_rssi[0] = (dev->cal.avg_rssi[0] * 15) / 16 + (rssi0 << 8);
  412. dev->cal.avg_rssi[1] = (dev->cal.avg_rssi[1] * 15) / 16 + (rssi1 << 8);
  413. dev->cal.avg_rssi_all = (dev->cal.avg_rssi[0] +
  414. dev->cal.avg_rssi[1]) / 512;
  415. low_gain = (dev->cal.avg_rssi_all > mt76x2_get_rssi_gain_thresh(dev)) +
  416. (dev->cal.avg_rssi_all > mt76x2_get_low_rssi_gain_thresh(dev));
  417. if (dev->cal.low_gain == low_gain) {
  418. mt76x2_phy_adjust_vga_gain(dev);
  419. return;
  420. }
  421. dev->cal.low_gain = low_gain;
  422. if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80)
  423. mt76_wr(dev, MT_BBP(RXO, 14), 0x00560211);
  424. else
  425. mt76_wr(dev, MT_BBP(RXO, 14), 0x00560423);
  426. if (low_gain) {
  427. mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a991);
  428. mt76_wr(dev, MT_BBP(AGC, 35), 0x08080808);
  429. mt76_wr(dev, MT_BBP(AGC, 37), 0x08080808);
  430. if (mt76x2_has_ext_lna(dev))
  431. gain_delta = 10;
  432. else
  433. gain_delta = 14;
  434. } else {
  435. mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a990);
  436. if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80)
  437. mt76_wr(dev, MT_BBP(AGC, 35), 0x10101014);
  438. else
  439. mt76_wr(dev, MT_BBP(AGC, 35), 0x11111116);
  440. mt76_wr(dev, MT_BBP(AGC, 37), 0x2121262C);
  441. gain_delta = 0;
  442. }
  443. dev->cal.agc_gain_cur[0] = gain[0] - gain_delta;
  444. dev->cal.agc_gain_cur[1] = gain[1] - gain_delta;
  445. dev->cal.agc_gain_adjust = 0;
  446. mt76x2_phy_set_gain_val(dev);
  447. }
  448. int mt76x2_phy_set_channel(struct mt76x2_dev *dev,
  449. struct cfg80211_chan_def *chandef)
  450. {
  451. struct ieee80211_channel *chan = chandef->chan;
  452. bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
  453. enum nl80211_band band = chan->band;
  454. u8 channel;
  455. u32 ext_cca_chan[4] = {
  456. [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
  457. FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
  458. FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
  459. FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
  460. FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
  461. [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
  462. FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
  463. FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
  464. FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
  465. FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
  466. [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
  467. FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
  468. FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
  469. FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
  470. FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
  471. [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
  472. FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
  473. FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
  474. FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
  475. FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
  476. };
  477. int ch_group_index;
  478. u8 bw, bw_index;
  479. int freq, freq1;
  480. int ret;
  481. dev->cal.channel_cal_done = false;
  482. freq = chandef->chan->center_freq;
  483. freq1 = chandef->center_freq1;
  484. channel = chan->hw_value;
  485. switch (chandef->width) {
  486. case NL80211_CHAN_WIDTH_40:
  487. bw = 1;
  488. if (freq1 > freq) {
  489. bw_index = 1;
  490. ch_group_index = 0;
  491. } else {
  492. bw_index = 3;
  493. ch_group_index = 1;
  494. }
  495. channel += 2 - ch_group_index * 4;
  496. break;
  497. case NL80211_CHAN_WIDTH_80:
  498. ch_group_index = (freq - freq1 + 30) / 20;
  499. if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
  500. ch_group_index = 0;
  501. bw = 2;
  502. bw_index = ch_group_index;
  503. channel += 6 - ch_group_index * 4;
  504. break;
  505. default:
  506. bw = 0;
  507. bw_index = 0;
  508. ch_group_index = 0;
  509. break;
  510. }
  511. mt76x2_read_rx_gain(dev);
  512. mt76x2_phy_set_txpower_regs(dev, band);
  513. mt76x2_configure_tx_delay(dev, band, bw);
  514. mt76x2_phy_set_txpower(dev);
  515. mt76x2_phy_set_band(dev, chan->band, ch_group_index & 1);
  516. mt76x2_phy_set_bw(dev, chandef->width, ch_group_index);
  517. mt76_rmw(dev, MT_EXT_CCA_CFG,
  518. (MT_EXT_CCA_CFG_CCA0 |
  519. MT_EXT_CCA_CFG_CCA1 |
  520. MT_EXT_CCA_CFG_CCA2 |
  521. MT_EXT_CCA_CFG_CCA3 |
  522. MT_EXT_CCA_CFG_CCA_MASK),
  523. ext_cca_chan[ch_group_index]);
  524. ret = mt76x2_mcu_set_channel(dev, channel, bw, bw_index, scan);
  525. if (ret)
  526. return ret;
  527. mt76x2_mcu_init_gain(dev, channel, dev->cal.rx.mcu_gain, true);
  528. mt76x2_phy_set_antenna(dev);
  529. /* Enable LDPC Rx */
  530. if (mt76xx_rev(dev) >= MT76XX_REV_E3)
  531. mt76_set(dev, MT_BBP(RXO, 13), BIT(10));
  532. if (!dev->cal.init_cal_done) {
  533. u8 val = mt76x2_eeprom_get(dev, MT_EE_BT_RCAL_RESULT);
  534. if (val != 0xff)
  535. mt76x2_mcu_calibrate(dev, MCU_CAL_R, 0);
  536. }
  537. mt76x2_mcu_calibrate(dev, MCU_CAL_RXDCOC, channel);
  538. /* Rx LPF calibration */
  539. if (!dev->cal.init_cal_done)
  540. mt76x2_mcu_calibrate(dev, MCU_CAL_RC, 0);
  541. dev->cal.init_cal_done = true;
  542. mt76_wr(dev, MT_BBP(AGC, 61), 0xFF64A4E2);
  543. mt76_wr(dev, MT_BBP(AGC, 7), 0x08081010);
  544. mt76_wr(dev, MT_BBP(AGC, 11), 0x00000404);
  545. mt76_wr(dev, MT_BBP(AGC, 2), 0x00007070);
  546. mt76_wr(dev, MT_TXOP_CTRL_CFG, 0x04101B3F);
  547. if (scan)
  548. return 0;
  549. dev->cal.low_gain = -1;
  550. mt76x2_phy_channel_calibrate(dev, true);
  551. mt76x2_get_agc_gain(dev, dev->cal.agc_gain_init);
  552. memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
  553. sizeof(dev->cal.agc_gain_cur));
  554. ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
  555. MT_CALIBRATE_INTERVAL);
  556. return 0;
  557. }
  558. static void
  559. mt76x2_phy_tssi_compensate(struct mt76x2_dev *dev)
  560. {
  561. struct ieee80211_channel *chan = dev->mt76.chandef.chan;
  562. struct mt76x2_tx_power_info txp;
  563. struct mt76x2_tssi_comp t = {};
  564. if (!dev->cal.tssi_cal_done)
  565. return;
  566. if (!dev->cal.tssi_comp_pending) {
  567. /* TSSI trigger */
  568. t.cal_mode = BIT(0);
  569. mt76x2_mcu_tssi_comp(dev, &t);
  570. dev->cal.tssi_comp_pending = true;
  571. } else {
  572. if (mt76_rr(dev, MT_BBP(CORE, 34)) & BIT(4))
  573. return;
  574. dev->cal.tssi_comp_pending = false;
  575. mt76x2_get_power_info(dev, &txp, chan);
  576. if (mt76x2_ext_pa_enabled(dev, chan->band))
  577. t.pa_mode = 1;
  578. t.cal_mode = BIT(1);
  579. t.slope0 = txp.chain[0].tssi_slope;
  580. t.offset0 = txp.chain[0].tssi_offset;
  581. t.slope1 = txp.chain[1].tssi_slope;
  582. t.offset1 = txp.chain[1].tssi_offset;
  583. mt76x2_mcu_tssi_comp(dev, &t);
  584. if (t.pa_mode || dev->cal.dpd_cal_done)
  585. return;
  586. usleep_range(10000, 20000);
  587. mt76x2_mcu_calibrate(dev, MCU_CAL_DPD, chan->hw_value);
  588. dev->cal.dpd_cal_done = true;
  589. }
  590. }
  591. static void
  592. mt76x2_phy_temp_compensate(struct mt76x2_dev *dev)
  593. {
  594. struct mt76x2_temp_comp t;
  595. int temp, db_diff;
  596. if (mt76x2_get_temp_comp(dev, &t))
  597. return;
  598. temp = mt76_get_field(dev, MT_TEMP_SENSOR, MT_TEMP_SENSOR_VAL);
  599. temp -= t.temp_25_ref;
  600. temp = (temp * 1789) / 1000 + 25;
  601. dev->cal.temp = temp;
  602. if (temp > 25)
  603. db_diff = (temp - 25) / t.high_slope;
  604. else
  605. db_diff = (25 - temp) / t.low_slope;
  606. db_diff = min(db_diff, t.upper_bound);
  607. db_diff = max(db_diff, t.lower_bound);
  608. mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
  609. db_diff * 2);
  610. mt76_rmw_field(dev, MT_TX_ALC_CFG_2, MT_TX_ALC_CFG_2_TEMP_COMP,
  611. db_diff * 2);
  612. }
  613. void mt76x2_phy_calibrate(struct work_struct *work)
  614. {
  615. struct mt76x2_dev *dev;
  616. dev = container_of(work, struct mt76x2_dev, cal_work.work);
  617. mt76x2_phy_channel_calibrate(dev, false);
  618. mt76x2_phy_tssi_compensate(dev);
  619. mt76x2_phy_temp_compensate(dev);
  620. mt76x2_phy_update_channel_gain(dev);
  621. ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
  622. MT_CALIBRATE_INTERVAL);
  623. }
  624. int mt76x2_phy_start(struct mt76x2_dev *dev)
  625. {
  626. int ret;
  627. ret = mt76x2_mcu_set_radio_state(dev, true);
  628. if (ret)
  629. return ret;
  630. mt76x2_mcu_load_cr(dev, MT_RF_BBP_CR, 0, 0);
  631. return ret;
  632. }