ar9003_calib.c 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. #define MAXIQCAL 3
  25. struct coeff {
  26. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  27. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  28. int iqc_coeff[2];
  29. };
  30. enum ar9003_cal_types {
  31. IQ_MISMATCH_CAL = BIT(0),
  32. };
  33. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  34. struct ath9k_cal_list *currCal)
  35. {
  36. struct ath_common *common = ath9k_hw_common(ah);
  37. /* Select calibration to run */
  38. switch (currCal->calData->calType) {
  39. case IQ_MISMATCH_CAL:
  40. /*
  41. * Start calibration with
  42. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  43. */
  44. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  45. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  46. currCal->calData->calCountMax);
  47. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  48. ath_dbg(common, CALIBRATE,
  49. "starting IQ Mismatch Calibration\n");
  50. /* Kick-off cal */
  51. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  52. break;
  53. default:
  54. ath_err(common, "Invalid calibration type\n");
  55. break;
  56. }
  57. }
  58. /*
  59. * Generic calibration routine.
  60. * Recalibrate the lower PHY chips to account for temperature/environment
  61. * changes.
  62. */
  63. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  64. struct ath9k_channel *ichan,
  65. u8 rxchainmask,
  66. struct ath9k_cal_list *currCal)
  67. {
  68. struct ath9k_hw_cal_data *caldata = ah->caldata;
  69. /* Cal is assumed not done until explicitly set below */
  70. bool iscaldone = false;
  71. /* Calibration in progress. */
  72. if (currCal->calState == CAL_RUNNING) {
  73. /* Check to see if it has finished. */
  74. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  75. /*
  76. * Accumulate cal measures for active chains
  77. */
  78. currCal->calData->calCollect(ah);
  79. ah->cal_samples++;
  80. if (ah->cal_samples >=
  81. currCal->calData->calNumSamples) {
  82. unsigned int i, numChains = 0;
  83. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  84. if (rxchainmask & (1 << i))
  85. numChains++;
  86. }
  87. /*
  88. * Process accumulated data
  89. */
  90. currCal->calData->calPostProc(ah, numChains);
  91. /* Calibration has finished. */
  92. caldata->CalValid |= currCal->calData->calType;
  93. currCal->calState = CAL_DONE;
  94. iscaldone = true;
  95. } else {
  96. /*
  97. * Set-up collection of another sub-sample until we
  98. * get desired number
  99. */
  100. ar9003_hw_setup_calibration(ah, currCal);
  101. }
  102. }
  103. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  104. /* If current cal is marked invalid in channel, kick it off */
  105. ath9k_hw_reset_calibration(ah, currCal);
  106. }
  107. return iscaldone;
  108. }
  109. static bool ar9003_hw_calibrate(struct ath_hw *ah,
  110. struct ath9k_channel *chan,
  111. u8 rxchainmask,
  112. bool longcal)
  113. {
  114. bool iscaldone = true;
  115. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  116. /*
  117. * For given calibration:
  118. * 1. Call generic cal routine
  119. * 2. When this cal is done (isCalDone) if we have more cals waiting
  120. * (eg after reset), mask this to upper layers by not propagating
  121. * isCalDone if it is set to TRUE.
  122. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  123. * to be run.
  124. */
  125. if (currCal &&
  126. (currCal->calState == CAL_RUNNING ||
  127. currCal->calState == CAL_WAITING)) {
  128. iscaldone = ar9003_hw_per_calibration(ah, chan,
  129. rxchainmask, currCal);
  130. if (iscaldone) {
  131. ah->cal_list_curr = currCal = currCal->calNext;
  132. if (currCal->calState == CAL_WAITING) {
  133. iscaldone = false;
  134. ath9k_hw_reset_calibration(ah, currCal);
  135. }
  136. }
  137. }
  138. /*
  139. * Do NF cal only at longer intervals. Get the value from
  140. * the previous NF cal and update history buffer.
  141. */
  142. if (longcal && ath9k_hw_getnf(ah, chan)) {
  143. /*
  144. * Load the NF from history buffer of the current channel.
  145. * NF is slow time-variant, so it is OK to use a historical
  146. * value.
  147. */
  148. ath9k_hw_loadnf(ah, ah->curchan);
  149. /* start NF calibration, without updating BB NF register */
  150. ath9k_hw_start_nfcal(ah, false);
  151. }
  152. return iscaldone;
  153. }
  154. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  155. {
  156. int i;
  157. /* Accumulate IQ cal measures for active chains */
  158. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  159. if (ah->txchainmask & BIT(i)) {
  160. ah->totalPowerMeasI[i] +=
  161. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  162. ah->totalPowerMeasQ[i] +=
  163. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  164. ah->totalIqCorrMeas[i] +=
  165. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  166. ath_dbg(ath9k_hw_common(ah), CALIBRATE,
  167. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  168. ah->cal_samples, i, ah->totalPowerMeasI[i],
  169. ah->totalPowerMeasQ[i],
  170. ah->totalIqCorrMeas[i]);
  171. }
  172. }
  173. }
  174. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  175. {
  176. struct ath_common *common = ath9k_hw_common(ah);
  177. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  178. u32 qCoffDenom, iCoffDenom;
  179. int32_t qCoff, iCoff;
  180. int iqCorrNeg, i;
  181. static const u_int32_t offset_array[3] = {
  182. AR_PHY_RX_IQCAL_CORR_B0,
  183. AR_PHY_RX_IQCAL_CORR_B1,
  184. AR_PHY_RX_IQCAL_CORR_B2,
  185. };
  186. for (i = 0; i < numChains; i++) {
  187. powerMeasI = ah->totalPowerMeasI[i];
  188. powerMeasQ = ah->totalPowerMeasQ[i];
  189. iqCorrMeas = ah->totalIqCorrMeas[i];
  190. ath_dbg(common, CALIBRATE,
  191. "Starting IQ Cal and Correction for Chain %d\n", i);
  192. ath_dbg(common, CALIBRATE,
  193. "Original: Chn %d iq_corr_meas = 0x%08x\n",
  194. i, ah->totalIqCorrMeas[i]);
  195. iqCorrNeg = 0;
  196. if (iqCorrMeas > 0x80000000) {
  197. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  198. iqCorrNeg = 1;
  199. }
  200. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_i = 0x%08x\n",
  201. i, powerMeasI);
  202. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_q = 0x%08x\n",
  203. i, powerMeasQ);
  204. ath_dbg(common, CALIBRATE, "iqCorrNeg is 0x%08x\n", iqCorrNeg);
  205. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  206. qCoffDenom = powerMeasQ / 64;
  207. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  208. iCoff = iqCorrMeas / iCoffDenom;
  209. qCoff = powerMeasI / qCoffDenom - 64;
  210. ath_dbg(common, CALIBRATE, "Chn %d iCoff = 0x%08x\n",
  211. i, iCoff);
  212. ath_dbg(common, CALIBRATE, "Chn %d qCoff = 0x%08x\n",
  213. i, qCoff);
  214. /* Force bounds on iCoff */
  215. if (iCoff >= 63)
  216. iCoff = 63;
  217. else if (iCoff <= -63)
  218. iCoff = -63;
  219. /* Negate iCoff if iqCorrNeg == 0 */
  220. if (iqCorrNeg == 0x0)
  221. iCoff = -iCoff;
  222. /* Force bounds on qCoff */
  223. if (qCoff >= 63)
  224. qCoff = 63;
  225. else if (qCoff <= -63)
  226. qCoff = -63;
  227. iCoff = iCoff & 0x7f;
  228. qCoff = qCoff & 0x7f;
  229. ath_dbg(common, CALIBRATE,
  230. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  231. i, iCoff, qCoff);
  232. ath_dbg(common, CALIBRATE,
  233. "Register offset (0x%04x) before update = 0x%x\n",
  234. offset_array[i],
  235. REG_READ(ah, offset_array[i]));
  236. if (AR_SREV_9565(ah) &&
  237. (iCoff == 63 || qCoff == 63 ||
  238. iCoff == -63 || qCoff == -63))
  239. return;
  240. REG_RMW_FIELD(ah, offset_array[i],
  241. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  242. iCoff);
  243. REG_RMW_FIELD(ah, offset_array[i],
  244. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  245. qCoff);
  246. ath_dbg(common, CALIBRATE,
  247. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  248. offset_array[i],
  249. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  250. REG_READ(ah, offset_array[i]));
  251. ath_dbg(common, CALIBRATE,
  252. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  253. offset_array[i],
  254. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  255. REG_READ(ah, offset_array[i]));
  256. ath_dbg(common, CALIBRATE,
  257. "IQ Cal and Correction done for Chain %d\n", i);
  258. }
  259. }
  260. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  261. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  262. ath_dbg(common, CALIBRATE,
  263. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  264. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  265. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  266. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  267. }
  268. static const struct ath9k_percal_data iq_cal_single_sample = {
  269. IQ_MISMATCH_CAL,
  270. MIN_CAL_SAMPLES,
  271. PER_MAX_LOG_COUNT,
  272. ar9003_hw_iqcal_collect,
  273. ar9003_hw_iqcalibrate
  274. };
  275. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  276. {
  277. ah->iq_caldata.calData = &iq_cal_single_sample;
  278. if (AR_SREV_9300_20_OR_LATER(ah)) {
  279. ah->enabled_cals |= TX_IQ_CAL;
  280. if (AR_SREV_9485_OR_LATER(ah) && !AR_SREV_9340(ah))
  281. ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
  282. }
  283. ah->supp_cals = IQ_MISMATCH_CAL;
  284. }
  285. #define OFF_UPPER_LT 24
  286. #define OFF_LOWER_LT 7
  287. static bool ar9003_hw_dynamic_osdac_selection(struct ath_hw *ah,
  288. bool txiqcal_done)
  289. {
  290. struct ath_common *common = ath9k_hw_common(ah);
  291. int ch0_done, osdac_ch0, dc_off_ch0_i1, dc_off_ch0_q1, dc_off_ch0_i2,
  292. dc_off_ch0_q2, dc_off_ch0_i3, dc_off_ch0_q3;
  293. int ch1_done, osdac_ch1, dc_off_ch1_i1, dc_off_ch1_q1, dc_off_ch1_i2,
  294. dc_off_ch1_q2, dc_off_ch1_i3, dc_off_ch1_q3;
  295. int ch2_done, osdac_ch2, dc_off_ch2_i1, dc_off_ch2_q1, dc_off_ch2_i2,
  296. dc_off_ch2_q2, dc_off_ch2_i3, dc_off_ch2_q3;
  297. bool status;
  298. u32 temp, val;
  299. /*
  300. * Clear offset and IQ calibration, run AGC cal.
  301. */
  302. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  303. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  304. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  305. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  306. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  307. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  308. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  309. AR_PHY_AGC_CONTROL_CAL,
  310. 0, AH_WAIT_TIMEOUT);
  311. if (!status) {
  312. ath_dbg(common, CALIBRATE,
  313. "AGC cal without offset cal failed to complete in 1ms");
  314. return false;
  315. }
  316. /*
  317. * Allow only offset calibration and disable the others
  318. * (Carrier Leak calibration, TX Filter calibration and
  319. * Peak Detector offset calibration).
  320. */
  321. REG_SET_BIT(ah, AR_PHY_AGC_CONTROL,
  322. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  323. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  324. AR_PHY_CL_CAL_ENABLE);
  325. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  326. AR_PHY_AGC_CONTROL_FLTR_CAL);
  327. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  328. AR_PHY_AGC_CONTROL_PKDET_CAL);
  329. ch0_done = 0;
  330. ch1_done = 0;
  331. ch2_done = 0;
  332. while ((ch0_done == 0) || (ch1_done == 0) || (ch2_done == 0)) {
  333. osdac_ch0 = (REG_READ(ah, AR_PHY_65NM_CH0_BB1) >> 30) & 0x3;
  334. osdac_ch1 = (REG_READ(ah, AR_PHY_65NM_CH1_BB1) >> 30) & 0x3;
  335. osdac_ch2 = (REG_READ(ah, AR_PHY_65NM_CH2_BB1) >> 30) & 0x3;
  336. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  337. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  338. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  339. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  340. AR_PHY_AGC_CONTROL_CAL,
  341. 0, AH_WAIT_TIMEOUT);
  342. if (!status) {
  343. ath_dbg(common, CALIBRATE,
  344. "DC offset cal failed to complete in 1ms");
  345. return false;
  346. }
  347. REG_CLR_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  348. /*
  349. * High gain.
  350. */
  351. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  352. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (1 << 8)));
  353. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  354. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (1 << 8)));
  355. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  356. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (1 << 8)));
  357. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  358. dc_off_ch0_i1 = (temp >> 26) & 0x1f;
  359. dc_off_ch0_q1 = (temp >> 21) & 0x1f;
  360. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  361. dc_off_ch1_i1 = (temp >> 26) & 0x1f;
  362. dc_off_ch1_q1 = (temp >> 21) & 0x1f;
  363. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  364. dc_off_ch2_i1 = (temp >> 26) & 0x1f;
  365. dc_off_ch2_q1 = (temp >> 21) & 0x1f;
  366. /*
  367. * Low gain.
  368. */
  369. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  370. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (2 << 8)));
  371. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  372. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (2 << 8)));
  373. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  374. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (2 << 8)));
  375. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  376. dc_off_ch0_i2 = (temp >> 26) & 0x1f;
  377. dc_off_ch0_q2 = (temp >> 21) & 0x1f;
  378. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  379. dc_off_ch1_i2 = (temp >> 26) & 0x1f;
  380. dc_off_ch1_q2 = (temp >> 21) & 0x1f;
  381. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  382. dc_off_ch2_i2 = (temp >> 26) & 0x1f;
  383. dc_off_ch2_q2 = (temp >> 21) & 0x1f;
  384. /*
  385. * Loopback.
  386. */
  387. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  388. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (3 << 8)));
  389. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  390. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (3 << 8)));
  391. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  392. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (3 << 8)));
  393. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  394. dc_off_ch0_i3 = (temp >> 26) & 0x1f;
  395. dc_off_ch0_q3 = (temp >> 21) & 0x1f;
  396. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  397. dc_off_ch1_i3 = (temp >> 26) & 0x1f;
  398. dc_off_ch1_q3 = (temp >> 21) & 0x1f;
  399. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  400. dc_off_ch2_i3 = (temp >> 26) & 0x1f;
  401. dc_off_ch2_q3 = (temp >> 21) & 0x1f;
  402. if ((dc_off_ch0_i1 > OFF_UPPER_LT) || (dc_off_ch0_i1 < OFF_LOWER_LT) ||
  403. (dc_off_ch0_i2 > OFF_UPPER_LT) || (dc_off_ch0_i2 < OFF_LOWER_LT) ||
  404. (dc_off_ch0_i3 > OFF_UPPER_LT) || (dc_off_ch0_i3 < OFF_LOWER_LT) ||
  405. (dc_off_ch0_q1 > OFF_UPPER_LT) || (dc_off_ch0_q1 < OFF_LOWER_LT) ||
  406. (dc_off_ch0_q2 > OFF_UPPER_LT) || (dc_off_ch0_q2 < OFF_LOWER_LT) ||
  407. (dc_off_ch0_q3 > OFF_UPPER_LT) || (dc_off_ch0_q3 < OFF_LOWER_LT)) {
  408. if (osdac_ch0 == 3) {
  409. ch0_done = 1;
  410. } else {
  411. osdac_ch0++;
  412. val = REG_READ(ah, AR_PHY_65NM_CH0_BB1) & 0x3fffffff;
  413. val |= (osdac_ch0 << 30);
  414. REG_WRITE(ah, AR_PHY_65NM_CH0_BB1, val);
  415. ch0_done = 0;
  416. }
  417. } else {
  418. ch0_done = 1;
  419. }
  420. if ((dc_off_ch1_i1 > OFF_UPPER_LT) || (dc_off_ch1_i1 < OFF_LOWER_LT) ||
  421. (dc_off_ch1_i2 > OFF_UPPER_LT) || (dc_off_ch1_i2 < OFF_LOWER_LT) ||
  422. (dc_off_ch1_i3 > OFF_UPPER_LT) || (dc_off_ch1_i3 < OFF_LOWER_LT) ||
  423. (dc_off_ch1_q1 > OFF_UPPER_LT) || (dc_off_ch1_q1 < OFF_LOWER_LT) ||
  424. (dc_off_ch1_q2 > OFF_UPPER_LT) || (dc_off_ch1_q2 < OFF_LOWER_LT) ||
  425. (dc_off_ch1_q3 > OFF_UPPER_LT) || (dc_off_ch1_q3 < OFF_LOWER_LT)) {
  426. if (osdac_ch1 == 3) {
  427. ch1_done = 1;
  428. } else {
  429. osdac_ch1++;
  430. val = REG_READ(ah, AR_PHY_65NM_CH1_BB1) & 0x3fffffff;
  431. val |= (osdac_ch1 << 30);
  432. REG_WRITE(ah, AR_PHY_65NM_CH1_BB1, val);
  433. ch1_done = 0;
  434. }
  435. } else {
  436. ch1_done = 1;
  437. }
  438. if ((dc_off_ch2_i1 > OFF_UPPER_LT) || (dc_off_ch2_i1 < OFF_LOWER_LT) ||
  439. (dc_off_ch2_i2 > OFF_UPPER_LT) || (dc_off_ch2_i2 < OFF_LOWER_LT) ||
  440. (dc_off_ch2_i3 > OFF_UPPER_LT) || (dc_off_ch2_i3 < OFF_LOWER_LT) ||
  441. (dc_off_ch2_q1 > OFF_UPPER_LT) || (dc_off_ch2_q1 < OFF_LOWER_LT) ||
  442. (dc_off_ch2_q2 > OFF_UPPER_LT) || (dc_off_ch2_q2 < OFF_LOWER_LT) ||
  443. (dc_off_ch2_q3 > OFF_UPPER_LT) || (dc_off_ch2_q3 < OFF_LOWER_LT)) {
  444. if (osdac_ch2 == 3) {
  445. ch2_done = 1;
  446. } else {
  447. osdac_ch2++;
  448. val = REG_READ(ah, AR_PHY_65NM_CH2_BB1) & 0x3fffffff;
  449. val |= (osdac_ch2 << 30);
  450. REG_WRITE(ah, AR_PHY_65NM_CH2_BB1, val);
  451. ch2_done = 0;
  452. }
  453. } else {
  454. ch2_done = 1;
  455. }
  456. }
  457. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  458. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  459. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  460. /*
  461. * We don't need to check txiqcal_done here since it is always
  462. * set for AR9550.
  463. */
  464. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  465. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  466. return true;
  467. }
  468. /*
  469. * solve 4x4 linear equation used in loopback iq cal.
  470. */
  471. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  472. s32 sin_2phi_1,
  473. s32 cos_2phi_1,
  474. s32 sin_2phi_2,
  475. s32 cos_2phi_2,
  476. s32 mag_a0_d0,
  477. s32 phs_a0_d0,
  478. s32 mag_a1_d0,
  479. s32 phs_a1_d0,
  480. s32 solved_eq[])
  481. {
  482. s32 f1 = cos_2phi_1 - cos_2phi_2,
  483. f3 = sin_2phi_1 - sin_2phi_2,
  484. f2;
  485. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  486. const s32 result_shift = 1 << 15;
  487. struct ath_common *common = ath9k_hw_common(ah);
  488. f2 = ((f1 >> 3) * (f1 >> 3) + (f3 >> 3) * (f3 >> 3)) >> 9;
  489. if (!f2) {
  490. ath_dbg(common, CALIBRATE, "Divide by 0\n");
  491. return false;
  492. }
  493. /* mag mismatch, tx */
  494. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  495. /* phs mismatch, tx */
  496. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  497. mag_tx = (mag_tx / f2);
  498. phs_tx = (phs_tx / f2);
  499. /* mag mismatch, rx */
  500. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  501. result_shift;
  502. /* phs mismatch, rx */
  503. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  504. result_shift;
  505. solved_eq[0] = mag_tx;
  506. solved_eq[1] = phs_tx;
  507. solved_eq[2] = mag_rx;
  508. solved_eq[3] = phs_rx;
  509. return true;
  510. }
  511. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  512. {
  513. s32 abs_i = abs(in_re),
  514. abs_q = abs(in_im),
  515. max_abs, min_abs;
  516. if (abs_i > abs_q) {
  517. max_abs = abs_i;
  518. min_abs = abs_q;
  519. } else {
  520. max_abs = abs_q;
  521. min_abs = abs_i;
  522. }
  523. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  524. }
  525. #define DELPT 32
  526. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  527. s32 chain_idx,
  528. const s32 iq_res[],
  529. s32 iqc_coeff[])
  530. {
  531. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  532. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  533. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  534. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  535. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  536. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  537. sin_2phi_1, cos_2phi_1,
  538. sin_2phi_2, cos_2phi_2;
  539. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  540. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  541. q_q_coff, q_i_coff;
  542. const s32 res_scale = 1 << 15;
  543. const s32 delpt_shift = 1 << 8;
  544. s32 mag1, mag2;
  545. struct ath_common *common = ath9k_hw_common(ah);
  546. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  547. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  548. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  549. if (i2_m_q2_a0_d0 > 0x800)
  550. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  551. if (i2_p_q2_a0_d0 > 0x800)
  552. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  553. if (iq_corr_a0_d0 > 0x800)
  554. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  555. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  556. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  557. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  558. if (i2_m_q2_a0_d1 > 0x800)
  559. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  560. if (iq_corr_a0_d1 > 0x800)
  561. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  562. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  563. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  564. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  565. if (i2_m_q2_a1_d0 > 0x800)
  566. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  567. if (i2_p_q2_a1_d0 > 0x800)
  568. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  569. if (iq_corr_a1_d0 > 0x800)
  570. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  571. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  572. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  573. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  574. if (i2_m_q2_a1_d1 > 0x800)
  575. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  576. if (i2_p_q2_a1_d1 > 0x800)
  577. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  578. if (iq_corr_a1_d1 > 0x800)
  579. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  580. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  581. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  582. ath_dbg(common, CALIBRATE,
  583. "Divide by 0:\n"
  584. "a0_d0=%d\n"
  585. "a0_d1=%d\n"
  586. "a2_d0=%d\n"
  587. "a1_d1=%d\n",
  588. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  589. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  590. return false;
  591. }
  592. if ((i2_p_q2_a0_d0 < 1024) || (i2_p_q2_a0_d0 > 2047) ||
  593. (i2_p_q2_a1_d0 < 0) || (i2_p_q2_a1_d1 < 0) ||
  594. (i2_p_q2_a0_d0 <= i2_m_q2_a0_d0) ||
  595. (i2_p_q2_a0_d0 <= iq_corr_a0_d0) ||
  596. (i2_p_q2_a0_d1 <= i2_m_q2_a0_d1) ||
  597. (i2_p_q2_a0_d1 <= iq_corr_a0_d1) ||
  598. (i2_p_q2_a1_d0 <= i2_m_q2_a1_d0) ||
  599. (i2_p_q2_a1_d0 <= iq_corr_a1_d0) ||
  600. (i2_p_q2_a1_d1 <= i2_m_q2_a1_d1) ||
  601. (i2_p_q2_a1_d1 <= iq_corr_a1_d1)) {
  602. return false;
  603. }
  604. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  605. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  606. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  607. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  608. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  609. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  610. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  611. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  612. /* w/o analog phase shift */
  613. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  614. /* w/o analog phase shift */
  615. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  616. /* w/ analog phase shift */
  617. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  618. /* w/ analog phase shift */
  619. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  620. /*
  621. * force sin^2 + cos^2 = 1;
  622. * find magnitude by approximation
  623. */
  624. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  625. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  626. if ((mag1 == 0) || (mag2 == 0)) {
  627. ath_dbg(common, CALIBRATE, "Divide by 0: mag1=%d, mag2=%d\n",
  628. mag1, mag2);
  629. return false;
  630. }
  631. /* normalization sin and cos by mag */
  632. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  633. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  634. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  635. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  636. /* calculate IQ mismatch */
  637. if (!ar9003_hw_solve_iq_cal(ah,
  638. sin_2phi_1, cos_2phi_1,
  639. sin_2phi_2, cos_2phi_2,
  640. mag_a0_d0, phs_a0_d0,
  641. mag_a1_d0,
  642. phs_a1_d0, solved_eq)) {
  643. ath_dbg(common, CALIBRATE,
  644. "Call to ar9003_hw_solve_iq_cal() failed\n");
  645. return false;
  646. }
  647. mag_tx = solved_eq[0];
  648. phs_tx = solved_eq[1];
  649. mag_rx = solved_eq[2];
  650. phs_rx = solved_eq[3];
  651. ath_dbg(common, CALIBRATE,
  652. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  653. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  654. if (res_scale == mag_tx) {
  655. ath_dbg(common, CALIBRATE,
  656. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  657. mag_tx, res_scale);
  658. return false;
  659. }
  660. /* calculate and quantize Tx IQ correction factor */
  661. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  662. phs_corr_tx = -phs_tx;
  663. q_q_coff = (mag_corr_tx * 128 / res_scale);
  664. q_i_coff = (phs_corr_tx * 256 / res_scale);
  665. ath_dbg(common, CALIBRATE, "tx chain %d: mag corr=%d phase corr=%d\n",
  666. chain_idx, q_q_coff, q_i_coff);
  667. if (q_i_coff < -63)
  668. q_i_coff = -63;
  669. if (q_i_coff > 63)
  670. q_i_coff = 63;
  671. if (q_q_coff < -63)
  672. q_q_coff = -63;
  673. if (q_q_coff > 63)
  674. q_q_coff = 63;
  675. iqc_coeff[0] = (q_q_coff * 128) + (0x7f & q_i_coff);
  676. ath_dbg(common, CALIBRATE, "tx chain %d: iq corr coeff=%x\n",
  677. chain_idx, iqc_coeff[0]);
  678. if (-mag_rx == res_scale) {
  679. ath_dbg(common, CALIBRATE,
  680. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  681. mag_rx, res_scale);
  682. return false;
  683. }
  684. /* calculate and quantize Rx IQ correction factors */
  685. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  686. phs_corr_rx = -phs_rx;
  687. q_q_coff = (mag_corr_rx * 128 / res_scale);
  688. q_i_coff = (phs_corr_rx * 256 / res_scale);
  689. ath_dbg(common, CALIBRATE, "rx chain %d: mag corr=%d phase corr=%d\n",
  690. chain_idx, q_q_coff, q_i_coff);
  691. if (q_i_coff < -63)
  692. q_i_coff = -63;
  693. if (q_i_coff > 63)
  694. q_i_coff = 63;
  695. if (q_q_coff < -63)
  696. q_q_coff = -63;
  697. if (q_q_coff > 63)
  698. q_q_coff = 63;
  699. iqc_coeff[1] = (q_q_coff * 128) + (0x7f & q_i_coff);
  700. ath_dbg(common, CALIBRATE, "rx chain %d: iq corr coeff=%x\n",
  701. chain_idx, iqc_coeff[1]);
  702. return true;
  703. }
  704. static void ar9003_hw_detect_outlier(int mp_coeff[][MAXIQCAL],
  705. int nmeasurement,
  706. int max_delta)
  707. {
  708. int mp_max = -64, max_idx = 0;
  709. int mp_min = 63, min_idx = 0;
  710. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  711. /* find min/max mismatch across all calibrated gains */
  712. for (i = 0; i < nmeasurement; i++) {
  713. if (mp_coeff[i][0] > mp_max) {
  714. mp_max = mp_coeff[i][0];
  715. max_idx = i;
  716. } else if (mp_coeff[i][0] < mp_min) {
  717. mp_min = mp_coeff[i][0];
  718. min_idx = i;
  719. }
  720. }
  721. /* find average (exclude max abs value) */
  722. for (i = 0; i < nmeasurement; i++) {
  723. if ((abs(mp_coeff[i][0]) < abs(mp_max)) ||
  724. (abs(mp_coeff[i][0]) < abs(mp_min))) {
  725. mp_avg += mp_coeff[i][0];
  726. mp_count++;
  727. }
  728. }
  729. /*
  730. * finding mean magnitude/phase if possible, otherwise
  731. * just use the last value as the mean
  732. */
  733. if (mp_count)
  734. mp_avg /= mp_count;
  735. else
  736. mp_avg = mp_coeff[nmeasurement - 1][0];
  737. /* detect outlier */
  738. if (abs(mp_max - mp_min) > max_delta) {
  739. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  740. outlier_idx = max_idx;
  741. else
  742. outlier_idx = min_idx;
  743. mp_coeff[outlier_idx][0] = mp_avg;
  744. }
  745. }
  746. static void ar9003_hw_tx_iq_cal_outlier_detection(struct ath_hw *ah,
  747. struct coeff *coeff,
  748. bool is_reusable)
  749. {
  750. int i, im, nmeasurement;
  751. int magnitude, phase;
  752. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  753. struct ath9k_hw_cal_data *caldata = ah->caldata;
  754. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  755. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  756. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  757. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  758. if (!AR_SREV_9485(ah)) {
  759. tx_corr_coeff[i * 2][1] =
  760. tx_corr_coeff[(i * 2) + 1][1] =
  761. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  762. tx_corr_coeff[i * 2][2] =
  763. tx_corr_coeff[(i * 2) + 1][2] =
  764. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  765. }
  766. }
  767. /* Load the average of 2 passes */
  768. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  769. if (!(ah->txchainmask & (1 << i)))
  770. continue;
  771. nmeasurement = REG_READ_FIELD(ah,
  772. AR_PHY_TX_IQCAL_STATUS_B0,
  773. AR_PHY_CALIBRATED_GAINS_0);
  774. if (nmeasurement > MAX_MEASUREMENT)
  775. nmeasurement = MAX_MEASUREMENT;
  776. /*
  777. * Skip normal outlier detection for AR9550.
  778. */
  779. if (!AR_SREV_9550(ah)) {
  780. /* detect outlier only if nmeasurement > 1 */
  781. if (nmeasurement > 1) {
  782. /* Detect magnitude outlier */
  783. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  784. nmeasurement,
  785. MAX_MAG_DELTA);
  786. /* Detect phase outlier */
  787. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  788. nmeasurement,
  789. MAX_PHS_DELTA);
  790. }
  791. }
  792. for (im = 0; im < nmeasurement; im++) {
  793. magnitude = coeff->mag_coeff[i][im][0];
  794. phase = coeff->phs_coeff[i][im][0];
  795. coeff->iqc_coeff[0] =
  796. (phase & 0x7f) | ((magnitude & 0x7f) << 7);
  797. if ((im % 2) == 0)
  798. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  799. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  800. coeff->iqc_coeff[0]);
  801. else
  802. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  803. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  804. coeff->iqc_coeff[0]);
  805. if (caldata)
  806. caldata->tx_corr_coeff[im][i] =
  807. coeff->iqc_coeff[0];
  808. }
  809. if (caldata)
  810. caldata->num_measures[i] = nmeasurement;
  811. }
  812. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  813. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  814. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  815. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  816. if (caldata) {
  817. if (is_reusable)
  818. set_bit(TXIQCAL_DONE, &caldata->cal_flags);
  819. else
  820. clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
  821. }
  822. return;
  823. }
  824. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  825. {
  826. struct ath_common *common = ath9k_hw_common(ah);
  827. u8 tx_gain_forced;
  828. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  829. AR_PHY_TXGAIN_FORCE);
  830. if (tx_gain_forced)
  831. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  832. AR_PHY_TXGAIN_FORCE, 0);
  833. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  834. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  835. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  836. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  837. AH_WAIT_TIMEOUT)) {
  838. ath_dbg(common, CALIBRATE, "Tx IQ Cal is not completed\n");
  839. return false;
  840. }
  841. return true;
  842. }
  843. static void __ar955x_tx_iq_cal_sort(struct ath_hw *ah,
  844. struct coeff *coeff,
  845. int i, int nmeasurement)
  846. {
  847. struct ath_common *common = ath9k_hw_common(ah);
  848. int im, ix, iy, temp;
  849. for (im = 0; im < nmeasurement; im++) {
  850. for (ix = 0; ix < MAXIQCAL - 1; ix++) {
  851. for (iy = ix + 1; iy <= MAXIQCAL - 1; iy++) {
  852. if (coeff->mag_coeff[i][im][iy] <
  853. coeff->mag_coeff[i][im][ix]) {
  854. temp = coeff->mag_coeff[i][im][ix];
  855. coeff->mag_coeff[i][im][ix] =
  856. coeff->mag_coeff[i][im][iy];
  857. coeff->mag_coeff[i][im][iy] = temp;
  858. }
  859. if (coeff->phs_coeff[i][im][iy] <
  860. coeff->phs_coeff[i][im][ix]) {
  861. temp = coeff->phs_coeff[i][im][ix];
  862. coeff->phs_coeff[i][im][ix] =
  863. coeff->phs_coeff[i][im][iy];
  864. coeff->phs_coeff[i][im][iy] = temp;
  865. }
  866. }
  867. }
  868. coeff->mag_coeff[i][im][0] = coeff->mag_coeff[i][im][MAXIQCAL / 2];
  869. coeff->phs_coeff[i][im][0] = coeff->phs_coeff[i][im][MAXIQCAL / 2];
  870. ath_dbg(common, CALIBRATE,
  871. "IQCAL: Median [ch%d][gain%d]: mag = %d phase = %d\n",
  872. i, im,
  873. coeff->mag_coeff[i][im][0],
  874. coeff->phs_coeff[i][im][0]);
  875. }
  876. }
  877. static bool ar955x_tx_iq_cal_median(struct ath_hw *ah,
  878. struct coeff *coeff,
  879. int iqcal_idx,
  880. int nmeasurement)
  881. {
  882. int i;
  883. if ((iqcal_idx + 1) != MAXIQCAL)
  884. return false;
  885. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  886. __ar955x_tx_iq_cal_sort(ah, coeff, i, nmeasurement);
  887. }
  888. return true;
  889. }
  890. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah,
  891. int iqcal_idx,
  892. bool is_reusable)
  893. {
  894. struct ath_common *common = ath9k_hw_common(ah);
  895. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  896. AR_PHY_TX_IQCAL_STATUS_B0,
  897. AR_PHY_TX_IQCAL_STATUS_B1,
  898. AR_PHY_TX_IQCAL_STATUS_B2,
  899. };
  900. const u_int32_t chan_info_tab[] = {
  901. AR_PHY_CHAN_INFO_TAB_0,
  902. AR_PHY_CHAN_INFO_TAB_1,
  903. AR_PHY_CHAN_INFO_TAB_2,
  904. };
  905. static struct coeff coeff;
  906. s32 iq_res[6];
  907. int i, im, j;
  908. int nmeasurement = 0;
  909. bool outlier_detect = true;
  910. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  911. if (!(ah->txchainmask & (1 << i)))
  912. continue;
  913. nmeasurement = REG_READ_FIELD(ah,
  914. AR_PHY_TX_IQCAL_STATUS_B0,
  915. AR_PHY_CALIBRATED_GAINS_0);
  916. if (nmeasurement > MAX_MEASUREMENT)
  917. nmeasurement = MAX_MEASUREMENT;
  918. for (im = 0; im < nmeasurement; im++) {
  919. ath_dbg(common, CALIBRATE,
  920. "Doing Tx IQ Cal for chain %d\n", i);
  921. if (REG_READ(ah, txiqcal_status[i]) &
  922. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  923. ath_dbg(common, CALIBRATE,
  924. "Tx IQ Cal failed for chain %d\n", i);
  925. goto tx_iqcal_fail;
  926. }
  927. for (j = 0; j < 3; j++) {
  928. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  929. REG_RMW_FIELD(ah,
  930. AR_PHY_CHAN_INFO_MEMORY,
  931. AR_PHY_CHAN_INFO_TAB_S2_READ,
  932. 0);
  933. /* 32 bits */
  934. iq_res[idx] = REG_READ(ah,
  935. chan_info_tab[i] +
  936. offset);
  937. REG_RMW_FIELD(ah,
  938. AR_PHY_CHAN_INFO_MEMORY,
  939. AR_PHY_CHAN_INFO_TAB_S2_READ,
  940. 1);
  941. /* 16 bits */
  942. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  943. chan_info_tab[i] + offset);
  944. ath_dbg(common, CALIBRATE,
  945. "IQ_RES[%d]=0x%x IQ_RES[%d]=0x%x\n",
  946. idx, iq_res[idx], idx + 1,
  947. iq_res[idx + 1]);
  948. }
  949. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  950. coeff.iqc_coeff)) {
  951. ath_dbg(common, CALIBRATE,
  952. "Failed in calculation of IQ correction\n");
  953. goto tx_iqcal_fail;
  954. }
  955. coeff.phs_coeff[i][im][iqcal_idx] =
  956. coeff.iqc_coeff[0] & 0x7f;
  957. coeff.mag_coeff[i][im][iqcal_idx] =
  958. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  959. if (coeff.mag_coeff[i][im][iqcal_idx] > 63)
  960. coeff.mag_coeff[i][im][iqcal_idx] -= 128;
  961. if (coeff.phs_coeff[i][im][iqcal_idx] > 63)
  962. coeff.phs_coeff[i][im][iqcal_idx] -= 128;
  963. }
  964. }
  965. if (AR_SREV_9550(ah))
  966. outlier_detect = ar955x_tx_iq_cal_median(ah, &coeff,
  967. iqcal_idx, nmeasurement);
  968. if (outlier_detect)
  969. ar9003_hw_tx_iq_cal_outlier_detection(ah, &coeff, is_reusable);
  970. return;
  971. tx_iqcal_fail:
  972. ath_dbg(common, CALIBRATE, "Tx IQ Cal failed\n");
  973. return;
  974. }
  975. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  976. {
  977. struct ath9k_hw_cal_data *caldata = ah->caldata;
  978. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  979. int i, im;
  980. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  981. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  982. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  983. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  984. if (!AR_SREV_9485(ah)) {
  985. tx_corr_coeff[i * 2][1] =
  986. tx_corr_coeff[(i * 2) + 1][1] =
  987. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  988. tx_corr_coeff[i * 2][2] =
  989. tx_corr_coeff[(i * 2) + 1][2] =
  990. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  991. }
  992. }
  993. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  994. if (!(ah->txchainmask & (1 << i)))
  995. continue;
  996. for (im = 0; im < caldata->num_measures[i]; im++) {
  997. if ((im % 2) == 0)
  998. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  999. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  1000. caldata->tx_corr_coeff[im][i]);
  1001. else
  1002. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  1003. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  1004. caldata->tx_corr_coeff[im][i]);
  1005. }
  1006. }
  1007. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  1008. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  1009. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  1010. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  1011. }
  1012. static void ar9003_hw_manual_peak_cal(struct ath_hw *ah, u8 chain, bool is_2g)
  1013. {
  1014. int offset[8] = {0}, total = 0, test;
  1015. int agc_out, i;
  1016. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1017. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0x1);
  1018. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1019. AR_PHY_65NM_RXRF_GAINSTAGES_LNAON_CALDC, 0x0);
  1020. if (is_2g)
  1021. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1022. AR_PHY_65NM_RXRF_GAINSTAGES_LNA2G_GAIN_OVR, 0x0);
  1023. else
  1024. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1025. AR_PHY_65NM_RXRF_GAINSTAGES_LNA5G_GAIN_OVR, 0x0);
  1026. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1027. AR_PHY_65NM_RXTX2_RXON_OVR, 0x1);
  1028. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1029. AR_PHY_65NM_RXTX2_RXON, 0x0);
  1030. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1031. AR_PHY_65NM_RXRF_AGC_AGC_OVERRIDE, 0x1);
  1032. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1033. AR_PHY_65NM_RXRF_AGC_AGC_ON_OVR, 0x1);
  1034. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1035. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0x1);
  1036. if (AR_SREV_9330_11(ah)) {
  1037. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1038. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, 0x0);
  1039. } else {
  1040. if (is_2g)
  1041. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1042. AR_PHY_65NM_RXRF_AGC_AGC2G_DBDAC_OVR, 0x0);
  1043. else
  1044. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1045. AR_PHY_65NM_RXRF_AGC_AGC5G_DBDAC_OVR, 0x0);
  1046. }
  1047. for (i = 6; i > 0; i--) {
  1048. offset[i] = BIT(i - 1);
  1049. test = total + offset[i];
  1050. if (is_2g)
  1051. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1052. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR,
  1053. test);
  1054. else
  1055. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1056. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR,
  1057. test);
  1058. udelay(100);
  1059. agc_out = REG_READ_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1060. AR_PHY_65NM_RXRF_AGC_AGC_OUT);
  1061. offset[i] = (agc_out) ? 0 : 1;
  1062. total += (offset[i] << (i - 1));
  1063. }
  1064. if (is_2g)
  1065. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1066. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, total);
  1067. else
  1068. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1069. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR, total);
  1070. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1071. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0);
  1072. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1073. AR_PHY_65NM_RXTX2_RXON_OVR, 0);
  1074. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1075. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0);
  1076. }
  1077. static void ar9003_hw_do_pcoem_manual_peak_cal(struct ath_hw *ah,
  1078. struct ath9k_channel *chan,
  1079. bool run_rtt_cal)
  1080. {
  1081. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1082. int i;
  1083. if (!AR_SREV_9462(ah) && !AR_SREV_9565(ah) && !AR_SREV_9485(ah))
  1084. return;
  1085. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && !run_rtt_cal)
  1086. return;
  1087. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1088. if (!(ah->rxchainmask & (1 << i)))
  1089. continue;
  1090. ar9003_hw_manual_peak_cal(ah, i, IS_CHAN_2GHZ(chan));
  1091. }
  1092. if (caldata)
  1093. set_bit(SW_PKDET_DONE, &caldata->cal_flags);
  1094. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && caldata) {
  1095. if (IS_CHAN_2GHZ(chan)){
  1096. caldata->caldac[0] = REG_READ_FIELD(ah,
  1097. AR_PHY_65NM_RXRF_AGC(0),
  1098. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1099. caldata->caldac[1] = REG_READ_FIELD(ah,
  1100. AR_PHY_65NM_RXRF_AGC(1),
  1101. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1102. } else {
  1103. caldata->caldac[0] = REG_READ_FIELD(ah,
  1104. AR_PHY_65NM_RXRF_AGC(0),
  1105. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1106. caldata->caldac[1] = REG_READ_FIELD(ah,
  1107. AR_PHY_65NM_RXRF_AGC(1),
  1108. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1109. }
  1110. }
  1111. }
  1112. static void ar9003_hw_cl_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  1113. {
  1114. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  1115. AR_PHY_CL_TAB_1,
  1116. AR_PHY_CL_TAB_2 };
  1117. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1118. bool txclcal_done = false;
  1119. int i, j;
  1120. if (!caldata || !(ah->enabled_cals & TX_CL_CAL))
  1121. return;
  1122. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  1123. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  1124. if (test_bit(TXCLCAL_DONE, &caldata->cal_flags)) {
  1125. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1126. if (!(ah->txchainmask & (1 << i)))
  1127. continue;
  1128. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1129. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  1130. caldata->tx_clcal[i][j]);
  1131. }
  1132. } else if (is_reusable && txclcal_done) {
  1133. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1134. if (!(ah->txchainmask & (1 << i)))
  1135. continue;
  1136. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1137. caldata->tx_clcal[i][j] =
  1138. REG_READ(ah, CL_TAB_ENTRY(cl_idx[i]));
  1139. }
  1140. set_bit(TXCLCAL_DONE, &caldata->cal_flags);
  1141. }
  1142. }
  1143. static bool ar9003_hw_init_cal_pcoem(struct ath_hw *ah,
  1144. struct ath9k_channel *chan)
  1145. {
  1146. struct ath_common *common = ath9k_hw_common(ah);
  1147. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1148. bool txiqcal_done = false;
  1149. bool is_reusable = true, status = true;
  1150. bool run_rtt_cal = false, run_agc_cal;
  1151. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  1152. u32 rx_delay = 0;
  1153. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1154. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1155. AR_PHY_AGC_CONTROL_PKDET_CAL;
  1156. /* Use chip chainmask only for calibration */
  1157. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1158. if (rtt) {
  1159. if (!ar9003_hw_rtt_restore(ah, chan))
  1160. run_rtt_cal = true;
  1161. if (run_rtt_cal)
  1162. ath_dbg(common, CALIBRATE, "RTT calibration to be done\n");
  1163. }
  1164. run_agc_cal = run_rtt_cal;
  1165. if (run_rtt_cal) {
  1166. ar9003_hw_rtt_enable(ah);
  1167. ar9003_hw_rtt_set_mask(ah, 0x00);
  1168. ar9003_hw_rtt_clear_hist(ah);
  1169. }
  1170. if (rtt) {
  1171. if (!run_rtt_cal) {
  1172. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  1173. agc_supp_cals &= agc_ctrl;
  1174. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1175. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1176. AR_PHY_AGC_CONTROL_PKDET_CAL);
  1177. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1178. } else {
  1179. if (ah->ah_flags & AH_FASTCC)
  1180. run_agc_cal = true;
  1181. }
  1182. }
  1183. if (ah->enabled_cals & TX_CL_CAL) {
  1184. if (caldata && test_bit(TXCLCAL_DONE, &caldata->cal_flags))
  1185. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  1186. AR_PHY_CL_CAL_ENABLE);
  1187. else {
  1188. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  1189. AR_PHY_CL_CAL_ENABLE);
  1190. run_agc_cal = true;
  1191. }
  1192. }
  1193. if ((IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan)) ||
  1194. !(ah->enabled_cals & TX_IQ_CAL))
  1195. goto skip_tx_iqcal;
  1196. /* Do Tx IQ Calibration */
  1197. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1198. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1199. DELPT);
  1200. /*
  1201. * For AR9485 or later chips, TxIQ cal runs as part of
  1202. * AGC calibration
  1203. */
  1204. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1205. if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1206. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1207. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1208. else
  1209. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1210. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1211. txiqcal_done = run_agc_cal = true;
  1212. }
  1213. skip_tx_iqcal:
  1214. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1215. ar9003_mci_init_cal_req(ah, &is_reusable);
  1216. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1217. rx_delay = REG_READ(ah, AR_PHY_RX_DELAY);
  1218. /* Disable BB_active */
  1219. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1220. udelay(5);
  1221. REG_WRITE(ah, AR_PHY_RX_DELAY, AR_PHY_RX_DELAY_DELAY);
  1222. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1223. }
  1224. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1225. /* Calibrate the AGC */
  1226. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1227. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1228. AR_PHY_AGC_CONTROL_CAL);
  1229. /* Poll for offset calibration complete */
  1230. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1231. AR_PHY_AGC_CONTROL_CAL,
  1232. 0, AH_WAIT_TIMEOUT);
  1233. ar9003_hw_do_pcoem_manual_peak_cal(ah, chan, run_rtt_cal);
  1234. }
  1235. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1236. REG_WRITE(ah, AR_PHY_RX_DELAY, rx_delay);
  1237. udelay(5);
  1238. }
  1239. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1240. ar9003_mci_init_cal_done(ah);
  1241. if (rtt && !run_rtt_cal) {
  1242. agc_ctrl |= agc_supp_cals;
  1243. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1244. }
  1245. if (!status) {
  1246. if (run_rtt_cal)
  1247. ar9003_hw_rtt_disable(ah);
  1248. ath_dbg(common, CALIBRATE,
  1249. "offset calibration failed to complete in %d ms; noisy environment?\n",
  1250. AH_WAIT_TIMEOUT / 1000);
  1251. return false;
  1252. }
  1253. if (txiqcal_done)
  1254. ar9003_hw_tx_iq_cal_post_proc(ah, 0, is_reusable);
  1255. else if (caldata && test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1256. ar9003_hw_tx_iq_cal_reload(ah);
  1257. ar9003_hw_cl_cal_post_proc(ah, is_reusable);
  1258. if (run_rtt_cal && caldata) {
  1259. if (is_reusable) {
  1260. if (!ath9k_hw_rfbus_req(ah)) {
  1261. ath_err(ath9k_hw_common(ah),
  1262. "Could not stop baseband\n");
  1263. } else {
  1264. ar9003_hw_rtt_fill_hist(ah);
  1265. if (test_bit(SW_PKDET_DONE, &caldata->cal_flags))
  1266. ar9003_hw_rtt_load_hist(ah);
  1267. }
  1268. ath9k_hw_rfbus_done(ah);
  1269. }
  1270. ar9003_hw_rtt_disable(ah);
  1271. }
  1272. /* Revert chainmask to runtime parameters */
  1273. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1274. /* Initialize list pointers */
  1275. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1276. INIT_CAL(&ah->iq_caldata);
  1277. INSERT_CAL(ah, &ah->iq_caldata);
  1278. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1279. /* Initialize current pointer to first element in list */
  1280. ah->cal_list_curr = ah->cal_list;
  1281. if (ah->cal_list_curr)
  1282. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1283. if (caldata)
  1284. caldata->CalValid = 0;
  1285. return true;
  1286. }
  1287. static bool do_ar9003_agc_cal(struct ath_hw *ah)
  1288. {
  1289. struct ath_common *common = ath9k_hw_common(ah);
  1290. bool status;
  1291. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1292. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1293. AR_PHY_AGC_CONTROL_CAL);
  1294. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1295. AR_PHY_AGC_CONTROL_CAL,
  1296. 0, AH_WAIT_TIMEOUT);
  1297. if (!status) {
  1298. ath_dbg(common, CALIBRATE,
  1299. "offset calibration failed to complete in %d ms,"
  1300. "noisy environment?\n",
  1301. AH_WAIT_TIMEOUT / 1000);
  1302. return false;
  1303. }
  1304. return true;
  1305. }
  1306. static bool ar9003_hw_init_cal_soc(struct ath_hw *ah,
  1307. struct ath9k_channel *chan)
  1308. {
  1309. struct ath_common *common = ath9k_hw_common(ah);
  1310. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1311. bool txiqcal_done = false;
  1312. bool status = true;
  1313. bool run_agc_cal = false, sep_iq_cal = false;
  1314. int i = 0;
  1315. /* Use chip chainmask only for calibration */
  1316. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1317. if (ah->enabled_cals & TX_CL_CAL) {
  1318. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL, AR_PHY_CL_CAL_ENABLE);
  1319. run_agc_cal = true;
  1320. }
  1321. if (IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan))
  1322. goto skip_tx_iqcal;
  1323. /* Do Tx IQ Calibration */
  1324. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1325. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1326. DELPT);
  1327. /*
  1328. * For AR9485 or later chips, TxIQ cal runs as part of
  1329. * AGC calibration. Specifically, AR9550 in SoC chips.
  1330. */
  1331. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1332. if (REG_READ_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1333. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL)) {
  1334. txiqcal_done = true;
  1335. } else {
  1336. txiqcal_done = false;
  1337. }
  1338. run_agc_cal = true;
  1339. } else {
  1340. sep_iq_cal = true;
  1341. run_agc_cal = true;
  1342. }
  1343. /*
  1344. * In the SoC family, this will run for AR9300, AR9331 and AR9340.
  1345. */
  1346. if (sep_iq_cal) {
  1347. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  1348. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1349. udelay(5);
  1350. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1351. }
  1352. if (AR_SREV_9550(ah) && IS_CHAN_2GHZ(chan)) {
  1353. if (!ar9003_hw_dynamic_osdac_selection(ah, txiqcal_done))
  1354. return false;
  1355. }
  1356. skip_tx_iqcal:
  1357. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1358. if (AR_SREV_9330_11(ah))
  1359. ar9003_hw_manual_peak_cal(ah, 0, IS_CHAN_2GHZ(chan));
  1360. /*
  1361. * For non-AR9550 chips, we just trigger AGC calibration
  1362. * in the HW, poll for completion and then process
  1363. * the results.
  1364. *
  1365. * For AR955x, we run it multiple times and use
  1366. * median IQ correction.
  1367. */
  1368. if (!AR_SREV_9550(ah)) {
  1369. status = do_ar9003_agc_cal(ah);
  1370. if (!status)
  1371. return false;
  1372. if (txiqcal_done)
  1373. ar9003_hw_tx_iq_cal_post_proc(ah, 0, false);
  1374. } else {
  1375. if (!txiqcal_done) {
  1376. status = do_ar9003_agc_cal(ah);
  1377. if (!status)
  1378. return false;
  1379. } else {
  1380. for (i = 0; i < MAXIQCAL; i++) {
  1381. status = do_ar9003_agc_cal(ah);
  1382. if (!status)
  1383. return false;
  1384. ar9003_hw_tx_iq_cal_post_proc(ah, i, false);
  1385. }
  1386. }
  1387. }
  1388. }
  1389. /* Revert chainmask to runtime parameters */
  1390. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1391. /* Initialize list pointers */
  1392. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1393. INIT_CAL(&ah->iq_caldata);
  1394. INSERT_CAL(ah, &ah->iq_caldata);
  1395. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1396. /* Initialize current pointer to first element in list */
  1397. ah->cal_list_curr = ah->cal_list;
  1398. if (ah->cal_list_curr)
  1399. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1400. if (caldata)
  1401. caldata->CalValid = 0;
  1402. return true;
  1403. }
  1404. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  1405. {
  1406. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  1407. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  1408. if (AR_SREV_9485(ah) || AR_SREV_9462(ah) || AR_SREV_9565(ah))
  1409. priv_ops->init_cal = ar9003_hw_init_cal_pcoem;
  1410. else
  1411. priv_ops->init_cal = ar9003_hw_init_cal_soc;
  1412. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  1413. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  1414. ops->calibrate = ar9003_hw_calibrate;
  1415. }