ar9003_calib.c 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. struct coeff {
  25. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  26. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  27. int iqc_coeff[2];
  28. };
  29. enum ar9003_cal_types {
  30. IQ_MISMATCH_CAL = BIT(0),
  31. };
  32. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  33. struct ath9k_cal_list *currCal)
  34. {
  35. struct ath_common *common = ath9k_hw_common(ah);
  36. /* Select calibration to run */
  37. switch (currCal->calData->calType) {
  38. case IQ_MISMATCH_CAL:
  39. /*
  40. * Start calibration with
  41. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  42. */
  43. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  44. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  45. currCal->calData->calCountMax);
  46. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  47. ath_dbg(common, CALIBRATE,
  48. "starting IQ Mismatch Calibration\n");
  49. /* Kick-off cal */
  50. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  51. break;
  52. default:
  53. ath_err(common, "Invalid calibration type\n");
  54. break;
  55. }
  56. }
  57. /*
  58. * Generic calibration routine.
  59. * Recalibrate the lower PHY chips to account for temperature/environment
  60. * changes.
  61. */
  62. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  63. struct ath9k_channel *ichan,
  64. u8 rxchainmask,
  65. struct ath9k_cal_list *currCal)
  66. {
  67. struct ath9k_hw_cal_data *caldata = ah->caldata;
  68. /* Cal is assumed not done until explicitly set below */
  69. bool iscaldone = false;
  70. /* Calibration in progress. */
  71. if (currCal->calState == CAL_RUNNING) {
  72. /* Check to see if it has finished. */
  73. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  74. /*
  75. * Accumulate cal measures for active chains
  76. */
  77. currCal->calData->calCollect(ah);
  78. ah->cal_samples++;
  79. if (ah->cal_samples >=
  80. currCal->calData->calNumSamples) {
  81. unsigned int i, numChains = 0;
  82. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  83. if (rxchainmask & (1 << i))
  84. numChains++;
  85. }
  86. /*
  87. * Process accumulated data
  88. */
  89. currCal->calData->calPostProc(ah, numChains);
  90. /* Calibration has finished. */
  91. caldata->CalValid |= currCal->calData->calType;
  92. currCal->calState = CAL_DONE;
  93. iscaldone = true;
  94. } else {
  95. /*
  96. * Set-up collection of another sub-sample until we
  97. * get desired number
  98. */
  99. ar9003_hw_setup_calibration(ah, currCal);
  100. }
  101. }
  102. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  103. /* If current cal is marked invalid in channel, kick it off */
  104. ath9k_hw_reset_calibration(ah, currCal);
  105. }
  106. return iscaldone;
  107. }
  108. static bool ar9003_hw_calibrate(struct ath_hw *ah,
  109. struct ath9k_channel *chan,
  110. u8 rxchainmask,
  111. bool longcal)
  112. {
  113. bool iscaldone = true;
  114. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  115. /*
  116. * For given calibration:
  117. * 1. Call generic cal routine
  118. * 2. When this cal is done (isCalDone) if we have more cals waiting
  119. * (eg after reset), mask this to upper layers by not propagating
  120. * isCalDone if it is set to TRUE.
  121. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  122. * to be run.
  123. */
  124. if (currCal &&
  125. (currCal->calState == CAL_RUNNING ||
  126. currCal->calState == CAL_WAITING)) {
  127. iscaldone = ar9003_hw_per_calibration(ah, chan,
  128. rxchainmask, currCal);
  129. if (iscaldone) {
  130. ah->cal_list_curr = currCal = currCal->calNext;
  131. if (currCal->calState == CAL_WAITING) {
  132. iscaldone = false;
  133. ath9k_hw_reset_calibration(ah, currCal);
  134. }
  135. }
  136. }
  137. /*
  138. * Do NF cal only at longer intervals. Get the value from
  139. * the previous NF cal and update history buffer.
  140. */
  141. if (longcal && ath9k_hw_getnf(ah, chan)) {
  142. /*
  143. * Load the NF from history buffer of the current channel.
  144. * NF is slow time-variant, so it is OK to use a historical
  145. * value.
  146. */
  147. ath9k_hw_loadnf(ah, ah->curchan);
  148. /* start NF calibration, without updating BB NF register */
  149. ath9k_hw_start_nfcal(ah, false);
  150. }
  151. return iscaldone;
  152. }
  153. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  154. {
  155. int i;
  156. /* Accumulate IQ cal measures for active chains */
  157. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  158. if (ah->txchainmask & BIT(i)) {
  159. ah->totalPowerMeasI[i] +=
  160. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  161. ah->totalPowerMeasQ[i] +=
  162. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  163. ah->totalIqCorrMeas[i] +=
  164. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  165. ath_dbg(ath9k_hw_common(ah), CALIBRATE,
  166. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  167. ah->cal_samples, i, ah->totalPowerMeasI[i],
  168. ah->totalPowerMeasQ[i],
  169. ah->totalIqCorrMeas[i]);
  170. }
  171. }
  172. }
  173. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  174. {
  175. struct ath_common *common = ath9k_hw_common(ah);
  176. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  177. u32 qCoffDenom, iCoffDenom;
  178. int32_t qCoff, iCoff;
  179. int iqCorrNeg, i;
  180. static const u_int32_t offset_array[3] = {
  181. AR_PHY_RX_IQCAL_CORR_B0,
  182. AR_PHY_RX_IQCAL_CORR_B1,
  183. AR_PHY_RX_IQCAL_CORR_B2,
  184. };
  185. for (i = 0; i < numChains; i++) {
  186. powerMeasI = ah->totalPowerMeasI[i];
  187. powerMeasQ = ah->totalPowerMeasQ[i];
  188. iqCorrMeas = ah->totalIqCorrMeas[i];
  189. ath_dbg(common, CALIBRATE,
  190. "Starting IQ Cal and Correction for Chain %d\n", i);
  191. ath_dbg(common, CALIBRATE,
  192. "Original: Chn %d iq_corr_meas = 0x%08x\n",
  193. i, ah->totalIqCorrMeas[i]);
  194. iqCorrNeg = 0;
  195. if (iqCorrMeas > 0x80000000) {
  196. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  197. iqCorrNeg = 1;
  198. }
  199. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_i = 0x%08x\n",
  200. i, powerMeasI);
  201. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_q = 0x%08x\n",
  202. i, powerMeasQ);
  203. ath_dbg(common, CALIBRATE, "iqCorrNeg is 0x%08x\n", iqCorrNeg);
  204. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  205. qCoffDenom = powerMeasQ / 64;
  206. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  207. iCoff = iqCorrMeas / iCoffDenom;
  208. qCoff = powerMeasI / qCoffDenom - 64;
  209. ath_dbg(common, CALIBRATE, "Chn %d iCoff = 0x%08x\n",
  210. i, iCoff);
  211. ath_dbg(common, CALIBRATE, "Chn %d qCoff = 0x%08x\n",
  212. i, qCoff);
  213. /* Force bounds on iCoff */
  214. if (iCoff >= 63)
  215. iCoff = 63;
  216. else if (iCoff <= -63)
  217. iCoff = -63;
  218. /* Negate iCoff if iqCorrNeg == 0 */
  219. if (iqCorrNeg == 0x0)
  220. iCoff = -iCoff;
  221. /* Force bounds on qCoff */
  222. if (qCoff >= 63)
  223. qCoff = 63;
  224. else if (qCoff <= -63)
  225. qCoff = -63;
  226. iCoff = iCoff & 0x7f;
  227. qCoff = qCoff & 0x7f;
  228. ath_dbg(common, CALIBRATE,
  229. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  230. i, iCoff, qCoff);
  231. ath_dbg(common, CALIBRATE,
  232. "Register offset (0x%04x) before update = 0x%x\n",
  233. offset_array[i],
  234. REG_READ(ah, offset_array[i]));
  235. if (AR_SREV_9565(ah) &&
  236. (iCoff == 63 || qCoff == 63 ||
  237. iCoff == -63 || qCoff == -63))
  238. return;
  239. REG_RMW_FIELD(ah, offset_array[i],
  240. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  241. iCoff);
  242. REG_RMW_FIELD(ah, offset_array[i],
  243. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  244. qCoff);
  245. ath_dbg(common, CALIBRATE,
  246. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  247. offset_array[i],
  248. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  249. REG_READ(ah, offset_array[i]));
  250. ath_dbg(common, CALIBRATE,
  251. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  252. offset_array[i],
  253. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  254. REG_READ(ah, offset_array[i]));
  255. ath_dbg(common, CALIBRATE,
  256. "IQ Cal and Correction done for Chain %d\n", i);
  257. }
  258. }
  259. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  260. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  261. ath_dbg(common, CALIBRATE,
  262. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  263. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  264. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  265. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  266. }
  267. static const struct ath9k_percal_data iq_cal_single_sample = {
  268. IQ_MISMATCH_CAL,
  269. MIN_CAL_SAMPLES,
  270. PER_MAX_LOG_COUNT,
  271. ar9003_hw_iqcal_collect,
  272. ar9003_hw_iqcalibrate
  273. };
  274. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  275. {
  276. ah->iq_caldata.calData = &iq_cal_single_sample;
  277. if (AR_SREV_9300_20_OR_LATER(ah)) {
  278. ah->enabled_cals |= TX_IQ_CAL;
  279. if (AR_SREV_9485_OR_LATER(ah) && !AR_SREV_9340(ah))
  280. ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
  281. }
  282. ah->supp_cals = IQ_MISMATCH_CAL;
  283. }
  284. #define OFF_UPPER_LT 24
  285. #define OFF_LOWER_LT 7
  286. static bool ar9003_hw_dynamic_osdac_selection(struct ath_hw *ah,
  287. bool txiqcal_done)
  288. {
  289. struct ath_common *common = ath9k_hw_common(ah);
  290. int ch0_done, osdac_ch0, dc_off_ch0_i1, dc_off_ch0_q1, dc_off_ch0_i2,
  291. dc_off_ch0_q2, dc_off_ch0_i3, dc_off_ch0_q3;
  292. int ch1_done, osdac_ch1, dc_off_ch1_i1, dc_off_ch1_q1, dc_off_ch1_i2,
  293. dc_off_ch1_q2, dc_off_ch1_i3, dc_off_ch1_q3;
  294. int ch2_done, osdac_ch2, dc_off_ch2_i1, dc_off_ch2_q1, dc_off_ch2_i2,
  295. dc_off_ch2_q2, dc_off_ch2_i3, dc_off_ch2_q3;
  296. bool status;
  297. u32 temp, val;
  298. /*
  299. * Clear offset and IQ calibration, run AGC cal.
  300. */
  301. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  302. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  303. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  304. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  305. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  306. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  307. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  308. AR_PHY_AGC_CONTROL_CAL,
  309. 0, AH_WAIT_TIMEOUT);
  310. if (!status) {
  311. ath_dbg(common, CALIBRATE,
  312. "AGC cal without offset cal failed to complete in 1ms");
  313. return false;
  314. }
  315. /*
  316. * Allow only offset calibration and disable the others
  317. * (Carrier Leak calibration, TX Filter calibration and
  318. * Peak Detector offset calibration).
  319. */
  320. REG_SET_BIT(ah, AR_PHY_AGC_CONTROL,
  321. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  322. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  323. AR_PHY_CL_CAL_ENABLE);
  324. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  325. AR_PHY_AGC_CONTROL_FLTR_CAL);
  326. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  327. AR_PHY_AGC_CONTROL_PKDET_CAL);
  328. ch0_done = 0;
  329. ch1_done = 0;
  330. ch2_done = 0;
  331. while ((ch0_done == 0) || (ch1_done == 0) || (ch2_done == 0)) {
  332. osdac_ch0 = (REG_READ(ah, AR_PHY_65NM_CH0_BB1) >> 30) & 0x3;
  333. osdac_ch1 = (REG_READ(ah, AR_PHY_65NM_CH1_BB1) >> 30) & 0x3;
  334. osdac_ch2 = (REG_READ(ah, AR_PHY_65NM_CH2_BB1) >> 30) & 0x3;
  335. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  336. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  337. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  338. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  339. AR_PHY_AGC_CONTROL_CAL,
  340. 0, AH_WAIT_TIMEOUT);
  341. if (!status) {
  342. ath_dbg(common, CALIBRATE,
  343. "DC offset cal failed to complete in 1ms");
  344. return false;
  345. }
  346. REG_CLR_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  347. /*
  348. * High gain.
  349. */
  350. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  351. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (1 << 8)));
  352. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  353. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (1 << 8)));
  354. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  355. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (1 << 8)));
  356. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  357. dc_off_ch0_i1 = (temp >> 26) & 0x1f;
  358. dc_off_ch0_q1 = (temp >> 21) & 0x1f;
  359. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  360. dc_off_ch1_i1 = (temp >> 26) & 0x1f;
  361. dc_off_ch1_q1 = (temp >> 21) & 0x1f;
  362. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  363. dc_off_ch2_i1 = (temp >> 26) & 0x1f;
  364. dc_off_ch2_q1 = (temp >> 21) & 0x1f;
  365. /*
  366. * Low gain.
  367. */
  368. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  369. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (2 << 8)));
  370. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  371. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (2 << 8)));
  372. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  373. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (2 << 8)));
  374. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  375. dc_off_ch0_i2 = (temp >> 26) & 0x1f;
  376. dc_off_ch0_q2 = (temp >> 21) & 0x1f;
  377. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  378. dc_off_ch1_i2 = (temp >> 26) & 0x1f;
  379. dc_off_ch1_q2 = (temp >> 21) & 0x1f;
  380. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  381. dc_off_ch2_i2 = (temp >> 26) & 0x1f;
  382. dc_off_ch2_q2 = (temp >> 21) & 0x1f;
  383. /*
  384. * Loopback.
  385. */
  386. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  387. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (3 << 8)));
  388. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  389. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (3 << 8)));
  390. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  391. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (3 << 8)));
  392. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  393. dc_off_ch0_i3 = (temp >> 26) & 0x1f;
  394. dc_off_ch0_q3 = (temp >> 21) & 0x1f;
  395. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  396. dc_off_ch1_i3 = (temp >> 26) & 0x1f;
  397. dc_off_ch1_q3 = (temp >> 21) & 0x1f;
  398. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  399. dc_off_ch2_i3 = (temp >> 26) & 0x1f;
  400. dc_off_ch2_q3 = (temp >> 21) & 0x1f;
  401. if ((dc_off_ch0_i1 > OFF_UPPER_LT) || (dc_off_ch0_i1 < OFF_LOWER_LT) ||
  402. (dc_off_ch0_i2 > OFF_UPPER_LT) || (dc_off_ch0_i2 < OFF_LOWER_LT) ||
  403. (dc_off_ch0_i3 > OFF_UPPER_LT) || (dc_off_ch0_i3 < OFF_LOWER_LT) ||
  404. (dc_off_ch0_q1 > OFF_UPPER_LT) || (dc_off_ch0_q1 < OFF_LOWER_LT) ||
  405. (dc_off_ch0_q2 > OFF_UPPER_LT) || (dc_off_ch0_q2 < OFF_LOWER_LT) ||
  406. (dc_off_ch0_q3 > OFF_UPPER_LT) || (dc_off_ch0_q3 < OFF_LOWER_LT)) {
  407. if (osdac_ch0 == 3) {
  408. ch0_done = 1;
  409. } else {
  410. osdac_ch0++;
  411. val = REG_READ(ah, AR_PHY_65NM_CH0_BB1) & 0x3fffffff;
  412. val |= (osdac_ch0 << 30);
  413. REG_WRITE(ah, AR_PHY_65NM_CH0_BB1, val);
  414. ch0_done = 0;
  415. }
  416. } else {
  417. ch0_done = 1;
  418. }
  419. if ((dc_off_ch1_i1 > OFF_UPPER_LT) || (dc_off_ch1_i1 < OFF_LOWER_LT) ||
  420. (dc_off_ch1_i2 > OFF_UPPER_LT) || (dc_off_ch1_i2 < OFF_LOWER_LT) ||
  421. (dc_off_ch1_i3 > OFF_UPPER_LT) || (dc_off_ch1_i3 < OFF_LOWER_LT) ||
  422. (dc_off_ch1_q1 > OFF_UPPER_LT) || (dc_off_ch1_q1 < OFF_LOWER_LT) ||
  423. (dc_off_ch1_q2 > OFF_UPPER_LT) || (dc_off_ch1_q2 < OFF_LOWER_LT) ||
  424. (dc_off_ch1_q3 > OFF_UPPER_LT) || (dc_off_ch1_q3 < OFF_LOWER_LT)) {
  425. if (osdac_ch1 == 3) {
  426. ch1_done = 1;
  427. } else {
  428. osdac_ch1++;
  429. val = REG_READ(ah, AR_PHY_65NM_CH1_BB1) & 0x3fffffff;
  430. val |= (osdac_ch1 << 30);
  431. REG_WRITE(ah, AR_PHY_65NM_CH1_BB1, val);
  432. ch1_done = 0;
  433. }
  434. } else {
  435. ch1_done = 1;
  436. }
  437. if ((dc_off_ch2_i1 > OFF_UPPER_LT) || (dc_off_ch2_i1 < OFF_LOWER_LT) ||
  438. (dc_off_ch2_i2 > OFF_UPPER_LT) || (dc_off_ch2_i2 < OFF_LOWER_LT) ||
  439. (dc_off_ch2_i3 > OFF_UPPER_LT) || (dc_off_ch2_i3 < OFF_LOWER_LT) ||
  440. (dc_off_ch2_q1 > OFF_UPPER_LT) || (dc_off_ch2_q1 < OFF_LOWER_LT) ||
  441. (dc_off_ch2_q2 > OFF_UPPER_LT) || (dc_off_ch2_q2 < OFF_LOWER_LT) ||
  442. (dc_off_ch2_q3 > OFF_UPPER_LT) || (dc_off_ch2_q3 < OFF_LOWER_LT)) {
  443. if (osdac_ch2 == 3) {
  444. ch2_done = 1;
  445. } else {
  446. osdac_ch2++;
  447. val = REG_READ(ah, AR_PHY_65NM_CH2_BB1) & 0x3fffffff;
  448. val |= (osdac_ch2 << 30);
  449. REG_WRITE(ah, AR_PHY_65NM_CH2_BB1, val);
  450. ch2_done = 0;
  451. }
  452. } else {
  453. ch2_done = 1;
  454. }
  455. }
  456. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  457. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  458. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  459. /*
  460. * We don't need to check txiqcal_done here since it is always
  461. * set for AR9550.
  462. */
  463. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  464. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  465. return true;
  466. }
  467. /*
  468. * solve 4x4 linear equation used in loopback iq cal.
  469. */
  470. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  471. s32 sin_2phi_1,
  472. s32 cos_2phi_1,
  473. s32 sin_2phi_2,
  474. s32 cos_2phi_2,
  475. s32 mag_a0_d0,
  476. s32 phs_a0_d0,
  477. s32 mag_a1_d0,
  478. s32 phs_a1_d0,
  479. s32 solved_eq[])
  480. {
  481. s32 f1 = cos_2phi_1 - cos_2phi_2,
  482. f3 = sin_2phi_1 - sin_2phi_2,
  483. f2;
  484. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  485. const s32 result_shift = 1 << 15;
  486. struct ath_common *common = ath9k_hw_common(ah);
  487. f2 = ((f1 >> 3) * (f1 >> 3) + (f3 >> 3) * (f3 >> 3)) >> 9;
  488. if (!f2) {
  489. ath_dbg(common, CALIBRATE, "Divide by 0\n");
  490. return false;
  491. }
  492. /* mag mismatch, tx */
  493. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  494. /* phs mismatch, tx */
  495. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  496. mag_tx = (mag_tx / f2);
  497. phs_tx = (phs_tx / f2);
  498. /* mag mismatch, rx */
  499. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  500. result_shift;
  501. /* phs mismatch, rx */
  502. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  503. result_shift;
  504. solved_eq[0] = mag_tx;
  505. solved_eq[1] = phs_tx;
  506. solved_eq[2] = mag_rx;
  507. solved_eq[3] = phs_rx;
  508. return true;
  509. }
  510. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  511. {
  512. s32 abs_i = abs(in_re),
  513. abs_q = abs(in_im),
  514. max_abs, min_abs;
  515. if (abs_i > abs_q) {
  516. max_abs = abs_i;
  517. min_abs = abs_q;
  518. } else {
  519. max_abs = abs_q;
  520. min_abs = abs_i;
  521. }
  522. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  523. }
  524. #define DELPT 32
  525. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  526. s32 chain_idx,
  527. const s32 iq_res[],
  528. s32 iqc_coeff[])
  529. {
  530. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  531. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  532. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  533. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  534. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  535. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  536. sin_2phi_1, cos_2phi_1,
  537. sin_2phi_2, cos_2phi_2;
  538. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  539. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  540. q_q_coff, q_i_coff;
  541. const s32 res_scale = 1 << 15;
  542. const s32 delpt_shift = 1 << 8;
  543. s32 mag1, mag2;
  544. struct ath_common *common = ath9k_hw_common(ah);
  545. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  546. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  547. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  548. if (i2_m_q2_a0_d0 > 0x800)
  549. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  550. if (i2_p_q2_a0_d0 > 0x800)
  551. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  552. if (iq_corr_a0_d0 > 0x800)
  553. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  554. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  555. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  556. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  557. if (i2_m_q2_a0_d1 > 0x800)
  558. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  559. if (i2_p_q2_a0_d1 > 0x1000)
  560. i2_p_q2_a0_d1 = -((0x1fff - i2_p_q2_a0_d1) + 1);
  561. if (iq_corr_a0_d1 > 0x800)
  562. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  563. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  564. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  565. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  566. if (i2_m_q2_a1_d0 > 0x800)
  567. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  568. if (i2_p_q2_a1_d0 > 0x800)
  569. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  570. if (iq_corr_a1_d0 > 0x800)
  571. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  572. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  573. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  574. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  575. if (i2_m_q2_a1_d1 > 0x800)
  576. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  577. if (i2_p_q2_a1_d1 > 0x800)
  578. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  579. if (iq_corr_a1_d1 > 0x800)
  580. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  581. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  582. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  583. ath_dbg(common, CALIBRATE,
  584. "Divide by 0:\n"
  585. "a0_d0=%d\n"
  586. "a0_d1=%d\n"
  587. "a2_d0=%d\n"
  588. "a1_d1=%d\n",
  589. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  590. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  591. return false;
  592. }
  593. if ((i2_p_q2_a0_d0 < 1024) || (i2_p_q2_a0_d0 > 2047) ||
  594. (i2_p_q2_a1_d0 < 0) || (i2_p_q2_a1_d1 < 0) ||
  595. (i2_p_q2_a0_d0 <= i2_m_q2_a0_d0) ||
  596. (i2_p_q2_a0_d0 <= iq_corr_a0_d0) ||
  597. (i2_p_q2_a0_d1 <= i2_m_q2_a0_d1) ||
  598. (i2_p_q2_a0_d1 <= iq_corr_a0_d1) ||
  599. (i2_p_q2_a1_d0 <= i2_m_q2_a1_d0) ||
  600. (i2_p_q2_a1_d0 <= iq_corr_a1_d0) ||
  601. (i2_p_q2_a1_d1 <= i2_m_q2_a1_d1) ||
  602. (i2_p_q2_a1_d1 <= iq_corr_a1_d1)) {
  603. return false;
  604. }
  605. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  606. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  607. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  608. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  609. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  610. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  611. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  612. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  613. /* w/o analog phase shift */
  614. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  615. /* w/o analog phase shift */
  616. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  617. /* w/ analog phase shift */
  618. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  619. /* w/ analog phase shift */
  620. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  621. /*
  622. * force sin^2 + cos^2 = 1;
  623. * find magnitude by approximation
  624. */
  625. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  626. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  627. if ((mag1 == 0) || (mag2 == 0)) {
  628. ath_dbg(common, CALIBRATE, "Divide by 0: mag1=%d, mag2=%d\n",
  629. mag1, mag2);
  630. return false;
  631. }
  632. /* normalization sin and cos by mag */
  633. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  634. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  635. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  636. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  637. /* calculate IQ mismatch */
  638. if (!ar9003_hw_solve_iq_cal(ah,
  639. sin_2phi_1, cos_2phi_1,
  640. sin_2phi_2, cos_2phi_2,
  641. mag_a0_d0, phs_a0_d0,
  642. mag_a1_d0,
  643. phs_a1_d0, solved_eq)) {
  644. ath_dbg(common, CALIBRATE,
  645. "Call to ar9003_hw_solve_iq_cal() failed\n");
  646. return false;
  647. }
  648. mag_tx = solved_eq[0];
  649. phs_tx = solved_eq[1];
  650. mag_rx = solved_eq[2];
  651. phs_rx = solved_eq[3];
  652. ath_dbg(common, CALIBRATE,
  653. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  654. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  655. if (res_scale == mag_tx) {
  656. ath_dbg(common, CALIBRATE,
  657. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  658. mag_tx, res_scale);
  659. return false;
  660. }
  661. /* calculate and quantize Tx IQ correction factor */
  662. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  663. phs_corr_tx = -phs_tx;
  664. q_q_coff = (mag_corr_tx * 128 / res_scale);
  665. q_i_coff = (phs_corr_tx * 256 / res_scale);
  666. ath_dbg(common, CALIBRATE, "tx chain %d: mag corr=%d phase corr=%d\n",
  667. chain_idx, q_q_coff, q_i_coff);
  668. if (q_i_coff < -63)
  669. q_i_coff = -63;
  670. if (q_i_coff > 63)
  671. q_i_coff = 63;
  672. if (q_q_coff < -63)
  673. q_q_coff = -63;
  674. if (q_q_coff > 63)
  675. q_q_coff = 63;
  676. iqc_coeff[0] = (q_q_coff * 128) + q_i_coff;
  677. ath_dbg(common, CALIBRATE, "tx chain %d: iq corr coeff=%x\n",
  678. chain_idx, iqc_coeff[0]);
  679. if (-mag_rx == res_scale) {
  680. ath_dbg(common, CALIBRATE,
  681. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  682. mag_rx, res_scale);
  683. return false;
  684. }
  685. /* calculate and quantize Rx IQ correction factors */
  686. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  687. phs_corr_rx = -phs_rx;
  688. q_q_coff = (mag_corr_rx * 128 / res_scale);
  689. q_i_coff = (phs_corr_rx * 256 / res_scale);
  690. ath_dbg(common, CALIBRATE, "rx chain %d: mag corr=%d phase corr=%d\n",
  691. chain_idx, q_q_coff, q_i_coff);
  692. if (q_i_coff < -63)
  693. q_i_coff = -63;
  694. if (q_i_coff > 63)
  695. q_i_coff = 63;
  696. if (q_q_coff < -63)
  697. q_q_coff = -63;
  698. if (q_q_coff > 63)
  699. q_q_coff = 63;
  700. iqc_coeff[1] = (q_q_coff * 128) + q_i_coff;
  701. ath_dbg(common, CALIBRATE, "rx chain %d: iq corr coeff=%x\n",
  702. chain_idx, iqc_coeff[1]);
  703. return true;
  704. }
  705. static void ar9003_hw_detect_outlier(int *mp_coeff, int nmeasurement,
  706. int max_delta)
  707. {
  708. int mp_max = -64, max_idx = 0;
  709. int mp_min = 63, min_idx = 0;
  710. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  711. /* find min/max mismatch across all calibrated gains */
  712. for (i = 0; i < nmeasurement; i++) {
  713. if (mp_coeff[i] > mp_max) {
  714. mp_max = mp_coeff[i];
  715. max_idx = i;
  716. } else if (mp_coeff[i] < mp_min) {
  717. mp_min = mp_coeff[i];
  718. min_idx = i;
  719. }
  720. }
  721. /* find average (exclude max abs value) */
  722. for (i = 0; i < nmeasurement; i++) {
  723. if ((abs(mp_coeff[i]) < abs(mp_max)) ||
  724. (abs(mp_coeff[i]) < abs(mp_min))) {
  725. mp_avg += mp_coeff[i];
  726. mp_count++;
  727. }
  728. }
  729. /*
  730. * finding mean magnitude/phase if possible, otherwise
  731. * just use the last value as the mean
  732. */
  733. if (mp_count)
  734. mp_avg /= mp_count;
  735. else
  736. mp_avg = mp_coeff[nmeasurement - 1];
  737. /* detect outlier */
  738. if (abs(mp_max - mp_min) > max_delta) {
  739. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  740. outlier_idx = max_idx;
  741. else
  742. outlier_idx = min_idx;
  743. mp_coeff[outlier_idx] = mp_avg;
  744. }
  745. }
  746. static void ar9003_hw_tx_iqcal_load_avg_2_passes(struct ath_hw *ah,
  747. struct coeff *coeff,
  748. bool is_reusable)
  749. {
  750. int i, im, nmeasurement;
  751. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  752. struct ath9k_hw_cal_data *caldata = ah->caldata;
  753. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  754. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  755. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  756. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  757. if (!AR_SREV_9485(ah)) {
  758. tx_corr_coeff[i * 2][1] =
  759. tx_corr_coeff[(i * 2) + 1][1] =
  760. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  761. tx_corr_coeff[i * 2][2] =
  762. tx_corr_coeff[(i * 2) + 1][2] =
  763. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  764. }
  765. }
  766. /* Load the average of 2 passes */
  767. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  768. if (!(ah->txchainmask & (1 << i)))
  769. continue;
  770. nmeasurement = REG_READ_FIELD(ah,
  771. AR_PHY_TX_IQCAL_STATUS_B0,
  772. AR_PHY_CALIBRATED_GAINS_0);
  773. if (nmeasurement > MAX_MEASUREMENT)
  774. nmeasurement = MAX_MEASUREMENT;
  775. /* detect outlier only if nmeasurement > 1 */
  776. if (nmeasurement > 1) {
  777. /* Detect magnitude outlier */
  778. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  779. nmeasurement, MAX_MAG_DELTA);
  780. /* Detect phase outlier */
  781. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  782. nmeasurement, MAX_PHS_DELTA);
  783. }
  784. for (im = 0; im < nmeasurement; im++) {
  785. coeff->iqc_coeff[0] = (coeff->mag_coeff[i][im] & 0x7f) |
  786. ((coeff->phs_coeff[i][im] & 0x7f) << 7);
  787. if ((im % 2) == 0)
  788. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  789. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  790. coeff->iqc_coeff[0]);
  791. else
  792. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  793. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  794. coeff->iqc_coeff[0]);
  795. if (caldata)
  796. caldata->tx_corr_coeff[im][i] =
  797. coeff->iqc_coeff[0];
  798. }
  799. if (caldata)
  800. caldata->num_measures[i] = nmeasurement;
  801. }
  802. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  803. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  804. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  805. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  806. if (caldata) {
  807. if (is_reusable)
  808. set_bit(TXIQCAL_DONE, &caldata->cal_flags);
  809. else
  810. clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
  811. }
  812. return;
  813. }
  814. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  815. {
  816. struct ath_common *common = ath9k_hw_common(ah);
  817. u8 tx_gain_forced;
  818. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  819. AR_PHY_TXGAIN_FORCE);
  820. if (tx_gain_forced)
  821. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  822. AR_PHY_TXGAIN_FORCE, 0);
  823. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  824. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  825. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  826. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  827. AH_WAIT_TIMEOUT)) {
  828. ath_dbg(common, CALIBRATE, "Tx IQ Cal is not completed\n");
  829. return false;
  830. }
  831. return true;
  832. }
  833. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  834. {
  835. struct ath_common *common = ath9k_hw_common(ah);
  836. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  837. AR_PHY_TX_IQCAL_STATUS_B0,
  838. AR_PHY_TX_IQCAL_STATUS_B1,
  839. AR_PHY_TX_IQCAL_STATUS_B2,
  840. };
  841. const u_int32_t chan_info_tab[] = {
  842. AR_PHY_CHAN_INFO_TAB_0,
  843. AR_PHY_CHAN_INFO_TAB_1,
  844. AR_PHY_CHAN_INFO_TAB_2,
  845. };
  846. struct coeff coeff;
  847. s32 iq_res[6];
  848. int i, im, j;
  849. int nmeasurement;
  850. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  851. if (!(ah->txchainmask & (1 << i)))
  852. continue;
  853. nmeasurement = REG_READ_FIELD(ah,
  854. AR_PHY_TX_IQCAL_STATUS_B0,
  855. AR_PHY_CALIBRATED_GAINS_0);
  856. if (nmeasurement > MAX_MEASUREMENT)
  857. nmeasurement = MAX_MEASUREMENT;
  858. for (im = 0; im < nmeasurement; im++) {
  859. ath_dbg(common, CALIBRATE,
  860. "Doing Tx IQ Cal for chain %d\n", i);
  861. if (REG_READ(ah, txiqcal_status[i]) &
  862. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  863. ath_dbg(common, CALIBRATE,
  864. "Tx IQ Cal failed for chain %d\n", i);
  865. goto tx_iqcal_fail;
  866. }
  867. for (j = 0; j < 3; j++) {
  868. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  869. REG_RMW_FIELD(ah,
  870. AR_PHY_CHAN_INFO_MEMORY,
  871. AR_PHY_CHAN_INFO_TAB_S2_READ,
  872. 0);
  873. /* 32 bits */
  874. iq_res[idx] = REG_READ(ah,
  875. chan_info_tab[i] +
  876. offset);
  877. REG_RMW_FIELD(ah,
  878. AR_PHY_CHAN_INFO_MEMORY,
  879. AR_PHY_CHAN_INFO_TAB_S2_READ,
  880. 1);
  881. /* 16 bits */
  882. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  883. chan_info_tab[i] + offset);
  884. ath_dbg(common, CALIBRATE,
  885. "IQ_RES[%d]=0x%x IQ_RES[%d]=0x%x\n",
  886. idx, iq_res[idx], idx + 1,
  887. iq_res[idx + 1]);
  888. }
  889. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  890. coeff.iqc_coeff)) {
  891. ath_dbg(common, CALIBRATE,
  892. "Failed in calculation of IQ correction\n");
  893. goto tx_iqcal_fail;
  894. }
  895. coeff.mag_coeff[i][im] = coeff.iqc_coeff[0] & 0x7f;
  896. coeff.phs_coeff[i][im] =
  897. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  898. if (coeff.mag_coeff[i][im] > 63)
  899. coeff.mag_coeff[i][im] -= 128;
  900. if (coeff.phs_coeff[i][im] > 63)
  901. coeff.phs_coeff[i][im] -= 128;
  902. }
  903. }
  904. ar9003_hw_tx_iqcal_load_avg_2_passes(ah, &coeff, is_reusable);
  905. return;
  906. tx_iqcal_fail:
  907. ath_dbg(common, CALIBRATE, "Tx IQ Cal failed\n");
  908. return;
  909. }
  910. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  911. {
  912. struct ath9k_hw_cal_data *caldata = ah->caldata;
  913. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  914. int i, im;
  915. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  916. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  917. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  918. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  919. if (!AR_SREV_9485(ah)) {
  920. tx_corr_coeff[i * 2][1] =
  921. tx_corr_coeff[(i * 2) + 1][1] =
  922. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  923. tx_corr_coeff[i * 2][2] =
  924. tx_corr_coeff[(i * 2) + 1][2] =
  925. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  926. }
  927. }
  928. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  929. if (!(ah->txchainmask & (1 << i)))
  930. continue;
  931. for (im = 0; im < caldata->num_measures[i]; im++) {
  932. if ((im % 2) == 0)
  933. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  934. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  935. caldata->tx_corr_coeff[im][i]);
  936. else
  937. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  938. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  939. caldata->tx_corr_coeff[im][i]);
  940. }
  941. }
  942. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  943. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  944. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  945. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  946. }
  947. static void ar9003_hw_manual_peak_cal(struct ath_hw *ah, u8 chain, bool is_2g)
  948. {
  949. int offset[8] = {0}, total = 0, test;
  950. int agc_out, i;
  951. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  952. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0x1);
  953. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  954. AR_PHY_65NM_RXRF_GAINSTAGES_LNAON_CALDC, 0x0);
  955. if (is_2g)
  956. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  957. AR_PHY_65NM_RXRF_GAINSTAGES_LNA2G_GAIN_OVR, 0x0);
  958. else
  959. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  960. AR_PHY_65NM_RXRF_GAINSTAGES_LNA5G_GAIN_OVR, 0x0);
  961. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  962. AR_PHY_65NM_RXTX2_RXON_OVR, 0x1);
  963. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  964. AR_PHY_65NM_RXTX2_RXON, 0x0);
  965. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  966. AR_PHY_65NM_RXRF_AGC_AGC_OVERRIDE, 0x1);
  967. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  968. AR_PHY_65NM_RXRF_AGC_AGC_ON_OVR, 0x1);
  969. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  970. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0x1);
  971. if (AR_SREV_9330_11(ah)) {
  972. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  973. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, 0x0);
  974. } else {
  975. if (is_2g)
  976. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  977. AR_PHY_65NM_RXRF_AGC_AGC2G_DBDAC_OVR, 0x0);
  978. else
  979. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  980. AR_PHY_65NM_RXRF_AGC_AGC5G_DBDAC_OVR, 0x0);
  981. }
  982. for (i = 6; i > 0; i--) {
  983. offset[i] = BIT(i - 1);
  984. test = total + offset[i];
  985. if (is_2g)
  986. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  987. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR,
  988. test);
  989. else
  990. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  991. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR,
  992. test);
  993. udelay(100);
  994. agc_out = REG_READ_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  995. AR_PHY_65NM_RXRF_AGC_AGC_OUT);
  996. offset[i] = (agc_out) ? 0 : 1;
  997. total += (offset[i] << (i - 1));
  998. }
  999. if (is_2g)
  1000. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1001. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, total);
  1002. else
  1003. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1004. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR, total);
  1005. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1006. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0);
  1007. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1008. AR_PHY_65NM_RXTX2_RXON_OVR, 0);
  1009. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1010. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0);
  1011. }
  1012. static void ar9003_hw_do_pcoem_manual_peak_cal(struct ath_hw *ah,
  1013. struct ath9k_channel *chan,
  1014. bool run_rtt_cal)
  1015. {
  1016. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1017. int i;
  1018. if (!AR_SREV_9462(ah) && !AR_SREV_9565(ah) && !AR_SREV_9485(ah))
  1019. return;
  1020. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && !run_rtt_cal)
  1021. return;
  1022. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1023. if (!(ah->rxchainmask & (1 << i)))
  1024. continue;
  1025. ar9003_hw_manual_peak_cal(ah, i, IS_CHAN_2GHZ(chan));
  1026. }
  1027. if (caldata)
  1028. set_bit(SW_PKDET_DONE, &caldata->cal_flags);
  1029. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && caldata) {
  1030. if (IS_CHAN_2GHZ(chan)){
  1031. caldata->caldac[0] = REG_READ_FIELD(ah,
  1032. AR_PHY_65NM_RXRF_AGC(0),
  1033. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1034. caldata->caldac[1] = REG_READ_FIELD(ah,
  1035. AR_PHY_65NM_RXRF_AGC(1),
  1036. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1037. } else {
  1038. caldata->caldac[0] = REG_READ_FIELD(ah,
  1039. AR_PHY_65NM_RXRF_AGC(0),
  1040. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1041. caldata->caldac[1] = REG_READ_FIELD(ah,
  1042. AR_PHY_65NM_RXRF_AGC(1),
  1043. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1044. }
  1045. }
  1046. }
  1047. static void ar9003_hw_cl_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  1048. {
  1049. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  1050. AR_PHY_CL_TAB_1,
  1051. AR_PHY_CL_TAB_2 };
  1052. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1053. bool txclcal_done = false;
  1054. int i, j;
  1055. if (!caldata || !(ah->enabled_cals & TX_CL_CAL))
  1056. return;
  1057. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  1058. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  1059. if (test_bit(TXCLCAL_DONE, &caldata->cal_flags)) {
  1060. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1061. if (!(ah->txchainmask & (1 << i)))
  1062. continue;
  1063. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1064. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  1065. caldata->tx_clcal[i][j]);
  1066. }
  1067. } else if (is_reusable && txclcal_done) {
  1068. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1069. if (!(ah->txchainmask & (1 << i)))
  1070. continue;
  1071. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1072. caldata->tx_clcal[i][j] =
  1073. REG_READ(ah, CL_TAB_ENTRY(cl_idx[i]));
  1074. }
  1075. set_bit(TXCLCAL_DONE, &caldata->cal_flags);
  1076. }
  1077. }
  1078. static bool ar9003_hw_init_cal_pcoem(struct ath_hw *ah,
  1079. struct ath9k_channel *chan)
  1080. {
  1081. struct ath_common *common = ath9k_hw_common(ah);
  1082. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1083. bool txiqcal_done = false;
  1084. bool is_reusable = true, status = true;
  1085. bool run_rtt_cal = false, run_agc_cal;
  1086. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  1087. u32 rx_delay = 0;
  1088. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1089. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1090. AR_PHY_AGC_CONTROL_PKDET_CAL;
  1091. /* Use chip chainmask only for calibration */
  1092. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1093. if (rtt) {
  1094. if (!ar9003_hw_rtt_restore(ah, chan))
  1095. run_rtt_cal = true;
  1096. if (run_rtt_cal)
  1097. ath_dbg(common, CALIBRATE, "RTT calibration to be done\n");
  1098. }
  1099. run_agc_cal = run_rtt_cal;
  1100. if (run_rtt_cal) {
  1101. ar9003_hw_rtt_enable(ah);
  1102. ar9003_hw_rtt_set_mask(ah, 0x00);
  1103. ar9003_hw_rtt_clear_hist(ah);
  1104. }
  1105. if (rtt) {
  1106. if (!run_rtt_cal) {
  1107. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  1108. agc_supp_cals &= agc_ctrl;
  1109. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1110. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1111. AR_PHY_AGC_CONTROL_PKDET_CAL);
  1112. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1113. } else {
  1114. if (ah->ah_flags & AH_FASTCC)
  1115. run_agc_cal = true;
  1116. }
  1117. }
  1118. if (ah->enabled_cals & TX_CL_CAL) {
  1119. if (caldata && test_bit(TXCLCAL_DONE, &caldata->cal_flags))
  1120. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  1121. AR_PHY_CL_CAL_ENABLE);
  1122. else {
  1123. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  1124. AR_PHY_CL_CAL_ENABLE);
  1125. run_agc_cal = true;
  1126. }
  1127. }
  1128. if ((IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan)) ||
  1129. !(ah->enabled_cals & TX_IQ_CAL))
  1130. goto skip_tx_iqcal;
  1131. /* Do Tx IQ Calibration */
  1132. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1133. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1134. DELPT);
  1135. /*
  1136. * For AR9485 or later chips, TxIQ cal runs as part of
  1137. * AGC calibration
  1138. */
  1139. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1140. if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1141. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1142. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1143. else
  1144. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1145. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1146. txiqcal_done = run_agc_cal = true;
  1147. }
  1148. skip_tx_iqcal:
  1149. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1150. ar9003_mci_init_cal_req(ah, &is_reusable);
  1151. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1152. rx_delay = REG_READ(ah, AR_PHY_RX_DELAY);
  1153. /* Disable BB_active */
  1154. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1155. udelay(5);
  1156. REG_WRITE(ah, AR_PHY_RX_DELAY, AR_PHY_RX_DELAY_DELAY);
  1157. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1158. }
  1159. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1160. /* Calibrate the AGC */
  1161. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1162. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1163. AR_PHY_AGC_CONTROL_CAL);
  1164. /* Poll for offset calibration complete */
  1165. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1166. AR_PHY_AGC_CONTROL_CAL,
  1167. 0, AH_WAIT_TIMEOUT);
  1168. ar9003_hw_do_pcoem_manual_peak_cal(ah, chan, run_rtt_cal);
  1169. }
  1170. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1171. REG_WRITE(ah, AR_PHY_RX_DELAY, rx_delay);
  1172. udelay(5);
  1173. }
  1174. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1175. ar9003_mci_init_cal_done(ah);
  1176. if (rtt && !run_rtt_cal) {
  1177. agc_ctrl |= agc_supp_cals;
  1178. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1179. }
  1180. if (!status) {
  1181. if (run_rtt_cal)
  1182. ar9003_hw_rtt_disable(ah);
  1183. ath_dbg(common, CALIBRATE,
  1184. "offset calibration failed to complete in %d ms; noisy environment?\n",
  1185. AH_WAIT_TIMEOUT / 1000);
  1186. return false;
  1187. }
  1188. if (txiqcal_done)
  1189. ar9003_hw_tx_iq_cal_post_proc(ah, is_reusable);
  1190. else if (caldata && test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1191. ar9003_hw_tx_iq_cal_reload(ah);
  1192. ar9003_hw_cl_cal_post_proc(ah, is_reusable);
  1193. if (run_rtt_cal && caldata) {
  1194. if (is_reusable) {
  1195. if (!ath9k_hw_rfbus_req(ah)) {
  1196. ath_err(ath9k_hw_common(ah),
  1197. "Could not stop baseband\n");
  1198. } else {
  1199. ar9003_hw_rtt_fill_hist(ah);
  1200. if (test_bit(SW_PKDET_DONE, &caldata->cal_flags))
  1201. ar9003_hw_rtt_load_hist(ah);
  1202. }
  1203. ath9k_hw_rfbus_done(ah);
  1204. }
  1205. ar9003_hw_rtt_disable(ah);
  1206. }
  1207. /* Revert chainmask to runtime parameters */
  1208. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1209. /* Initialize list pointers */
  1210. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1211. INIT_CAL(&ah->iq_caldata);
  1212. INSERT_CAL(ah, &ah->iq_caldata);
  1213. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1214. /* Initialize current pointer to first element in list */
  1215. ah->cal_list_curr = ah->cal_list;
  1216. if (ah->cal_list_curr)
  1217. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1218. if (caldata)
  1219. caldata->CalValid = 0;
  1220. return true;
  1221. }
  1222. static bool ar9003_hw_init_cal_soc(struct ath_hw *ah,
  1223. struct ath9k_channel *chan)
  1224. {
  1225. struct ath_common *common = ath9k_hw_common(ah);
  1226. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1227. bool txiqcal_done = false;
  1228. bool is_reusable = true, status = true;
  1229. bool run_agc_cal = false, sep_iq_cal = false;
  1230. /* Use chip chainmask only for calibration */
  1231. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1232. if (ah->enabled_cals & TX_CL_CAL) {
  1233. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL, AR_PHY_CL_CAL_ENABLE);
  1234. run_agc_cal = true;
  1235. }
  1236. if (IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan))
  1237. goto skip_tx_iqcal;
  1238. /* Do Tx IQ Calibration */
  1239. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1240. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1241. DELPT);
  1242. /*
  1243. * For AR9485 or later chips, TxIQ cal runs as part of
  1244. * AGC calibration. Specifically, AR9550 in SoC chips.
  1245. */
  1246. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1247. txiqcal_done = true;
  1248. run_agc_cal = true;
  1249. } else {
  1250. sep_iq_cal = true;
  1251. run_agc_cal = true;
  1252. }
  1253. /*
  1254. * In the SoC family, this will run for AR9300, AR9331 and AR9340.
  1255. */
  1256. if (sep_iq_cal) {
  1257. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  1258. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1259. udelay(5);
  1260. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1261. }
  1262. if (AR_SREV_9550(ah) && IS_CHAN_2GHZ(chan)) {
  1263. if (!ar9003_hw_dynamic_osdac_selection(ah, txiqcal_done))
  1264. return false;
  1265. }
  1266. skip_tx_iqcal:
  1267. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1268. if (AR_SREV_9330_11(ah))
  1269. ar9003_hw_manual_peak_cal(ah, 0, IS_CHAN_2GHZ(chan));
  1270. /* Calibrate the AGC */
  1271. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1272. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1273. AR_PHY_AGC_CONTROL_CAL);
  1274. /* Poll for offset calibration complete */
  1275. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1276. AR_PHY_AGC_CONTROL_CAL,
  1277. 0, AH_WAIT_TIMEOUT);
  1278. }
  1279. if (!status) {
  1280. ath_dbg(common, CALIBRATE,
  1281. "offset calibration failed to complete in %d ms; noisy environment?\n",
  1282. AH_WAIT_TIMEOUT / 1000);
  1283. return false;
  1284. }
  1285. if (txiqcal_done)
  1286. ar9003_hw_tx_iq_cal_post_proc(ah, is_reusable);
  1287. /* Revert chainmask to runtime parameters */
  1288. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1289. /* Initialize list pointers */
  1290. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1291. INIT_CAL(&ah->iq_caldata);
  1292. INSERT_CAL(ah, &ah->iq_caldata);
  1293. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1294. /* Initialize current pointer to first element in list */
  1295. ah->cal_list_curr = ah->cal_list;
  1296. if (ah->cal_list_curr)
  1297. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1298. if (caldata)
  1299. caldata->CalValid = 0;
  1300. return true;
  1301. }
  1302. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  1303. {
  1304. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  1305. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  1306. if (AR_SREV_9485(ah) || AR_SREV_9462(ah) || AR_SREV_9565(ah))
  1307. priv_ops->init_cal = ar9003_hw_init_cal_pcoem;
  1308. else
  1309. priv_ops->init_cal = ar9003_hw_init_cal_soc;
  1310. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  1311. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  1312. ops->calibrate = ar9003_hw_calibrate;
  1313. }