ar9003_calib.c 46 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. #define MAXIQCAL 3
  25. struct coeff {
  26. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  27. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  28. int iqc_coeff[2];
  29. };
  30. enum ar9003_cal_types {
  31. IQ_MISMATCH_CAL = BIT(0),
  32. };
  33. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  34. struct ath9k_cal_list *currCal)
  35. {
  36. struct ath_common *common = ath9k_hw_common(ah);
  37. /* Select calibration to run */
  38. switch (currCal->calData->calType) {
  39. case IQ_MISMATCH_CAL:
  40. /*
  41. * Start calibration with
  42. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  43. */
  44. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  45. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  46. currCal->calData->calCountMax);
  47. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  48. ath_dbg(common, CALIBRATE,
  49. "starting IQ Mismatch Calibration\n");
  50. /* Kick-off cal */
  51. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  52. break;
  53. default:
  54. ath_err(common, "Invalid calibration type\n");
  55. break;
  56. }
  57. }
  58. /*
  59. * Generic calibration routine.
  60. * Recalibrate the lower PHY chips to account for temperature/environment
  61. * changes.
  62. */
  63. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  64. struct ath9k_channel *ichan,
  65. u8 rxchainmask,
  66. struct ath9k_cal_list *currCal)
  67. {
  68. struct ath9k_hw_cal_data *caldata = ah->caldata;
  69. /* Cal is assumed not done until explicitly set below */
  70. bool iscaldone = false;
  71. /* Calibration in progress. */
  72. if (currCal->calState == CAL_RUNNING) {
  73. /* Check to see if it has finished. */
  74. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  75. /*
  76. * Accumulate cal measures for active chains
  77. */
  78. currCal->calData->calCollect(ah);
  79. ah->cal_samples++;
  80. if (ah->cal_samples >=
  81. currCal->calData->calNumSamples) {
  82. unsigned int i, numChains = 0;
  83. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  84. if (rxchainmask & (1 << i))
  85. numChains++;
  86. }
  87. /*
  88. * Process accumulated data
  89. */
  90. currCal->calData->calPostProc(ah, numChains);
  91. /* Calibration has finished. */
  92. caldata->CalValid |= currCal->calData->calType;
  93. currCal->calState = CAL_DONE;
  94. iscaldone = true;
  95. } else {
  96. /*
  97. * Set-up collection of another sub-sample until we
  98. * get desired number
  99. */
  100. ar9003_hw_setup_calibration(ah, currCal);
  101. }
  102. }
  103. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  104. /* If current cal is marked invalid in channel, kick it off */
  105. ath9k_hw_reset_calibration(ah, currCal);
  106. }
  107. return iscaldone;
  108. }
  109. static int ar9003_hw_calibrate(struct ath_hw *ah, struct ath9k_channel *chan,
  110. u8 rxchainmask, bool longcal)
  111. {
  112. bool iscaldone = true;
  113. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  114. int ret;
  115. /*
  116. * For given calibration:
  117. * 1. Call generic cal routine
  118. * 2. When this cal is done (isCalDone) if we have more cals waiting
  119. * (eg after reset), mask this to upper layers by not propagating
  120. * isCalDone if it is set to TRUE.
  121. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  122. * to be run.
  123. */
  124. if (currCal &&
  125. (currCal->calState == CAL_RUNNING ||
  126. currCal->calState == CAL_WAITING)) {
  127. iscaldone = ar9003_hw_per_calibration(ah, chan,
  128. rxchainmask, currCal);
  129. if (iscaldone) {
  130. ah->cal_list_curr = currCal = currCal->calNext;
  131. if (currCal->calState == CAL_WAITING) {
  132. iscaldone = false;
  133. ath9k_hw_reset_calibration(ah, currCal);
  134. }
  135. }
  136. }
  137. /*
  138. * Do NF cal only at longer intervals. Get the value from
  139. * the previous NF cal and update history buffer.
  140. */
  141. if (longcal && ath9k_hw_getnf(ah, chan)) {
  142. /*
  143. * Load the NF from history buffer of the current channel.
  144. * NF is slow time-variant, so it is OK to use a historical
  145. * value.
  146. */
  147. ret = ath9k_hw_loadnf(ah, ah->curchan);
  148. if (ret < 0)
  149. return ret;
  150. /* start NF calibration, without updating BB NF register */
  151. ath9k_hw_start_nfcal(ah, false);
  152. }
  153. return iscaldone;
  154. }
  155. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  156. {
  157. int i;
  158. /* Accumulate IQ cal measures for active chains */
  159. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  160. if (ah->txchainmask & BIT(i)) {
  161. ah->totalPowerMeasI[i] +=
  162. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  163. ah->totalPowerMeasQ[i] +=
  164. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  165. ah->totalIqCorrMeas[i] +=
  166. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  167. ath_dbg(ath9k_hw_common(ah), CALIBRATE,
  168. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  169. ah->cal_samples, i, ah->totalPowerMeasI[i],
  170. ah->totalPowerMeasQ[i],
  171. ah->totalIqCorrMeas[i]);
  172. }
  173. }
  174. }
  175. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  176. {
  177. struct ath_common *common = ath9k_hw_common(ah);
  178. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  179. u32 qCoffDenom, iCoffDenom;
  180. int32_t qCoff, iCoff;
  181. int iqCorrNeg, i;
  182. static const u_int32_t offset_array[3] = {
  183. AR_PHY_RX_IQCAL_CORR_B0,
  184. AR_PHY_RX_IQCAL_CORR_B1,
  185. AR_PHY_RX_IQCAL_CORR_B2,
  186. };
  187. for (i = 0; i < numChains; i++) {
  188. powerMeasI = ah->totalPowerMeasI[i];
  189. powerMeasQ = ah->totalPowerMeasQ[i];
  190. iqCorrMeas = ah->totalIqCorrMeas[i];
  191. ath_dbg(common, CALIBRATE,
  192. "Starting IQ Cal and Correction for Chain %d\n", i);
  193. ath_dbg(common, CALIBRATE,
  194. "Original: Chn %d iq_corr_meas = 0x%08x\n",
  195. i, ah->totalIqCorrMeas[i]);
  196. iqCorrNeg = 0;
  197. if (iqCorrMeas > 0x80000000) {
  198. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  199. iqCorrNeg = 1;
  200. }
  201. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_i = 0x%08x\n",
  202. i, powerMeasI);
  203. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_q = 0x%08x\n",
  204. i, powerMeasQ);
  205. ath_dbg(common, CALIBRATE, "iqCorrNeg is 0x%08x\n", iqCorrNeg);
  206. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  207. qCoffDenom = powerMeasQ / 64;
  208. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  209. iCoff = iqCorrMeas / iCoffDenom;
  210. qCoff = powerMeasI / qCoffDenom - 64;
  211. ath_dbg(common, CALIBRATE, "Chn %d iCoff = 0x%08x\n",
  212. i, iCoff);
  213. ath_dbg(common, CALIBRATE, "Chn %d qCoff = 0x%08x\n",
  214. i, qCoff);
  215. /* Force bounds on iCoff */
  216. if (iCoff >= 63)
  217. iCoff = 63;
  218. else if (iCoff <= -63)
  219. iCoff = -63;
  220. /* Negate iCoff if iqCorrNeg == 0 */
  221. if (iqCorrNeg == 0x0)
  222. iCoff = -iCoff;
  223. /* Force bounds on qCoff */
  224. if (qCoff >= 63)
  225. qCoff = 63;
  226. else if (qCoff <= -63)
  227. qCoff = -63;
  228. iCoff = iCoff & 0x7f;
  229. qCoff = qCoff & 0x7f;
  230. ath_dbg(common, CALIBRATE,
  231. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  232. i, iCoff, qCoff);
  233. ath_dbg(common, CALIBRATE,
  234. "Register offset (0x%04x) before update = 0x%x\n",
  235. offset_array[i],
  236. REG_READ(ah, offset_array[i]));
  237. if (AR_SREV_9565(ah) &&
  238. (iCoff == 63 || qCoff == 63 ||
  239. iCoff == -63 || qCoff == -63))
  240. return;
  241. REG_RMW_FIELD(ah, offset_array[i],
  242. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  243. iCoff);
  244. REG_RMW_FIELD(ah, offset_array[i],
  245. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  246. qCoff);
  247. ath_dbg(common, CALIBRATE,
  248. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  249. offset_array[i],
  250. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  251. REG_READ(ah, offset_array[i]));
  252. ath_dbg(common, CALIBRATE,
  253. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  254. offset_array[i],
  255. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  256. REG_READ(ah, offset_array[i]));
  257. ath_dbg(common, CALIBRATE,
  258. "IQ Cal and Correction done for Chain %d\n", i);
  259. }
  260. }
  261. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  262. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  263. ath_dbg(common, CALIBRATE,
  264. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  265. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  266. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  267. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  268. }
  269. static const struct ath9k_percal_data iq_cal_single_sample = {
  270. IQ_MISMATCH_CAL,
  271. MIN_CAL_SAMPLES,
  272. PER_MAX_LOG_COUNT,
  273. ar9003_hw_iqcal_collect,
  274. ar9003_hw_iqcalibrate
  275. };
  276. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  277. {
  278. ah->iq_caldata.calData = &iq_cal_single_sample;
  279. if (AR_SREV_9300_20_OR_LATER(ah)) {
  280. ah->enabled_cals |= TX_IQ_CAL;
  281. if (AR_SREV_9485_OR_LATER(ah) && !AR_SREV_9340(ah))
  282. ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
  283. }
  284. ah->supp_cals = IQ_MISMATCH_CAL;
  285. }
  286. #define OFF_UPPER_LT 24
  287. #define OFF_LOWER_LT 7
  288. static bool ar9003_hw_dynamic_osdac_selection(struct ath_hw *ah,
  289. bool txiqcal_done)
  290. {
  291. struct ath_common *common = ath9k_hw_common(ah);
  292. int ch0_done, osdac_ch0, dc_off_ch0_i1, dc_off_ch0_q1, dc_off_ch0_i2,
  293. dc_off_ch0_q2, dc_off_ch0_i3, dc_off_ch0_q3;
  294. int ch1_done, osdac_ch1, dc_off_ch1_i1, dc_off_ch1_q1, dc_off_ch1_i2,
  295. dc_off_ch1_q2, dc_off_ch1_i3, dc_off_ch1_q3;
  296. int ch2_done, osdac_ch2, dc_off_ch2_i1, dc_off_ch2_q1, dc_off_ch2_i2,
  297. dc_off_ch2_q2, dc_off_ch2_i3, dc_off_ch2_q3;
  298. bool status;
  299. u32 temp, val;
  300. /*
  301. * Clear offset and IQ calibration, run AGC cal.
  302. */
  303. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  304. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  305. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  306. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  307. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  308. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  309. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  310. AR_PHY_AGC_CONTROL_CAL,
  311. 0, AH_WAIT_TIMEOUT);
  312. if (!status) {
  313. ath_dbg(common, CALIBRATE,
  314. "AGC cal without offset cal failed to complete in 1ms");
  315. return false;
  316. }
  317. /*
  318. * Allow only offset calibration and disable the others
  319. * (Carrier Leak calibration, TX Filter calibration and
  320. * Peak Detector offset calibration).
  321. */
  322. REG_SET_BIT(ah, AR_PHY_AGC_CONTROL,
  323. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  324. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  325. AR_PHY_CL_CAL_ENABLE);
  326. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  327. AR_PHY_AGC_CONTROL_FLTR_CAL);
  328. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  329. AR_PHY_AGC_CONTROL_PKDET_CAL);
  330. ch0_done = 0;
  331. ch1_done = 0;
  332. ch2_done = 0;
  333. while ((ch0_done == 0) || (ch1_done == 0) || (ch2_done == 0)) {
  334. osdac_ch0 = (REG_READ(ah, AR_PHY_65NM_CH0_BB1) >> 30) & 0x3;
  335. osdac_ch1 = (REG_READ(ah, AR_PHY_65NM_CH1_BB1) >> 30) & 0x3;
  336. osdac_ch2 = (REG_READ(ah, AR_PHY_65NM_CH2_BB1) >> 30) & 0x3;
  337. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  338. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  339. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  340. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  341. AR_PHY_AGC_CONTROL_CAL,
  342. 0, AH_WAIT_TIMEOUT);
  343. if (!status) {
  344. ath_dbg(common, CALIBRATE,
  345. "DC offset cal failed to complete in 1ms");
  346. return false;
  347. }
  348. REG_CLR_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  349. /*
  350. * High gain.
  351. */
  352. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  353. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (1 << 8)));
  354. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  355. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (1 << 8)));
  356. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  357. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (1 << 8)));
  358. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  359. dc_off_ch0_i1 = (temp >> 26) & 0x1f;
  360. dc_off_ch0_q1 = (temp >> 21) & 0x1f;
  361. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  362. dc_off_ch1_i1 = (temp >> 26) & 0x1f;
  363. dc_off_ch1_q1 = (temp >> 21) & 0x1f;
  364. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  365. dc_off_ch2_i1 = (temp >> 26) & 0x1f;
  366. dc_off_ch2_q1 = (temp >> 21) & 0x1f;
  367. /*
  368. * Low gain.
  369. */
  370. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  371. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (2 << 8)));
  372. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  373. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (2 << 8)));
  374. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  375. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (2 << 8)));
  376. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  377. dc_off_ch0_i2 = (temp >> 26) & 0x1f;
  378. dc_off_ch0_q2 = (temp >> 21) & 0x1f;
  379. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  380. dc_off_ch1_i2 = (temp >> 26) & 0x1f;
  381. dc_off_ch1_q2 = (temp >> 21) & 0x1f;
  382. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  383. dc_off_ch2_i2 = (temp >> 26) & 0x1f;
  384. dc_off_ch2_q2 = (temp >> 21) & 0x1f;
  385. /*
  386. * Loopback.
  387. */
  388. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  389. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (3 << 8)));
  390. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  391. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (3 << 8)));
  392. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  393. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (3 << 8)));
  394. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  395. dc_off_ch0_i3 = (temp >> 26) & 0x1f;
  396. dc_off_ch0_q3 = (temp >> 21) & 0x1f;
  397. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  398. dc_off_ch1_i3 = (temp >> 26) & 0x1f;
  399. dc_off_ch1_q3 = (temp >> 21) & 0x1f;
  400. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  401. dc_off_ch2_i3 = (temp >> 26) & 0x1f;
  402. dc_off_ch2_q3 = (temp >> 21) & 0x1f;
  403. if ((dc_off_ch0_i1 > OFF_UPPER_LT) || (dc_off_ch0_i1 < OFF_LOWER_LT) ||
  404. (dc_off_ch0_i2 > OFF_UPPER_LT) || (dc_off_ch0_i2 < OFF_LOWER_LT) ||
  405. (dc_off_ch0_i3 > OFF_UPPER_LT) || (dc_off_ch0_i3 < OFF_LOWER_LT) ||
  406. (dc_off_ch0_q1 > OFF_UPPER_LT) || (dc_off_ch0_q1 < OFF_LOWER_LT) ||
  407. (dc_off_ch0_q2 > OFF_UPPER_LT) || (dc_off_ch0_q2 < OFF_LOWER_LT) ||
  408. (dc_off_ch0_q3 > OFF_UPPER_LT) || (dc_off_ch0_q3 < OFF_LOWER_LT)) {
  409. if (osdac_ch0 == 3) {
  410. ch0_done = 1;
  411. } else {
  412. osdac_ch0++;
  413. val = REG_READ(ah, AR_PHY_65NM_CH0_BB1) & 0x3fffffff;
  414. val |= (osdac_ch0 << 30);
  415. REG_WRITE(ah, AR_PHY_65NM_CH0_BB1, val);
  416. ch0_done = 0;
  417. }
  418. } else {
  419. ch0_done = 1;
  420. }
  421. if ((dc_off_ch1_i1 > OFF_UPPER_LT) || (dc_off_ch1_i1 < OFF_LOWER_LT) ||
  422. (dc_off_ch1_i2 > OFF_UPPER_LT) || (dc_off_ch1_i2 < OFF_LOWER_LT) ||
  423. (dc_off_ch1_i3 > OFF_UPPER_LT) || (dc_off_ch1_i3 < OFF_LOWER_LT) ||
  424. (dc_off_ch1_q1 > OFF_UPPER_LT) || (dc_off_ch1_q1 < OFF_LOWER_LT) ||
  425. (dc_off_ch1_q2 > OFF_UPPER_LT) || (dc_off_ch1_q2 < OFF_LOWER_LT) ||
  426. (dc_off_ch1_q3 > OFF_UPPER_LT) || (dc_off_ch1_q3 < OFF_LOWER_LT)) {
  427. if (osdac_ch1 == 3) {
  428. ch1_done = 1;
  429. } else {
  430. osdac_ch1++;
  431. val = REG_READ(ah, AR_PHY_65NM_CH1_BB1) & 0x3fffffff;
  432. val |= (osdac_ch1 << 30);
  433. REG_WRITE(ah, AR_PHY_65NM_CH1_BB1, val);
  434. ch1_done = 0;
  435. }
  436. } else {
  437. ch1_done = 1;
  438. }
  439. if ((dc_off_ch2_i1 > OFF_UPPER_LT) || (dc_off_ch2_i1 < OFF_LOWER_LT) ||
  440. (dc_off_ch2_i2 > OFF_UPPER_LT) || (dc_off_ch2_i2 < OFF_LOWER_LT) ||
  441. (dc_off_ch2_i3 > OFF_UPPER_LT) || (dc_off_ch2_i3 < OFF_LOWER_LT) ||
  442. (dc_off_ch2_q1 > OFF_UPPER_LT) || (dc_off_ch2_q1 < OFF_LOWER_LT) ||
  443. (dc_off_ch2_q2 > OFF_UPPER_LT) || (dc_off_ch2_q2 < OFF_LOWER_LT) ||
  444. (dc_off_ch2_q3 > OFF_UPPER_LT) || (dc_off_ch2_q3 < OFF_LOWER_LT)) {
  445. if (osdac_ch2 == 3) {
  446. ch2_done = 1;
  447. } else {
  448. osdac_ch2++;
  449. val = REG_READ(ah, AR_PHY_65NM_CH2_BB1) & 0x3fffffff;
  450. val |= (osdac_ch2 << 30);
  451. REG_WRITE(ah, AR_PHY_65NM_CH2_BB1, val);
  452. ch2_done = 0;
  453. }
  454. } else {
  455. ch2_done = 1;
  456. }
  457. }
  458. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  459. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  460. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  461. /*
  462. * We don't need to check txiqcal_done here since it is always
  463. * set for AR9550.
  464. */
  465. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  466. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  467. return true;
  468. }
  469. /*
  470. * solve 4x4 linear equation used in loopback iq cal.
  471. */
  472. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  473. s32 sin_2phi_1,
  474. s32 cos_2phi_1,
  475. s32 sin_2phi_2,
  476. s32 cos_2phi_2,
  477. s32 mag_a0_d0,
  478. s32 phs_a0_d0,
  479. s32 mag_a1_d0,
  480. s32 phs_a1_d0,
  481. s32 solved_eq[])
  482. {
  483. s32 f1 = cos_2phi_1 - cos_2phi_2,
  484. f3 = sin_2phi_1 - sin_2phi_2,
  485. f2;
  486. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  487. const s32 result_shift = 1 << 15;
  488. struct ath_common *common = ath9k_hw_common(ah);
  489. f2 = ((f1 >> 3) * (f1 >> 3) + (f3 >> 3) * (f3 >> 3)) >> 9;
  490. if (!f2) {
  491. ath_dbg(common, CALIBRATE, "Divide by 0\n");
  492. return false;
  493. }
  494. /* mag mismatch, tx */
  495. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  496. /* phs mismatch, tx */
  497. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  498. mag_tx = (mag_tx / f2);
  499. phs_tx = (phs_tx / f2);
  500. /* mag mismatch, rx */
  501. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  502. result_shift;
  503. /* phs mismatch, rx */
  504. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  505. result_shift;
  506. solved_eq[0] = mag_tx;
  507. solved_eq[1] = phs_tx;
  508. solved_eq[2] = mag_rx;
  509. solved_eq[3] = phs_rx;
  510. return true;
  511. }
  512. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  513. {
  514. s32 abs_i = abs(in_re),
  515. abs_q = abs(in_im),
  516. max_abs, min_abs;
  517. if (abs_i > abs_q) {
  518. max_abs = abs_i;
  519. min_abs = abs_q;
  520. } else {
  521. max_abs = abs_q;
  522. min_abs = abs_i;
  523. }
  524. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  525. }
  526. #define DELPT 32
  527. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  528. s32 chain_idx,
  529. const s32 iq_res[],
  530. s32 iqc_coeff[])
  531. {
  532. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  533. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  534. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  535. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  536. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  537. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  538. sin_2phi_1, cos_2phi_1,
  539. sin_2phi_2, cos_2phi_2;
  540. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  541. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  542. q_q_coff, q_i_coff;
  543. const s32 res_scale = 1 << 15;
  544. const s32 delpt_shift = 1 << 8;
  545. s32 mag1, mag2;
  546. struct ath_common *common = ath9k_hw_common(ah);
  547. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  548. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  549. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  550. if (i2_m_q2_a0_d0 > 0x800)
  551. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  552. if (i2_p_q2_a0_d0 > 0x800)
  553. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  554. if (iq_corr_a0_d0 > 0x800)
  555. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  556. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  557. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  558. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  559. if (i2_m_q2_a0_d1 > 0x800)
  560. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  561. if (iq_corr_a0_d1 > 0x800)
  562. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  563. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  564. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  565. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  566. if (i2_m_q2_a1_d0 > 0x800)
  567. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  568. if (i2_p_q2_a1_d0 > 0x800)
  569. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  570. if (iq_corr_a1_d0 > 0x800)
  571. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  572. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  573. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  574. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  575. if (i2_m_q2_a1_d1 > 0x800)
  576. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  577. if (i2_p_q2_a1_d1 > 0x800)
  578. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  579. if (iq_corr_a1_d1 > 0x800)
  580. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  581. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  582. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  583. ath_dbg(common, CALIBRATE,
  584. "Divide by 0:\n"
  585. "a0_d0=%d\n"
  586. "a0_d1=%d\n"
  587. "a2_d0=%d\n"
  588. "a1_d1=%d\n",
  589. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  590. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  591. return false;
  592. }
  593. if ((i2_p_q2_a0_d0 < 1024) || (i2_p_q2_a0_d0 > 2047) ||
  594. (i2_p_q2_a1_d0 < 0) || (i2_p_q2_a1_d1 < 0) ||
  595. (i2_p_q2_a0_d0 <= i2_m_q2_a0_d0) ||
  596. (i2_p_q2_a0_d0 <= iq_corr_a0_d0) ||
  597. (i2_p_q2_a0_d1 <= i2_m_q2_a0_d1) ||
  598. (i2_p_q2_a0_d1 <= iq_corr_a0_d1) ||
  599. (i2_p_q2_a1_d0 <= i2_m_q2_a1_d0) ||
  600. (i2_p_q2_a1_d0 <= iq_corr_a1_d0) ||
  601. (i2_p_q2_a1_d1 <= i2_m_q2_a1_d1) ||
  602. (i2_p_q2_a1_d1 <= iq_corr_a1_d1)) {
  603. return false;
  604. }
  605. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  606. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  607. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  608. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  609. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  610. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  611. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  612. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  613. /* w/o analog phase shift */
  614. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  615. /* w/o analog phase shift */
  616. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  617. /* w/ analog phase shift */
  618. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  619. /* w/ analog phase shift */
  620. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  621. /*
  622. * force sin^2 + cos^2 = 1;
  623. * find magnitude by approximation
  624. */
  625. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  626. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  627. if ((mag1 == 0) || (mag2 == 0)) {
  628. ath_dbg(common, CALIBRATE, "Divide by 0: mag1=%d, mag2=%d\n",
  629. mag1, mag2);
  630. return false;
  631. }
  632. /* normalization sin and cos by mag */
  633. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  634. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  635. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  636. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  637. /* calculate IQ mismatch */
  638. if (!ar9003_hw_solve_iq_cal(ah,
  639. sin_2phi_1, cos_2phi_1,
  640. sin_2phi_2, cos_2phi_2,
  641. mag_a0_d0, phs_a0_d0,
  642. mag_a1_d0,
  643. phs_a1_d0, solved_eq)) {
  644. ath_dbg(common, CALIBRATE,
  645. "Call to ar9003_hw_solve_iq_cal() failed\n");
  646. return false;
  647. }
  648. mag_tx = solved_eq[0];
  649. phs_tx = solved_eq[1];
  650. mag_rx = solved_eq[2];
  651. phs_rx = solved_eq[3];
  652. ath_dbg(common, CALIBRATE,
  653. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  654. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  655. if (res_scale == mag_tx) {
  656. ath_dbg(common, CALIBRATE,
  657. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  658. mag_tx, res_scale);
  659. return false;
  660. }
  661. /* calculate and quantize Tx IQ correction factor */
  662. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  663. phs_corr_tx = -phs_tx;
  664. q_q_coff = (mag_corr_tx * 128 / res_scale);
  665. q_i_coff = (phs_corr_tx * 256 / res_scale);
  666. ath_dbg(common, CALIBRATE, "tx chain %d: mag corr=%d phase corr=%d\n",
  667. chain_idx, q_q_coff, q_i_coff);
  668. if (q_i_coff < -63)
  669. q_i_coff = -63;
  670. if (q_i_coff > 63)
  671. q_i_coff = 63;
  672. if (q_q_coff < -63)
  673. q_q_coff = -63;
  674. if (q_q_coff > 63)
  675. q_q_coff = 63;
  676. iqc_coeff[0] = (q_q_coff * 128) + (0x7f & q_i_coff);
  677. ath_dbg(common, CALIBRATE, "tx chain %d: iq corr coeff=%x\n",
  678. chain_idx, iqc_coeff[0]);
  679. if (-mag_rx == res_scale) {
  680. ath_dbg(common, CALIBRATE,
  681. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  682. mag_rx, res_scale);
  683. return false;
  684. }
  685. /* calculate and quantize Rx IQ correction factors */
  686. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  687. phs_corr_rx = -phs_rx;
  688. q_q_coff = (mag_corr_rx * 128 / res_scale);
  689. q_i_coff = (phs_corr_rx * 256 / res_scale);
  690. ath_dbg(common, CALIBRATE, "rx chain %d: mag corr=%d phase corr=%d\n",
  691. chain_idx, q_q_coff, q_i_coff);
  692. if (q_i_coff < -63)
  693. q_i_coff = -63;
  694. if (q_i_coff > 63)
  695. q_i_coff = 63;
  696. if (q_q_coff < -63)
  697. q_q_coff = -63;
  698. if (q_q_coff > 63)
  699. q_q_coff = 63;
  700. iqc_coeff[1] = (q_q_coff * 128) + (0x7f & q_i_coff);
  701. ath_dbg(common, CALIBRATE, "rx chain %d: iq corr coeff=%x\n",
  702. chain_idx, iqc_coeff[1]);
  703. return true;
  704. }
  705. static void ar9003_hw_detect_outlier(int mp_coeff[][MAXIQCAL],
  706. int nmeasurement,
  707. int max_delta)
  708. {
  709. int mp_max = -64, max_idx = 0;
  710. int mp_min = 63, min_idx = 0;
  711. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  712. /* find min/max mismatch across all calibrated gains */
  713. for (i = 0; i < nmeasurement; i++) {
  714. if (mp_coeff[i][0] > mp_max) {
  715. mp_max = mp_coeff[i][0];
  716. max_idx = i;
  717. } else if (mp_coeff[i][0] < mp_min) {
  718. mp_min = mp_coeff[i][0];
  719. min_idx = i;
  720. }
  721. }
  722. /* find average (exclude max abs value) */
  723. for (i = 0; i < nmeasurement; i++) {
  724. if ((abs(mp_coeff[i][0]) < abs(mp_max)) ||
  725. (abs(mp_coeff[i][0]) < abs(mp_min))) {
  726. mp_avg += mp_coeff[i][0];
  727. mp_count++;
  728. }
  729. }
  730. /*
  731. * finding mean magnitude/phase if possible, otherwise
  732. * just use the last value as the mean
  733. */
  734. if (mp_count)
  735. mp_avg /= mp_count;
  736. else
  737. mp_avg = mp_coeff[nmeasurement - 1][0];
  738. /* detect outlier */
  739. if (abs(mp_max - mp_min) > max_delta) {
  740. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  741. outlier_idx = max_idx;
  742. else
  743. outlier_idx = min_idx;
  744. mp_coeff[outlier_idx][0] = mp_avg;
  745. }
  746. }
  747. static void ar9003_hw_tx_iq_cal_outlier_detection(struct ath_hw *ah,
  748. struct coeff *coeff,
  749. bool is_reusable)
  750. {
  751. int i, im, nmeasurement;
  752. int magnitude, phase;
  753. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  754. struct ath9k_hw_cal_data *caldata = ah->caldata;
  755. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  756. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  757. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  758. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  759. if (!AR_SREV_9485(ah)) {
  760. tx_corr_coeff[i * 2][1] =
  761. tx_corr_coeff[(i * 2) + 1][1] =
  762. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  763. tx_corr_coeff[i * 2][2] =
  764. tx_corr_coeff[(i * 2) + 1][2] =
  765. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  766. }
  767. }
  768. /* Load the average of 2 passes */
  769. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  770. if (!(ah->txchainmask & (1 << i)))
  771. continue;
  772. nmeasurement = REG_READ_FIELD(ah,
  773. AR_PHY_TX_IQCAL_STATUS_B0,
  774. AR_PHY_CALIBRATED_GAINS_0);
  775. if (nmeasurement > MAX_MEASUREMENT)
  776. nmeasurement = MAX_MEASUREMENT;
  777. /*
  778. * Skip normal outlier detection for AR9550.
  779. */
  780. if (!AR_SREV_9550(ah)) {
  781. /* detect outlier only if nmeasurement > 1 */
  782. if (nmeasurement > 1) {
  783. /* Detect magnitude outlier */
  784. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  785. nmeasurement,
  786. MAX_MAG_DELTA);
  787. /* Detect phase outlier */
  788. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  789. nmeasurement,
  790. MAX_PHS_DELTA);
  791. }
  792. }
  793. for (im = 0; im < nmeasurement; im++) {
  794. magnitude = coeff->mag_coeff[i][im][0];
  795. phase = coeff->phs_coeff[i][im][0];
  796. coeff->iqc_coeff[0] =
  797. (phase & 0x7f) | ((magnitude & 0x7f) << 7);
  798. if ((im % 2) == 0)
  799. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  800. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  801. coeff->iqc_coeff[0]);
  802. else
  803. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  804. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  805. coeff->iqc_coeff[0]);
  806. if (caldata)
  807. caldata->tx_corr_coeff[im][i] =
  808. coeff->iqc_coeff[0];
  809. }
  810. if (caldata)
  811. caldata->num_measures[i] = nmeasurement;
  812. }
  813. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  814. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  815. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  816. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  817. if (caldata) {
  818. if (is_reusable)
  819. set_bit(TXIQCAL_DONE, &caldata->cal_flags);
  820. else
  821. clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
  822. }
  823. return;
  824. }
  825. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  826. {
  827. struct ath_common *common = ath9k_hw_common(ah);
  828. u8 tx_gain_forced;
  829. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  830. AR_PHY_TXGAIN_FORCE);
  831. if (tx_gain_forced)
  832. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  833. AR_PHY_TXGAIN_FORCE, 0);
  834. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  835. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  836. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  837. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  838. AH_WAIT_TIMEOUT)) {
  839. ath_dbg(common, CALIBRATE, "Tx IQ Cal is not completed\n");
  840. return false;
  841. }
  842. return true;
  843. }
  844. static void __ar955x_tx_iq_cal_sort(struct ath_hw *ah,
  845. struct coeff *coeff,
  846. int i, int nmeasurement)
  847. {
  848. struct ath_common *common = ath9k_hw_common(ah);
  849. int im, ix, iy, temp;
  850. for (im = 0; im < nmeasurement; im++) {
  851. for (ix = 0; ix < MAXIQCAL - 1; ix++) {
  852. for (iy = ix + 1; iy <= MAXIQCAL - 1; iy++) {
  853. if (coeff->mag_coeff[i][im][iy] <
  854. coeff->mag_coeff[i][im][ix]) {
  855. temp = coeff->mag_coeff[i][im][ix];
  856. coeff->mag_coeff[i][im][ix] =
  857. coeff->mag_coeff[i][im][iy];
  858. coeff->mag_coeff[i][im][iy] = temp;
  859. }
  860. if (coeff->phs_coeff[i][im][iy] <
  861. coeff->phs_coeff[i][im][ix]) {
  862. temp = coeff->phs_coeff[i][im][ix];
  863. coeff->phs_coeff[i][im][ix] =
  864. coeff->phs_coeff[i][im][iy];
  865. coeff->phs_coeff[i][im][iy] = temp;
  866. }
  867. }
  868. }
  869. coeff->mag_coeff[i][im][0] = coeff->mag_coeff[i][im][MAXIQCAL / 2];
  870. coeff->phs_coeff[i][im][0] = coeff->phs_coeff[i][im][MAXIQCAL / 2];
  871. ath_dbg(common, CALIBRATE,
  872. "IQCAL: Median [ch%d][gain%d]: mag = %d phase = %d\n",
  873. i, im,
  874. coeff->mag_coeff[i][im][0],
  875. coeff->phs_coeff[i][im][0]);
  876. }
  877. }
  878. static bool ar955x_tx_iq_cal_median(struct ath_hw *ah,
  879. struct coeff *coeff,
  880. int iqcal_idx,
  881. int nmeasurement)
  882. {
  883. int i;
  884. if ((iqcal_idx + 1) != MAXIQCAL)
  885. return false;
  886. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  887. __ar955x_tx_iq_cal_sort(ah, coeff, i, nmeasurement);
  888. }
  889. return true;
  890. }
  891. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah,
  892. int iqcal_idx,
  893. bool is_reusable)
  894. {
  895. struct ath_common *common = ath9k_hw_common(ah);
  896. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  897. AR_PHY_TX_IQCAL_STATUS_B0,
  898. AR_PHY_TX_IQCAL_STATUS_B1,
  899. AR_PHY_TX_IQCAL_STATUS_B2,
  900. };
  901. const u_int32_t chan_info_tab[] = {
  902. AR_PHY_CHAN_INFO_TAB_0,
  903. AR_PHY_CHAN_INFO_TAB_1,
  904. AR_PHY_CHAN_INFO_TAB_2,
  905. };
  906. static struct coeff coeff;
  907. s32 iq_res[6];
  908. int i, im, j;
  909. int nmeasurement = 0;
  910. bool outlier_detect = true;
  911. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  912. if (!(ah->txchainmask & (1 << i)))
  913. continue;
  914. nmeasurement = REG_READ_FIELD(ah,
  915. AR_PHY_TX_IQCAL_STATUS_B0,
  916. AR_PHY_CALIBRATED_GAINS_0);
  917. if (nmeasurement > MAX_MEASUREMENT)
  918. nmeasurement = MAX_MEASUREMENT;
  919. for (im = 0; im < nmeasurement; im++) {
  920. ath_dbg(common, CALIBRATE,
  921. "Doing Tx IQ Cal for chain %d\n", i);
  922. if (REG_READ(ah, txiqcal_status[i]) &
  923. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  924. ath_dbg(common, CALIBRATE,
  925. "Tx IQ Cal failed for chain %d\n", i);
  926. goto tx_iqcal_fail;
  927. }
  928. for (j = 0; j < 3; j++) {
  929. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  930. REG_RMW_FIELD(ah,
  931. AR_PHY_CHAN_INFO_MEMORY,
  932. AR_PHY_CHAN_INFO_TAB_S2_READ,
  933. 0);
  934. /* 32 bits */
  935. iq_res[idx] = REG_READ(ah,
  936. chan_info_tab[i] +
  937. offset);
  938. REG_RMW_FIELD(ah,
  939. AR_PHY_CHAN_INFO_MEMORY,
  940. AR_PHY_CHAN_INFO_TAB_S2_READ,
  941. 1);
  942. /* 16 bits */
  943. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  944. chan_info_tab[i] + offset);
  945. ath_dbg(common, CALIBRATE,
  946. "IQ_RES[%d]=0x%x IQ_RES[%d]=0x%x\n",
  947. idx, iq_res[idx], idx + 1,
  948. iq_res[idx + 1]);
  949. }
  950. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  951. coeff.iqc_coeff)) {
  952. ath_dbg(common, CALIBRATE,
  953. "Failed in calculation of IQ correction\n");
  954. goto tx_iqcal_fail;
  955. }
  956. coeff.phs_coeff[i][im][iqcal_idx] =
  957. coeff.iqc_coeff[0] & 0x7f;
  958. coeff.mag_coeff[i][im][iqcal_idx] =
  959. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  960. if (coeff.mag_coeff[i][im][iqcal_idx] > 63)
  961. coeff.mag_coeff[i][im][iqcal_idx] -= 128;
  962. if (coeff.phs_coeff[i][im][iqcal_idx] > 63)
  963. coeff.phs_coeff[i][im][iqcal_idx] -= 128;
  964. }
  965. }
  966. if (AR_SREV_9550(ah))
  967. outlier_detect = ar955x_tx_iq_cal_median(ah, &coeff,
  968. iqcal_idx, nmeasurement);
  969. if (outlier_detect)
  970. ar9003_hw_tx_iq_cal_outlier_detection(ah, &coeff, is_reusable);
  971. return;
  972. tx_iqcal_fail:
  973. ath_dbg(common, CALIBRATE, "Tx IQ Cal failed\n");
  974. return;
  975. }
  976. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  977. {
  978. struct ath9k_hw_cal_data *caldata = ah->caldata;
  979. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  980. int i, im;
  981. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  982. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  983. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  984. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  985. if (!AR_SREV_9485(ah)) {
  986. tx_corr_coeff[i * 2][1] =
  987. tx_corr_coeff[(i * 2) + 1][1] =
  988. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  989. tx_corr_coeff[i * 2][2] =
  990. tx_corr_coeff[(i * 2) + 1][2] =
  991. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  992. }
  993. }
  994. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  995. if (!(ah->txchainmask & (1 << i)))
  996. continue;
  997. for (im = 0; im < caldata->num_measures[i]; im++) {
  998. if ((im % 2) == 0)
  999. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  1000. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  1001. caldata->tx_corr_coeff[im][i]);
  1002. else
  1003. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  1004. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  1005. caldata->tx_corr_coeff[im][i]);
  1006. }
  1007. }
  1008. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  1009. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  1010. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  1011. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  1012. }
  1013. static void ar9003_hw_manual_peak_cal(struct ath_hw *ah, u8 chain, bool is_2g)
  1014. {
  1015. int offset[8] = {0}, total = 0, test;
  1016. int agc_out, i;
  1017. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1018. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0x1);
  1019. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1020. AR_PHY_65NM_RXRF_GAINSTAGES_LNAON_CALDC, 0x0);
  1021. if (is_2g)
  1022. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1023. AR_PHY_65NM_RXRF_GAINSTAGES_LNA2G_GAIN_OVR, 0x0);
  1024. else
  1025. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1026. AR_PHY_65NM_RXRF_GAINSTAGES_LNA5G_GAIN_OVR, 0x0);
  1027. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1028. AR_PHY_65NM_RXTX2_RXON_OVR, 0x1);
  1029. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1030. AR_PHY_65NM_RXTX2_RXON, 0x0);
  1031. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1032. AR_PHY_65NM_RXRF_AGC_AGC_OVERRIDE, 0x1);
  1033. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1034. AR_PHY_65NM_RXRF_AGC_AGC_ON_OVR, 0x1);
  1035. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1036. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0x1);
  1037. if (AR_SREV_9330_11(ah)) {
  1038. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1039. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, 0x0);
  1040. } else {
  1041. if (is_2g)
  1042. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1043. AR_PHY_65NM_RXRF_AGC_AGC2G_DBDAC_OVR, 0x0);
  1044. else
  1045. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1046. AR_PHY_65NM_RXRF_AGC_AGC5G_DBDAC_OVR, 0x0);
  1047. }
  1048. for (i = 6; i > 0; i--) {
  1049. offset[i] = BIT(i - 1);
  1050. test = total + offset[i];
  1051. if (is_2g)
  1052. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1053. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR,
  1054. test);
  1055. else
  1056. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1057. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR,
  1058. test);
  1059. udelay(100);
  1060. agc_out = REG_READ_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1061. AR_PHY_65NM_RXRF_AGC_AGC_OUT);
  1062. offset[i] = (agc_out) ? 0 : 1;
  1063. total += (offset[i] << (i - 1));
  1064. }
  1065. if (is_2g)
  1066. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1067. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, total);
  1068. else
  1069. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1070. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR, total);
  1071. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1072. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0);
  1073. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1074. AR_PHY_65NM_RXTX2_RXON_OVR, 0);
  1075. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1076. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0);
  1077. }
  1078. static void ar9003_hw_do_pcoem_manual_peak_cal(struct ath_hw *ah,
  1079. struct ath9k_channel *chan,
  1080. bool run_rtt_cal)
  1081. {
  1082. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1083. int i;
  1084. if (!AR_SREV_9462(ah) && !AR_SREV_9565(ah) && !AR_SREV_9485(ah))
  1085. return;
  1086. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && !run_rtt_cal)
  1087. return;
  1088. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1089. if (!(ah->rxchainmask & (1 << i)))
  1090. continue;
  1091. ar9003_hw_manual_peak_cal(ah, i, IS_CHAN_2GHZ(chan));
  1092. }
  1093. if (caldata)
  1094. set_bit(SW_PKDET_DONE, &caldata->cal_flags);
  1095. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && caldata) {
  1096. if (IS_CHAN_2GHZ(chan)){
  1097. caldata->caldac[0] = REG_READ_FIELD(ah,
  1098. AR_PHY_65NM_RXRF_AGC(0),
  1099. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1100. caldata->caldac[1] = REG_READ_FIELD(ah,
  1101. AR_PHY_65NM_RXRF_AGC(1),
  1102. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1103. } else {
  1104. caldata->caldac[0] = REG_READ_FIELD(ah,
  1105. AR_PHY_65NM_RXRF_AGC(0),
  1106. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1107. caldata->caldac[1] = REG_READ_FIELD(ah,
  1108. AR_PHY_65NM_RXRF_AGC(1),
  1109. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1110. }
  1111. }
  1112. }
  1113. static void ar9003_hw_cl_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  1114. {
  1115. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  1116. AR_PHY_CL_TAB_1,
  1117. AR_PHY_CL_TAB_2 };
  1118. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1119. bool txclcal_done = false;
  1120. int i, j;
  1121. if (!caldata || !(ah->enabled_cals & TX_CL_CAL))
  1122. return;
  1123. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  1124. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  1125. if (test_bit(TXCLCAL_DONE, &caldata->cal_flags)) {
  1126. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1127. if (!(ah->txchainmask & (1 << i)))
  1128. continue;
  1129. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1130. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  1131. caldata->tx_clcal[i][j]);
  1132. }
  1133. } else if (is_reusable && txclcal_done) {
  1134. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1135. if (!(ah->txchainmask & (1 << i)))
  1136. continue;
  1137. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1138. caldata->tx_clcal[i][j] =
  1139. REG_READ(ah, CL_TAB_ENTRY(cl_idx[i]));
  1140. }
  1141. set_bit(TXCLCAL_DONE, &caldata->cal_flags);
  1142. }
  1143. }
  1144. static bool ar9003_hw_init_cal_pcoem(struct ath_hw *ah,
  1145. struct ath9k_channel *chan)
  1146. {
  1147. struct ath_common *common = ath9k_hw_common(ah);
  1148. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1149. bool txiqcal_done = false;
  1150. bool is_reusable = true, status = true;
  1151. bool run_rtt_cal = false, run_agc_cal;
  1152. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  1153. u32 rx_delay = 0;
  1154. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1155. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1156. AR_PHY_AGC_CONTROL_PKDET_CAL;
  1157. /* Use chip chainmask only for calibration */
  1158. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1159. if (rtt) {
  1160. if (!ar9003_hw_rtt_restore(ah, chan))
  1161. run_rtt_cal = true;
  1162. if (run_rtt_cal)
  1163. ath_dbg(common, CALIBRATE, "RTT calibration to be done\n");
  1164. }
  1165. run_agc_cal = run_rtt_cal;
  1166. if (run_rtt_cal) {
  1167. ar9003_hw_rtt_enable(ah);
  1168. ar9003_hw_rtt_set_mask(ah, 0x00);
  1169. ar9003_hw_rtt_clear_hist(ah);
  1170. }
  1171. if (rtt) {
  1172. if (!run_rtt_cal) {
  1173. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  1174. agc_supp_cals &= agc_ctrl;
  1175. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1176. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1177. AR_PHY_AGC_CONTROL_PKDET_CAL);
  1178. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1179. } else {
  1180. if (ah->ah_flags & AH_FASTCC)
  1181. run_agc_cal = true;
  1182. }
  1183. }
  1184. if (ah->enabled_cals & TX_CL_CAL) {
  1185. if (caldata && test_bit(TXCLCAL_DONE, &caldata->cal_flags))
  1186. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  1187. AR_PHY_CL_CAL_ENABLE);
  1188. else {
  1189. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  1190. AR_PHY_CL_CAL_ENABLE);
  1191. run_agc_cal = true;
  1192. }
  1193. }
  1194. if ((IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan)) ||
  1195. !(ah->enabled_cals & TX_IQ_CAL))
  1196. goto skip_tx_iqcal;
  1197. /* Do Tx IQ Calibration */
  1198. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1199. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1200. DELPT);
  1201. /*
  1202. * For AR9485 or later chips, TxIQ cal runs as part of
  1203. * AGC calibration
  1204. */
  1205. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1206. if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1207. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1208. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1209. else
  1210. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1211. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1212. txiqcal_done = run_agc_cal = true;
  1213. }
  1214. skip_tx_iqcal:
  1215. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1216. ar9003_mci_init_cal_req(ah, &is_reusable);
  1217. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1218. rx_delay = REG_READ(ah, AR_PHY_RX_DELAY);
  1219. /* Disable BB_active */
  1220. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1221. udelay(5);
  1222. REG_WRITE(ah, AR_PHY_RX_DELAY, AR_PHY_RX_DELAY_DELAY);
  1223. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1224. }
  1225. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1226. /* Calibrate the AGC */
  1227. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1228. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1229. AR_PHY_AGC_CONTROL_CAL);
  1230. /* Poll for offset calibration complete */
  1231. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1232. AR_PHY_AGC_CONTROL_CAL,
  1233. 0, AH_WAIT_TIMEOUT);
  1234. ar9003_hw_do_pcoem_manual_peak_cal(ah, chan, run_rtt_cal);
  1235. }
  1236. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1237. REG_WRITE(ah, AR_PHY_RX_DELAY, rx_delay);
  1238. udelay(5);
  1239. }
  1240. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1241. ar9003_mci_init_cal_done(ah);
  1242. if (rtt && !run_rtt_cal) {
  1243. agc_ctrl |= agc_supp_cals;
  1244. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1245. }
  1246. if (!status) {
  1247. if (run_rtt_cal)
  1248. ar9003_hw_rtt_disable(ah);
  1249. ath_dbg(common, CALIBRATE,
  1250. "offset calibration failed to complete in %d ms; noisy environment?\n",
  1251. AH_WAIT_TIMEOUT / 1000);
  1252. return false;
  1253. }
  1254. if (txiqcal_done)
  1255. ar9003_hw_tx_iq_cal_post_proc(ah, 0, is_reusable);
  1256. else if (caldata && test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1257. ar9003_hw_tx_iq_cal_reload(ah);
  1258. ar9003_hw_cl_cal_post_proc(ah, is_reusable);
  1259. if (run_rtt_cal && caldata) {
  1260. if (is_reusable) {
  1261. if (!ath9k_hw_rfbus_req(ah)) {
  1262. ath_err(ath9k_hw_common(ah),
  1263. "Could not stop baseband\n");
  1264. } else {
  1265. ar9003_hw_rtt_fill_hist(ah);
  1266. if (test_bit(SW_PKDET_DONE, &caldata->cal_flags))
  1267. ar9003_hw_rtt_load_hist(ah);
  1268. }
  1269. ath9k_hw_rfbus_done(ah);
  1270. }
  1271. ar9003_hw_rtt_disable(ah);
  1272. }
  1273. /* Revert chainmask to runtime parameters */
  1274. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1275. /* Initialize list pointers */
  1276. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1277. INIT_CAL(&ah->iq_caldata);
  1278. INSERT_CAL(ah, &ah->iq_caldata);
  1279. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1280. /* Initialize current pointer to first element in list */
  1281. ah->cal_list_curr = ah->cal_list;
  1282. if (ah->cal_list_curr)
  1283. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1284. if (caldata)
  1285. caldata->CalValid = 0;
  1286. return true;
  1287. }
  1288. static bool do_ar9003_agc_cal(struct ath_hw *ah)
  1289. {
  1290. struct ath_common *common = ath9k_hw_common(ah);
  1291. bool status;
  1292. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1293. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1294. AR_PHY_AGC_CONTROL_CAL);
  1295. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1296. AR_PHY_AGC_CONTROL_CAL,
  1297. 0, AH_WAIT_TIMEOUT);
  1298. if (!status) {
  1299. ath_dbg(common, CALIBRATE,
  1300. "offset calibration failed to complete in %d ms,"
  1301. "noisy environment?\n",
  1302. AH_WAIT_TIMEOUT / 1000);
  1303. return false;
  1304. }
  1305. return true;
  1306. }
  1307. static bool ar9003_hw_init_cal_soc(struct ath_hw *ah,
  1308. struct ath9k_channel *chan)
  1309. {
  1310. struct ath_common *common = ath9k_hw_common(ah);
  1311. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1312. bool txiqcal_done = false;
  1313. bool status = true;
  1314. bool run_agc_cal = false, sep_iq_cal = false;
  1315. int i = 0;
  1316. /* Use chip chainmask only for calibration */
  1317. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1318. if (ah->enabled_cals & TX_CL_CAL) {
  1319. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL, AR_PHY_CL_CAL_ENABLE);
  1320. run_agc_cal = true;
  1321. }
  1322. if (IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan))
  1323. goto skip_tx_iqcal;
  1324. /* Do Tx IQ Calibration */
  1325. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1326. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1327. DELPT);
  1328. /*
  1329. * For AR9485 or later chips, TxIQ cal runs as part of
  1330. * AGC calibration. Specifically, AR9550 in SoC chips.
  1331. */
  1332. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1333. if (REG_READ_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1334. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL)) {
  1335. txiqcal_done = true;
  1336. } else {
  1337. txiqcal_done = false;
  1338. }
  1339. run_agc_cal = true;
  1340. } else {
  1341. sep_iq_cal = true;
  1342. run_agc_cal = true;
  1343. }
  1344. /*
  1345. * In the SoC family, this will run for AR9300, AR9331 and AR9340.
  1346. */
  1347. if (sep_iq_cal) {
  1348. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  1349. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1350. udelay(5);
  1351. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1352. }
  1353. if (AR_SREV_9550(ah) && IS_CHAN_2GHZ(chan)) {
  1354. if (!ar9003_hw_dynamic_osdac_selection(ah, txiqcal_done))
  1355. return false;
  1356. }
  1357. skip_tx_iqcal:
  1358. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1359. if (AR_SREV_9330_11(ah))
  1360. ar9003_hw_manual_peak_cal(ah, 0, IS_CHAN_2GHZ(chan));
  1361. /*
  1362. * For non-AR9550 chips, we just trigger AGC calibration
  1363. * in the HW, poll for completion and then process
  1364. * the results.
  1365. *
  1366. * For AR955x, we run it multiple times and use
  1367. * median IQ correction.
  1368. */
  1369. if (!AR_SREV_9550(ah)) {
  1370. status = do_ar9003_agc_cal(ah);
  1371. if (!status)
  1372. return false;
  1373. if (txiqcal_done)
  1374. ar9003_hw_tx_iq_cal_post_proc(ah, 0, false);
  1375. } else {
  1376. if (!txiqcal_done) {
  1377. status = do_ar9003_agc_cal(ah);
  1378. if (!status)
  1379. return false;
  1380. } else {
  1381. for (i = 0; i < MAXIQCAL; i++) {
  1382. status = do_ar9003_agc_cal(ah);
  1383. if (!status)
  1384. return false;
  1385. ar9003_hw_tx_iq_cal_post_proc(ah, i, false);
  1386. }
  1387. }
  1388. }
  1389. }
  1390. /* Revert chainmask to runtime parameters */
  1391. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1392. /* Initialize list pointers */
  1393. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1394. INIT_CAL(&ah->iq_caldata);
  1395. INSERT_CAL(ah, &ah->iq_caldata);
  1396. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1397. /* Initialize current pointer to first element in list */
  1398. ah->cal_list_curr = ah->cal_list;
  1399. if (ah->cal_list_curr)
  1400. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1401. if (caldata)
  1402. caldata->CalValid = 0;
  1403. return true;
  1404. }
  1405. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  1406. {
  1407. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  1408. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  1409. if (AR_SREV_9485(ah) || AR_SREV_9462(ah) || AR_SREV_9565(ah))
  1410. priv_ops->init_cal = ar9003_hw_init_cal_pcoem;
  1411. else
  1412. priv_ops->init_cal = ar9003_hw_init_cal_soc;
  1413. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  1414. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  1415. ops->calibrate = ar9003_hw_calibrate;
  1416. }