ar9003_calib.c 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. #define MAXIQCAL 3
  25. struct coeff {
  26. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  27. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT][MAXIQCAL];
  28. int iqc_coeff[2];
  29. };
  30. enum ar9003_cal_types {
  31. IQ_MISMATCH_CAL = BIT(0),
  32. TEMP_COMP_CAL = BIT(1),
  33. };
  34. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  35. struct ath9k_cal_list *currCal)
  36. {
  37. struct ath_common *common = ath9k_hw_common(ah);
  38. /* Select calibration to run */
  39. switch (currCal->calData->calType) {
  40. case IQ_MISMATCH_CAL:
  41. /*
  42. * Start calibration with
  43. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  44. */
  45. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  46. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  47. currCal->calData->calCountMax);
  48. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  49. ath_dbg(common, CALIBRATE,
  50. "starting IQ Mismatch Calibration\n");
  51. /* Kick-off cal */
  52. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  53. break;
  54. case TEMP_COMP_CAL:
  55. ath_dbg(common, CALIBRATE,
  56. "starting Temperature Compensation Calibration\n");
  57. REG_SET_BIT(ah, AR_CH0_THERM, AR_CH0_THERM_LOCAL);
  58. REG_SET_BIT(ah, AR_CH0_THERM, AR_CH0_THERM_START);
  59. break;
  60. default:
  61. ath_err(common, "Invalid calibration type\n");
  62. break;
  63. }
  64. }
  65. /*
  66. * Generic calibration routine.
  67. * Recalibrate the lower PHY chips to account for temperature/environment
  68. * changes.
  69. */
  70. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  71. struct ath9k_channel *ichan,
  72. u8 rxchainmask,
  73. struct ath9k_cal_list *currCal)
  74. {
  75. struct ath9k_hw_cal_data *caldata = ah->caldata;
  76. const struct ath9k_percal_data *cur_caldata = currCal->calData;
  77. /* Calibration in progress. */
  78. if (currCal->calState == CAL_RUNNING) {
  79. /* Check to see if it has finished. */
  80. if (REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)
  81. return false;
  82. /*
  83. * Accumulate cal measures for active chains
  84. */
  85. if (cur_caldata->calCollect)
  86. cur_caldata->calCollect(ah);
  87. ah->cal_samples++;
  88. if (ah->cal_samples >= cur_caldata->calNumSamples) {
  89. unsigned int i, numChains = 0;
  90. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  91. if (rxchainmask & (1 << i))
  92. numChains++;
  93. }
  94. /*
  95. * Process accumulated data
  96. */
  97. if (cur_caldata->calPostProc)
  98. cur_caldata->calPostProc(ah, numChains);
  99. /* Calibration has finished. */
  100. caldata->CalValid |= cur_caldata->calType;
  101. currCal->calState = CAL_DONE;
  102. return true;
  103. } else {
  104. /*
  105. * Set-up collection of another sub-sample until we
  106. * get desired number
  107. */
  108. ar9003_hw_setup_calibration(ah, currCal);
  109. }
  110. } else if (!(caldata->CalValid & cur_caldata->calType)) {
  111. /* If current cal is marked invalid in channel, kick it off */
  112. ath9k_hw_reset_calibration(ah, currCal);
  113. }
  114. return false;
  115. }
  116. static int ar9003_hw_calibrate(struct ath_hw *ah, struct ath9k_channel *chan,
  117. u8 rxchainmask, bool longcal)
  118. {
  119. bool iscaldone = true;
  120. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  121. int ret;
  122. /*
  123. * For given calibration:
  124. * 1. Call generic cal routine
  125. * 2. When this cal is done (isCalDone) if we have more cals waiting
  126. * (eg after reset), mask this to upper layers by not propagating
  127. * isCalDone if it is set to TRUE.
  128. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  129. * to be run.
  130. */
  131. if (currCal &&
  132. (currCal->calState == CAL_RUNNING ||
  133. currCal->calState == CAL_WAITING)) {
  134. iscaldone = ar9003_hw_per_calibration(ah, chan,
  135. rxchainmask, currCal);
  136. if (iscaldone) {
  137. ah->cal_list_curr = currCal = currCal->calNext;
  138. if (currCal->calState == CAL_WAITING) {
  139. iscaldone = false;
  140. ath9k_hw_reset_calibration(ah, currCal);
  141. }
  142. }
  143. }
  144. /*
  145. * Do NF cal only at longer intervals. Get the value from
  146. * the previous NF cal and update history buffer.
  147. */
  148. if (longcal && ath9k_hw_getnf(ah, chan)) {
  149. /*
  150. * Load the NF from history buffer of the current channel.
  151. * NF is slow time-variant, so it is OK to use a historical
  152. * value.
  153. */
  154. ret = ath9k_hw_loadnf(ah, ah->curchan);
  155. if (ret < 0)
  156. return ret;
  157. /* start NF calibration, without updating BB NF register */
  158. ath9k_hw_start_nfcal(ah, false);
  159. }
  160. return iscaldone;
  161. }
  162. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  163. {
  164. int i;
  165. /* Accumulate IQ cal measures for active chains */
  166. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  167. if (ah->txchainmask & BIT(i)) {
  168. ah->totalPowerMeasI[i] +=
  169. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  170. ah->totalPowerMeasQ[i] +=
  171. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  172. ah->totalIqCorrMeas[i] +=
  173. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  174. ath_dbg(ath9k_hw_common(ah), CALIBRATE,
  175. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  176. ah->cal_samples, i, ah->totalPowerMeasI[i],
  177. ah->totalPowerMeasQ[i],
  178. ah->totalIqCorrMeas[i]);
  179. }
  180. }
  181. }
  182. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  183. {
  184. struct ath_common *common = ath9k_hw_common(ah);
  185. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  186. u32 qCoffDenom, iCoffDenom;
  187. int32_t qCoff, iCoff;
  188. int iqCorrNeg, i;
  189. static const u_int32_t offset_array[3] = {
  190. AR_PHY_RX_IQCAL_CORR_B0,
  191. AR_PHY_RX_IQCAL_CORR_B1,
  192. AR_PHY_RX_IQCAL_CORR_B2,
  193. };
  194. for (i = 0; i < numChains; i++) {
  195. powerMeasI = ah->totalPowerMeasI[i];
  196. powerMeasQ = ah->totalPowerMeasQ[i];
  197. iqCorrMeas = ah->totalIqCorrMeas[i];
  198. ath_dbg(common, CALIBRATE,
  199. "Starting IQ Cal and Correction for Chain %d\n", i);
  200. ath_dbg(common, CALIBRATE,
  201. "Original: Chn %d iq_corr_meas = 0x%08x\n",
  202. i, ah->totalIqCorrMeas[i]);
  203. iqCorrNeg = 0;
  204. if (iqCorrMeas > 0x80000000) {
  205. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  206. iqCorrNeg = 1;
  207. }
  208. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_i = 0x%08x\n",
  209. i, powerMeasI);
  210. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_q = 0x%08x\n",
  211. i, powerMeasQ);
  212. ath_dbg(common, CALIBRATE, "iqCorrNeg is 0x%08x\n", iqCorrNeg);
  213. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  214. qCoffDenom = powerMeasQ / 64;
  215. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  216. iCoff = iqCorrMeas / iCoffDenom;
  217. qCoff = powerMeasI / qCoffDenom - 64;
  218. ath_dbg(common, CALIBRATE, "Chn %d iCoff = 0x%08x\n",
  219. i, iCoff);
  220. ath_dbg(common, CALIBRATE, "Chn %d qCoff = 0x%08x\n",
  221. i, qCoff);
  222. /* Force bounds on iCoff */
  223. if (iCoff >= 63)
  224. iCoff = 63;
  225. else if (iCoff <= -63)
  226. iCoff = -63;
  227. /* Negate iCoff if iqCorrNeg == 0 */
  228. if (iqCorrNeg == 0x0)
  229. iCoff = -iCoff;
  230. /* Force bounds on qCoff */
  231. if (qCoff >= 63)
  232. qCoff = 63;
  233. else if (qCoff <= -63)
  234. qCoff = -63;
  235. iCoff = iCoff & 0x7f;
  236. qCoff = qCoff & 0x7f;
  237. ath_dbg(common, CALIBRATE,
  238. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  239. i, iCoff, qCoff);
  240. ath_dbg(common, CALIBRATE,
  241. "Register offset (0x%04x) before update = 0x%x\n",
  242. offset_array[i],
  243. REG_READ(ah, offset_array[i]));
  244. if (AR_SREV_9565(ah) &&
  245. (iCoff == 63 || qCoff == 63 ||
  246. iCoff == -63 || qCoff == -63))
  247. return;
  248. REG_RMW_FIELD(ah, offset_array[i],
  249. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  250. iCoff);
  251. REG_RMW_FIELD(ah, offset_array[i],
  252. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  253. qCoff);
  254. ath_dbg(common, CALIBRATE,
  255. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  256. offset_array[i],
  257. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  258. REG_READ(ah, offset_array[i]));
  259. ath_dbg(common, CALIBRATE,
  260. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  261. offset_array[i],
  262. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  263. REG_READ(ah, offset_array[i]));
  264. ath_dbg(common, CALIBRATE,
  265. "IQ Cal and Correction done for Chain %d\n", i);
  266. }
  267. }
  268. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  269. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  270. ath_dbg(common, CALIBRATE,
  271. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  272. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  273. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  274. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  275. }
  276. static const struct ath9k_percal_data iq_cal_single_sample = {
  277. IQ_MISMATCH_CAL,
  278. MIN_CAL_SAMPLES,
  279. PER_MAX_LOG_COUNT,
  280. ar9003_hw_iqcal_collect,
  281. ar9003_hw_iqcalibrate
  282. };
  283. static const struct ath9k_percal_data temp_cal_single_sample = {
  284. TEMP_COMP_CAL,
  285. MIN_CAL_SAMPLES,
  286. PER_MAX_LOG_COUNT,
  287. };
  288. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  289. {
  290. ah->iq_caldata.calData = &iq_cal_single_sample;
  291. ah->temp_caldata.calData = &temp_cal_single_sample;
  292. if (AR_SREV_9300_20_OR_LATER(ah)) {
  293. ah->enabled_cals |= TX_IQ_CAL;
  294. if (AR_SREV_9485_OR_LATER(ah) && !AR_SREV_9340(ah))
  295. ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
  296. }
  297. ah->supp_cals = IQ_MISMATCH_CAL | TEMP_COMP_CAL;
  298. }
  299. #define OFF_UPPER_LT 24
  300. #define OFF_LOWER_LT 7
  301. static bool ar9003_hw_dynamic_osdac_selection(struct ath_hw *ah,
  302. bool txiqcal_done)
  303. {
  304. struct ath_common *common = ath9k_hw_common(ah);
  305. int ch0_done, osdac_ch0, dc_off_ch0_i1, dc_off_ch0_q1, dc_off_ch0_i2,
  306. dc_off_ch0_q2, dc_off_ch0_i3, dc_off_ch0_q3;
  307. int ch1_done, osdac_ch1, dc_off_ch1_i1, dc_off_ch1_q1, dc_off_ch1_i2,
  308. dc_off_ch1_q2, dc_off_ch1_i3, dc_off_ch1_q3;
  309. int ch2_done, osdac_ch2, dc_off_ch2_i1, dc_off_ch2_q1, dc_off_ch2_i2,
  310. dc_off_ch2_q2, dc_off_ch2_i3, dc_off_ch2_q3;
  311. bool status;
  312. u32 temp, val;
  313. /*
  314. * Clear offset and IQ calibration, run AGC cal.
  315. */
  316. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  317. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  318. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  319. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  320. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  321. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  322. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  323. AR_PHY_AGC_CONTROL_CAL,
  324. 0, AH_WAIT_TIMEOUT);
  325. if (!status) {
  326. ath_dbg(common, CALIBRATE,
  327. "AGC cal without offset cal failed to complete in 1ms");
  328. return false;
  329. }
  330. /*
  331. * Allow only offset calibration and disable the others
  332. * (Carrier Leak calibration, TX Filter calibration and
  333. * Peak Detector offset calibration).
  334. */
  335. REG_SET_BIT(ah, AR_PHY_AGC_CONTROL,
  336. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  337. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  338. AR_PHY_CL_CAL_ENABLE);
  339. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  340. AR_PHY_AGC_CONTROL_FLTR_CAL);
  341. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  342. AR_PHY_AGC_CONTROL_PKDET_CAL);
  343. ch0_done = 0;
  344. ch1_done = 0;
  345. ch2_done = 0;
  346. while ((ch0_done == 0) || (ch1_done == 0) || (ch2_done == 0)) {
  347. osdac_ch0 = (REG_READ(ah, AR_PHY_65NM_CH0_BB1) >> 30) & 0x3;
  348. osdac_ch1 = (REG_READ(ah, AR_PHY_65NM_CH1_BB1) >> 30) & 0x3;
  349. osdac_ch2 = (REG_READ(ah, AR_PHY_65NM_CH2_BB1) >> 30) & 0x3;
  350. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  351. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  352. REG_READ(ah, AR_PHY_AGC_CONTROL) | AR_PHY_AGC_CONTROL_CAL);
  353. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  354. AR_PHY_AGC_CONTROL_CAL,
  355. 0, AH_WAIT_TIMEOUT);
  356. if (!status) {
  357. ath_dbg(common, CALIBRATE,
  358. "DC offset cal failed to complete in 1ms");
  359. return false;
  360. }
  361. REG_CLR_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  362. /*
  363. * High gain.
  364. */
  365. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  366. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (1 << 8)));
  367. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  368. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (1 << 8)));
  369. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  370. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (1 << 8)));
  371. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  372. dc_off_ch0_i1 = (temp >> 26) & 0x1f;
  373. dc_off_ch0_q1 = (temp >> 21) & 0x1f;
  374. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  375. dc_off_ch1_i1 = (temp >> 26) & 0x1f;
  376. dc_off_ch1_q1 = (temp >> 21) & 0x1f;
  377. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  378. dc_off_ch2_i1 = (temp >> 26) & 0x1f;
  379. dc_off_ch2_q1 = (temp >> 21) & 0x1f;
  380. /*
  381. * Low gain.
  382. */
  383. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  384. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (2 << 8)));
  385. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  386. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (2 << 8)));
  387. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  388. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (2 << 8)));
  389. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  390. dc_off_ch0_i2 = (temp >> 26) & 0x1f;
  391. dc_off_ch0_q2 = (temp >> 21) & 0x1f;
  392. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  393. dc_off_ch1_i2 = (temp >> 26) & 0x1f;
  394. dc_off_ch1_q2 = (temp >> 21) & 0x1f;
  395. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  396. dc_off_ch2_i2 = (temp >> 26) & 0x1f;
  397. dc_off_ch2_q2 = (temp >> 21) & 0x1f;
  398. /*
  399. * Loopback.
  400. */
  401. REG_WRITE(ah, AR_PHY_65NM_CH0_BB3,
  402. ((REG_READ(ah, AR_PHY_65NM_CH0_BB3) & 0xfffffcff) | (3 << 8)));
  403. REG_WRITE(ah, AR_PHY_65NM_CH1_BB3,
  404. ((REG_READ(ah, AR_PHY_65NM_CH1_BB3) & 0xfffffcff) | (3 << 8)));
  405. REG_WRITE(ah, AR_PHY_65NM_CH2_BB3,
  406. ((REG_READ(ah, AR_PHY_65NM_CH2_BB3) & 0xfffffcff) | (3 << 8)));
  407. temp = REG_READ(ah, AR_PHY_65NM_CH0_BB3);
  408. dc_off_ch0_i3 = (temp >> 26) & 0x1f;
  409. dc_off_ch0_q3 = (temp >> 21) & 0x1f;
  410. temp = REG_READ(ah, AR_PHY_65NM_CH1_BB3);
  411. dc_off_ch1_i3 = (temp >> 26) & 0x1f;
  412. dc_off_ch1_q3 = (temp >> 21) & 0x1f;
  413. temp = REG_READ(ah, AR_PHY_65NM_CH2_BB3);
  414. dc_off_ch2_i3 = (temp >> 26) & 0x1f;
  415. dc_off_ch2_q3 = (temp >> 21) & 0x1f;
  416. if ((dc_off_ch0_i1 > OFF_UPPER_LT) || (dc_off_ch0_i1 < OFF_LOWER_LT) ||
  417. (dc_off_ch0_i2 > OFF_UPPER_LT) || (dc_off_ch0_i2 < OFF_LOWER_LT) ||
  418. (dc_off_ch0_i3 > OFF_UPPER_LT) || (dc_off_ch0_i3 < OFF_LOWER_LT) ||
  419. (dc_off_ch0_q1 > OFF_UPPER_LT) || (dc_off_ch0_q1 < OFF_LOWER_LT) ||
  420. (dc_off_ch0_q2 > OFF_UPPER_LT) || (dc_off_ch0_q2 < OFF_LOWER_LT) ||
  421. (dc_off_ch0_q3 > OFF_UPPER_LT) || (dc_off_ch0_q3 < OFF_LOWER_LT)) {
  422. if (osdac_ch0 == 3) {
  423. ch0_done = 1;
  424. } else {
  425. osdac_ch0++;
  426. val = REG_READ(ah, AR_PHY_65NM_CH0_BB1) & 0x3fffffff;
  427. val |= (osdac_ch0 << 30);
  428. REG_WRITE(ah, AR_PHY_65NM_CH0_BB1, val);
  429. ch0_done = 0;
  430. }
  431. } else {
  432. ch0_done = 1;
  433. }
  434. if ((dc_off_ch1_i1 > OFF_UPPER_LT) || (dc_off_ch1_i1 < OFF_LOWER_LT) ||
  435. (dc_off_ch1_i2 > OFF_UPPER_LT) || (dc_off_ch1_i2 < OFF_LOWER_LT) ||
  436. (dc_off_ch1_i3 > OFF_UPPER_LT) || (dc_off_ch1_i3 < OFF_LOWER_LT) ||
  437. (dc_off_ch1_q1 > OFF_UPPER_LT) || (dc_off_ch1_q1 < OFF_LOWER_LT) ||
  438. (dc_off_ch1_q2 > OFF_UPPER_LT) || (dc_off_ch1_q2 < OFF_LOWER_LT) ||
  439. (dc_off_ch1_q3 > OFF_UPPER_LT) || (dc_off_ch1_q3 < OFF_LOWER_LT)) {
  440. if (osdac_ch1 == 3) {
  441. ch1_done = 1;
  442. } else {
  443. osdac_ch1++;
  444. val = REG_READ(ah, AR_PHY_65NM_CH1_BB1) & 0x3fffffff;
  445. val |= (osdac_ch1 << 30);
  446. REG_WRITE(ah, AR_PHY_65NM_CH1_BB1, val);
  447. ch1_done = 0;
  448. }
  449. } else {
  450. ch1_done = 1;
  451. }
  452. if ((dc_off_ch2_i1 > OFF_UPPER_LT) || (dc_off_ch2_i1 < OFF_LOWER_LT) ||
  453. (dc_off_ch2_i2 > OFF_UPPER_LT) || (dc_off_ch2_i2 < OFF_LOWER_LT) ||
  454. (dc_off_ch2_i3 > OFF_UPPER_LT) || (dc_off_ch2_i3 < OFF_LOWER_LT) ||
  455. (dc_off_ch2_q1 > OFF_UPPER_LT) || (dc_off_ch2_q1 < OFF_LOWER_LT) ||
  456. (dc_off_ch2_q2 > OFF_UPPER_LT) || (dc_off_ch2_q2 < OFF_LOWER_LT) ||
  457. (dc_off_ch2_q3 > OFF_UPPER_LT) || (dc_off_ch2_q3 < OFF_LOWER_LT)) {
  458. if (osdac_ch2 == 3) {
  459. ch2_done = 1;
  460. } else {
  461. osdac_ch2++;
  462. val = REG_READ(ah, AR_PHY_65NM_CH2_BB1) & 0x3fffffff;
  463. val |= (osdac_ch2 << 30);
  464. REG_WRITE(ah, AR_PHY_65NM_CH2_BB1, val);
  465. ch2_done = 0;
  466. }
  467. } else {
  468. ch2_done = 1;
  469. }
  470. }
  471. REG_CLR_BIT(ah, AR_PHY_AGC_CONTROL,
  472. AR_PHY_AGC_CONTROL_OFFSET_CAL);
  473. REG_SET_BIT(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  474. /*
  475. * We don't need to check txiqcal_done here since it is always
  476. * set for AR9550.
  477. */
  478. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  479. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  480. return true;
  481. }
  482. /*
  483. * solve 4x4 linear equation used in loopback iq cal.
  484. */
  485. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  486. s32 sin_2phi_1,
  487. s32 cos_2phi_1,
  488. s32 sin_2phi_2,
  489. s32 cos_2phi_2,
  490. s32 mag_a0_d0,
  491. s32 phs_a0_d0,
  492. s32 mag_a1_d0,
  493. s32 phs_a1_d0,
  494. s32 solved_eq[])
  495. {
  496. s32 f1 = cos_2phi_1 - cos_2phi_2,
  497. f3 = sin_2phi_1 - sin_2phi_2,
  498. f2;
  499. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  500. const s32 result_shift = 1 << 15;
  501. struct ath_common *common = ath9k_hw_common(ah);
  502. f2 = ((f1 >> 3) * (f1 >> 3) + (f3 >> 3) * (f3 >> 3)) >> 9;
  503. if (!f2) {
  504. ath_dbg(common, CALIBRATE, "Divide by 0\n");
  505. return false;
  506. }
  507. /* mag mismatch, tx */
  508. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  509. /* phs mismatch, tx */
  510. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  511. mag_tx = (mag_tx / f2);
  512. phs_tx = (phs_tx / f2);
  513. /* mag mismatch, rx */
  514. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  515. result_shift;
  516. /* phs mismatch, rx */
  517. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  518. result_shift;
  519. solved_eq[0] = mag_tx;
  520. solved_eq[1] = phs_tx;
  521. solved_eq[2] = mag_rx;
  522. solved_eq[3] = phs_rx;
  523. return true;
  524. }
  525. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  526. {
  527. s32 abs_i = abs(in_re),
  528. abs_q = abs(in_im),
  529. max_abs, min_abs;
  530. if (abs_i > abs_q) {
  531. max_abs = abs_i;
  532. min_abs = abs_q;
  533. } else {
  534. max_abs = abs_q;
  535. min_abs = abs_i;
  536. }
  537. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  538. }
  539. #define DELPT 32
  540. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  541. s32 chain_idx,
  542. const s32 iq_res[],
  543. s32 iqc_coeff[])
  544. {
  545. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  546. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  547. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  548. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  549. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  550. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  551. sin_2phi_1, cos_2phi_1,
  552. sin_2phi_2, cos_2phi_2;
  553. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  554. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  555. q_q_coff, q_i_coff;
  556. const s32 res_scale = 1 << 15;
  557. const s32 delpt_shift = 1 << 8;
  558. s32 mag1, mag2;
  559. struct ath_common *common = ath9k_hw_common(ah);
  560. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  561. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  562. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  563. if (i2_m_q2_a0_d0 > 0x800)
  564. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  565. if (i2_p_q2_a0_d0 > 0x800)
  566. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  567. if (iq_corr_a0_d0 > 0x800)
  568. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  569. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  570. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  571. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  572. if (i2_m_q2_a0_d1 > 0x800)
  573. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  574. if (iq_corr_a0_d1 > 0x800)
  575. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  576. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  577. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  578. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  579. if (i2_m_q2_a1_d0 > 0x800)
  580. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  581. if (i2_p_q2_a1_d0 > 0x800)
  582. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  583. if (iq_corr_a1_d0 > 0x800)
  584. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  585. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  586. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  587. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  588. if (i2_m_q2_a1_d1 > 0x800)
  589. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  590. if (i2_p_q2_a1_d1 > 0x800)
  591. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  592. if (iq_corr_a1_d1 > 0x800)
  593. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  594. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  595. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  596. ath_dbg(common, CALIBRATE,
  597. "Divide by 0:\n"
  598. "a0_d0=%d\n"
  599. "a0_d1=%d\n"
  600. "a2_d0=%d\n"
  601. "a1_d1=%d\n",
  602. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  603. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  604. return false;
  605. }
  606. if ((i2_p_q2_a0_d0 < 1024) || (i2_p_q2_a0_d0 > 2047) ||
  607. (i2_p_q2_a1_d0 < 0) || (i2_p_q2_a1_d1 < 0) ||
  608. (i2_p_q2_a0_d0 <= i2_m_q2_a0_d0) ||
  609. (i2_p_q2_a0_d0 <= iq_corr_a0_d0) ||
  610. (i2_p_q2_a0_d1 <= i2_m_q2_a0_d1) ||
  611. (i2_p_q2_a0_d1 <= iq_corr_a0_d1) ||
  612. (i2_p_q2_a1_d0 <= i2_m_q2_a1_d0) ||
  613. (i2_p_q2_a1_d0 <= iq_corr_a1_d0) ||
  614. (i2_p_q2_a1_d1 <= i2_m_q2_a1_d1) ||
  615. (i2_p_q2_a1_d1 <= iq_corr_a1_d1)) {
  616. return false;
  617. }
  618. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  619. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  620. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  621. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  622. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  623. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  624. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  625. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  626. /* w/o analog phase shift */
  627. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  628. /* w/o analog phase shift */
  629. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  630. /* w/ analog phase shift */
  631. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  632. /* w/ analog phase shift */
  633. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  634. /*
  635. * force sin^2 + cos^2 = 1;
  636. * find magnitude by approximation
  637. */
  638. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  639. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  640. if ((mag1 == 0) || (mag2 == 0)) {
  641. ath_dbg(common, CALIBRATE, "Divide by 0: mag1=%d, mag2=%d\n",
  642. mag1, mag2);
  643. return false;
  644. }
  645. /* normalization sin and cos by mag */
  646. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  647. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  648. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  649. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  650. /* calculate IQ mismatch */
  651. if (!ar9003_hw_solve_iq_cal(ah,
  652. sin_2phi_1, cos_2phi_1,
  653. sin_2phi_2, cos_2phi_2,
  654. mag_a0_d0, phs_a0_d0,
  655. mag_a1_d0,
  656. phs_a1_d0, solved_eq)) {
  657. ath_dbg(common, CALIBRATE,
  658. "Call to ar9003_hw_solve_iq_cal() failed\n");
  659. return false;
  660. }
  661. mag_tx = solved_eq[0];
  662. phs_tx = solved_eq[1];
  663. mag_rx = solved_eq[2];
  664. phs_rx = solved_eq[3];
  665. ath_dbg(common, CALIBRATE,
  666. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  667. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  668. if (res_scale == mag_tx) {
  669. ath_dbg(common, CALIBRATE,
  670. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  671. mag_tx, res_scale);
  672. return false;
  673. }
  674. /* calculate and quantize Tx IQ correction factor */
  675. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  676. phs_corr_tx = -phs_tx;
  677. q_q_coff = (mag_corr_tx * 128 / res_scale);
  678. q_i_coff = (phs_corr_tx * 256 / res_scale);
  679. ath_dbg(common, CALIBRATE, "tx chain %d: mag corr=%d phase corr=%d\n",
  680. chain_idx, q_q_coff, q_i_coff);
  681. if (q_i_coff < -63)
  682. q_i_coff = -63;
  683. if (q_i_coff > 63)
  684. q_i_coff = 63;
  685. if (q_q_coff < -63)
  686. q_q_coff = -63;
  687. if (q_q_coff > 63)
  688. q_q_coff = 63;
  689. iqc_coeff[0] = (q_q_coff * 128) + (0x7f & q_i_coff);
  690. ath_dbg(common, CALIBRATE, "tx chain %d: iq corr coeff=%x\n",
  691. chain_idx, iqc_coeff[0]);
  692. if (-mag_rx == res_scale) {
  693. ath_dbg(common, CALIBRATE,
  694. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  695. mag_rx, res_scale);
  696. return false;
  697. }
  698. /* calculate and quantize Rx IQ correction factors */
  699. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  700. phs_corr_rx = -phs_rx;
  701. q_q_coff = (mag_corr_rx * 128 / res_scale);
  702. q_i_coff = (phs_corr_rx * 256 / res_scale);
  703. ath_dbg(common, CALIBRATE, "rx chain %d: mag corr=%d phase corr=%d\n",
  704. chain_idx, q_q_coff, q_i_coff);
  705. if (q_i_coff < -63)
  706. q_i_coff = -63;
  707. if (q_i_coff > 63)
  708. q_i_coff = 63;
  709. if (q_q_coff < -63)
  710. q_q_coff = -63;
  711. if (q_q_coff > 63)
  712. q_q_coff = 63;
  713. iqc_coeff[1] = (q_q_coff * 128) + (0x7f & q_i_coff);
  714. ath_dbg(common, CALIBRATE, "rx chain %d: iq corr coeff=%x\n",
  715. chain_idx, iqc_coeff[1]);
  716. return true;
  717. }
  718. static void ar9003_hw_detect_outlier(int mp_coeff[][MAXIQCAL],
  719. int nmeasurement,
  720. int max_delta)
  721. {
  722. int mp_max = -64, max_idx = 0;
  723. int mp_min = 63, min_idx = 0;
  724. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  725. /* find min/max mismatch across all calibrated gains */
  726. for (i = 0; i < nmeasurement; i++) {
  727. if (mp_coeff[i][0] > mp_max) {
  728. mp_max = mp_coeff[i][0];
  729. max_idx = i;
  730. } else if (mp_coeff[i][0] < mp_min) {
  731. mp_min = mp_coeff[i][0];
  732. min_idx = i;
  733. }
  734. }
  735. /* find average (exclude max abs value) */
  736. for (i = 0; i < nmeasurement; i++) {
  737. if ((abs(mp_coeff[i][0]) < abs(mp_max)) ||
  738. (abs(mp_coeff[i][0]) < abs(mp_min))) {
  739. mp_avg += mp_coeff[i][0];
  740. mp_count++;
  741. }
  742. }
  743. /*
  744. * finding mean magnitude/phase if possible, otherwise
  745. * just use the last value as the mean
  746. */
  747. if (mp_count)
  748. mp_avg /= mp_count;
  749. else
  750. mp_avg = mp_coeff[nmeasurement - 1][0];
  751. /* detect outlier */
  752. if (abs(mp_max - mp_min) > max_delta) {
  753. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  754. outlier_idx = max_idx;
  755. else
  756. outlier_idx = min_idx;
  757. mp_coeff[outlier_idx][0] = mp_avg;
  758. }
  759. }
  760. static void ar9003_hw_tx_iq_cal_outlier_detection(struct ath_hw *ah,
  761. struct coeff *coeff,
  762. bool is_reusable)
  763. {
  764. int i, im, nmeasurement;
  765. int magnitude, phase;
  766. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  767. struct ath9k_hw_cal_data *caldata = ah->caldata;
  768. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  769. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  770. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  771. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  772. if (!AR_SREV_9485(ah)) {
  773. tx_corr_coeff[i * 2][1] =
  774. tx_corr_coeff[(i * 2) + 1][1] =
  775. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  776. tx_corr_coeff[i * 2][2] =
  777. tx_corr_coeff[(i * 2) + 1][2] =
  778. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  779. }
  780. }
  781. /* Load the average of 2 passes */
  782. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  783. if (!(ah->txchainmask & (1 << i)))
  784. continue;
  785. nmeasurement = REG_READ_FIELD(ah,
  786. AR_PHY_TX_IQCAL_STATUS_B0,
  787. AR_PHY_CALIBRATED_GAINS_0);
  788. if (nmeasurement > MAX_MEASUREMENT)
  789. nmeasurement = MAX_MEASUREMENT;
  790. /*
  791. * Skip normal outlier detection for AR9550.
  792. */
  793. if (!AR_SREV_9550(ah)) {
  794. /* detect outlier only if nmeasurement > 1 */
  795. if (nmeasurement > 1) {
  796. /* Detect magnitude outlier */
  797. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  798. nmeasurement,
  799. MAX_MAG_DELTA);
  800. /* Detect phase outlier */
  801. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  802. nmeasurement,
  803. MAX_PHS_DELTA);
  804. }
  805. }
  806. for (im = 0; im < nmeasurement; im++) {
  807. magnitude = coeff->mag_coeff[i][im][0];
  808. phase = coeff->phs_coeff[i][im][0];
  809. coeff->iqc_coeff[0] =
  810. (phase & 0x7f) | ((magnitude & 0x7f) << 7);
  811. if ((im % 2) == 0)
  812. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  813. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  814. coeff->iqc_coeff[0]);
  815. else
  816. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  817. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  818. coeff->iqc_coeff[0]);
  819. if (caldata)
  820. caldata->tx_corr_coeff[im][i] =
  821. coeff->iqc_coeff[0];
  822. }
  823. if (caldata)
  824. caldata->num_measures[i] = nmeasurement;
  825. }
  826. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  827. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  828. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  829. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  830. if (caldata) {
  831. if (is_reusable)
  832. set_bit(TXIQCAL_DONE, &caldata->cal_flags);
  833. else
  834. clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
  835. }
  836. return;
  837. }
  838. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  839. {
  840. struct ath_common *common = ath9k_hw_common(ah);
  841. u8 tx_gain_forced;
  842. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  843. AR_PHY_TXGAIN_FORCE);
  844. if (tx_gain_forced)
  845. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  846. AR_PHY_TXGAIN_FORCE, 0);
  847. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  848. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  849. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  850. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  851. AH_WAIT_TIMEOUT)) {
  852. ath_dbg(common, CALIBRATE, "Tx IQ Cal is not completed\n");
  853. return false;
  854. }
  855. return true;
  856. }
  857. static void __ar955x_tx_iq_cal_sort(struct ath_hw *ah,
  858. struct coeff *coeff,
  859. int i, int nmeasurement)
  860. {
  861. struct ath_common *common = ath9k_hw_common(ah);
  862. int im, ix, iy, temp;
  863. for (im = 0; im < nmeasurement; im++) {
  864. for (ix = 0; ix < MAXIQCAL - 1; ix++) {
  865. for (iy = ix + 1; iy <= MAXIQCAL - 1; iy++) {
  866. if (coeff->mag_coeff[i][im][iy] <
  867. coeff->mag_coeff[i][im][ix]) {
  868. temp = coeff->mag_coeff[i][im][ix];
  869. coeff->mag_coeff[i][im][ix] =
  870. coeff->mag_coeff[i][im][iy];
  871. coeff->mag_coeff[i][im][iy] = temp;
  872. }
  873. if (coeff->phs_coeff[i][im][iy] <
  874. coeff->phs_coeff[i][im][ix]) {
  875. temp = coeff->phs_coeff[i][im][ix];
  876. coeff->phs_coeff[i][im][ix] =
  877. coeff->phs_coeff[i][im][iy];
  878. coeff->phs_coeff[i][im][iy] = temp;
  879. }
  880. }
  881. }
  882. coeff->mag_coeff[i][im][0] = coeff->mag_coeff[i][im][MAXIQCAL / 2];
  883. coeff->phs_coeff[i][im][0] = coeff->phs_coeff[i][im][MAXIQCAL / 2];
  884. ath_dbg(common, CALIBRATE,
  885. "IQCAL: Median [ch%d][gain%d]: mag = %d phase = %d\n",
  886. i, im,
  887. coeff->mag_coeff[i][im][0],
  888. coeff->phs_coeff[i][im][0]);
  889. }
  890. }
  891. static bool ar955x_tx_iq_cal_median(struct ath_hw *ah,
  892. struct coeff *coeff,
  893. int iqcal_idx,
  894. int nmeasurement)
  895. {
  896. int i;
  897. if ((iqcal_idx + 1) != MAXIQCAL)
  898. return false;
  899. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  900. __ar955x_tx_iq_cal_sort(ah, coeff, i, nmeasurement);
  901. }
  902. return true;
  903. }
  904. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah,
  905. int iqcal_idx,
  906. bool is_reusable)
  907. {
  908. struct ath_common *common = ath9k_hw_common(ah);
  909. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  910. AR_PHY_TX_IQCAL_STATUS_B0,
  911. AR_PHY_TX_IQCAL_STATUS_B1,
  912. AR_PHY_TX_IQCAL_STATUS_B2,
  913. };
  914. const u_int32_t chan_info_tab[] = {
  915. AR_PHY_CHAN_INFO_TAB_0,
  916. AR_PHY_CHAN_INFO_TAB_1,
  917. AR_PHY_CHAN_INFO_TAB_2,
  918. };
  919. static struct coeff coeff;
  920. s32 iq_res[6];
  921. int i, im, j;
  922. int nmeasurement = 0;
  923. bool outlier_detect = true;
  924. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  925. if (!(ah->txchainmask & (1 << i)))
  926. continue;
  927. nmeasurement = REG_READ_FIELD(ah,
  928. AR_PHY_TX_IQCAL_STATUS_B0,
  929. AR_PHY_CALIBRATED_GAINS_0);
  930. if (nmeasurement > MAX_MEASUREMENT)
  931. nmeasurement = MAX_MEASUREMENT;
  932. for (im = 0; im < nmeasurement; im++) {
  933. ath_dbg(common, CALIBRATE,
  934. "Doing Tx IQ Cal for chain %d\n", i);
  935. if (REG_READ(ah, txiqcal_status[i]) &
  936. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  937. ath_dbg(common, CALIBRATE,
  938. "Tx IQ Cal failed for chain %d\n", i);
  939. goto tx_iqcal_fail;
  940. }
  941. for (j = 0; j < 3; j++) {
  942. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  943. REG_RMW_FIELD(ah,
  944. AR_PHY_CHAN_INFO_MEMORY,
  945. AR_PHY_CHAN_INFO_TAB_S2_READ,
  946. 0);
  947. /* 32 bits */
  948. iq_res[idx] = REG_READ(ah,
  949. chan_info_tab[i] +
  950. offset);
  951. REG_RMW_FIELD(ah,
  952. AR_PHY_CHAN_INFO_MEMORY,
  953. AR_PHY_CHAN_INFO_TAB_S2_READ,
  954. 1);
  955. /* 16 bits */
  956. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  957. chan_info_tab[i] + offset);
  958. ath_dbg(common, CALIBRATE,
  959. "IQ_RES[%d]=0x%x IQ_RES[%d]=0x%x\n",
  960. idx, iq_res[idx], idx + 1,
  961. iq_res[idx + 1]);
  962. }
  963. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  964. coeff.iqc_coeff)) {
  965. ath_dbg(common, CALIBRATE,
  966. "Failed in calculation of IQ correction\n");
  967. goto tx_iqcal_fail;
  968. }
  969. coeff.phs_coeff[i][im][iqcal_idx] =
  970. coeff.iqc_coeff[0] & 0x7f;
  971. coeff.mag_coeff[i][im][iqcal_idx] =
  972. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  973. if (coeff.mag_coeff[i][im][iqcal_idx] > 63)
  974. coeff.mag_coeff[i][im][iqcal_idx] -= 128;
  975. if (coeff.phs_coeff[i][im][iqcal_idx] > 63)
  976. coeff.phs_coeff[i][im][iqcal_idx] -= 128;
  977. }
  978. }
  979. if (AR_SREV_9550(ah))
  980. outlier_detect = ar955x_tx_iq_cal_median(ah, &coeff,
  981. iqcal_idx, nmeasurement);
  982. if (outlier_detect)
  983. ar9003_hw_tx_iq_cal_outlier_detection(ah, &coeff, is_reusable);
  984. return;
  985. tx_iqcal_fail:
  986. ath_dbg(common, CALIBRATE, "Tx IQ Cal failed\n");
  987. return;
  988. }
  989. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  990. {
  991. struct ath9k_hw_cal_data *caldata = ah->caldata;
  992. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  993. int i, im;
  994. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  995. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  996. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  997. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  998. if (!AR_SREV_9485(ah)) {
  999. tx_corr_coeff[i * 2][1] =
  1000. tx_corr_coeff[(i * 2) + 1][1] =
  1001. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  1002. tx_corr_coeff[i * 2][2] =
  1003. tx_corr_coeff[(i * 2) + 1][2] =
  1004. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  1005. }
  1006. }
  1007. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1008. if (!(ah->txchainmask & (1 << i)))
  1009. continue;
  1010. for (im = 0; im < caldata->num_measures[i]; im++) {
  1011. if ((im % 2) == 0)
  1012. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  1013. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  1014. caldata->tx_corr_coeff[im][i]);
  1015. else
  1016. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  1017. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  1018. caldata->tx_corr_coeff[im][i]);
  1019. }
  1020. }
  1021. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  1022. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  1023. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  1024. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  1025. }
  1026. static void ar9003_hw_manual_peak_cal(struct ath_hw *ah, u8 chain, bool is_2g)
  1027. {
  1028. int offset[8] = {0}, total = 0, test;
  1029. int agc_out, i, peak_detect_threshold = 0;
  1030. if (AR_SREV_9550(ah) || AR_SREV_9531(ah))
  1031. peak_detect_threshold = 8;
  1032. else if (AR_SREV_9561(ah))
  1033. peak_detect_threshold = 11;
  1034. /*
  1035. * Turn off LNA/SW.
  1036. */
  1037. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1038. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0x1);
  1039. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1040. AR_PHY_65NM_RXRF_GAINSTAGES_LNAON_CALDC, 0x0);
  1041. if (AR_SREV_9003_PCOEM(ah) || AR_SREV_9330_11(ah)) {
  1042. if (is_2g)
  1043. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1044. AR_PHY_65NM_RXRF_GAINSTAGES_LNA2G_GAIN_OVR, 0x0);
  1045. else
  1046. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1047. AR_PHY_65NM_RXRF_GAINSTAGES_LNA5G_GAIN_OVR, 0x0);
  1048. }
  1049. /*
  1050. * Turn off RXON.
  1051. */
  1052. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1053. AR_PHY_65NM_RXTX2_RXON_OVR, 0x1);
  1054. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1055. AR_PHY_65NM_RXTX2_RXON, 0x0);
  1056. /*
  1057. * Turn on AGC for cal.
  1058. */
  1059. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1060. AR_PHY_65NM_RXRF_AGC_AGC_OVERRIDE, 0x1);
  1061. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1062. AR_PHY_65NM_RXRF_AGC_AGC_ON_OVR, 0x1);
  1063. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1064. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0x1);
  1065. if (AR_SREV_9330_11(ah))
  1066. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1067. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, 0x0);
  1068. if (is_2g)
  1069. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1070. AR_PHY_65NM_RXRF_AGC_AGC2G_DBDAC_OVR,
  1071. peak_detect_threshold);
  1072. else
  1073. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1074. AR_PHY_65NM_RXRF_AGC_AGC5G_DBDAC_OVR,
  1075. peak_detect_threshold);
  1076. for (i = 6; i > 0; i--) {
  1077. offset[i] = BIT(i - 1);
  1078. test = total + offset[i];
  1079. if (is_2g)
  1080. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1081. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR,
  1082. test);
  1083. else
  1084. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1085. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR,
  1086. test);
  1087. udelay(100);
  1088. agc_out = REG_READ_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1089. AR_PHY_65NM_RXRF_AGC_AGC_OUT);
  1090. offset[i] = (agc_out) ? 0 : 1;
  1091. total += (offset[i] << (i - 1));
  1092. }
  1093. if (is_2g)
  1094. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1095. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, total);
  1096. else
  1097. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1098. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR, total);
  1099. /*
  1100. * Turn on LNA.
  1101. */
  1102. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  1103. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0);
  1104. /*
  1105. * Turn off RXON.
  1106. */
  1107. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  1108. AR_PHY_65NM_RXTX2_RXON_OVR, 0);
  1109. /*
  1110. * Turn off peak detect calibration.
  1111. */
  1112. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  1113. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0);
  1114. }
  1115. static void ar9003_hw_do_pcoem_manual_peak_cal(struct ath_hw *ah,
  1116. struct ath9k_channel *chan,
  1117. bool run_rtt_cal)
  1118. {
  1119. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1120. int i;
  1121. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && !run_rtt_cal)
  1122. return;
  1123. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1124. if (!(ah->rxchainmask & (1 << i)))
  1125. continue;
  1126. ar9003_hw_manual_peak_cal(ah, i, IS_CHAN_2GHZ(chan));
  1127. }
  1128. if (caldata)
  1129. set_bit(SW_PKDET_DONE, &caldata->cal_flags);
  1130. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && caldata) {
  1131. if (IS_CHAN_2GHZ(chan)){
  1132. caldata->caldac[0] = REG_READ_FIELD(ah,
  1133. AR_PHY_65NM_RXRF_AGC(0),
  1134. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1135. caldata->caldac[1] = REG_READ_FIELD(ah,
  1136. AR_PHY_65NM_RXRF_AGC(1),
  1137. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  1138. } else {
  1139. caldata->caldac[0] = REG_READ_FIELD(ah,
  1140. AR_PHY_65NM_RXRF_AGC(0),
  1141. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1142. caldata->caldac[1] = REG_READ_FIELD(ah,
  1143. AR_PHY_65NM_RXRF_AGC(1),
  1144. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  1145. }
  1146. }
  1147. }
  1148. static void ar9003_hw_cl_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  1149. {
  1150. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  1151. AR_PHY_CL_TAB_1,
  1152. AR_PHY_CL_TAB_2 };
  1153. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1154. bool txclcal_done = false;
  1155. int i, j;
  1156. if (!caldata || !(ah->enabled_cals & TX_CL_CAL))
  1157. return;
  1158. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  1159. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  1160. if (test_bit(TXCLCAL_DONE, &caldata->cal_flags)) {
  1161. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1162. if (!(ah->txchainmask & (1 << i)))
  1163. continue;
  1164. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1165. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  1166. caldata->tx_clcal[i][j]);
  1167. }
  1168. } else if (is_reusable && txclcal_done) {
  1169. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1170. if (!(ah->txchainmask & (1 << i)))
  1171. continue;
  1172. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  1173. caldata->tx_clcal[i][j] =
  1174. REG_READ(ah, CL_TAB_ENTRY(cl_idx[i]));
  1175. }
  1176. set_bit(TXCLCAL_DONE, &caldata->cal_flags);
  1177. }
  1178. }
  1179. static void ar9003_hw_init_cal_common(struct ath_hw *ah)
  1180. {
  1181. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1182. /* Initialize list pointers */
  1183. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1184. INIT_CAL(&ah->iq_caldata);
  1185. INSERT_CAL(ah, &ah->iq_caldata);
  1186. INIT_CAL(&ah->temp_caldata);
  1187. INSERT_CAL(ah, &ah->temp_caldata);
  1188. /* Initialize current pointer to first element in list */
  1189. ah->cal_list_curr = ah->cal_list;
  1190. if (ah->cal_list_curr)
  1191. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1192. if (caldata)
  1193. caldata->CalValid = 0;
  1194. }
  1195. static bool ar9003_hw_init_cal_pcoem(struct ath_hw *ah,
  1196. struct ath9k_channel *chan)
  1197. {
  1198. struct ath_common *common = ath9k_hw_common(ah);
  1199. struct ath9k_hw_cal_data *caldata = ah->caldata;
  1200. bool txiqcal_done = false;
  1201. bool is_reusable = true, status = true;
  1202. bool run_rtt_cal = false, run_agc_cal;
  1203. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  1204. u32 rx_delay = 0;
  1205. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1206. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1207. AR_PHY_AGC_CONTROL_PKDET_CAL;
  1208. /* Use chip chainmask only for calibration */
  1209. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1210. if (rtt) {
  1211. if (!ar9003_hw_rtt_restore(ah, chan))
  1212. run_rtt_cal = true;
  1213. if (run_rtt_cal)
  1214. ath_dbg(common, CALIBRATE, "RTT calibration to be done\n");
  1215. }
  1216. run_agc_cal = run_rtt_cal;
  1217. if (run_rtt_cal) {
  1218. ar9003_hw_rtt_enable(ah);
  1219. ar9003_hw_rtt_set_mask(ah, 0x00);
  1220. ar9003_hw_rtt_clear_hist(ah);
  1221. }
  1222. if (rtt) {
  1223. if (!run_rtt_cal) {
  1224. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  1225. agc_supp_cals &= agc_ctrl;
  1226. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  1227. AR_PHY_AGC_CONTROL_FLTR_CAL |
  1228. AR_PHY_AGC_CONTROL_PKDET_CAL);
  1229. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1230. } else {
  1231. if (ah->ah_flags & AH_FASTCC)
  1232. run_agc_cal = true;
  1233. }
  1234. }
  1235. if (ah->enabled_cals & TX_CL_CAL) {
  1236. if (caldata && test_bit(TXCLCAL_DONE, &caldata->cal_flags))
  1237. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  1238. AR_PHY_CL_CAL_ENABLE);
  1239. else {
  1240. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  1241. AR_PHY_CL_CAL_ENABLE);
  1242. run_agc_cal = true;
  1243. }
  1244. }
  1245. if ((IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan)) ||
  1246. !(ah->enabled_cals & TX_IQ_CAL))
  1247. goto skip_tx_iqcal;
  1248. /* Do Tx IQ Calibration */
  1249. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1250. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1251. DELPT);
  1252. /*
  1253. * For AR9485 or later chips, TxIQ cal runs as part of
  1254. * AGC calibration
  1255. */
  1256. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1257. if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1258. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1259. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1260. else
  1261. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1262. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  1263. txiqcal_done = run_agc_cal = true;
  1264. }
  1265. skip_tx_iqcal:
  1266. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1267. ar9003_mci_init_cal_req(ah, &is_reusable);
  1268. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1269. rx_delay = REG_READ(ah, AR_PHY_RX_DELAY);
  1270. /* Disable BB_active */
  1271. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1272. udelay(5);
  1273. REG_WRITE(ah, AR_PHY_RX_DELAY, AR_PHY_RX_DELAY_DELAY);
  1274. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1275. }
  1276. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1277. /* Calibrate the AGC */
  1278. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1279. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1280. AR_PHY_AGC_CONTROL_CAL);
  1281. /* Poll for offset calibration complete */
  1282. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1283. AR_PHY_AGC_CONTROL_CAL,
  1284. 0, AH_WAIT_TIMEOUT);
  1285. ar9003_hw_do_pcoem_manual_peak_cal(ah, chan, run_rtt_cal);
  1286. }
  1287. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  1288. REG_WRITE(ah, AR_PHY_RX_DELAY, rx_delay);
  1289. udelay(5);
  1290. }
  1291. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  1292. ar9003_mci_init_cal_done(ah);
  1293. if (rtt && !run_rtt_cal) {
  1294. agc_ctrl |= agc_supp_cals;
  1295. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  1296. }
  1297. if (!status) {
  1298. if (run_rtt_cal)
  1299. ar9003_hw_rtt_disable(ah);
  1300. ath_dbg(common, CALIBRATE,
  1301. "offset calibration failed to complete in %d ms; noisy environment?\n",
  1302. AH_WAIT_TIMEOUT / 1000);
  1303. return false;
  1304. }
  1305. if (txiqcal_done)
  1306. ar9003_hw_tx_iq_cal_post_proc(ah, 0, is_reusable);
  1307. else if (caldata && test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1308. ar9003_hw_tx_iq_cal_reload(ah);
  1309. ar9003_hw_cl_cal_post_proc(ah, is_reusable);
  1310. if (run_rtt_cal && caldata) {
  1311. if (is_reusable) {
  1312. if (!ath9k_hw_rfbus_req(ah)) {
  1313. ath_err(ath9k_hw_common(ah),
  1314. "Could not stop baseband\n");
  1315. } else {
  1316. ar9003_hw_rtt_fill_hist(ah);
  1317. if (test_bit(SW_PKDET_DONE, &caldata->cal_flags))
  1318. ar9003_hw_rtt_load_hist(ah);
  1319. }
  1320. ath9k_hw_rfbus_done(ah);
  1321. }
  1322. ar9003_hw_rtt_disable(ah);
  1323. }
  1324. /* Revert chainmask to runtime parameters */
  1325. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1326. ar9003_hw_init_cal_common(ah);
  1327. return true;
  1328. }
  1329. static bool do_ar9003_agc_cal(struct ath_hw *ah)
  1330. {
  1331. struct ath_common *common = ath9k_hw_common(ah);
  1332. bool status;
  1333. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  1334. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  1335. AR_PHY_AGC_CONTROL_CAL);
  1336. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  1337. AR_PHY_AGC_CONTROL_CAL,
  1338. 0, AH_WAIT_TIMEOUT);
  1339. if (!status) {
  1340. ath_dbg(common, CALIBRATE,
  1341. "offset calibration failed to complete in %d ms,"
  1342. "noisy environment?\n",
  1343. AH_WAIT_TIMEOUT / 1000);
  1344. return false;
  1345. }
  1346. return true;
  1347. }
  1348. static bool ar9003_hw_init_cal_soc(struct ath_hw *ah,
  1349. struct ath9k_channel *chan)
  1350. {
  1351. bool txiqcal_done = false;
  1352. bool status = true;
  1353. bool run_agc_cal = false, sep_iq_cal = false;
  1354. int i = 0;
  1355. /* Use chip chainmask only for calibration */
  1356. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  1357. if (ah->enabled_cals & TX_CL_CAL) {
  1358. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL, AR_PHY_CL_CAL_ENABLE);
  1359. run_agc_cal = true;
  1360. }
  1361. if (IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan))
  1362. goto skip_tx_iqcal;
  1363. /* Do Tx IQ Calibration */
  1364. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  1365. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  1366. DELPT);
  1367. /*
  1368. * For AR9485 or later chips, TxIQ cal runs as part of
  1369. * AGC calibration. Specifically, AR9550 in SoC chips.
  1370. */
  1371. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  1372. if (REG_READ_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  1373. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL)) {
  1374. txiqcal_done = true;
  1375. } else {
  1376. txiqcal_done = false;
  1377. }
  1378. run_agc_cal = true;
  1379. } else {
  1380. sep_iq_cal = true;
  1381. run_agc_cal = true;
  1382. }
  1383. /*
  1384. * In the SoC family, this will run for AR9300, AR9331 and AR9340.
  1385. */
  1386. if (sep_iq_cal) {
  1387. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  1388. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  1389. udelay(5);
  1390. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  1391. }
  1392. if (AR_SREV_9550(ah) && IS_CHAN_2GHZ(chan)) {
  1393. if (!ar9003_hw_dynamic_osdac_selection(ah, txiqcal_done))
  1394. return false;
  1395. }
  1396. skip_tx_iqcal:
  1397. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  1398. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  1399. if (!(ah->rxchainmask & (1 << i)))
  1400. continue;
  1401. ar9003_hw_manual_peak_cal(ah, i,
  1402. IS_CHAN_2GHZ(chan));
  1403. }
  1404. /*
  1405. * For non-AR9550 chips, we just trigger AGC calibration
  1406. * in the HW, poll for completion and then process
  1407. * the results.
  1408. *
  1409. * For AR955x, we run it multiple times and use
  1410. * median IQ correction.
  1411. */
  1412. if (!AR_SREV_9550(ah)) {
  1413. status = do_ar9003_agc_cal(ah);
  1414. if (!status)
  1415. return false;
  1416. if (txiqcal_done)
  1417. ar9003_hw_tx_iq_cal_post_proc(ah, 0, false);
  1418. } else {
  1419. if (!txiqcal_done) {
  1420. status = do_ar9003_agc_cal(ah);
  1421. if (!status)
  1422. return false;
  1423. } else {
  1424. for (i = 0; i < MAXIQCAL; i++) {
  1425. status = do_ar9003_agc_cal(ah);
  1426. if (!status)
  1427. return false;
  1428. ar9003_hw_tx_iq_cal_post_proc(ah, i, false);
  1429. }
  1430. }
  1431. }
  1432. }
  1433. /* Revert chainmask to runtime parameters */
  1434. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1435. ar9003_hw_init_cal_common(ah);
  1436. return true;
  1437. }
  1438. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  1439. {
  1440. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  1441. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  1442. if (AR_SREV_9003_PCOEM(ah))
  1443. priv_ops->init_cal = ar9003_hw_init_cal_pcoem;
  1444. else
  1445. priv_ops->init_cal = ar9003_hw_init_cal_soc;
  1446. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  1447. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  1448. ops->calibrate = ar9003_hw_calibrate;
  1449. }