ar9003_calib.c 30 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  21. #define MAX_MAG_DELTA 11
  22. #define MAX_PHS_DELTA 10
  23. struct coeff {
  24. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  25. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  26. int iqc_coeff[2];
  27. };
  28. enum ar9003_cal_types {
  29. IQ_MISMATCH_CAL = BIT(0),
  30. TEMP_COMP_CAL = BIT(1),
  31. };
  32. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  33. struct ath9k_cal_list *currCal)
  34. {
  35. struct ath_common *common = ath9k_hw_common(ah);
  36. /* Select calibration to run */
  37. switch (currCal->calData->calType) {
  38. case IQ_MISMATCH_CAL:
  39. /*
  40. * Start calibration with
  41. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  42. */
  43. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  44. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  45. currCal->calData->calCountMax);
  46. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  47. ath_dbg(common, ATH_DBG_CALIBRATE,
  48. "starting IQ Mismatch Calibration\n");
  49. /* Kick-off cal */
  50. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  51. break;
  52. case TEMP_COMP_CAL:
  53. REG_RMW_FIELD(ah, AR_PHY_65NM_CH0_THERM,
  54. AR_PHY_65NM_CH0_THERM_LOCAL, 1);
  55. REG_RMW_FIELD(ah, AR_PHY_65NM_CH0_THERM,
  56. AR_PHY_65NM_CH0_THERM_START, 1);
  57. ath_dbg(common, ATH_DBG_CALIBRATE,
  58. "starting Temperature Compensation Calibration\n");
  59. break;
  60. }
  61. }
  62. /*
  63. * Generic calibration routine.
  64. * Recalibrate the lower PHY chips to account for temperature/environment
  65. * changes.
  66. */
  67. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  68. struct ath9k_channel *ichan,
  69. u8 rxchainmask,
  70. struct ath9k_cal_list *currCal)
  71. {
  72. struct ath9k_hw_cal_data *caldata = ah->caldata;
  73. /* Cal is assumed not done until explicitly set below */
  74. bool iscaldone = false;
  75. /* Calibration in progress. */
  76. if (currCal->calState == CAL_RUNNING) {
  77. /* Check to see if it has finished. */
  78. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  79. /*
  80. * Accumulate cal measures for active chains
  81. */
  82. currCal->calData->calCollect(ah);
  83. ah->cal_samples++;
  84. if (ah->cal_samples >=
  85. currCal->calData->calNumSamples) {
  86. unsigned int i, numChains = 0;
  87. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  88. if (rxchainmask & (1 << i))
  89. numChains++;
  90. }
  91. /*
  92. * Process accumulated data
  93. */
  94. currCal->calData->calPostProc(ah, numChains);
  95. /* Calibration has finished. */
  96. caldata->CalValid |= currCal->calData->calType;
  97. currCal->calState = CAL_DONE;
  98. iscaldone = true;
  99. } else {
  100. /*
  101. * Set-up collection of another sub-sample until we
  102. * get desired number
  103. */
  104. ar9003_hw_setup_calibration(ah, currCal);
  105. }
  106. }
  107. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  108. /* If current cal is marked invalid in channel, kick it off */
  109. ath9k_hw_reset_calibration(ah, currCal);
  110. }
  111. return iscaldone;
  112. }
  113. static bool ar9003_hw_calibrate(struct ath_hw *ah,
  114. struct ath9k_channel *chan,
  115. u8 rxchainmask,
  116. bool longcal)
  117. {
  118. bool iscaldone = true;
  119. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  120. /*
  121. * For given calibration:
  122. * 1. Call generic cal routine
  123. * 2. When this cal is done (isCalDone) if we have more cals waiting
  124. * (eg after reset), mask this to upper layers by not propagating
  125. * isCalDone if it is set to TRUE.
  126. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  127. * to be run.
  128. */
  129. if (currCal &&
  130. (currCal->calState == CAL_RUNNING ||
  131. currCal->calState == CAL_WAITING)) {
  132. iscaldone = ar9003_hw_per_calibration(ah, chan,
  133. rxchainmask, currCal);
  134. if (iscaldone) {
  135. ah->cal_list_curr = currCal = currCal->calNext;
  136. if (currCal->calState == CAL_WAITING) {
  137. iscaldone = false;
  138. ath9k_hw_reset_calibration(ah, currCal);
  139. }
  140. }
  141. }
  142. /* Do NF cal only at longer intervals */
  143. if (longcal) {
  144. /*
  145. * Get the value from the previous NF cal and update
  146. * history buffer.
  147. */
  148. ath9k_hw_getnf(ah, chan);
  149. /*
  150. * Load the NF from history buffer of the current channel.
  151. * NF is slow time-variant, so it is OK to use a historical
  152. * value.
  153. */
  154. ath9k_hw_loadnf(ah, ah->curchan);
  155. /* start NF calibration, without updating BB NF register */
  156. ath9k_hw_start_nfcal(ah, false);
  157. }
  158. return iscaldone;
  159. }
  160. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  161. {
  162. int i;
  163. /* Accumulate IQ cal measures for active chains */
  164. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  165. if (ah->txchainmask & BIT(i)) {
  166. ah->totalPowerMeasI[i] +=
  167. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  168. ah->totalPowerMeasQ[i] +=
  169. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  170. ah->totalIqCorrMeas[i] +=
  171. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  172. ath_dbg(ath9k_hw_common(ah), ATH_DBG_CALIBRATE,
  173. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  174. ah->cal_samples, i, ah->totalPowerMeasI[i],
  175. ah->totalPowerMeasQ[i],
  176. ah->totalIqCorrMeas[i]);
  177. }
  178. }
  179. }
  180. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  181. {
  182. struct ath_common *common = ath9k_hw_common(ah);
  183. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  184. u32 qCoffDenom, iCoffDenom;
  185. int32_t qCoff, iCoff;
  186. int iqCorrNeg, i;
  187. static const u_int32_t offset_array[3] = {
  188. AR_PHY_RX_IQCAL_CORR_B0,
  189. AR_PHY_RX_IQCAL_CORR_B1,
  190. AR_PHY_RX_IQCAL_CORR_B2,
  191. };
  192. for (i = 0; i < numChains; i++) {
  193. powerMeasI = ah->totalPowerMeasI[i];
  194. powerMeasQ = ah->totalPowerMeasQ[i];
  195. iqCorrMeas = ah->totalIqCorrMeas[i];
  196. ath_dbg(common, ATH_DBG_CALIBRATE,
  197. "Starting IQ Cal and Correction for Chain %d\n",
  198. i);
  199. ath_dbg(common, ATH_DBG_CALIBRATE,
  200. "Orignal: Chn %diq_corr_meas = 0x%08x\n",
  201. i, ah->totalIqCorrMeas[i]);
  202. iqCorrNeg = 0;
  203. if (iqCorrMeas > 0x80000000) {
  204. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  205. iqCorrNeg = 1;
  206. }
  207. ath_dbg(common, ATH_DBG_CALIBRATE,
  208. "Chn %d pwr_meas_i = 0x%08x\n", i, powerMeasI);
  209. ath_dbg(common, ATH_DBG_CALIBRATE,
  210. "Chn %d pwr_meas_q = 0x%08x\n", i, powerMeasQ);
  211. ath_dbg(common, ATH_DBG_CALIBRATE, "iqCorrNeg is 0x%08x\n",
  212. iqCorrNeg);
  213. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  214. qCoffDenom = powerMeasQ / 64;
  215. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  216. iCoff = iqCorrMeas / iCoffDenom;
  217. qCoff = powerMeasI / qCoffDenom - 64;
  218. ath_dbg(common, ATH_DBG_CALIBRATE,
  219. "Chn %d iCoff = 0x%08x\n", i, iCoff);
  220. ath_dbg(common, ATH_DBG_CALIBRATE,
  221. "Chn %d qCoff = 0x%08x\n", i, qCoff);
  222. /* Force bounds on iCoff */
  223. if (iCoff >= 63)
  224. iCoff = 63;
  225. else if (iCoff <= -63)
  226. iCoff = -63;
  227. /* Negate iCoff if iqCorrNeg == 0 */
  228. if (iqCorrNeg == 0x0)
  229. iCoff = -iCoff;
  230. /* Force bounds on qCoff */
  231. if (qCoff >= 63)
  232. qCoff = 63;
  233. else if (qCoff <= -63)
  234. qCoff = -63;
  235. iCoff = iCoff & 0x7f;
  236. qCoff = qCoff & 0x7f;
  237. ath_dbg(common, ATH_DBG_CALIBRATE,
  238. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  239. i, iCoff, qCoff);
  240. ath_dbg(common, ATH_DBG_CALIBRATE,
  241. "Register offset (0x%04x) before update = 0x%x\n",
  242. offset_array[i],
  243. REG_READ(ah, offset_array[i]));
  244. REG_RMW_FIELD(ah, offset_array[i],
  245. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  246. iCoff);
  247. REG_RMW_FIELD(ah, offset_array[i],
  248. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  249. qCoff);
  250. ath_dbg(common, ATH_DBG_CALIBRATE,
  251. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  252. offset_array[i],
  253. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  254. REG_READ(ah, offset_array[i]));
  255. ath_dbg(common, ATH_DBG_CALIBRATE,
  256. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  257. offset_array[i],
  258. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  259. REG_READ(ah, offset_array[i]));
  260. ath_dbg(common, ATH_DBG_CALIBRATE,
  261. "IQ Cal and Correction done for Chain %d\n", i);
  262. }
  263. }
  264. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  265. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  266. ath_dbg(common, ATH_DBG_CALIBRATE,
  267. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  268. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  269. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  270. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  271. }
  272. static const struct ath9k_percal_data iq_cal_single_sample = {
  273. IQ_MISMATCH_CAL,
  274. MIN_CAL_SAMPLES,
  275. PER_MAX_LOG_COUNT,
  276. ar9003_hw_iqcal_collect,
  277. ar9003_hw_iqcalibrate
  278. };
  279. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  280. {
  281. ah->iq_caldata.calData = &iq_cal_single_sample;
  282. }
  283. /*
  284. * solve 4x4 linear equation used in loopback iq cal.
  285. */
  286. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  287. s32 sin_2phi_1,
  288. s32 cos_2phi_1,
  289. s32 sin_2phi_2,
  290. s32 cos_2phi_2,
  291. s32 mag_a0_d0,
  292. s32 phs_a0_d0,
  293. s32 mag_a1_d0,
  294. s32 phs_a1_d0,
  295. s32 solved_eq[])
  296. {
  297. s32 f1 = cos_2phi_1 - cos_2phi_2,
  298. f3 = sin_2phi_1 - sin_2phi_2,
  299. f2;
  300. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  301. const s32 result_shift = 1 << 15;
  302. struct ath_common *common = ath9k_hw_common(ah);
  303. f2 = (f1 * f1 + f3 * f3) / result_shift;
  304. if (!f2) {
  305. ath_dbg(common, ATH_DBG_CALIBRATE, "Divide by 0\n");
  306. return false;
  307. }
  308. /* mag mismatch, tx */
  309. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  310. /* phs mismatch, tx */
  311. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  312. mag_tx = (mag_tx / f2);
  313. phs_tx = (phs_tx / f2);
  314. /* mag mismatch, rx */
  315. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  316. result_shift;
  317. /* phs mismatch, rx */
  318. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  319. result_shift;
  320. solved_eq[0] = mag_tx;
  321. solved_eq[1] = phs_tx;
  322. solved_eq[2] = mag_rx;
  323. solved_eq[3] = phs_rx;
  324. return true;
  325. }
  326. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  327. {
  328. s32 abs_i = abs(in_re),
  329. abs_q = abs(in_im),
  330. max_abs, min_abs;
  331. if (abs_i > abs_q) {
  332. max_abs = abs_i;
  333. min_abs = abs_q;
  334. } else {
  335. max_abs = abs_q;
  336. min_abs = abs_i;
  337. }
  338. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  339. }
  340. #define DELPT 32
  341. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  342. s32 chain_idx,
  343. const s32 iq_res[],
  344. s32 iqc_coeff[])
  345. {
  346. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  347. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  348. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  349. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  350. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  351. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  352. sin_2phi_1, cos_2phi_1,
  353. sin_2phi_2, cos_2phi_2;
  354. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  355. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  356. q_q_coff, q_i_coff;
  357. const s32 res_scale = 1 << 15;
  358. const s32 delpt_shift = 1 << 8;
  359. s32 mag1, mag2;
  360. struct ath_common *common = ath9k_hw_common(ah);
  361. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  362. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  363. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  364. if (i2_m_q2_a0_d0 > 0x800)
  365. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  366. if (i2_p_q2_a0_d0 > 0x800)
  367. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  368. if (iq_corr_a0_d0 > 0x800)
  369. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  370. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  371. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  372. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  373. if (i2_m_q2_a0_d1 > 0x800)
  374. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  375. if (i2_p_q2_a0_d1 > 0x800)
  376. i2_p_q2_a0_d1 = -((0xfff - i2_p_q2_a0_d1) + 1);
  377. if (iq_corr_a0_d1 > 0x800)
  378. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  379. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  380. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  381. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  382. if (i2_m_q2_a1_d0 > 0x800)
  383. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  384. if (i2_p_q2_a1_d0 > 0x800)
  385. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  386. if (iq_corr_a1_d0 > 0x800)
  387. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  388. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  389. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  390. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  391. if (i2_m_q2_a1_d1 > 0x800)
  392. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  393. if (i2_p_q2_a1_d1 > 0x800)
  394. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  395. if (iq_corr_a1_d1 > 0x800)
  396. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  397. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  398. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  399. ath_dbg(common, ATH_DBG_CALIBRATE,
  400. "Divide by 0:\n"
  401. "a0_d0=%d\n"
  402. "a0_d1=%d\n"
  403. "a2_d0=%d\n"
  404. "a1_d1=%d\n",
  405. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  406. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  407. return false;
  408. }
  409. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  410. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  411. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  412. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  413. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  414. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  415. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  416. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  417. /* w/o analog phase shift */
  418. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  419. /* w/o analog phase shift */
  420. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  421. /* w/ analog phase shift */
  422. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  423. /* w/ analog phase shift */
  424. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  425. /*
  426. * force sin^2 + cos^2 = 1;
  427. * find magnitude by approximation
  428. */
  429. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  430. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  431. if ((mag1 == 0) || (mag2 == 0)) {
  432. ath_dbg(common, ATH_DBG_CALIBRATE,
  433. "Divide by 0: mag1=%d, mag2=%d\n",
  434. mag1, mag2);
  435. return false;
  436. }
  437. /* normalization sin and cos by mag */
  438. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  439. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  440. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  441. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  442. /* calculate IQ mismatch */
  443. if (!ar9003_hw_solve_iq_cal(ah,
  444. sin_2phi_1, cos_2phi_1,
  445. sin_2phi_2, cos_2phi_2,
  446. mag_a0_d0, phs_a0_d0,
  447. mag_a1_d0,
  448. phs_a1_d0, solved_eq)) {
  449. ath_dbg(common, ATH_DBG_CALIBRATE,
  450. "Call to ar9003_hw_solve_iq_cal() failed.\n");
  451. return false;
  452. }
  453. mag_tx = solved_eq[0];
  454. phs_tx = solved_eq[1];
  455. mag_rx = solved_eq[2];
  456. phs_rx = solved_eq[3];
  457. ath_dbg(common, ATH_DBG_CALIBRATE,
  458. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  459. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  460. if (res_scale == mag_tx) {
  461. ath_dbg(common, ATH_DBG_CALIBRATE,
  462. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  463. mag_tx, res_scale);
  464. return false;
  465. }
  466. /* calculate and quantize Tx IQ correction factor */
  467. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  468. phs_corr_tx = -phs_tx;
  469. q_q_coff = (mag_corr_tx * 128 / res_scale);
  470. q_i_coff = (phs_corr_tx * 256 / res_scale);
  471. ath_dbg(common, ATH_DBG_CALIBRATE,
  472. "tx chain %d: mag corr=%d phase corr=%d\n",
  473. chain_idx, q_q_coff, q_i_coff);
  474. if (q_i_coff < -63)
  475. q_i_coff = -63;
  476. if (q_i_coff > 63)
  477. q_i_coff = 63;
  478. if (q_q_coff < -63)
  479. q_q_coff = -63;
  480. if (q_q_coff > 63)
  481. q_q_coff = 63;
  482. iqc_coeff[0] = (q_q_coff * 128) + q_i_coff;
  483. ath_dbg(common, ATH_DBG_CALIBRATE,
  484. "tx chain %d: iq corr coeff=%x\n",
  485. chain_idx, iqc_coeff[0]);
  486. if (-mag_rx == res_scale) {
  487. ath_dbg(common, ATH_DBG_CALIBRATE,
  488. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  489. mag_rx, res_scale);
  490. return false;
  491. }
  492. /* calculate and quantize Rx IQ correction factors */
  493. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  494. phs_corr_rx = -phs_rx;
  495. q_q_coff = (mag_corr_rx * 128 / res_scale);
  496. q_i_coff = (phs_corr_rx * 256 / res_scale);
  497. ath_dbg(common, ATH_DBG_CALIBRATE,
  498. "rx chain %d: mag corr=%d phase corr=%d\n",
  499. chain_idx, q_q_coff, q_i_coff);
  500. if (q_i_coff < -63)
  501. q_i_coff = -63;
  502. if (q_i_coff > 63)
  503. q_i_coff = 63;
  504. if (q_q_coff < -63)
  505. q_q_coff = -63;
  506. if (q_q_coff > 63)
  507. q_q_coff = 63;
  508. iqc_coeff[1] = (q_q_coff * 128) + q_i_coff;
  509. ath_dbg(common, ATH_DBG_CALIBRATE,
  510. "rx chain %d: iq corr coeff=%x\n",
  511. chain_idx, iqc_coeff[1]);
  512. return true;
  513. }
  514. static void ar9003_hw_detect_outlier(int *mp_coeff, int nmeasurement,
  515. int max_delta)
  516. {
  517. int mp_max = -64, max_idx = 0;
  518. int mp_min = 63, min_idx = 0;
  519. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  520. /* find min/max mismatch across all calibrated gains */
  521. for (i = 0; i < nmeasurement; i++) {
  522. if (mp_coeff[i] > mp_max) {
  523. mp_max = mp_coeff[i];
  524. max_idx = i;
  525. } else if (mp_coeff[i] < mp_min) {
  526. mp_min = mp_coeff[i];
  527. min_idx = i;
  528. }
  529. }
  530. /* find average (exclude max abs value) */
  531. for (i = 0; i < nmeasurement; i++) {
  532. if ((abs(mp_coeff[i]) < abs(mp_max)) ||
  533. (abs(mp_coeff[i]) < abs(mp_min))) {
  534. mp_avg += mp_coeff[i];
  535. mp_count++;
  536. }
  537. }
  538. /*
  539. * finding mean magnitude/phase if possible, otherwise
  540. * just use the last value as the mean
  541. */
  542. if (mp_count)
  543. mp_avg /= mp_count;
  544. else
  545. mp_avg = mp_coeff[nmeasurement - 1];
  546. /* detect outlier */
  547. if (abs(mp_max - mp_min) > max_delta) {
  548. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  549. outlier_idx = max_idx;
  550. else
  551. outlier_idx = min_idx;
  552. mp_coeff[outlier_idx] = mp_avg;
  553. }
  554. }
  555. static void ar9003_hw_tx_iqcal_load_avg_2_passes(struct ath_hw *ah,
  556. u8 num_chains,
  557. struct coeff *coeff,
  558. bool is_reusable)
  559. {
  560. int i, im, nmeasurement;
  561. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  562. struct ath9k_hw_cal_data *caldata = ah->caldata;
  563. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  564. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  565. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  566. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  567. if (!AR_SREV_9485(ah)) {
  568. tx_corr_coeff[i * 2][1] =
  569. tx_corr_coeff[(i * 2) + 1][1] =
  570. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  571. tx_corr_coeff[i * 2][2] =
  572. tx_corr_coeff[(i * 2) + 1][2] =
  573. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  574. }
  575. }
  576. /* Load the average of 2 passes */
  577. for (i = 0; i < num_chains; i++) {
  578. nmeasurement = REG_READ_FIELD(ah,
  579. AR_PHY_TX_IQCAL_STATUS_B0,
  580. AR_PHY_CALIBRATED_GAINS_0);
  581. if (nmeasurement > MAX_MEASUREMENT)
  582. nmeasurement = MAX_MEASUREMENT;
  583. /* detect outlier only if nmeasurement > 1 */
  584. if (nmeasurement > 1) {
  585. /* Detect magnitude outlier */
  586. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  587. nmeasurement, MAX_MAG_DELTA);
  588. /* Detect phase outlier */
  589. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  590. nmeasurement, MAX_PHS_DELTA);
  591. }
  592. for (im = 0; im < nmeasurement; im++) {
  593. coeff->iqc_coeff[0] = (coeff->mag_coeff[i][im] & 0x7f) |
  594. ((coeff->phs_coeff[i][im] & 0x7f) << 7);
  595. if ((im % 2) == 0)
  596. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  597. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  598. coeff->iqc_coeff[0]);
  599. else
  600. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  601. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  602. coeff->iqc_coeff[0]);
  603. if (caldata)
  604. caldata->tx_corr_coeff[im][i] =
  605. coeff->iqc_coeff[0];
  606. }
  607. if (caldata)
  608. caldata->num_measures[i] = nmeasurement;
  609. }
  610. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  611. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  612. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  613. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  614. if (caldata)
  615. caldata->done_txiqcal_once = is_reusable;
  616. return;
  617. }
  618. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  619. {
  620. struct ath_common *common = ath9k_hw_common(ah);
  621. u8 tx_gain_forced;
  622. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  623. AR_PHY_TXGAIN_FORCE);
  624. if (tx_gain_forced)
  625. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  626. AR_PHY_TXGAIN_FORCE, 0);
  627. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  628. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  629. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  630. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  631. AH_WAIT_TIMEOUT)) {
  632. ath_dbg(common, ATH_DBG_CALIBRATE,
  633. "Tx IQ Cal is not completed.\n");
  634. return false;
  635. }
  636. return true;
  637. }
  638. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  639. {
  640. struct ath_common *common = ath9k_hw_common(ah);
  641. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  642. AR_PHY_TX_IQCAL_STATUS_B0,
  643. AR_PHY_TX_IQCAL_STATUS_B1,
  644. AR_PHY_TX_IQCAL_STATUS_B2,
  645. };
  646. const u_int32_t chan_info_tab[] = {
  647. AR_PHY_CHAN_INFO_TAB_0,
  648. AR_PHY_CHAN_INFO_TAB_1,
  649. AR_PHY_CHAN_INFO_TAB_2,
  650. };
  651. struct coeff coeff;
  652. s32 iq_res[6];
  653. u8 num_chains = 0;
  654. int i, im, j;
  655. int nmeasurement;
  656. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  657. if (ah->txchainmask & (1 << i))
  658. num_chains++;
  659. }
  660. for (i = 0; i < num_chains; i++) {
  661. nmeasurement = REG_READ_FIELD(ah,
  662. AR_PHY_TX_IQCAL_STATUS_B0,
  663. AR_PHY_CALIBRATED_GAINS_0);
  664. if (nmeasurement > MAX_MEASUREMENT)
  665. nmeasurement = MAX_MEASUREMENT;
  666. for (im = 0; im < nmeasurement; im++) {
  667. ath_dbg(common, ATH_DBG_CALIBRATE,
  668. "Doing Tx IQ Cal for chain %d.\n", i);
  669. if (REG_READ(ah, txiqcal_status[i]) &
  670. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  671. ath_dbg(common, ATH_DBG_CALIBRATE,
  672. "Tx IQ Cal failed for chain %d.\n", i);
  673. goto tx_iqcal_fail;
  674. }
  675. for (j = 0; j < 3; j++) {
  676. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  677. REG_RMW_FIELD(ah,
  678. AR_PHY_CHAN_INFO_MEMORY,
  679. AR_PHY_CHAN_INFO_TAB_S2_READ,
  680. 0);
  681. /* 32 bits */
  682. iq_res[idx] = REG_READ(ah,
  683. chan_info_tab[i] +
  684. offset);
  685. REG_RMW_FIELD(ah,
  686. AR_PHY_CHAN_INFO_MEMORY,
  687. AR_PHY_CHAN_INFO_TAB_S2_READ,
  688. 1);
  689. /* 16 bits */
  690. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  691. chan_info_tab[i] + offset);
  692. ath_dbg(common, ATH_DBG_CALIBRATE,
  693. "IQ RES[%d]=0x%x"
  694. "IQ_RES[%d]=0x%x\n",
  695. idx, iq_res[idx], idx + 1,
  696. iq_res[idx + 1]);
  697. }
  698. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  699. coeff.iqc_coeff)) {
  700. ath_dbg(common, ATH_DBG_CALIBRATE,
  701. "Failed in calculation of \
  702. IQ correction.\n");
  703. goto tx_iqcal_fail;
  704. }
  705. coeff.mag_coeff[i][im] = coeff.iqc_coeff[0] & 0x7f;
  706. coeff.phs_coeff[i][im] =
  707. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  708. if (coeff.mag_coeff[i][im] > 63)
  709. coeff.mag_coeff[i][im] -= 128;
  710. if (coeff.phs_coeff[i][im] > 63)
  711. coeff.phs_coeff[i][im] -= 128;
  712. }
  713. }
  714. ar9003_hw_tx_iqcal_load_avg_2_passes(ah, num_chains,
  715. &coeff, is_reusable);
  716. return;
  717. tx_iqcal_fail:
  718. ath_dbg(common, ATH_DBG_CALIBRATE, "Tx IQ Cal failed\n");
  719. return;
  720. }
  721. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  722. {
  723. struct ath9k_hw_cal_data *caldata = ah->caldata;
  724. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  725. int i, im;
  726. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  727. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  728. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  729. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  730. if (!AR_SREV_9485(ah)) {
  731. tx_corr_coeff[i * 2][1] =
  732. tx_corr_coeff[(i * 2) + 1][1] =
  733. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  734. tx_corr_coeff[i * 2][2] =
  735. tx_corr_coeff[(i * 2) + 1][2] =
  736. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  737. }
  738. }
  739. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  740. if (!(ah->txchainmask & (1 << i)))
  741. continue;
  742. for (im = 0; im < caldata->num_measures[i]; im++) {
  743. if ((im % 2) == 0)
  744. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  745. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  746. caldata->tx_corr_coeff[im][i]);
  747. else
  748. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  749. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  750. caldata->tx_corr_coeff[im][i]);
  751. }
  752. }
  753. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  754. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  755. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  756. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  757. }
  758. static bool ar9003_hw_rtt_restore(struct ath_hw *ah, struct ath9k_channel *chan)
  759. {
  760. struct ath9k_rtt_hist *hist;
  761. u32 *table;
  762. int i;
  763. bool restore;
  764. if (!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT) || !ah->caldata)
  765. return false;
  766. hist = &ah->caldata->rtt_hist;
  767. ar9003_hw_rtt_enable(ah);
  768. ar9003_hw_rtt_set_mask(ah, 0x10);
  769. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  770. if (!(ah->rxchainmask & (1 << i)))
  771. continue;
  772. table = &hist->table[i][hist->num_readings][0];
  773. ar9003_hw_rtt_load_hist(ah, i, table);
  774. }
  775. restore = ar9003_hw_rtt_force_restore(ah);
  776. ar9003_hw_rtt_disable(ah);
  777. return restore;
  778. }
  779. static bool ar9003_hw_init_cal(struct ath_hw *ah,
  780. struct ath9k_channel *chan)
  781. {
  782. struct ath_common *common = ath9k_hw_common(ah);
  783. struct ath9k_hw_cal_data *caldata = ah->caldata;
  784. bool txiqcal_done = false, txclcal_done = false;
  785. bool is_reusable = true, status = true;
  786. bool run_rtt_cal = false, run_agc_cal;
  787. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  788. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  789. AR_PHY_AGC_CONTROL_FLTR_CAL |
  790. AR_PHY_AGC_CONTROL_PKDET_CAL;
  791. int i, j;
  792. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  793. AR_PHY_CL_TAB_1,
  794. AR_PHY_CL_TAB_2 };
  795. if (rtt) {
  796. if (!ar9003_hw_rtt_restore(ah, chan))
  797. run_rtt_cal = true;
  798. ath_dbg(common, ATH_DBG_CALIBRATE, "RTT restore %s\n",
  799. run_rtt_cal ? "failed" : "succeed");
  800. }
  801. run_agc_cal = run_rtt_cal;
  802. if (run_rtt_cal) {
  803. ar9003_hw_rtt_enable(ah);
  804. ar9003_hw_rtt_set_mask(ah, 0x00);
  805. ar9003_hw_rtt_clear_hist(ah);
  806. }
  807. if (rtt && !run_rtt_cal) {
  808. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  809. agc_supp_cals &= agc_ctrl;
  810. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  811. AR_PHY_AGC_CONTROL_FLTR_CAL |
  812. AR_PHY_AGC_CONTROL_PKDET_CAL);
  813. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  814. }
  815. if (ah->enabled_cals & TX_CL_CAL) {
  816. if (caldata && caldata->done_txclcal_once)
  817. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  818. AR_PHY_CL_CAL_ENABLE);
  819. else {
  820. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  821. AR_PHY_CL_CAL_ENABLE);
  822. run_agc_cal = true;
  823. }
  824. }
  825. if (!(ah->enabled_cals & TX_IQ_CAL))
  826. goto skip_tx_iqcal;
  827. /* Do Tx IQ Calibration */
  828. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  829. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  830. DELPT);
  831. /*
  832. * For AR9485 or later chips, TxIQ cal runs as part of
  833. * AGC calibration
  834. */
  835. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  836. if (caldata && !caldata->done_txiqcal_once)
  837. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  838. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  839. else
  840. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  841. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  842. txiqcal_done = run_agc_cal = true;
  843. goto skip_tx_iqcal;
  844. } else if (caldata && !caldata->done_txiqcal_once)
  845. run_agc_cal = true;
  846. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  847. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  848. udelay(5);
  849. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  850. skip_tx_iqcal:
  851. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  852. /* Calibrate the AGC */
  853. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  854. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  855. AR_PHY_AGC_CONTROL_CAL);
  856. /* Poll for offset calibration complete */
  857. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  858. AR_PHY_AGC_CONTROL_CAL,
  859. 0, AH_WAIT_TIMEOUT);
  860. }
  861. if (rtt && !run_rtt_cal) {
  862. agc_ctrl |= agc_supp_cals;
  863. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  864. }
  865. if (!status) {
  866. if (run_rtt_cal)
  867. ar9003_hw_rtt_disable(ah);
  868. ath_dbg(common, ATH_DBG_CALIBRATE,
  869. "offset calibration failed to complete in 1ms;"
  870. "noisy environment?\n");
  871. return false;
  872. }
  873. if (txiqcal_done)
  874. ar9003_hw_tx_iq_cal_post_proc(ah, is_reusable);
  875. else if (caldata && caldata->done_txiqcal_once)
  876. ar9003_hw_tx_iq_cal_reload(ah);
  877. #define CL_TAB_ENTRY(reg_base) (reg_base + (4 * j))
  878. if (caldata && (ah->enabled_cals & TX_CL_CAL)) {
  879. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  880. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  881. if (caldata->done_txclcal_once) {
  882. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  883. if (!(ah->txchainmask & (1 << i)))
  884. continue;
  885. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  886. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  887. caldata->tx_clcal[i][j]);
  888. }
  889. } else if (is_reusable && txclcal_done) {
  890. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  891. if (!(ah->txchainmask & (1 << i)))
  892. continue;
  893. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  894. caldata->tx_clcal[i][j] =
  895. REG_READ(ah,
  896. CL_TAB_ENTRY(cl_idx[i]));
  897. }
  898. caldata->done_txclcal_once = true;
  899. }
  900. }
  901. #undef CL_TAB_ENTRY
  902. if (run_rtt_cal && caldata) {
  903. struct ath9k_rtt_hist *hist = &caldata->rtt_hist;
  904. if (is_reusable && (hist->num_readings < RTT_HIST_MAX)) {
  905. u32 *table;
  906. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  907. if (!(ah->rxchainmask & (1 << i)))
  908. continue;
  909. table = &hist->table[i][hist->num_readings][0];
  910. ar9003_hw_rtt_fill_hist(ah, i, table);
  911. }
  912. }
  913. ar9003_hw_rtt_disable(ah);
  914. }
  915. ath9k_hw_loadnf(ah, chan);
  916. ath9k_hw_start_nfcal(ah, true);
  917. /* Initialize list pointers */
  918. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  919. ah->supp_cals = IQ_MISMATCH_CAL;
  920. if (ah->supp_cals & IQ_MISMATCH_CAL) {
  921. INIT_CAL(&ah->iq_caldata);
  922. INSERT_CAL(ah, &ah->iq_caldata);
  923. ath_dbg(common, ATH_DBG_CALIBRATE,
  924. "enabling IQ Calibration.\n");
  925. }
  926. if (ah->supp_cals & TEMP_COMP_CAL) {
  927. INIT_CAL(&ah->tempCompCalData);
  928. INSERT_CAL(ah, &ah->tempCompCalData);
  929. ath_dbg(common, ATH_DBG_CALIBRATE,
  930. "enabling Temperature Compensation Calibration.\n");
  931. }
  932. /* Initialize current pointer to first element in list */
  933. ah->cal_list_curr = ah->cal_list;
  934. if (ah->cal_list_curr)
  935. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  936. if (caldata)
  937. caldata->CalValid = 0;
  938. return true;
  939. }
  940. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  941. {
  942. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  943. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  944. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  945. priv_ops->init_cal = ar9003_hw_init_cal;
  946. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  947. ops->calibrate = ar9003_hw_calibrate;
  948. }