ar9003_calib.c 31 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. struct coeff {
  25. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  26. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  27. int iqc_coeff[2];
  28. };
  29. enum ar9003_cal_types {
  30. IQ_MISMATCH_CAL = BIT(0),
  31. TEMP_COMP_CAL = BIT(1),
  32. };
  33. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  34. struct ath9k_cal_list *currCal)
  35. {
  36. struct ath_common *common = ath9k_hw_common(ah);
  37. /* Select calibration to run */
  38. switch (currCal->calData->calType) {
  39. case IQ_MISMATCH_CAL:
  40. /*
  41. * Start calibration with
  42. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  43. */
  44. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  45. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  46. currCal->calData->calCountMax);
  47. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  48. ath_dbg(common, ATH_DBG_CALIBRATE,
  49. "starting IQ Mismatch Calibration\n");
  50. /* Kick-off cal */
  51. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  52. break;
  53. case TEMP_COMP_CAL:
  54. REG_RMW_FIELD(ah, AR_PHY_65NM_CH0_THERM,
  55. AR_PHY_65NM_CH0_THERM_LOCAL, 1);
  56. REG_RMW_FIELD(ah, AR_PHY_65NM_CH0_THERM,
  57. AR_PHY_65NM_CH0_THERM_START, 1);
  58. ath_dbg(common, ATH_DBG_CALIBRATE,
  59. "starting Temperature Compensation Calibration\n");
  60. break;
  61. }
  62. }
  63. /*
  64. * Generic calibration routine.
  65. * Recalibrate the lower PHY chips to account for temperature/environment
  66. * changes.
  67. */
  68. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  69. struct ath9k_channel *ichan,
  70. u8 rxchainmask,
  71. struct ath9k_cal_list *currCal)
  72. {
  73. struct ath9k_hw_cal_data *caldata = ah->caldata;
  74. /* Cal is assumed not done until explicitly set below */
  75. bool iscaldone = false;
  76. /* Calibration in progress. */
  77. if (currCal->calState == CAL_RUNNING) {
  78. /* Check to see if it has finished. */
  79. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  80. /*
  81. * Accumulate cal measures for active chains
  82. */
  83. currCal->calData->calCollect(ah);
  84. ah->cal_samples++;
  85. if (ah->cal_samples >=
  86. currCal->calData->calNumSamples) {
  87. unsigned int i, numChains = 0;
  88. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  89. if (rxchainmask & (1 << i))
  90. numChains++;
  91. }
  92. /*
  93. * Process accumulated data
  94. */
  95. currCal->calData->calPostProc(ah, numChains);
  96. /* Calibration has finished. */
  97. caldata->CalValid |= currCal->calData->calType;
  98. currCal->calState = CAL_DONE;
  99. iscaldone = true;
  100. } else {
  101. /*
  102. * Set-up collection of another sub-sample until we
  103. * get desired number
  104. */
  105. ar9003_hw_setup_calibration(ah, currCal);
  106. }
  107. }
  108. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  109. /* If current cal is marked invalid in channel, kick it off */
  110. ath9k_hw_reset_calibration(ah, currCal);
  111. }
  112. return iscaldone;
  113. }
  114. static bool ar9003_hw_calibrate(struct ath_hw *ah,
  115. struct ath9k_channel *chan,
  116. u8 rxchainmask,
  117. bool longcal)
  118. {
  119. bool iscaldone = true;
  120. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  121. /*
  122. * For given calibration:
  123. * 1. Call generic cal routine
  124. * 2. When this cal is done (isCalDone) if we have more cals waiting
  125. * (eg after reset), mask this to upper layers by not propagating
  126. * isCalDone if it is set to TRUE.
  127. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  128. * to be run.
  129. */
  130. if (currCal &&
  131. (currCal->calState == CAL_RUNNING ||
  132. currCal->calState == CAL_WAITING)) {
  133. iscaldone = ar9003_hw_per_calibration(ah, chan,
  134. rxchainmask, currCal);
  135. if (iscaldone) {
  136. ah->cal_list_curr = currCal = currCal->calNext;
  137. if (currCal->calState == CAL_WAITING) {
  138. iscaldone = false;
  139. ath9k_hw_reset_calibration(ah, currCal);
  140. }
  141. }
  142. }
  143. /* Do NF cal only at longer intervals */
  144. if (longcal) {
  145. /*
  146. * Get the value from the previous NF cal and update
  147. * history buffer.
  148. */
  149. ath9k_hw_getnf(ah, chan);
  150. /*
  151. * Load the NF from history buffer of the current channel.
  152. * NF is slow time-variant, so it is OK to use a historical
  153. * value.
  154. */
  155. ath9k_hw_loadnf(ah, ah->curchan);
  156. /* start NF calibration, without updating BB NF register */
  157. ath9k_hw_start_nfcal(ah, false);
  158. }
  159. return iscaldone;
  160. }
  161. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  162. {
  163. int i;
  164. /* Accumulate IQ cal measures for active chains */
  165. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  166. if (ah->txchainmask & BIT(i)) {
  167. ah->totalPowerMeasI[i] +=
  168. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  169. ah->totalPowerMeasQ[i] +=
  170. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  171. ah->totalIqCorrMeas[i] +=
  172. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  173. ath_dbg(ath9k_hw_common(ah), ATH_DBG_CALIBRATE,
  174. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  175. ah->cal_samples, i, ah->totalPowerMeasI[i],
  176. ah->totalPowerMeasQ[i],
  177. ah->totalIqCorrMeas[i]);
  178. }
  179. }
  180. }
  181. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  182. {
  183. struct ath_common *common = ath9k_hw_common(ah);
  184. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  185. u32 qCoffDenom, iCoffDenom;
  186. int32_t qCoff, iCoff;
  187. int iqCorrNeg, i;
  188. static const u_int32_t offset_array[3] = {
  189. AR_PHY_RX_IQCAL_CORR_B0,
  190. AR_PHY_RX_IQCAL_CORR_B1,
  191. AR_PHY_RX_IQCAL_CORR_B2,
  192. };
  193. for (i = 0; i < numChains; i++) {
  194. powerMeasI = ah->totalPowerMeasI[i];
  195. powerMeasQ = ah->totalPowerMeasQ[i];
  196. iqCorrMeas = ah->totalIqCorrMeas[i];
  197. ath_dbg(common, ATH_DBG_CALIBRATE,
  198. "Starting IQ Cal and Correction for Chain %d\n",
  199. i);
  200. ath_dbg(common, ATH_DBG_CALIBRATE,
  201. "Orignal: Chn %diq_corr_meas = 0x%08x\n",
  202. i, ah->totalIqCorrMeas[i]);
  203. iqCorrNeg = 0;
  204. if (iqCorrMeas > 0x80000000) {
  205. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  206. iqCorrNeg = 1;
  207. }
  208. ath_dbg(common, ATH_DBG_CALIBRATE,
  209. "Chn %d pwr_meas_i = 0x%08x\n", i, powerMeasI);
  210. ath_dbg(common, ATH_DBG_CALIBRATE,
  211. "Chn %d pwr_meas_q = 0x%08x\n", i, powerMeasQ);
  212. ath_dbg(common, ATH_DBG_CALIBRATE, "iqCorrNeg is 0x%08x\n",
  213. iqCorrNeg);
  214. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  215. qCoffDenom = powerMeasQ / 64;
  216. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  217. iCoff = iqCorrMeas / iCoffDenom;
  218. qCoff = powerMeasI / qCoffDenom - 64;
  219. ath_dbg(common, ATH_DBG_CALIBRATE,
  220. "Chn %d iCoff = 0x%08x\n", i, iCoff);
  221. ath_dbg(common, ATH_DBG_CALIBRATE,
  222. "Chn %d qCoff = 0x%08x\n", i, qCoff);
  223. /* Force bounds on iCoff */
  224. if (iCoff >= 63)
  225. iCoff = 63;
  226. else if (iCoff <= -63)
  227. iCoff = -63;
  228. /* Negate iCoff if iqCorrNeg == 0 */
  229. if (iqCorrNeg == 0x0)
  230. iCoff = -iCoff;
  231. /* Force bounds on qCoff */
  232. if (qCoff >= 63)
  233. qCoff = 63;
  234. else if (qCoff <= -63)
  235. qCoff = -63;
  236. iCoff = iCoff & 0x7f;
  237. qCoff = qCoff & 0x7f;
  238. ath_dbg(common, ATH_DBG_CALIBRATE,
  239. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  240. i, iCoff, qCoff);
  241. ath_dbg(common, ATH_DBG_CALIBRATE,
  242. "Register offset (0x%04x) before update = 0x%x\n",
  243. offset_array[i],
  244. REG_READ(ah, offset_array[i]));
  245. REG_RMW_FIELD(ah, offset_array[i],
  246. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  247. iCoff);
  248. REG_RMW_FIELD(ah, offset_array[i],
  249. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  250. qCoff);
  251. ath_dbg(common, ATH_DBG_CALIBRATE,
  252. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  253. offset_array[i],
  254. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  255. REG_READ(ah, offset_array[i]));
  256. ath_dbg(common, ATH_DBG_CALIBRATE,
  257. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  258. offset_array[i],
  259. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  260. REG_READ(ah, offset_array[i]));
  261. ath_dbg(common, ATH_DBG_CALIBRATE,
  262. "IQ Cal and Correction done for Chain %d\n", i);
  263. }
  264. }
  265. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  266. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  267. ath_dbg(common, ATH_DBG_CALIBRATE,
  268. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  269. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  270. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  271. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  272. }
  273. static const struct ath9k_percal_data iq_cal_single_sample = {
  274. IQ_MISMATCH_CAL,
  275. MIN_CAL_SAMPLES,
  276. PER_MAX_LOG_COUNT,
  277. ar9003_hw_iqcal_collect,
  278. ar9003_hw_iqcalibrate
  279. };
  280. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  281. {
  282. ah->iq_caldata.calData = &iq_cal_single_sample;
  283. }
  284. /*
  285. * solve 4x4 linear equation used in loopback iq cal.
  286. */
  287. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  288. s32 sin_2phi_1,
  289. s32 cos_2phi_1,
  290. s32 sin_2phi_2,
  291. s32 cos_2phi_2,
  292. s32 mag_a0_d0,
  293. s32 phs_a0_d0,
  294. s32 mag_a1_d0,
  295. s32 phs_a1_d0,
  296. s32 solved_eq[])
  297. {
  298. s32 f1 = cos_2phi_1 - cos_2phi_2,
  299. f3 = sin_2phi_1 - sin_2phi_2,
  300. f2;
  301. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  302. const s32 result_shift = 1 << 15;
  303. struct ath_common *common = ath9k_hw_common(ah);
  304. f2 = (f1 * f1 + f3 * f3) / result_shift;
  305. if (!f2) {
  306. ath_dbg(common, ATH_DBG_CALIBRATE, "Divide by 0\n");
  307. return false;
  308. }
  309. /* mag mismatch, tx */
  310. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  311. /* phs mismatch, tx */
  312. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  313. mag_tx = (mag_tx / f2);
  314. phs_tx = (phs_tx / f2);
  315. /* mag mismatch, rx */
  316. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  317. result_shift;
  318. /* phs mismatch, rx */
  319. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  320. result_shift;
  321. solved_eq[0] = mag_tx;
  322. solved_eq[1] = phs_tx;
  323. solved_eq[2] = mag_rx;
  324. solved_eq[3] = phs_rx;
  325. return true;
  326. }
  327. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  328. {
  329. s32 abs_i = abs(in_re),
  330. abs_q = abs(in_im),
  331. max_abs, min_abs;
  332. if (abs_i > abs_q) {
  333. max_abs = abs_i;
  334. min_abs = abs_q;
  335. } else {
  336. max_abs = abs_q;
  337. min_abs = abs_i;
  338. }
  339. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  340. }
  341. #define DELPT 32
  342. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  343. s32 chain_idx,
  344. const s32 iq_res[],
  345. s32 iqc_coeff[])
  346. {
  347. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  348. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  349. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  350. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  351. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  352. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  353. sin_2phi_1, cos_2phi_1,
  354. sin_2phi_2, cos_2phi_2;
  355. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  356. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  357. q_q_coff, q_i_coff;
  358. const s32 res_scale = 1 << 15;
  359. const s32 delpt_shift = 1 << 8;
  360. s32 mag1, mag2;
  361. struct ath_common *common = ath9k_hw_common(ah);
  362. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  363. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  364. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  365. if (i2_m_q2_a0_d0 > 0x800)
  366. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  367. if (i2_p_q2_a0_d0 > 0x800)
  368. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  369. if (iq_corr_a0_d0 > 0x800)
  370. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  371. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  372. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  373. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  374. if (i2_m_q2_a0_d1 > 0x800)
  375. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  376. if (i2_p_q2_a0_d1 > 0x800)
  377. i2_p_q2_a0_d1 = -((0xfff - i2_p_q2_a0_d1) + 1);
  378. if (iq_corr_a0_d1 > 0x800)
  379. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  380. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  381. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  382. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  383. if (i2_m_q2_a1_d0 > 0x800)
  384. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  385. if (i2_p_q2_a1_d0 > 0x800)
  386. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  387. if (iq_corr_a1_d0 > 0x800)
  388. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  389. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  390. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  391. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  392. if (i2_m_q2_a1_d1 > 0x800)
  393. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  394. if (i2_p_q2_a1_d1 > 0x800)
  395. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  396. if (iq_corr_a1_d1 > 0x800)
  397. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  398. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  399. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  400. ath_dbg(common, ATH_DBG_CALIBRATE,
  401. "Divide by 0:\n"
  402. "a0_d0=%d\n"
  403. "a0_d1=%d\n"
  404. "a2_d0=%d\n"
  405. "a1_d1=%d\n",
  406. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  407. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  408. return false;
  409. }
  410. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  411. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  412. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  413. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  414. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  415. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  416. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  417. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  418. /* w/o analog phase shift */
  419. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  420. /* w/o analog phase shift */
  421. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  422. /* w/ analog phase shift */
  423. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  424. /* w/ analog phase shift */
  425. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  426. /*
  427. * force sin^2 + cos^2 = 1;
  428. * find magnitude by approximation
  429. */
  430. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  431. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  432. if ((mag1 == 0) || (mag2 == 0)) {
  433. ath_dbg(common, ATH_DBG_CALIBRATE,
  434. "Divide by 0: mag1=%d, mag2=%d\n",
  435. mag1, mag2);
  436. return false;
  437. }
  438. /* normalization sin and cos by mag */
  439. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  440. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  441. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  442. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  443. /* calculate IQ mismatch */
  444. if (!ar9003_hw_solve_iq_cal(ah,
  445. sin_2phi_1, cos_2phi_1,
  446. sin_2phi_2, cos_2phi_2,
  447. mag_a0_d0, phs_a0_d0,
  448. mag_a1_d0,
  449. phs_a1_d0, solved_eq)) {
  450. ath_dbg(common, ATH_DBG_CALIBRATE,
  451. "Call to ar9003_hw_solve_iq_cal() failed.\n");
  452. return false;
  453. }
  454. mag_tx = solved_eq[0];
  455. phs_tx = solved_eq[1];
  456. mag_rx = solved_eq[2];
  457. phs_rx = solved_eq[3];
  458. ath_dbg(common, ATH_DBG_CALIBRATE,
  459. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  460. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  461. if (res_scale == mag_tx) {
  462. ath_dbg(common, ATH_DBG_CALIBRATE,
  463. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  464. mag_tx, res_scale);
  465. return false;
  466. }
  467. /* calculate and quantize Tx IQ correction factor */
  468. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  469. phs_corr_tx = -phs_tx;
  470. q_q_coff = (mag_corr_tx * 128 / res_scale);
  471. q_i_coff = (phs_corr_tx * 256 / res_scale);
  472. ath_dbg(common, ATH_DBG_CALIBRATE,
  473. "tx chain %d: mag corr=%d phase corr=%d\n",
  474. chain_idx, q_q_coff, q_i_coff);
  475. if (q_i_coff < -63)
  476. q_i_coff = -63;
  477. if (q_i_coff > 63)
  478. q_i_coff = 63;
  479. if (q_q_coff < -63)
  480. q_q_coff = -63;
  481. if (q_q_coff > 63)
  482. q_q_coff = 63;
  483. iqc_coeff[0] = (q_q_coff * 128) + q_i_coff;
  484. ath_dbg(common, ATH_DBG_CALIBRATE,
  485. "tx chain %d: iq corr coeff=%x\n",
  486. chain_idx, iqc_coeff[0]);
  487. if (-mag_rx == res_scale) {
  488. ath_dbg(common, ATH_DBG_CALIBRATE,
  489. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  490. mag_rx, res_scale);
  491. return false;
  492. }
  493. /* calculate and quantize Rx IQ correction factors */
  494. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  495. phs_corr_rx = -phs_rx;
  496. q_q_coff = (mag_corr_rx * 128 / res_scale);
  497. q_i_coff = (phs_corr_rx * 256 / res_scale);
  498. ath_dbg(common, ATH_DBG_CALIBRATE,
  499. "rx chain %d: mag corr=%d phase corr=%d\n",
  500. chain_idx, q_q_coff, q_i_coff);
  501. if (q_i_coff < -63)
  502. q_i_coff = -63;
  503. if (q_i_coff > 63)
  504. q_i_coff = 63;
  505. if (q_q_coff < -63)
  506. q_q_coff = -63;
  507. if (q_q_coff > 63)
  508. q_q_coff = 63;
  509. iqc_coeff[1] = (q_q_coff * 128) + q_i_coff;
  510. ath_dbg(common, ATH_DBG_CALIBRATE,
  511. "rx chain %d: iq corr coeff=%x\n",
  512. chain_idx, iqc_coeff[1]);
  513. return true;
  514. }
  515. static void ar9003_hw_detect_outlier(int *mp_coeff, int nmeasurement,
  516. int max_delta)
  517. {
  518. int mp_max = -64, max_idx = 0;
  519. int mp_min = 63, min_idx = 0;
  520. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  521. /* find min/max mismatch across all calibrated gains */
  522. for (i = 0; i < nmeasurement; i++) {
  523. if (mp_coeff[i] > mp_max) {
  524. mp_max = mp_coeff[i];
  525. max_idx = i;
  526. } else if (mp_coeff[i] < mp_min) {
  527. mp_min = mp_coeff[i];
  528. min_idx = i;
  529. }
  530. }
  531. /* find average (exclude max abs value) */
  532. for (i = 0; i < nmeasurement; i++) {
  533. if ((abs(mp_coeff[i]) < abs(mp_max)) ||
  534. (abs(mp_coeff[i]) < abs(mp_min))) {
  535. mp_avg += mp_coeff[i];
  536. mp_count++;
  537. }
  538. }
  539. /*
  540. * finding mean magnitude/phase if possible, otherwise
  541. * just use the last value as the mean
  542. */
  543. if (mp_count)
  544. mp_avg /= mp_count;
  545. else
  546. mp_avg = mp_coeff[nmeasurement - 1];
  547. /* detect outlier */
  548. if (abs(mp_max - mp_min) > max_delta) {
  549. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  550. outlier_idx = max_idx;
  551. else
  552. outlier_idx = min_idx;
  553. mp_coeff[outlier_idx] = mp_avg;
  554. }
  555. }
  556. static void ar9003_hw_tx_iqcal_load_avg_2_passes(struct ath_hw *ah,
  557. u8 num_chains,
  558. struct coeff *coeff,
  559. bool is_reusable)
  560. {
  561. int i, im, nmeasurement;
  562. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  563. struct ath9k_hw_cal_data *caldata = ah->caldata;
  564. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  565. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  566. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  567. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  568. if (!AR_SREV_9485(ah)) {
  569. tx_corr_coeff[i * 2][1] =
  570. tx_corr_coeff[(i * 2) + 1][1] =
  571. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  572. tx_corr_coeff[i * 2][2] =
  573. tx_corr_coeff[(i * 2) + 1][2] =
  574. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  575. }
  576. }
  577. /* Load the average of 2 passes */
  578. for (i = 0; i < num_chains; i++) {
  579. nmeasurement = REG_READ_FIELD(ah,
  580. AR_PHY_TX_IQCAL_STATUS_B0,
  581. AR_PHY_CALIBRATED_GAINS_0);
  582. if (nmeasurement > MAX_MEASUREMENT)
  583. nmeasurement = MAX_MEASUREMENT;
  584. /* detect outlier only if nmeasurement > 1 */
  585. if (nmeasurement > 1) {
  586. /* Detect magnitude outlier */
  587. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  588. nmeasurement, MAX_MAG_DELTA);
  589. /* Detect phase outlier */
  590. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  591. nmeasurement, MAX_PHS_DELTA);
  592. }
  593. for (im = 0; im < nmeasurement; im++) {
  594. coeff->iqc_coeff[0] = (coeff->mag_coeff[i][im] & 0x7f) |
  595. ((coeff->phs_coeff[i][im] & 0x7f) << 7);
  596. if ((im % 2) == 0)
  597. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  598. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  599. coeff->iqc_coeff[0]);
  600. else
  601. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  602. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  603. coeff->iqc_coeff[0]);
  604. if (caldata)
  605. caldata->tx_corr_coeff[im][i] =
  606. coeff->iqc_coeff[0];
  607. }
  608. if (caldata)
  609. caldata->num_measures[i] = nmeasurement;
  610. }
  611. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  612. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  613. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  614. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  615. if (caldata)
  616. caldata->done_txiqcal_once = is_reusable;
  617. return;
  618. }
  619. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  620. {
  621. struct ath_common *common = ath9k_hw_common(ah);
  622. u8 tx_gain_forced;
  623. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  624. AR_PHY_TXGAIN_FORCE);
  625. if (tx_gain_forced)
  626. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  627. AR_PHY_TXGAIN_FORCE, 0);
  628. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  629. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  630. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  631. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  632. AH_WAIT_TIMEOUT)) {
  633. ath_dbg(common, ATH_DBG_CALIBRATE,
  634. "Tx IQ Cal is not completed.\n");
  635. return false;
  636. }
  637. return true;
  638. }
  639. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  640. {
  641. struct ath_common *common = ath9k_hw_common(ah);
  642. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  643. AR_PHY_TX_IQCAL_STATUS_B0,
  644. AR_PHY_TX_IQCAL_STATUS_B1,
  645. AR_PHY_TX_IQCAL_STATUS_B2,
  646. };
  647. const u_int32_t chan_info_tab[] = {
  648. AR_PHY_CHAN_INFO_TAB_0,
  649. AR_PHY_CHAN_INFO_TAB_1,
  650. AR_PHY_CHAN_INFO_TAB_2,
  651. };
  652. struct coeff coeff;
  653. s32 iq_res[6];
  654. u8 num_chains = 0;
  655. int i, im, j;
  656. int nmeasurement;
  657. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  658. if (ah->txchainmask & (1 << i))
  659. num_chains++;
  660. }
  661. for (i = 0; i < num_chains; i++) {
  662. nmeasurement = REG_READ_FIELD(ah,
  663. AR_PHY_TX_IQCAL_STATUS_B0,
  664. AR_PHY_CALIBRATED_GAINS_0);
  665. if (nmeasurement > MAX_MEASUREMENT)
  666. nmeasurement = MAX_MEASUREMENT;
  667. for (im = 0; im < nmeasurement; im++) {
  668. ath_dbg(common, ATH_DBG_CALIBRATE,
  669. "Doing Tx IQ Cal for chain %d.\n", i);
  670. if (REG_READ(ah, txiqcal_status[i]) &
  671. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  672. ath_dbg(common, ATH_DBG_CALIBRATE,
  673. "Tx IQ Cal failed for chain %d.\n", i);
  674. goto tx_iqcal_fail;
  675. }
  676. for (j = 0; j < 3; j++) {
  677. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  678. REG_RMW_FIELD(ah,
  679. AR_PHY_CHAN_INFO_MEMORY,
  680. AR_PHY_CHAN_INFO_TAB_S2_READ,
  681. 0);
  682. /* 32 bits */
  683. iq_res[idx] = REG_READ(ah,
  684. chan_info_tab[i] +
  685. offset);
  686. REG_RMW_FIELD(ah,
  687. AR_PHY_CHAN_INFO_MEMORY,
  688. AR_PHY_CHAN_INFO_TAB_S2_READ,
  689. 1);
  690. /* 16 bits */
  691. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  692. chan_info_tab[i] + offset);
  693. ath_dbg(common, ATH_DBG_CALIBRATE,
  694. "IQ_RES[%d]=0x%x "
  695. "IQ_RES[%d]=0x%x\n",
  696. idx, iq_res[idx], idx + 1,
  697. iq_res[idx + 1]);
  698. }
  699. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  700. coeff.iqc_coeff)) {
  701. ath_dbg(common, ATH_DBG_CALIBRATE,
  702. "Failed in calculation of \
  703. IQ correction.\n");
  704. goto tx_iqcal_fail;
  705. }
  706. coeff.mag_coeff[i][im] = coeff.iqc_coeff[0] & 0x7f;
  707. coeff.phs_coeff[i][im] =
  708. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  709. if (coeff.mag_coeff[i][im] > 63)
  710. coeff.mag_coeff[i][im] -= 128;
  711. if (coeff.phs_coeff[i][im] > 63)
  712. coeff.phs_coeff[i][im] -= 128;
  713. }
  714. }
  715. ar9003_hw_tx_iqcal_load_avg_2_passes(ah, num_chains,
  716. &coeff, is_reusable);
  717. return;
  718. tx_iqcal_fail:
  719. ath_dbg(common, ATH_DBG_CALIBRATE, "Tx IQ Cal failed\n");
  720. return;
  721. }
  722. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  723. {
  724. struct ath9k_hw_cal_data *caldata = ah->caldata;
  725. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  726. int i, im;
  727. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  728. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  729. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  730. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  731. if (!AR_SREV_9485(ah)) {
  732. tx_corr_coeff[i * 2][1] =
  733. tx_corr_coeff[(i * 2) + 1][1] =
  734. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  735. tx_corr_coeff[i * 2][2] =
  736. tx_corr_coeff[(i * 2) + 1][2] =
  737. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  738. }
  739. }
  740. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  741. if (!(ah->txchainmask & (1 << i)))
  742. continue;
  743. for (im = 0; im < caldata->num_measures[i]; im++) {
  744. if ((im % 2) == 0)
  745. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  746. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  747. caldata->tx_corr_coeff[im][i]);
  748. else
  749. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  750. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  751. caldata->tx_corr_coeff[im][i]);
  752. }
  753. }
  754. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  755. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  756. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  757. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  758. }
  759. static bool ar9003_hw_rtt_restore(struct ath_hw *ah, struct ath9k_channel *chan)
  760. {
  761. struct ath9k_rtt_hist *hist;
  762. u32 *table;
  763. int i;
  764. bool restore;
  765. if (!ah->caldata)
  766. return false;
  767. hist = &ah->caldata->rtt_hist;
  768. if (!hist->num_readings)
  769. return false;
  770. ar9003_hw_rtt_enable(ah);
  771. ar9003_hw_rtt_set_mask(ah, 0x00);
  772. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  773. if (!(ah->rxchainmask & (1 << i)))
  774. continue;
  775. table = &hist->table[i][hist->num_readings][0];
  776. ar9003_hw_rtt_load_hist(ah, i, table);
  777. }
  778. restore = ar9003_hw_rtt_force_restore(ah);
  779. ar9003_hw_rtt_disable(ah);
  780. return restore;
  781. }
  782. static bool ar9003_hw_init_cal(struct ath_hw *ah,
  783. struct ath9k_channel *chan)
  784. {
  785. struct ath_common *common = ath9k_hw_common(ah);
  786. struct ath9k_hw_cal_data *caldata = ah->caldata;
  787. struct ath9k_hw_mci *mci_hw = &ah->btcoex_hw.mci;
  788. bool txiqcal_done = false, txclcal_done = false;
  789. bool is_reusable = true, status = true;
  790. bool run_rtt_cal = false, run_agc_cal;
  791. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  792. bool mci = !!(ah->caps.hw_caps & ATH9K_HW_CAP_MCI);
  793. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  794. AR_PHY_AGC_CONTROL_FLTR_CAL |
  795. AR_PHY_AGC_CONTROL_PKDET_CAL;
  796. int i, j;
  797. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  798. AR_PHY_CL_TAB_1,
  799. AR_PHY_CL_TAB_2 };
  800. if (rtt) {
  801. if (!ar9003_hw_rtt_restore(ah, chan))
  802. run_rtt_cal = true;
  803. ath_dbg(common, ATH_DBG_CALIBRATE, "RTT restore %s\n",
  804. run_rtt_cal ? "failed" : "succeed");
  805. }
  806. run_agc_cal = run_rtt_cal;
  807. if (run_rtt_cal) {
  808. ar9003_hw_rtt_enable(ah);
  809. ar9003_hw_rtt_set_mask(ah, 0x00);
  810. ar9003_hw_rtt_clear_hist(ah);
  811. }
  812. if (rtt && !run_rtt_cal) {
  813. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  814. agc_supp_cals &= agc_ctrl;
  815. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  816. AR_PHY_AGC_CONTROL_FLTR_CAL |
  817. AR_PHY_AGC_CONTROL_PKDET_CAL);
  818. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  819. }
  820. if (ah->enabled_cals & TX_CL_CAL) {
  821. if (caldata && caldata->done_txclcal_once)
  822. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  823. AR_PHY_CL_CAL_ENABLE);
  824. else {
  825. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  826. AR_PHY_CL_CAL_ENABLE);
  827. run_agc_cal = true;
  828. }
  829. }
  830. if (!(ah->enabled_cals & TX_IQ_CAL))
  831. goto skip_tx_iqcal;
  832. /* Do Tx IQ Calibration */
  833. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  834. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  835. DELPT);
  836. /*
  837. * For AR9485 or later chips, TxIQ cal runs as part of
  838. * AGC calibration
  839. */
  840. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  841. if (caldata && !caldata->done_txiqcal_once)
  842. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  843. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  844. else
  845. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  846. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  847. txiqcal_done = run_agc_cal = true;
  848. goto skip_tx_iqcal;
  849. } else if (caldata && !caldata->done_txiqcal_once)
  850. run_agc_cal = true;
  851. if (mci && IS_CHAN_2GHZ(chan) &&
  852. (mci_hw->bt_state == MCI_BT_AWAKE) &&
  853. run_agc_cal &&
  854. !(mci_hw->config & ATH_MCI_CONFIG_DISABLE_MCI_CAL)) {
  855. u32 pld[4] = {0, 0, 0, 0};
  856. /* send CAL_REQ only when BT is AWAKE. */
  857. ath_dbg(common, ATH_DBG_MCI, "MCI send WLAN_CAL_REQ 0x%x\n",
  858. mci_hw->wlan_cal_seq);
  859. MCI_GPM_SET_CAL_TYPE(pld, MCI_GPM_WLAN_CAL_REQ);
  860. pld[MCI_GPM_WLAN_CAL_W_SEQUENCE] = mci_hw->wlan_cal_seq++;
  861. ar9003_mci_send_message(ah, MCI_GPM, 0, pld, 16, true, false);
  862. /* Wait BT_CAL_GRANT for 50ms */
  863. ath_dbg(common, ATH_DBG_MCI, "MCI wait for BT_CAL_GRANT");
  864. if (ar9003_mci_wait_for_gpm(ah, MCI_GPM_BT_CAL_GRANT, 0, 50000))
  865. ath_dbg(common, ATH_DBG_MCI, "MCI got BT_CAL_GRANT");
  866. else {
  867. is_reusable = false;
  868. ath_dbg(common, ATH_DBG_MCI, "\nMCI BT is not responding");
  869. }
  870. }
  871. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  872. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  873. udelay(5);
  874. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  875. skip_tx_iqcal:
  876. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  877. /* Calibrate the AGC */
  878. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  879. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  880. AR_PHY_AGC_CONTROL_CAL);
  881. /* Poll for offset calibration complete */
  882. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  883. AR_PHY_AGC_CONTROL_CAL,
  884. 0, AH_WAIT_TIMEOUT);
  885. }
  886. if (mci && IS_CHAN_2GHZ(chan) &&
  887. (mci_hw->bt_state == MCI_BT_AWAKE) &&
  888. run_agc_cal &&
  889. !(mci_hw->config & ATH_MCI_CONFIG_DISABLE_MCI_CAL)) {
  890. u32 pld[4] = {0, 0, 0, 0};
  891. ath_dbg(common, ATH_DBG_MCI, "MCI Send WLAN_CAL_DONE 0x%x\n",
  892. mci_hw->wlan_cal_done);
  893. MCI_GPM_SET_CAL_TYPE(pld, MCI_GPM_WLAN_CAL_DONE);
  894. pld[MCI_GPM_WLAN_CAL_W_SEQUENCE] = mci_hw->wlan_cal_done++;
  895. ar9003_mci_send_message(ah, MCI_GPM, 0, pld, 16, true, false);
  896. }
  897. if (rtt && !run_rtt_cal) {
  898. agc_ctrl |= agc_supp_cals;
  899. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  900. }
  901. if (!status) {
  902. if (run_rtt_cal)
  903. ar9003_hw_rtt_disable(ah);
  904. ath_dbg(common, ATH_DBG_CALIBRATE,
  905. "offset calibration failed to complete in 1ms;"
  906. "noisy environment?\n");
  907. return false;
  908. }
  909. if (txiqcal_done)
  910. ar9003_hw_tx_iq_cal_post_proc(ah, is_reusable);
  911. else if (caldata && caldata->done_txiqcal_once)
  912. ar9003_hw_tx_iq_cal_reload(ah);
  913. #define CL_TAB_ENTRY(reg_base) (reg_base + (4 * j))
  914. if (caldata && (ah->enabled_cals & TX_CL_CAL)) {
  915. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  916. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  917. if (caldata->done_txclcal_once) {
  918. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  919. if (!(ah->txchainmask & (1 << i)))
  920. continue;
  921. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  922. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  923. caldata->tx_clcal[i][j]);
  924. }
  925. } else if (is_reusable && txclcal_done) {
  926. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  927. if (!(ah->txchainmask & (1 << i)))
  928. continue;
  929. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  930. caldata->tx_clcal[i][j] =
  931. REG_READ(ah,
  932. CL_TAB_ENTRY(cl_idx[i]));
  933. }
  934. caldata->done_txclcal_once = true;
  935. }
  936. }
  937. #undef CL_TAB_ENTRY
  938. if (run_rtt_cal && caldata) {
  939. struct ath9k_rtt_hist *hist = &caldata->rtt_hist;
  940. if (is_reusable && (hist->num_readings < RTT_HIST_MAX)) {
  941. u32 *table;
  942. hist->num_readings++;
  943. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  944. if (!(ah->rxchainmask & (1 << i)))
  945. continue;
  946. table = &hist->table[i][hist->num_readings][0];
  947. ar9003_hw_rtt_fill_hist(ah, i, table);
  948. }
  949. }
  950. ar9003_hw_rtt_disable(ah);
  951. }
  952. /* Initialize list pointers */
  953. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  954. ah->supp_cals = IQ_MISMATCH_CAL;
  955. if (ah->supp_cals & IQ_MISMATCH_CAL) {
  956. INIT_CAL(&ah->iq_caldata);
  957. INSERT_CAL(ah, &ah->iq_caldata);
  958. ath_dbg(common, ATH_DBG_CALIBRATE,
  959. "enabling IQ Calibration.\n");
  960. }
  961. if (ah->supp_cals & TEMP_COMP_CAL) {
  962. INIT_CAL(&ah->tempCompCalData);
  963. INSERT_CAL(ah, &ah->tempCompCalData);
  964. ath_dbg(common, ATH_DBG_CALIBRATE,
  965. "enabling Temperature Compensation Calibration.\n");
  966. }
  967. /* Initialize current pointer to first element in list */
  968. ah->cal_list_curr = ah->cal_list;
  969. if (ah->cal_list_curr)
  970. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  971. if (caldata)
  972. caldata->CalValid = 0;
  973. return true;
  974. }
  975. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  976. {
  977. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  978. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  979. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  980. priv_ops->init_cal = ar9003_hw_init_cal;
  981. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  982. ops->calibrate = ar9003_hw_calibrate;
  983. }