ar9003_calib.c 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241
  1. /*
  2. * Copyright (c) 2010-2011 Atheros Communications Inc.
  3. *
  4. * Permission to use, copy, modify, and/or distribute this software for any
  5. * purpose with or without fee is hereby granted, provided that the above
  6. * copyright notice and this permission notice appear in all copies.
  7. *
  8. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15. */
  16. #include "hw.h"
  17. #include "hw-ops.h"
  18. #include "ar9003_phy.h"
  19. #include "ar9003_rtt.h"
  20. #include "ar9003_mci.h"
  21. #define MAX_MEASUREMENT MAX_IQCAL_MEASUREMENT
  22. #define MAX_MAG_DELTA 11
  23. #define MAX_PHS_DELTA 10
  24. struct coeff {
  25. int mag_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  26. int phs_coeff[AR9300_MAX_CHAINS][MAX_MEASUREMENT];
  27. int iqc_coeff[2];
  28. };
  29. enum ar9003_cal_types {
  30. IQ_MISMATCH_CAL = BIT(0),
  31. };
  32. static void ar9003_hw_setup_calibration(struct ath_hw *ah,
  33. struct ath9k_cal_list *currCal)
  34. {
  35. struct ath_common *common = ath9k_hw_common(ah);
  36. /* Select calibration to run */
  37. switch (currCal->calData->calType) {
  38. case IQ_MISMATCH_CAL:
  39. /*
  40. * Start calibration with
  41. * 2^(INIT_IQCAL_LOG_COUNT_MAX+1) samples
  42. */
  43. REG_RMW_FIELD(ah, AR_PHY_TIMING4,
  44. AR_PHY_TIMING4_IQCAL_LOG_COUNT_MAX,
  45. currCal->calData->calCountMax);
  46. REG_WRITE(ah, AR_PHY_CALMODE, AR_PHY_CALMODE_IQ);
  47. ath_dbg(common, CALIBRATE,
  48. "starting IQ Mismatch Calibration\n");
  49. /* Kick-off cal */
  50. REG_SET_BIT(ah, AR_PHY_TIMING4, AR_PHY_TIMING4_DO_CAL);
  51. break;
  52. default:
  53. ath_err(common, "Invalid calibration type\n");
  54. break;
  55. }
  56. }
  57. /*
  58. * Generic calibration routine.
  59. * Recalibrate the lower PHY chips to account for temperature/environment
  60. * changes.
  61. */
  62. static bool ar9003_hw_per_calibration(struct ath_hw *ah,
  63. struct ath9k_channel *ichan,
  64. u8 rxchainmask,
  65. struct ath9k_cal_list *currCal)
  66. {
  67. struct ath9k_hw_cal_data *caldata = ah->caldata;
  68. /* Cal is assumed not done until explicitly set below */
  69. bool iscaldone = false;
  70. /* Calibration in progress. */
  71. if (currCal->calState == CAL_RUNNING) {
  72. /* Check to see if it has finished. */
  73. if (!(REG_READ(ah, AR_PHY_TIMING4) & AR_PHY_TIMING4_DO_CAL)) {
  74. /*
  75. * Accumulate cal measures for active chains
  76. */
  77. currCal->calData->calCollect(ah);
  78. ah->cal_samples++;
  79. if (ah->cal_samples >=
  80. currCal->calData->calNumSamples) {
  81. unsigned int i, numChains = 0;
  82. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  83. if (rxchainmask & (1 << i))
  84. numChains++;
  85. }
  86. /*
  87. * Process accumulated data
  88. */
  89. currCal->calData->calPostProc(ah, numChains);
  90. /* Calibration has finished. */
  91. caldata->CalValid |= currCal->calData->calType;
  92. currCal->calState = CAL_DONE;
  93. iscaldone = true;
  94. } else {
  95. /*
  96. * Set-up collection of another sub-sample until we
  97. * get desired number
  98. */
  99. ar9003_hw_setup_calibration(ah, currCal);
  100. }
  101. }
  102. } else if (!(caldata->CalValid & currCal->calData->calType)) {
  103. /* If current cal is marked invalid in channel, kick it off */
  104. ath9k_hw_reset_calibration(ah, currCal);
  105. }
  106. return iscaldone;
  107. }
  108. static bool ar9003_hw_calibrate(struct ath_hw *ah,
  109. struct ath9k_channel *chan,
  110. u8 rxchainmask,
  111. bool longcal)
  112. {
  113. bool iscaldone = true;
  114. struct ath9k_cal_list *currCal = ah->cal_list_curr;
  115. /*
  116. * For given calibration:
  117. * 1. Call generic cal routine
  118. * 2. When this cal is done (isCalDone) if we have more cals waiting
  119. * (eg after reset), mask this to upper layers by not propagating
  120. * isCalDone if it is set to TRUE.
  121. * Instead, change isCalDone to FALSE and setup the waiting cal(s)
  122. * to be run.
  123. */
  124. if (currCal &&
  125. (currCal->calState == CAL_RUNNING ||
  126. currCal->calState == CAL_WAITING)) {
  127. iscaldone = ar9003_hw_per_calibration(ah, chan,
  128. rxchainmask, currCal);
  129. if (iscaldone) {
  130. ah->cal_list_curr = currCal = currCal->calNext;
  131. if (currCal->calState == CAL_WAITING) {
  132. iscaldone = false;
  133. ath9k_hw_reset_calibration(ah, currCal);
  134. }
  135. }
  136. }
  137. /*
  138. * Do NF cal only at longer intervals. Get the value from
  139. * the previous NF cal and update history buffer.
  140. */
  141. if (longcal && ath9k_hw_getnf(ah, chan)) {
  142. /*
  143. * Load the NF from history buffer of the current channel.
  144. * NF is slow time-variant, so it is OK to use a historical
  145. * value.
  146. */
  147. ath9k_hw_loadnf(ah, ah->curchan);
  148. /* start NF calibration, without updating BB NF register */
  149. ath9k_hw_start_nfcal(ah, false);
  150. }
  151. return iscaldone;
  152. }
  153. static void ar9003_hw_iqcal_collect(struct ath_hw *ah)
  154. {
  155. int i;
  156. /* Accumulate IQ cal measures for active chains */
  157. for (i = 0; i < AR5416_MAX_CHAINS; i++) {
  158. if (ah->txchainmask & BIT(i)) {
  159. ah->totalPowerMeasI[i] +=
  160. REG_READ(ah, AR_PHY_CAL_MEAS_0(i));
  161. ah->totalPowerMeasQ[i] +=
  162. REG_READ(ah, AR_PHY_CAL_MEAS_1(i));
  163. ah->totalIqCorrMeas[i] +=
  164. (int32_t) REG_READ(ah, AR_PHY_CAL_MEAS_2(i));
  165. ath_dbg(ath9k_hw_common(ah), CALIBRATE,
  166. "%d: Chn %d pmi=0x%08x;pmq=0x%08x;iqcm=0x%08x;\n",
  167. ah->cal_samples, i, ah->totalPowerMeasI[i],
  168. ah->totalPowerMeasQ[i],
  169. ah->totalIqCorrMeas[i]);
  170. }
  171. }
  172. }
  173. static void ar9003_hw_iqcalibrate(struct ath_hw *ah, u8 numChains)
  174. {
  175. struct ath_common *common = ath9k_hw_common(ah);
  176. u32 powerMeasQ, powerMeasI, iqCorrMeas;
  177. u32 qCoffDenom, iCoffDenom;
  178. int32_t qCoff, iCoff;
  179. int iqCorrNeg, i;
  180. static const u_int32_t offset_array[3] = {
  181. AR_PHY_RX_IQCAL_CORR_B0,
  182. AR_PHY_RX_IQCAL_CORR_B1,
  183. AR_PHY_RX_IQCAL_CORR_B2,
  184. };
  185. for (i = 0; i < numChains; i++) {
  186. powerMeasI = ah->totalPowerMeasI[i];
  187. powerMeasQ = ah->totalPowerMeasQ[i];
  188. iqCorrMeas = ah->totalIqCorrMeas[i];
  189. ath_dbg(common, CALIBRATE,
  190. "Starting IQ Cal and Correction for Chain %d\n", i);
  191. ath_dbg(common, CALIBRATE,
  192. "Original: Chn %d iq_corr_meas = 0x%08x\n",
  193. i, ah->totalIqCorrMeas[i]);
  194. iqCorrNeg = 0;
  195. if (iqCorrMeas > 0x80000000) {
  196. iqCorrMeas = (0xffffffff - iqCorrMeas) + 1;
  197. iqCorrNeg = 1;
  198. }
  199. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_i = 0x%08x\n",
  200. i, powerMeasI);
  201. ath_dbg(common, CALIBRATE, "Chn %d pwr_meas_q = 0x%08x\n",
  202. i, powerMeasQ);
  203. ath_dbg(common, CALIBRATE, "iqCorrNeg is 0x%08x\n", iqCorrNeg);
  204. iCoffDenom = (powerMeasI / 2 + powerMeasQ / 2) / 256;
  205. qCoffDenom = powerMeasQ / 64;
  206. if ((iCoffDenom != 0) && (qCoffDenom != 0)) {
  207. iCoff = iqCorrMeas / iCoffDenom;
  208. qCoff = powerMeasI / qCoffDenom - 64;
  209. ath_dbg(common, CALIBRATE, "Chn %d iCoff = 0x%08x\n",
  210. i, iCoff);
  211. ath_dbg(common, CALIBRATE, "Chn %d qCoff = 0x%08x\n",
  212. i, qCoff);
  213. /* Force bounds on iCoff */
  214. if (iCoff >= 63)
  215. iCoff = 63;
  216. else if (iCoff <= -63)
  217. iCoff = -63;
  218. /* Negate iCoff if iqCorrNeg == 0 */
  219. if (iqCorrNeg == 0x0)
  220. iCoff = -iCoff;
  221. /* Force bounds on qCoff */
  222. if (qCoff >= 63)
  223. qCoff = 63;
  224. else if (qCoff <= -63)
  225. qCoff = -63;
  226. iCoff = iCoff & 0x7f;
  227. qCoff = qCoff & 0x7f;
  228. ath_dbg(common, CALIBRATE,
  229. "Chn %d : iCoff = 0x%x qCoff = 0x%x\n",
  230. i, iCoff, qCoff);
  231. ath_dbg(common, CALIBRATE,
  232. "Register offset (0x%04x) before update = 0x%x\n",
  233. offset_array[i],
  234. REG_READ(ah, offset_array[i]));
  235. if (AR_SREV_9565(ah) &&
  236. (iCoff == 63 || qCoff == 63 ||
  237. iCoff == -63 || qCoff == -63))
  238. return;
  239. REG_RMW_FIELD(ah, offset_array[i],
  240. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  241. iCoff);
  242. REG_RMW_FIELD(ah, offset_array[i],
  243. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  244. qCoff);
  245. ath_dbg(common, CALIBRATE,
  246. "Register offset (0x%04x) QI COFF (bitfields 0x%08x) after update = 0x%x\n",
  247. offset_array[i],
  248. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_I_COFF,
  249. REG_READ(ah, offset_array[i]));
  250. ath_dbg(common, CALIBRATE,
  251. "Register offset (0x%04x) QQ COFF (bitfields 0x%08x) after update = 0x%x\n",
  252. offset_array[i],
  253. AR_PHY_RX_IQCAL_CORR_IQCORR_Q_Q_COFF,
  254. REG_READ(ah, offset_array[i]));
  255. ath_dbg(common, CALIBRATE,
  256. "IQ Cal and Correction done for Chain %d\n", i);
  257. }
  258. }
  259. REG_SET_BIT(ah, AR_PHY_RX_IQCAL_CORR_B0,
  260. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE);
  261. ath_dbg(common, CALIBRATE,
  262. "IQ Cal and Correction (offset 0x%04x) enabled (bit position 0x%08x). New Value 0x%08x\n",
  263. (unsigned) (AR_PHY_RX_IQCAL_CORR_B0),
  264. AR_PHY_RX_IQCAL_CORR_IQCORR_ENABLE,
  265. REG_READ(ah, AR_PHY_RX_IQCAL_CORR_B0));
  266. }
  267. static const struct ath9k_percal_data iq_cal_single_sample = {
  268. IQ_MISMATCH_CAL,
  269. MIN_CAL_SAMPLES,
  270. PER_MAX_LOG_COUNT,
  271. ar9003_hw_iqcal_collect,
  272. ar9003_hw_iqcalibrate
  273. };
  274. static void ar9003_hw_init_cal_settings(struct ath_hw *ah)
  275. {
  276. ah->iq_caldata.calData = &iq_cal_single_sample;
  277. if (AR_SREV_9300_20_OR_LATER(ah)) {
  278. ah->enabled_cals |= TX_IQ_CAL;
  279. if (AR_SREV_9485_OR_LATER(ah) && !AR_SREV_9340(ah))
  280. ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
  281. }
  282. ah->supp_cals = IQ_MISMATCH_CAL;
  283. }
  284. /*
  285. * solve 4x4 linear equation used in loopback iq cal.
  286. */
  287. static bool ar9003_hw_solve_iq_cal(struct ath_hw *ah,
  288. s32 sin_2phi_1,
  289. s32 cos_2phi_1,
  290. s32 sin_2phi_2,
  291. s32 cos_2phi_2,
  292. s32 mag_a0_d0,
  293. s32 phs_a0_d0,
  294. s32 mag_a1_d0,
  295. s32 phs_a1_d0,
  296. s32 solved_eq[])
  297. {
  298. s32 f1 = cos_2phi_1 - cos_2phi_2,
  299. f3 = sin_2phi_1 - sin_2phi_2,
  300. f2;
  301. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  302. const s32 result_shift = 1 << 15;
  303. struct ath_common *common = ath9k_hw_common(ah);
  304. f2 = (f1 * f1 + f3 * f3) / result_shift;
  305. if (!f2) {
  306. ath_dbg(common, CALIBRATE, "Divide by 0\n");
  307. return false;
  308. }
  309. /* mag mismatch, tx */
  310. mag_tx = f1 * (mag_a0_d0 - mag_a1_d0) + f3 * (phs_a0_d0 - phs_a1_d0);
  311. /* phs mismatch, tx */
  312. phs_tx = f3 * (-mag_a0_d0 + mag_a1_d0) + f1 * (phs_a0_d0 - phs_a1_d0);
  313. mag_tx = (mag_tx / f2);
  314. phs_tx = (phs_tx / f2);
  315. /* mag mismatch, rx */
  316. mag_rx = mag_a0_d0 - (cos_2phi_1 * mag_tx + sin_2phi_1 * phs_tx) /
  317. result_shift;
  318. /* phs mismatch, rx */
  319. phs_rx = phs_a0_d0 + (sin_2phi_1 * mag_tx - cos_2phi_1 * phs_tx) /
  320. result_shift;
  321. solved_eq[0] = mag_tx;
  322. solved_eq[1] = phs_tx;
  323. solved_eq[2] = mag_rx;
  324. solved_eq[3] = phs_rx;
  325. return true;
  326. }
  327. static s32 ar9003_hw_find_mag_approx(struct ath_hw *ah, s32 in_re, s32 in_im)
  328. {
  329. s32 abs_i = abs(in_re),
  330. abs_q = abs(in_im),
  331. max_abs, min_abs;
  332. if (abs_i > abs_q) {
  333. max_abs = abs_i;
  334. min_abs = abs_q;
  335. } else {
  336. max_abs = abs_q;
  337. min_abs = abs_i;
  338. }
  339. return max_abs - (max_abs / 32) + (min_abs / 8) + (min_abs / 4);
  340. }
  341. #define DELPT 32
  342. static bool ar9003_hw_calc_iq_corr(struct ath_hw *ah,
  343. s32 chain_idx,
  344. const s32 iq_res[],
  345. s32 iqc_coeff[])
  346. {
  347. s32 i2_m_q2_a0_d0, i2_p_q2_a0_d0, iq_corr_a0_d0,
  348. i2_m_q2_a0_d1, i2_p_q2_a0_d1, iq_corr_a0_d1,
  349. i2_m_q2_a1_d0, i2_p_q2_a1_d0, iq_corr_a1_d0,
  350. i2_m_q2_a1_d1, i2_p_q2_a1_d1, iq_corr_a1_d1;
  351. s32 mag_a0_d0, mag_a1_d0, mag_a0_d1, mag_a1_d1,
  352. phs_a0_d0, phs_a1_d0, phs_a0_d1, phs_a1_d1,
  353. sin_2phi_1, cos_2phi_1,
  354. sin_2phi_2, cos_2phi_2;
  355. s32 mag_tx, phs_tx, mag_rx, phs_rx;
  356. s32 solved_eq[4], mag_corr_tx, phs_corr_tx, mag_corr_rx, phs_corr_rx,
  357. q_q_coff, q_i_coff;
  358. const s32 res_scale = 1 << 15;
  359. const s32 delpt_shift = 1 << 8;
  360. s32 mag1, mag2;
  361. struct ath_common *common = ath9k_hw_common(ah);
  362. i2_m_q2_a0_d0 = iq_res[0] & 0xfff;
  363. i2_p_q2_a0_d0 = (iq_res[0] >> 12) & 0xfff;
  364. iq_corr_a0_d0 = ((iq_res[0] >> 24) & 0xff) + ((iq_res[1] & 0xf) << 8);
  365. if (i2_m_q2_a0_d0 > 0x800)
  366. i2_m_q2_a0_d0 = -((0xfff - i2_m_q2_a0_d0) + 1);
  367. if (i2_p_q2_a0_d0 > 0x800)
  368. i2_p_q2_a0_d0 = -((0xfff - i2_p_q2_a0_d0) + 1);
  369. if (iq_corr_a0_d0 > 0x800)
  370. iq_corr_a0_d0 = -((0xfff - iq_corr_a0_d0) + 1);
  371. i2_m_q2_a0_d1 = (iq_res[1] >> 4) & 0xfff;
  372. i2_p_q2_a0_d1 = (iq_res[2] & 0xfff);
  373. iq_corr_a0_d1 = (iq_res[2] >> 12) & 0xfff;
  374. if (i2_m_q2_a0_d1 > 0x800)
  375. i2_m_q2_a0_d1 = -((0xfff - i2_m_q2_a0_d1) + 1);
  376. if (i2_p_q2_a0_d1 > 0x800)
  377. i2_p_q2_a0_d1 = -((0xfff - i2_p_q2_a0_d1) + 1);
  378. if (iq_corr_a0_d1 > 0x800)
  379. iq_corr_a0_d1 = -((0xfff - iq_corr_a0_d1) + 1);
  380. i2_m_q2_a1_d0 = ((iq_res[2] >> 24) & 0xff) + ((iq_res[3] & 0xf) << 8);
  381. i2_p_q2_a1_d0 = (iq_res[3] >> 4) & 0xfff;
  382. iq_corr_a1_d0 = iq_res[4] & 0xfff;
  383. if (i2_m_q2_a1_d0 > 0x800)
  384. i2_m_q2_a1_d0 = -((0xfff - i2_m_q2_a1_d0) + 1);
  385. if (i2_p_q2_a1_d0 > 0x800)
  386. i2_p_q2_a1_d0 = -((0xfff - i2_p_q2_a1_d0) + 1);
  387. if (iq_corr_a1_d0 > 0x800)
  388. iq_corr_a1_d0 = -((0xfff - iq_corr_a1_d0) + 1);
  389. i2_m_q2_a1_d1 = (iq_res[4] >> 12) & 0xfff;
  390. i2_p_q2_a1_d1 = ((iq_res[4] >> 24) & 0xff) + ((iq_res[5] & 0xf) << 8);
  391. iq_corr_a1_d1 = (iq_res[5] >> 4) & 0xfff;
  392. if (i2_m_q2_a1_d1 > 0x800)
  393. i2_m_q2_a1_d1 = -((0xfff - i2_m_q2_a1_d1) + 1);
  394. if (i2_p_q2_a1_d1 > 0x800)
  395. i2_p_q2_a1_d1 = -((0xfff - i2_p_q2_a1_d1) + 1);
  396. if (iq_corr_a1_d1 > 0x800)
  397. iq_corr_a1_d1 = -((0xfff - iq_corr_a1_d1) + 1);
  398. if ((i2_p_q2_a0_d0 == 0) || (i2_p_q2_a0_d1 == 0) ||
  399. (i2_p_q2_a1_d0 == 0) || (i2_p_q2_a1_d1 == 0)) {
  400. ath_dbg(common, CALIBRATE,
  401. "Divide by 0:\n"
  402. "a0_d0=%d\n"
  403. "a0_d1=%d\n"
  404. "a2_d0=%d\n"
  405. "a1_d1=%d\n",
  406. i2_p_q2_a0_d0, i2_p_q2_a0_d1,
  407. i2_p_q2_a1_d0, i2_p_q2_a1_d1);
  408. return false;
  409. }
  410. mag_a0_d0 = (i2_m_q2_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  411. phs_a0_d0 = (iq_corr_a0_d0 * res_scale) / i2_p_q2_a0_d0;
  412. mag_a0_d1 = (i2_m_q2_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  413. phs_a0_d1 = (iq_corr_a0_d1 * res_scale) / i2_p_q2_a0_d1;
  414. mag_a1_d0 = (i2_m_q2_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  415. phs_a1_d0 = (iq_corr_a1_d0 * res_scale) / i2_p_q2_a1_d0;
  416. mag_a1_d1 = (i2_m_q2_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  417. phs_a1_d1 = (iq_corr_a1_d1 * res_scale) / i2_p_q2_a1_d1;
  418. /* w/o analog phase shift */
  419. sin_2phi_1 = (((mag_a0_d0 - mag_a0_d1) * delpt_shift) / DELPT);
  420. /* w/o analog phase shift */
  421. cos_2phi_1 = (((phs_a0_d1 - phs_a0_d0) * delpt_shift) / DELPT);
  422. /* w/ analog phase shift */
  423. sin_2phi_2 = (((mag_a1_d0 - mag_a1_d1) * delpt_shift) / DELPT);
  424. /* w/ analog phase shift */
  425. cos_2phi_2 = (((phs_a1_d1 - phs_a1_d0) * delpt_shift) / DELPT);
  426. /*
  427. * force sin^2 + cos^2 = 1;
  428. * find magnitude by approximation
  429. */
  430. mag1 = ar9003_hw_find_mag_approx(ah, cos_2phi_1, sin_2phi_1);
  431. mag2 = ar9003_hw_find_mag_approx(ah, cos_2phi_2, sin_2phi_2);
  432. if ((mag1 == 0) || (mag2 == 0)) {
  433. ath_dbg(common, CALIBRATE, "Divide by 0: mag1=%d, mag2=%d\n",
  434. mag1, mag2);
  435. return false;
  436. }
  437. /* normalization sin and cos by mag */
  438. sin_2phi_1 = (sin_2phi_1 * res_scale / mag1);
  439. cos_2phi_1 = (cos_2phi_1 * res_scale / mag1);
  440. sin_2phi_2 = (sin_2phi_2 * res_scale / mag2);
  441. cos_2phi_2 = (cos_2phi_2 * res_scale / mag2);
  442. /* calculate IQ mismatch */
  443. if (!ar9003_hw_solve_iq_cal(ah,
  444. sin_2phi_1, cos_2phi_1,
  445. sin_2phi_2, cos_2phi_2,
  446. mag_a0_d0, phs_a0_d0,
  447. mag_a1_d0,
  448. phs_a1_d0, solved_eq)) {
  449. ath_dbg(common, CALIBRATE,
  450. "Call to ar9003_hw_solve_iq_cal() failed\n");
  451. return false;
  452. }
  453. mag_tx = solved_eq[0];
  454. phs_tx = solved_eq[1];
  455. mag_rx = solved_eq[2];
  456. phs_rx = solved_eq[3];
  457. ath_dbg(common, CALIBRATE,
  458. "chain %d: mag mismatch=%d phase mismatch=%d\n",
  459. chain_idx, mag_tx/res_scale, phs_tx/res_scale);
  460. if (res_scale == mag_tx) {
  461. ath_dbg(common, CALIBRATE,
  462. "Divide by 0: mag_tx=%d, res_scale=%d\n",
  463. mag_tx, res_scale);
  464. return false;
  465. }
  466. /* calculate and quantize Tx IQ correction factor */
  467. mag_corr_tx = (mag_tx * res_scale) / (res_scale - mag_tx);
  468. phs_corr_tx = -phs_tx;
  469. q_q_coff = (mag_corr_tx * 128 / res_scale);
  470. q_i_coff = (phs_corr_tx * 256 / res_scale);
  471. ath_dbg(common, CALIBRATE, "tx chain %d: mag corr=%d phase corr=%d\n",
  472. chain_idx, q_q_coff, q_i_coff);
  473. if (q_i_coff < -63)
  474. q_i_coff = -63;
  475. if (q_i_coff > 63)
  476. q_i_coff = 63;
  477. if (q_q_coff < -63)
  478. q_q_coff = -63;
  479. if (q_q_coff > 63)
  480. q_q_coff = 63;
  481. iqc_coeff[0] = (q_q_coff * 128) + q_i_coff;
  482. ath_dbg(common, CALIBRATE, "tx chain %d: iq corr coeff=%x\n",
  483. chain_idx, iqc_coeff[0]);
  484. if (-mag_rx == res_scale) {
  485. ath_dbg(common, CALIBRATE,
  486. "Divide by 0: mag_rx=%d, res_scale=%d\n",
  487. mag_rx, res_scale);
  488. return false;
  489. }
  490. /* calculate and quantize Rx IQ correction factors */
  491. mag_corr_rx = (-mag_rx * res_scale) / (res_scale + mag_rx);
  492. phs_corr_rx = -phs_rx;
  493. q_q_coff = (mag_corr_rx * 128 / res_scale);
  494. q_i_coff = (phs_corr_rx * 256 / res_scale);
  495. ath_dbg(common, CALIBRATE, "rx chain %d: mag corr=%d phase corr=%d\n",
  496. chain_idx, q_q_coff, q_i_coff);
  497. if (q_i_coff < -63)
  498. q_i_coff = -63;
  499. if (q_i_coff > 63)
  500. q_i_coff = 63;
  501. if (q_q_coff < -63)
  502. q_q_coff = -63;
  503. if (q_q_coff > 63)
  504. q_q_coff = 63;
  505. iqc_coeff[1] = (q_q_coff * 128) + q_i_coff;
  506. ath_dbg(common, CALIBRATE, "rx chain %d: iq corr coeff=%x\n",
  507. chain_idx, iqc_coeff[1]);
  508. return true;
  509. }
  510. static void ar9003_hw_detect_outlier(int *mp_coeff, int nmeasurement,
  511. int max_delta)
  512. {
  513. int mp_max = -64, max_idx = 0;
  514. int mp_min = 63, min_idx = 0;
  515. int mp_avg = 0, i, outlier_idx = 0, mp_count = 0;
  516. /* find min/max mismatch across all calibrated gains */
  517. for (i = 0; i < nmeasurement; i++) {
  518. if (mp_coeff[i] > mp_max) {
  519. mp_max = mp_coeff[i];
  520. max_idx = i;
  521. } else if (mp_coeff[i] < mp_min) {
  522. mp_min = mp_coeff[i];
  523. min_idx = i;
  524. }
  525. }
  526. /* find average (exclude max abs value) */
  527. for (i = 0; i < nmeasurement; i++) {
  528. if ((abs(mp_coeff[i]) < abs(mp_max)) ||
  529. (abs(mp_coeff[i]) < abs(mp_min))) {
  530. mp_avg += mp_coeff[i];
  531. mp_count++;
  532. }
  533. }
  534. /*
  535. * finding mean magnitude/phase if possible, otherwise
  536. * just use the last value as the mean
  537. */
  538. if (mp_count)
  539. mp_avg /= mp_count;
  540. else
  541. mp_avg = mp_coeff[nmeasurement - 1];
  542. /* detect outlier */
  543. if (abs(mp_max - mp_min) > max_delta) {
  544. if (abs(mp_max - mp_avg) > abs(mp_min - mp_avg))
  545. outlier_idx = max_idx;
  546. else
  547. outlier_idx = min_idx;
  548. mp_coeff[outlier_idx] = mp_avg;
  549. }
  550. }
  551. static void ar9003_hw_tx_iqcal_load_avg_2_passes(struct ath_hw *ah,
  552. struct coeff *coeff,
  553. bool is_reusable)
  554. {
  555. int i, im, nmeasurement;
  556. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  557. struct ath9k_hw_cal_data *caldata = ah->caldata;
  558. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  559. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  560. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  561. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  562. if (!AR_SREV_9485(ah)) {
  563. tx_corr_coeff[i * 2][1] =
  564. tx_corr_coeff[(i * 2) + 1][1] =
  565. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  566. tx_corr_coeff[i * 2][2] =
  567. tx_corr_coeff[(i * 2) + 1][2] =
  568. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  569. }
  570. }
  571. /* Load the average of 2 passes */
  572. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  573. if (!(ah->txchainmask & (1 << i)))
  574. continue;
  575. nmeasurement = REG_READ_FIELD(ah,
  576. AR_PHY_TX_IQCAL_STATUS_B0,
  577. AR_PHY_CALIBRATED_GAINS_0);
  578. if (nmeasurement > MAX_MEASUREMENT)
  579. nmeasurement = MAX_MEASUREMENT;
  580. /* detect outlier only if nmeasurement > 1 */
  581. if (nmeasurement > 1) {
  582. /* Detect magnitude outlier */
  583. ar9003_hw_detect_outlier(coeff->mag_coeff[i],
  584. nmeasurement, MAX_MAG_DELTA);
  585. /* Detect phase outlier */
  586. ar9003_hw_detect_outlier(coeff->phs_coeff[i],
  587. nmeasurement, MAX_PHS_DELTA);
  588. }
  589. for (im = 0; im < nmeasurement; im++) {
  590. coeff->iqc_coeff[0] = (coeff->mag_coeff[i][im] & 0x7f) |
  591. ((coeff->phs_coeff[i][im] & 0x7f) << 7);
  592. if ((im % 2) == 0)
  593. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  594. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  595. coeff->iqc_coeff[0]);
  596. else
  597. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  598. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  599. coeff->iqc_coeff[0]);
  600. if (caldata)
  601. caldata->tx_corr_coeff[im][i] =
  602. coeff->iqc_coeff[0];
  603. }
  604. if (caldata)
  605. caldata->num_measures[i] = nmeasurement;
  606. }
  607. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  608. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  609. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  610. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  611. if (caldata) {
  612. if (is_reusable)
  613. set_bit(TXIQCAL_DONE, &caldata->cal_flags);
  614. else
  615. clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
  616. }
  617. return;
  618. }
  619. static bool ar9003_hw_tx_iq_cal_run(struct ath_hw *ah)
  620. {
  621. struct ath_common *common = ath9k_hw_common(ah);
  622. u8 tx_gain_forced;
  623. tx_gain_forced = REG_READ_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  624. AR_PHY_TXGAIN_FORCE);
  625. if (tx_gain_forced)
  626. REG_RMW_FIELD(ah, AR_PHY_TX_FORCED_GAIN,
  627. AR_PHY_TXGAIN_FORCE, 0);
  628. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_START,
  629. AR_PHY_TX_IQCAL_START_DO_CAL, 1);
  630. if (!ath9k_hw_wait(ah, AR_PHY_TX_IQCAL_START,
  631. AR_PHY_TX_IQCAL_START_DO_CAL, 0,
  632. AH_WAIT_TIMEOUT)) {
  633. ath_dbg(common, CALIBRATE, "Tx IQ Cal is not completed\n");
  634. return false;
  635. }
  636. return true;
  637. }
  638. static void ar9003_hw_tx_iq_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  639. {
  640. struct ath_common *common = ath9k_hw_common(ah);
  641. const u32 txiqcal_status[AR9300_MAX_CHAINS] = {
  642. AR_PHY_TX_IQCAL_STATUS_B0,
  643. AR_PHY_TX_IQCAL_STATUS_B1,
  644. AR_PHY_TX_IQCAL_STATUS_B2,
  645. };
  646. const u_int32_t chan_info_tab[] = {
  647. AR_PHY_CHAN_INFO_TAB_0,
  648. AR_PHY_CHAN_INFO_TAB_1,
  649. AR_PHY_CHAN_INFO_TAB_2,
  650. };
  651. struct coeff coeff;
  652. s32 iq_res[6];
  653. int i, im, j;
  654. int nmeasurement;
  655. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  656. if (!(ah->txchainmask & (1 << i)))
  657. continue;
  658. nmeasurement = REG_READ_FIELD(ah,
  659. AR_PHY_TX_IQCAL_STATUS_B0,
  660. AR_PHY_CALIBRATED_GAINS_0);
  661. if (nmeasurement > MAX_MEASUREMENT)
  662. nmeasurement = MAX_MEASUREMENT;
  663. for (im = 0; im < nmeasurement; im++) {
  664. ath_dbg(common, CALIBRATE,
  665. "Doing Tx IQ Cal for chain %d\n", i);
  666. if (REG_READ(ah, txiqcal_status[i]) &
  667. AR_PHY_TX_IQCAL_STATUS_FAILED) {
  668. ath_dbg(common, CALIBRATE,
  669. "Tx IQ Cal failed for chain %d\n", i);
  670. goto tx_iqcal_fail;
  671. }
  672. for (j = 0; j < 3; j++) {
  673. u32 idx = 2 * j, offset = 4 * (3 * im + j);
  674. REG_RMW_FIELD(ah,
  675. AR_PHY_CHAN_INFO_MEMORY,
  676. AR_PHY_CHAN_INFO_TAB_S2_READ,
  677. 0);
  678. /* 32 bits */
  679. iq_res[idx] = REG_READ(ah,
  680. chan_info_tab[i] +
  681. offset);
  682. REG_RMW_FIELD(ah,
  683. AR_PHY_CHAN_INFO_MEMORY,
  684. AR_PHY_CHAN_INFO_TAB_S2_READ,
  685. 1);
  686. /* 16 bits */
  687. iq_res[idx + 1] = 0xffff & REG_READ(ah,
  688. chan_info_tab[i] + offset);
  689. ath_dbg(common, CALIBRATE,
  690. "IQ_RES[%d]=0x%x IQ_RES[%d]=0x%x\n",
  691. idx, iq_res[idx], idx + 1,
  692. iq_res[idx + 1]);
  693. }
  694. if (!ar9003_hw_calc_iq_corr(ah, i, iq_res,
  695. coeff.iqc_coeff)) {
  696. ath_dbg(common, CALIBRATE,
  697. "Failed in calculation of IQ correction\n");
  698. goto tx_iqcal_fail;
  699. }
  700. coeff.mag_coeff[i][im] = coeff.iqc_coeff[0] & 0x7f;
  701. coeff.phs_coeff[i][im] =
  702. (coeff.iqc_coeff[0] >> 7) & 0x7f;
  703. if (coeff.mag_coeff[i][im] > 63)
  704. coeff.mag_coeff[i][im] -= 128;
  705. if (coeff.phs_coeff[i][im] > 63)
  706. coeff.phs_coeff[i][im] -= 128;
  707. }
  708. }
  709. ar9003_hw_tx_iqcal_load_avg_2_passes(ah, &coeff, is_reusable);
  710. return;
  711. tx_iqcal_fail:
  712. ath_dbg(common, CALIBRATE, "Tx IQ Cal failed\n");
  713. return;
  714. }
  715. static void ar9003_hw_tx_iq_cal_reload(struct ath_hw *ah)
  716. {
  717. struct ath9k_hw_cal_data *caldata = ah->caldata;
  718. u32 tx_corr_coeff[MAX_MEASUREMENT][AR9300_MAX_CHAINS];
  719. int i, im;
  720. memset(tx_corr_coeff, 0, sizeof(tx_corr_coeff));
  721. for (i = 0; i < MAX_MEASUREMENT / 2; i++) {
  722. tx_corr_coeff[i * 2][0] = tx_corr_coeff[(i * 2) + 1][0] =
  723. AR_PHY_TX_IQCAL_CORR_COEFF_B0(i);
  724. if (!AR_SREV_9485(ah)) {
  725. tx_corr_coeff[i * 2][1] =
  726. tx_corr_coeff[(i * 2) + 1][1] =
  727. AR_PHY_TX_IQCAL_CORR_COEFF_B1(i);
  728. tx_corr_coeff[i * 2][2] =
  729. tx_corr_coeff[(i * 2) + 1][2] =
  730. AR_PHY_TX_IQCAL_CORR_COEFF_B2(i);
  731. }
  732. }
  733. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  734. if (!(ah->txchainmask & (1 << i)))
  735. continue;
  736. for (im = 0; im < caldata->num_measures[i]; im++) {
  737. if ((im % 2) == 0)
  738. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  739. AR_PHY_TX_IQCAL_CORR_COEFF_00_COEFF_TABLE,
  740. caldata->tx_corr_coeff[im][i]);
  741. else
  742. REG_RMW_FIELD(ah, tx_corr_coeff[im][i],
  743. AR_PHY_TX_IQCAL_CORR_COEFF_01_COEFF_TABLE,
  744. caldata->tx_corr_coeff[im][i]);
  745. }
  746. }
  747. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_3,
  748. AR_PHY_TX_IQCAL_CONTROL_3_IQCORR_EN, 0x1);
  749. REG_RMW_FIELD(ah, AR_PHY_RX_IQCAL_CORR_B0,
  750. AR_PHY_RX_IQCAL_CORR_B0_LOOPBACK_IQCORR_EN, 0x1);
  751. }
  752. static void ar9003_hw_manual_peak_cal(struct ath_hw *ah, u8 chain, bool is_2g)
  753. {
  754. int offset[8], total = 0, test;
  755. int agc_out, i;
  756. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  757. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0x1);
  758. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  759. AR_PHY_65NM_RXRF_GAINSTAGES_LNAON_CALDC, 0x0);
  760. if (is_2g)
  761. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  762. AR_PHY_65NM_RXRF_GAINSTAGES_LNA2G_GAIN_OVR, 0x0);
  763. else
  764. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  765. AR_PHY_65NM_RXRF_GAINSTAGES_LNA5G_GAIN_OVR, 0x0);
  766. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  767. AR_PHY_65NM_RXTX2_RXON_OVR, 0x1);
  768. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  769. AR_PHY_65NM_RXTX2_RXON, 0x0);
  770. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  771. AR_PHY_65NM_RXRF_AGC_AGC_OVERRIDE, 0x1);
  772. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  773. AR_PHY_65NM_RXRF_AGC_AGC_ON_OVR, 0x1);
  774. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  775. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0x1);
  776. if (is_2g)
  777. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  778. AR_PHY_65NM_RXRF_AGC_AGC2G_DBDAC_OVR, 0x0);
  779. else
  780. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  781. AR_PHY_65NM_RXRF_AGC_AGC5G_DBDAC_OVR, 0x0);
  782. for (i = 6; i > 0; i--) {
  783. offset[i] = BIT(i - 1);
  784. test = total + offset[i];
  785. if (is_2g)
  786. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  787. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR,
  788. test);
  789. else
  790. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  791. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR,
  792. test);
  793. udelay(100);
  794. agc_out = REG_READ_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  795. AR_PHY_65NM_RXRF_AGC_AGC_OUT);
  796. offset[i] = (agc_out) ? 0 : 1;
  797. total += (offset[i] << (i - 1));
  798. }
  799. if (is_2g)
  800. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  801. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR, total);
  802. else
  803. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  804. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR, total);
  805. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_GAINSTAGES(chain),
  806. AR_PHY_65NM_RXRF_GAINSTAGES_RX_OVERRIDE, 0);
  807. REG_RMW_FIELD(ah, AR_PHY_65NM_RXTX2(chain),
  808. AR_PHY_65NM_RXTX2_RXON_OVR, 0);
  809. REG_RMW_FIELD(ah, AR_PHY_65NM_RXRF_AGC(chain),
  810. AR_PHY_65NM_RXRF_AGC_AGC_CAL_OVR, 0);
  811. }
  812. static void ar9003_hw_do_manual_peak_cal(struct ath_hw *ah,
  813. struct ath9k_channel *chan,
  814. bool run_rtt_cal)
  815. {
  816. struct ath9k_hw_cal_data *caldata = ah->caldata;
  817. int i;
  818. if (!AR_SREV_9462(ah) && !AR_SREV_9565(ah) && !AR_SREV_9485(ah))
  819. return;
  820. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && !run_rtt_cal)
  821. return;
  822. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  823. if (!(ah->rxchainmask & (1 << i)))
  824. continue;
  825. ar9003_hw_manual_peak_cal(ah, i, IS_CHAN_2GHZ(chan));
  826. }
  827. if (caldata)
  828. set_bit(SW_PKDET_DONE, &caldata->cal_flags);
  829. if ((ah->caps.hw_caps & ATH9K_HW_CAP_RTT) && caldata) {
  830. if (IS_CHAN_2GHZ(chan)){
  831. caldata->caldac[0] = REG_READ_FIELD(ah,
  832. AR_PHY_65NM_RXRF_AGC(0),
  833. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  834. caldata->caldac[1] = REG_READ_FIELD(ah,
  835. AR_PHY_65NM_RXRF_AGC(1),
  836. AR_PHY_65NM_RXRF_AGC_AGC2G_CALDAC_OVR);
  837. } else {
  838. caldata->caldac[0] = REG_READ_FIELD(ah,
  839. AR_PHY_65NM_RXRF_AGC(0),
  840. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  841. caldata->caldac[1] = REG_READ_FIELD(ah,
  842. AR_PHY_65NM_RXRF_AGC(1),
  843. AR_PHY_65NM_RXRF_AGC_AGC5G_CALDAC_OVR);
  844. }
  845. }
  846. }
  847. static void ar9003_hw_cl_cal_post_proc(struct ath_hw *ah, bool is_reusable)
  848. {
  849. u32 cl_idx[AR9300_MAX_CHAINS] = { AR_PHY_CL_TAB_0,
  850. AR_PHY_CL_TAB_1,
  851. AR_PHY_CL_TAB_2 };
  852. struct ath9k_hw_cal_data *caldata = ah->caldata;
  853. bool txclcal_done = false;
  854. int i, j;
  855. if (!caldata || !(ah->enabled_cals & TX_CL_CAL))
  856. return;
  857. txclcal_done = !!(REG_READ(ah, AR_PHY_AGC_CONTROL) &
  858. AR_PHY_AGC_CONTROL_CLC_SUCCESS);
  859. if (test_bit(TXCLCAL_DONE, &caldata->cal_flags)) {
  860. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  861. if (!(ah->txchainmask & (1 << i)))
  862. continue;
  863. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  864. REG_WRITE(ah, CL_TAB_ENTRY(cl_idx[i]),
  865. caldata->tx_clcal[i][j]);
  866. }
  867. } else if (is_reusable && txclcal_done) {
  868. for (i = 0; i < AR9300_MAX_CHAINS; i++) {
  869. if (!(ah->txchainmask & (1 << i)))
  870. continue;
  871. for (j = 0; j < MAX_CL_TAB_ENTRY; j++)
  872. caldata->tx_clcal[i][j] =
  873. REG_READ(ah, CL_TAB_ENTRY(cl_idx[i]));
  874. }
  875. set_bit(TXCLCAL_DONE, &caldata->cal_flags);
  876. }
  877. }
  878. static bool ar9003_hw_init_cal(struct ath_hw *ah,
  879. struct ath9k_channel *chan)
  880. {
  881. struct ath_common *common = ath9k_hw_common(ah);
  882. struct ath9k_hw_cal_data *caldata = ah->caldata;
  883. bool txiqcal_done = false;
  884. bool is_reusable = true, status = true;
  885. bool run_rtt_cal = false, run_agc_cal, sep_iq_cal = false;
  886. bool rtt = !!(ah->caps.hw_caps & ATH9K_HW_CAP_RTT);
  887. u32 rx_delay = 0;
  888. u32 agc_ctrl = 0, agc_supp_cals = AR_PHY_AGC_CONTROL_OFFSET_CAL |
  889. AR_PHY_AGC_CONTROL_FLTR_CAL |
  890. AR_PHY_AGC_CONTROL_PKDET_CAL;
  891. /* Use chip chainmask only for calibration */
  892. ar9003_hw_set_chain_masks(ah, ah->caps.rx_chainmask, ah->caps.tx_chainmask);
  893. if (rtt) {
  894. if (!ar9003_hw_rtt_restore(ah, chan))
  895. run_rtt_cal = true;
  896. if (run_rtt_cal)
  897. ath_dbg(common, CALIBRATE, "RTT calibration to be done\n");
  898. }
  899. run_agc_cal = run_rtt_cal;
  900. if (run_rtt_cal) {
  901. ar9003_hw_rtt_enable(ah);
  902. ar9003_hw_rtt_set_mask(ah, 0x00);
  903. ar9003_hw_rtt_clear_hist(ah);
  904. }
  905. if (rtt) {
  906. if (!run_rtt_cal) {
  907. agc_ctrl = REG_READ(ah, AR_PHY_AGC_CONTROL);
  908. agc_supp_cals &= agc_ctrl;
  909. agc_ctrl &= ~(AR_PHY_AGC_CONTROL_OFFSET_CAL |
  910. AR_PHY_AGC_CONTROL_FLTR_CAL |
  911. AR_PHY_AGC_CONTROL_PKDET_CAL);
  912. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  913. } else {
  914. if (ah->ah_flags & AH_FASTCC)
  915. run_agc_cal = true;
  916. }
  917. }
  918. if (ah->enabled_cals & TX_CL_CAL) {
  919. if (caldata && test_bit(TXCLCAL_DONE, &caldata->cal_flags))
  920. REG_CLR_BIT(ah, AR_PHY_CL_CAL_CTL,
  921. AR_PHY_CL_CAL_ENABLE);
  922. else {
  923. REG_SET_BIT(ah, AR_PHY_CL_CAL_CTL,
  924. AR_PHY_CL_CAL_ENABLE);
  925. run_agc_cal = true;
  926. }
  927. }
  928. if ((IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan)) ||
  929. !(ah->enabled_cals & TX_IQ_CAL))
  930. goto skip_tx_iqcal;
  931. /* Do Tx IQ Calibration */
  932. REG_RMW_FIELD(ah, AR_PHY_TX_IQCAL_CONTROL_1,
  933. AR_PHY_TX_IQCAL_CONTROL_1_IQCORR_I_Q_COFF_DELPT,
  934. DELPT);
  935. /*
  936. * For AR9485 or later chips, TxIQ cal runs as part of
  937. * AGC calibration
  938. */
  939. if (ah->enabled_cals & TX_IQ_ON_AGC_CAL) {
  940. if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  941. REG_SET_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  942. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  943. else
  944. REG_CLR_BIT(ah, AR_PHY_TX_IQCAL_CONTROL_0,
  945. AR_PHY_TX_IQCAL_CONTROL_0_ENABLE_TXIQ_CAL);
  946. txiqcal_done = run_agc_cal = true;
  947. } else if (caldata && !test_bit(TXIQCAL_DONE, &caldata->cal_flags)) {
  948. run_agc_cal = true;
  949. sep_iq_cal = true;
  950. }
  951. skip_tx_iqcal:
  952. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  953. ar9003_mci_init_cal_req(ah, &is_reusable);
  954. if (sep_iq_cal) {
  955. txiqcal_done = ar9003_hw_tx_iq_cal_run(ah);
  956. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  957. udelay(5);
  958. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  959. }
  960. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  961. rx_delay = REG_READ(ah, AR_PHY_RX_DELAY);
  962. /* Disable BB_active */
  963. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
  964. udelay(5);
  965. REG_WRITE(ah, AR_PHY_RX_DELAY, AR_PHY_RX_DELAY_DELAY);
  966. REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
  967. }
  968. if (run_agc_cal || !(ah->ah_flags & AH_FASTCC)) {
  969. /* Calibrate the AGC */
  970. REG_WRITE(ah, AR_PHY_AGC_CONTROL,
  971. REG_READ(ah, AR_PHY_AGC_CONTROL) |
  972. AR_PHY_AGC_CONTROL_CAL);
  973. /* Poll for offset calibration complete */
  974. status = ath9k_hw_wait(ah, AR_PHY_AGC_CONTROL,
  975. AR_PHY_AGC_CONTROL_CAL,
  976. 0, AH_WAIT_TIMEOUT);
  977. ar9003_hw_do_manual_peak_cal(ah, chan, run_rtt_cal);
  978. }
  979. if (REG_READ(ah, AR_PHY_CL_CAL_CTL) & AR_PHY_CL_CAL_ENABLE) {
  980. REG_WRITE(ah, AR_PHY_RX_DELAY, rx_delay);
  981. udelay(5);
  982. }
  983. if (ath9k_hw_mci_is_enabled(ah) && IS_CHAN_2GHZ(chan) && run_agc_cal)
  984. ar9003_mci_init_cal_done(ah);
  985. if (rtt && !run_rtt_cal) {
  986. agc_ctrl |= agc_supp_cals;
  987. REG_WRITE(ah, AR_PHY_AGC_CONTROL, agc_ctrl);
  988. }
  989. if (!status) {
  990. if (run_rtt_cal)
  991. ar9003_hw_rtt_disable(ah);
  992. ath_dbg(common, CALIBRATE,
  993. "offset calibration failed to complete in %d ms; noisy environment?\n",
  994. AH_WAIT_TIMEOUT / 1000);
  995. return false;
  996. }
  997. if (txiqcal_done)
  998. ar9003_hw_tx_iq_cal_post_proc(ah, is_reusable);
  999. else if (caldata && test_bit(TXIQCAL_DONE, &caldata->cal_flags))
  1000. ar9003_hw_tx_iq_cal_reload(ah);
  1001. ar9003_hw_cl_cal_post_proc(ah, is_reusable);
  1002. if (run_rtt_cal && caldata) {
  1003. if (is_reusable) {
  1004. if (!ath9k_hw_rfbus_req(ah)) {
  1005. ath_err(ath9k_hw_common(ah),
  1006. "Could not stop baseband\n");
  1007. } else {
  1008. ar9003_hw_rtt_fill_hist(ah);
  1009. if (test_bit(SW_PKDET_DONE, &caldata->cal_flags))
  1010. ar9003_hw_rtt_load_hist(ah);
  1011. }
  1012. ath9k_hw_rfbus_done(ah);
  1013. }
  1014. ar9003_hw_rtt_disable(ah);
  1015. }
  1016. /* Revert chainmask to runtime parameters */
  1017. ar9003_hw_set_chain_masks(ah, ah->rxchainmask, ah->txchainmask);
  1018. /* Initialize list pointers */
  1019. ah->cal_list = ah->cal_list_last = ah->cal_list_curr = NULL;
  1020. INIT_CAL(&ah->iq_caldata);
  1021. INSERT_CAL(ah, &ah->iq_caldata);
  1022. ath_dbg(common, CALIBRATE, "enabling IQ Calibration\n");
  1023. /* Initialize current pointer to first element in list */
  1024. ah->cal_list_curr = ah->cal_list;
  1025. if (ah->cal_list_curr)
  1026. ath9k_hw_reset_calibration(ah, ah->cal_list_curr);
  1027. if (caldata)
  1028. caldata->CalValid = 0;
  1029. return true;
  1030. }
  1031. void ar9003_hw_attach_calib_ops(struct ath_hw *ah)
  1032. {
  1033. struct ath_hw_private_ops *priv_ops = ath9k_hw_private_ops(ah);
  1034. struct ath_hw_ops *ops = ath9k_hw_ops(ah);
  1035. priv_ops->init_cal_settings = ar9003_hw_init_cal_settings;
  1036. priv_ops->init_cal = ar9003_hw_init_cal;
  1037. priv_ops->setup_calibration = ar9003_hw_setup_calibration;
  1038. ops->calibrate = ar9003_hw_calibrate;
  1039. }