44x_spd_ddr2.c 102 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284
  1. /*
  2. * cpu/ppc4xx/44x_spd_ddr2.c
  3. * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
  4. * DDR2 controller (non Denali Core). Those currently are:
  5. *
  6. * 405: 405EX(r)
  7. * 440/460: 440SP/440SPe/460EX/460GT
  8. *
  9. * Copyright (c) 2008 Nuovation System Designs, LLC
  10. * Grant Erickson <gerickson@nuovations.com>
  11. * (C) Copyright 2007-2008
  12. * Stefan Roese, DENX Software Engineering, sr@denx.de.
  13. *
  14. * COPYRIGHT AMCC CORPORATION 2004
  15. *
  16. * See file CREDITS for list of people who contributed to this
  17. * project.
  18. *
  19. * This program is free software; you can redistribute it and/or
  20. * modify it under the terms of the GNU General Public License as
  21. * published by the Free Software Foundation; either version 2 of
  22. * the License, or (at your option) any later version.
  23. *
  24. * This program is distributed in the hope that it will be useful,
  25. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  26. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  27. * GNU General Public License for more details.
  28. *
  29. * You should have received a copy of the GNU General Public License
  30. * along with this program; if not, write to the Free Software
  31. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  32. * MA 02111-1307 USA
  33. *
  34. */
  35. /* define DEBUG for debugging output (obviously ;-)) */
  36. #if 0
  37. #define DEBUG
  38. #endif
  39. #include <common.h>
  40. #include <command.h>
  41. #include <ppc4xx.h>
  42. #include <i2c.h>
  43. #include <asm/io.h>
  44. #include <asm/processor.h>
  45. #include <asm/mmu.h>
  46. #include <asm/cache.h>
  47. #include "ecc.h"
  48. #if defined(CONFIG_SDRAM_PPC4xx_IBM_DDR2)
  49. #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
  50. do { \
  51. u32 data; \
  52. mfsdram(SDRAM_##mnemonic, data); \
  53. printf("%20s[%02x] = 0x%08X\n", \
  54. "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
  55. } while (0)
  56. #define PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(mnemonic) \
  57. do { \
  58. u32 data; \
  59. data = mfdcr(SDRAM_##mnemonic); \
  60. printf("%20s[%02x] = 0x%08X\n", \
  61. "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
  62. } while (0)
  63. #if defined(CONFIG_440)
  64. /*
  65. * This DDR2 setup code can dynamically setup the TLB entries for the DDR2
  66. * memory region. Right now the cache should still be disabled in U-Boot
  67. * because of the EMAC driver, that need its buffer descriptor to be located
  68. * in non cached memory.
  69. *
  70. * If at some time this restriction doesn't apply anymore, just define
  71. * CONFIG_4xx_DCACHE in the board config file and this code should setup
  72. * everything correctly.
  73. */
  74. #ifdef CONFIG_4xx_DCACHE
  75. /* enable caching on SDRAM */
  76. #define MY_TLB_WORD2_I_ENABLE 0
  77. #else
  78. /* disable caching on SDRAM */
  79. #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE
  80. #endif /* CONFIG_4xx_DCACHE */
  81. #endif /* CONFIG_440 */
  82. #if defined(CONFIG_SPD_EEPROM)
  83. /*-----------------------------------------------------------------------------+
  84. * Defines
  85. *-----------------------------------------------------------------------------*/
  86. #ifndef TRUE
  87. #define TRUE 1
  88. #endif
  89. #ifndef FALSE
  90. #define FALSE 0
  91. #endif
  92. #define SDRAM_DDR1 1
  93. #define SDRAM_DDR2 2
  94. #define SDRAM_NONE 0
  95. #define MAXDIMMS 2
  96. #define MAXRANKS 4
  97. #define MAXBXCF 4
  98. #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
  99. #define ONE_BILLION 1000000000
  100. #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
  101. #define CMD_NOP (7 << 19)
  102. #define CMD_PRECHARGE (2 << 19)
  103. #define CMD_REFRESH (1 << 19)
  104. #define CMD_EMR (0 << 19)
  105. #define CMD_READ (5 << 19)
  106. #define CMD_WRITE (4 << 19)
  107. #define SELECT_MR (0 << 16)
  108. #define SELECT_EMR (1 << 16)
  109. #define SELECT_EMR2 (2 << 16)
  110. #define SELECT_EMR3 (3 << 16)
  111. /* MR */
  112. #define DLL_RESET 0x00000100
  113. #define WRITE_RECOV_2 (1 << 9)
  114. #define WRITE_RECOV_3 (2 << 9)
  115. #define WRITE_RECOV_4 (3 << 9)
  116. #define WRITE_RECOV_5 (4 << 9)
  117. #define WRITE_RECOV_6 (5 << 9)
  118. #define BURST_LEN_4 0x00000002
  119. /* EMR */
  120. #define ODT_0_OHM 0x00000000
  121. #define ODT_50_OHM 0x00000044
  122. #define ODT_75_OHM 0x00000004
  123. #define ODT_150_OHM 0x00000040
  124. #define ODS_FULL 0x00000000
  125. #define ODS_REDUCED 0x00000002
  126. #define OCD_CALIB_DEF 0x00000380
  127. /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
  128. #define ODT_EB0R (0x80000000 >> 8)
  129. #define ODT_EB0W (0x80000000 >> 7)
  130. #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
  131. #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
  132. #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
  133. /* Defines for the Read Cycle Delay test */
  134. #define NUMMEMTESTS 8
  135. #define NUMMEMWORDS 8
  136. #define NUMLOOPS 64 /* memory test loops */
  137. /*
  138. * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
  139. * To support such configurations, we "only" map the first 2GB via the TLB's. We
  140. * need some free virtual address space for the remaining peripherals like, SoC
  141. * devices, FLASH etc.
  142. *
  143. * Note that ECC is currently not supported on configurations with more than 2GB
  144. * SDRAM. This is because we only map the first 2GB on such systems, and therefore
  145. * the ECC parity byte of the remaining area can't be written.
  146. */
  147. /*
  148. * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
  149. */
  150. void __spd_ddr_init_hang (void)
  151. {
  152. hang ();
  153. }
  154. void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
  155. /*
  156. * To provide an interface for board specific config values in this common
  157. * DDR setup code, we implement he "weak" default functions here. They return
  158. * the default value back to the caller.
  159. *
  160. * Please see include/configs/yucca.h for an example fora board specific
  161. * implementation.
  162. */
  163. u32 __ddr_wrdtr(u32 default_val)
  164. {
  165. return default_val;
  166. }
  167. u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
  168. u32 __ddr_clktr(u32 default_val)
  169. {
  170. return default_val;
  171. }
  172. u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
  173. /* Private Structure Definitions */
  174. /* enum only to ease code for cas latency setting */
  175. typedef enum ddr_cas_id {
  176. DDR_CAS_2 = 20,
  177. DDR_CAS_2_5 = 25,
  178. DDR_CAS_3 = 30,
  179. DDR_CAS_4 = 40,
  180. DDR_CAS_5 = 50
  181. } ddr_cas_id_t;
  182. /*-----------------------------------------------------------------------------+
  183. * Prototypes
  184. *-----------------------------------------------------------------------------*/
  185. static phys_size_t sdram_memsize(void);
  186. static void get_spd_info(unsigned long *dimm_populated,
  187. unsigned char *iic0_dimm_addr,
  188. unsigned long num_dimm_banks);
  189. static void check_mem_type(unsigned long *dimm_populated,
  190. unsigned char *iic0_dimm_addr,
  191. unsigned long num_dimm_banks);
  192. static void check_frequency(unsigned long *dimm_populated,
  193. unsigned char *iic0_dimm_addr,
  194. unsigned long num_dimm_banks);
  195. static void check_rank_number(unsigned long *dimm_populated,
  196. unsigned char *iic0_dimm_addr,
  197. unsigned long num_dimm_banks);
  198. static void check_voltage_type(unsigned long *dimm_populated,
  199. unsigned char *iic0_dimm_addr,
  200. unsigned long num_dimm_banks);
  201. static void program_memory_queue(unsigned long *dimm_populated,
  202. unsigned char *iic0_dimm_addr,
  203. unsigned long num_dimm_banks);
  204. static void program_codt(unsigned long *dimm_populated,
  205. unsigned char *iic0_dimm_addr,
  206. unsigned long num_dimm_banks);
  207. static void program_mode(unsigned long *dimm_populated,
  208. unsigned char *iic0_dimm_addr,
  209. unsigned long num_dimm_banks,
  210. ddr_cas_id_t *selected_cas,
  211. int *write_recovery);
  212. static void program_tr(unsigned long *dimm_populated,
  213. unsigned char *iic0_dimm_addr,
  214. unsigned long num_dimm_banks);
  215. static void program_rtr(unsigned long *dimm_populated,
  216. unsigned char *iic0_dimm_addr,
  217. unsigned long num_dimm_banks);
  218. static void program_bxcf(unsigned long *dimm_populated,
  219. unsigned char *iic0_dimm_addr,
  220. unsigned long num_dimm_banks);
  221. static void program_copt1(unsigned long *dimm_populated,
  222. unsigned char *iic0_dimm_addr,
  223. unsigned long num_dimm_banks);
  224. static void program_initplr(unsigned long *dimm_populated,
  225. unsigned char *iic0_dimm_addr,
  226. unsigned long num_dimm_banks,
  227. ddr_cas_id_t selected_cas,
  228. int write_recovery);
  229. static unsigned long is_ecc_enabled(void);
  230. #ifdef CONFIG_DDR_ECC
  231. static void program_ecc(unsigned long *dimm_populated,
  232. unsigned char *iic0_dimm_addr,
  233. unsigned long num_dimm_banks,
  234. unsigned long tlb_word2_i_value);
  235. static void program_ecc_addr(unsigned long start_address,
  236. unsigned long num_bytes,
  237. unsigned long tlb_word2_i_value);
  238. #endif
  239. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  240. static void program_DQS_calibration(unsigned long *dimm_populated,
  241. unsigned char *iic0_dimm_addr,
  242. unsigned long num_dimm_banks);
  243. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  244. static void test(void);
  245. #else
  246. static void DQS_calibration_process(void);
  247. #endif
  248. #endif
  249. int do_reset (cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]);
  250. void dcbz_area(u32 start_address, u32 num_bytes);
  251. static unsigned char spd_read(uchar chip, uint addr)
  252. {
  253. unsigned char data[2];
  254. if (i2c_probe(chip) == 0)
  255. if (i2c_read(chip, addr, 1, data, 1) == 0)
  256. return data[0];
  257. return 0;
  258. }
  259. /*-----------------------------------------------------------------------------+
  260. * sdram_memsize
  261. *-----------------------------------------------------------------------------*/
  262. static phys_size_t sdram_memsize(void)
  263. {
  264. phys_size_t mem_size;
  265. unsigned long mcopt2;
  266. unsigned long mcstat;
  267. unsigned long mb0cf;
  268. unsigned long sdsz;
  269. unsigned long i;
  270. mem_size = 0;
  271. mfsdram(SDRAM_MCOPT2, mcopt2);
  272. mfsdram(SDRAM_MCSTAT, mcstat);
  273. /* DDR controller must be enabled and not in self-refresh. */
  274. /* Otherwise memsize is zero. */
  275. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  276. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  277. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  278. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  279. for (i = 0; i < MAXBXCF; i++) {
  280. mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
  281. /* Banks enabled */
  282. if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  283. sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
  284. switch(sdsz) {
  285. case SDRAM_RXBAS_SDSZ_8:
  286. mem_size+=8;
  287. break;
  288. case SDRAM_RXBAS_SDSZ_16:
  289. mem_size+=16;
  290. break;
  291. case SDRAM_RXBAS_SDSZ_32:
  292. mem_size+=32;
  293. break;
  294. case SDRAM_RXBAS_SDSZ_64:
  295. mem_size+=64;
  296. break;
  297. case SDRAM_RXBAS_SDSZ_128:
  298. mem_size+=128;
  299. break;
  300. case SDRAM_RXBAS_SDSZ_256:
  301. mem_size+=256;
  302. break;
  303. case SDRAM_RXBAS_SDSZ_512:
  304. mem_size+=512;
  305. break;
  306. case SDRAM_RXBAS_SDSZ_1024:
  307. mem_size+=1024;
  308. break;
  309. case SDRAM_RXBAS_SDSZ_2048:
  310. mem_size+=2048;
  311. break;
  312. case SDRAM_RXBAS_SDSZ_4096:
  313. mem_size+=4096;
  314. break;
  315. default:
  316. printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
  317. , sdsz);
  318. mem_size=0;
  319. break;
  320. }
  321. }
  322. }
  323. }
  324. return mem_size << 20;
  325. }
  326. /*-----------------------------------------------------------------------------+
  327. * initdram. Initializes the 440SP Memory Queue and DDR SDRAM controller.
  328. * Note: This routine runs from flash with a stack set up in the chip's
  329. * sram space. It is important that the routine does not require .sbss, .bss or
  330. * .data sections. It also cannot call routines that require these sections.
  331. *-----------------------------------------------------------------------------*/
  332. /*-----------------------------------------------------------------------------
  333. * Function: initdram
  334. * Description: Configures SDRAM memory banks for DDR operation.
  335. * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
  336. * via the IIC bus and then configures the DDR SDRAM memory
  337. * banks appropriately. If Auto Memory Configuration is
  338. * not used, it is assumed that no DIMM is plugged
  339. *-----------------------------------------------------------------------------*/
  340. phys_size_t initdram(int board_type)
  341. {
  342. unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
  343. unsigned char spd0[MAX_SPD_BYTES];
  344. unsigned char spd1[MAX_SPD_BYTES];
  345. unsigned char *dimm_spd[MAXDIMMS];
  346. unsigned long dimm_populated[MAXDIMMS];
  347. unsigned long num_dimm_banks; /* on board dimm banks */
  348. unsigned long val;
  349. ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
  350. int write_recovery;
  351. phys_size_t dram_size = 0;
  352. num_dimm_banks = sizeof(iic0_dimm_addr);
  353. /*------------------------------------------------------------------
  354. * Set up an array of SPD matrixes.
  355. *-----------------------------------------------------------------*/
  356. dimm_spd[0] = spd0;
  357. dimm_spd[1] = spd1;
  358. /*------------------------------------------------------------------
  359. * Reset the DDR-SDRAM controller.
  360. *-----------------------------------------------------------------*/
  361. mtsdr(SDR0_SRST, (0x80000000 >> 10));
  362. mtsdr(SDR0_SRST, 0x00000000);
  363. /*
  364. * Make sure I2C controller is initialized
  365. * before continuing.
  366. */
  367. /* switch to correct I2C bus */
  368. I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
  369. i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
  370. /*------------------------------------------------------------------
  371. * Clear out the serial presence detect buffers.
  372. * Perform IIC reads from the dimm. Fill in the spds.
  373. * Check to see if the dimm slots are populated
  374. *-----------------------------------------------------------------*/
  375. get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  376. /*------------------------------------------------------------------
  377. * Check the memory type for the dimms plugged.
  378. *-----------------------------------------------------------------*/
  379. check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  380. /*------------------------------------------------------------------
  381. * Check the frequency supported for the dimms plugged.
  382. *-----------------------------------------------------------------*/
  383. check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  384. /*------------------------------------------------------------------
  385. * Check the total rank number.
  386. *-----------------------------------------------------------------*/
  387. check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  388. /*------------------------------------------------------------------
  389. * Check the voltage type for the dimms plugged.
  390. *-----------------------------------------------------------------*/
  391. check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  392. /*------------------------------------------------------------------
  393. * Program SDRAM controller options 2 register
  394. * Except Enabling of the memory controller.
  395. *-----------------------------------------------------------------*/
  396. mfsdram(SDRAM_MCOPT2, val);
  397. mtsdram(SDRAM_MCOPT2,
  398. (val &
  399. ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
  400. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
  401. SDRAM_MCOPT2_ISIE_MASK))
  402. | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
  403. SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
  404. SDRAM_MCOPT2_ISIE_ENABLE));
  405. /*------------------------------------------------------------------
  406. * Program SDRAM controller options 1 register
  407. * Note: Does not enable the memory controller.
  408. *-----------------------------------------------------------------*/
  409. program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  410. /*------------------------------------------------------------------
  411. * Set the SDRAM Controller On Die Termination Register
  412. *-----------------------------------------------------------------*/
  413. program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  414. /*------------------------------------------------------------------
  415. * Program SDRAM refresh register.
  416. *-----------------------------------------------------------------*/
  417. program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  418. /*------------------------------------------------------------------
  419. * Program SDRAM mode register.
  420. *-----------------------------------------------------------------*/
  421. program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  422. &selected_cas, &write_recovery);
  423. /*------------------------------------------------------------------
  424. * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
  425. *-----------------------------------------------------------------*/
  426. mfsdram(SDRAM_WRDTR, val);
  427. mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
  428. ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
  429. /*------------------------------------------------------------------
  430. * Set the SDRAM Clock Timing Register
  431. *-----------------------------------------------------------------*/
  432. mfsdram(SDRAM_CLKTR, val);
  433. mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
  434. ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
  435. /*------------------------------------------------------------------
  436. * Program the BxCF registers.
  437. *-----------------------------------------------------------------*/
  438. program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  439. /*------------------------------------------------------------------
  440. * Program SDRAM timing registers.
  441. *-----------------------------------------------------------------*/
  442. program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  443. /*------------------------------------------------------------------
  444. * Set the Extended Mode register
  445. *-----------------------------------------------------------------*/
  446. mfsdram(SDRAM_MEMODE, val);
  447. mtsdram(SDRAM_MEMODE,
  448. (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
  449. SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
  450. (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
  451. | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
  452. /*------------------------------------------------------------------
  453. * Program Initialization preload registers.
  454. *-----------------------------------------------------------------*/
  455. program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  456. selected_cas, write_recovery);
  457. /*------------------------------------------------------------------
  458. * Delay to ensure 200usec have elapsed since reset.
  459. *-----------------------------------------------------------------*/
  460. udelay(400);
  461. /*------------------------------------------------------------------
  462. * Set the memory queue core base addr.
  463. *-----------------------------------------------------------------*/
  464. program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  465. /*------------------------------------------------------------------
  466. * Program SDRAM controller options 2 register
  467. * Enable the memory controller.
  468. *-----------------------------------------------------------------*/
  469. mfsdram(SDRAM_MCOPT2, val);
  470. mtsdram(SDRAM_MCOPT2,
  471. (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
  472. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
  473. SDRAM_MCOPT2_IPTR_EXECUTE);
  474. /*------------------------------------------------------------------
  475. * Wait for IPTR_EXECUTE init sequence to complete.
  476. *-----------------------------------------------------------------*/
  477. do {
  478. mfsdram(SDRAM_MCSTAT, val);
  479. } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
  480. /* enable the controller only after init sequence completes */
  481. mfsdram(SDRAM_MCOPT2, val);
  482. mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
  483. /* Make sure delay-line calibration is done before proceeding */
  484. do {
  485. mfsdram(SDRAM_DLCR, val);
  486. } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
  487. /* get installed memory size */
  488. dram_size = sdram_memsize();
  489. /*
  490. * Limit size to 2GB
  491. */
  492. if (dram_size > CONFIG_MAX_MEM_MAPPED)
  493. dram_size = CONFIG_MAX_MEM_MAPPED;
  494. /* and program tlb entries for this size (dynamic) */
  495. /*
  496. * Program TLB entries with caches enabled, for best performace
  497. * while auto-calibrating and ECC generation
  498. */
  499. program_tlb(0, 0, dram_size, 0);
  500. /*------------------------------------------------------------------
  501. * DQS calibration.
  502. *-----------------------------------------------------------------*/
  503. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  504. DQS_autocalibration();
  505. #else
  506. program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  507. #endif
  508. #ifdef CONFIG_DDR_ECC
  509. /*------------------------------------------------------------------
  510. * If ecc is enabled, initialize the parity bits.
  511. *-----------------------------------------------------------------*/
  512. program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
  513. #endif
  514. /*
  515. * Now after initialization (auto-calibration and ECC generation)
  516. * remove the TLB entries with caches enabled and program again with
  517. * desired cache functionality
  518. */
  519. remove_tlb(0, dram_size);
  520. program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
  521. ppc4xx_ibm_ddr2_register_dump();
  522. /*
  523. * Clear potential errors resulting from auto-calibration.
  524. * If not done, then we could get an interrupt later on when
  525. * exceptions are enabled.
  526. */
  527. set_mcsr(get_mcsr());
  528. return sdram_memsize();
  529. }
  530. static void get_spd_info(unsigned long *dimm_populated,
  531. unsigned char *iic0_dimm_addr,
  532. unsigned long num_dimm_banks)
  533. {
  534. unsigned long dimm_num;
  535. unsigned long dimm_found;
  536. unsigned char num_of_bytes;
  537. unsigned char total_size;
  538. dimm_found = FALSE;
  539. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  540. num_of_bytes = 0;
  541. total_size = 0;
  542. num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
  543. debug("\nspd_read(0x%x) returned %d\n",
  544. iic0_dimm_addr[dimm_num], num_of_bytes);
  545. total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
  546. debug("spd_read(0x%x) returned %d\n",
  547. iic0_dimm_addr[dimm_num], total_size);
  548. if ((num_of_bytes != 0) && (total_size != 0)) {
  549. dimm_populated[dimm_num] = TRUE;
  550. dimm_found = TRUE;
  551. debug("DIMM slot %lu: populated\n", dimm_num);
  552. } else {
  553. dimm_populated[dimm_num] = FALSE;
  554. debug("DIMM slot %lu: Not populated\n", dimm_num);
  555. }
  556. }
  557. if (dimm_found == FALSE) {
  558. printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
  559. spd_ddr_init_hang ();
  560. }
  561. }
  562. void board_add_ram_info(int use_default)
  563. {
  564. PPC4xx_SYS_INFO board_cfg;
  565. u32 val;
  566. if (is_ecc_enabled())
  567. puts(" (ECC");
  568. else
  569. puts(" (ECC not");
  570. get_sys_info(&board_cfg);
  571. mfsdr(SDR0_DDR0, val);
  572. val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
  573. printf(" enabled, %d MHz", (val * 2) / 1000000);
  574. mfsdram(SDRAM_MMODE, val);
  575. val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
  576. printf(", CL%d)", val);
  577. }
  578. /*------------------------------------------------------------------
  579. * For the memory DIMMs installed, this routine verifies that they
  580. * really are DDR specific DIMMs.
  581. *-----------------------------------------------------------------*/
  582. static void check_mem_type(unsigned long *dimm_populated,
  583. unsigned char *iic0_dimm_addr,
  584. unsigned long num_dimm_banks)
  585. {
  586. unsigned long dimm_num;
  587. unsigned long dimm_type;
  588. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  589. if (dimm_populated[dimm_num] == TRUE) {
  590. dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
  591. switch (dimm_type) {
  592. case 1:
  593. printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
  594. "slot %d.\n", (unsigned int)dimm_num);
  595. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  596. printf("Replace the DIMM module with a supported DIMM.\n\n");
  597. spd_ddr_init_hang ();
  598. break;
  599. case 2:
  600. printf("ERROR: EDO DIMM detected in slot %d.\n",
  601. (unsigned int)dimm_num);
  602. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  603. printf("Replace the DIMM module with a supported DIMM.\n\n");
  604. spd_ddr_init_hang ();
  605. break;
  606. case 3:
  607. printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
  608. (unsigned int)dimm_num);
  609. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  610. printf("Replace the DIMM module with a supported DIMM.\n\n");
  611. spd_ddr_init_hang ();
  612. break;
  613. case 4:
  614. printf("ERROR: SDRAM DIMM detected in slot %d.\n",
  615. (unsigned int)dimm_num);
  616. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  617. printf("Replace the DIMM module with a supported DIMM.\n\n");
  618. spd_ddr_init_hang ();
  619. break;
  620. case 5:
  621. printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
  622. (unsigned int)dimm_num);
  623. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  624. printf("Replace the DIMM module with a supported DIMM.\n\n");
  625. spd_ddr_init_hang ();
  626. break;
  627. case 6:
  628. printf("ERROR: SGRAM DIMM detected in slot %d.\n",
  629. (unsigned int)dimm_num);
  630. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  631. printf("Replace the DIMM module with a supported DIMM.\n\n");
  632. spd_ddr_init_hang ();
  633. break;
  634. case 7:
  635. debug("DIMM slot %lu: DDR1 SDRAM detected\n", dimm_num);
  636. dimm_populated[dimm_num] = SDRAM_DDR1;
  637. break;
  638. case 8:
  639. debug("DIMM slot %lu: DDR2 SDRAM detected\n", dimm_num);
  640. dimm_populated[dimm_num] = SDRAM_DDR2;
  641. break;
  642. default:
  643. printf("ERROR: Unknown DIMM detected in slot %d.\n",
  644. (unsigned int)dimm_num);
  645. printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
  646. printf("Replace the DIMM module with a supported DIMM.\n\n");
  647. spd_ddr_init_hang ();
  648. break;
  649. }
  650. }
  651. }
  652. for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
  653. if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
  654. && (dimm_populated[dimm_num] != SDRAM_NONE)
  655. && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
  656. printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
  657. spd_ddr_init_hang ();
  658. }
  659. }
  660. }
  661. /*------------------------------------------------------------------
  662. * For the memory DIMMs installed, this routine verifies that
  663. * frequency previously calculated is supported.
  664. *-----------------------------------------------------------------*/
  665. static void check_frequency(unsigned long *dimm_populated,
  666. unsigned char *iic0_dimm_addr,
  667. unsigned long num_dimm_banks)
  668. {
  669. unsigned long dimm_num;
  670. unsigned long tcyc_reg;
  671. unsigned long cycle_time;
  672. unsigned long calc_cycle_time;
  673. unsigned long sdram_freq;
  674. unsigned long sdr_ddrpll;
  675. PPC4xx_SYS_INFO board_cfg;
  676. /*------------------------------------------------------------------
  677. * Get the board configuration info.
  678. *-----------------------------------------------------------------*/
  679. get_sys_info(&board_cfg);
  680. mfsdr(SDR0_DDR0, sdr_ddrpll);
  681. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  682. /*
  683. * calc_cycle_time is calculated from DDR frequency set by board/chip
  684. * and is expressed in multiple of 10 picoseconds
  685. * to match the way DIMM cycle time is calculated below.
  686. */
  687. calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
  688. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  689. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  690. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  691. /*
  692. * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
  693. * the higher order nibble (bits 4-7) designates the cycle time
  694. * to a granularity of 1ns;
  695. * the value presented by the lower order nibble (bits 0-3)
  696. * has a granularity of .1ns and is added to the value designated
  697. * by the higher nibble. In addition, four lines of the lower order
  698. * nibble are assigned to support +.25,+.33, +.66 and +.75.
  699. */
  700. /* Convert from hex to decimal */
  701. if ((tcyc_reg & 0x0F) == 0x0D)
  702. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  703. else if ((tcyc_reg & 0x0F) == 0x0C)
  704. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
  705. else if ((tcyc_reg & 0x0F) == 0x0B)
  706. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
  707. else if ((tcyc_reg & 0x0F) == 0x0A)
  708. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
  709. else
  710. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
  711. ((tcyc_reg & 0x0F)*10);
  712. debug("cycle_time=%lu [10 picoseconds]\n", cycle_time);
  713. if (cycle_time > (calc_cycle_time + 10)) {
  714. /*
  715. * the provided sdram cycle_time is too small
  716. * for the available DIMM cycle_time.
  717. * The additionnal 100ps is here to accept a small incertainty.
  718. */
  719. printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
  720. "slot %d \n while calculated cycle time is %d ps.\n",
  721. (unsigned int)(cycle_time*10),
  722. (unsigned int)dimm_num,
  723. (unsigned int)(calc_cycle_time*10));
  724. printf("Replace the DIMM, or change DDR frequency via "
  725. "strapping bits.\n\n");
  726. spd_ddr_init_hang ();
  727. }
  728. }
  729. }
  730. }
  731. /*------------------------------------------------------------------
  732. * For the memory DIMMs installed, this routine verifies two
  733. * ranks/banks maximum are availables.
  734. *-----------------------------------------------------------------*/
  735. static void check_rank_number(unsigned long *dimm_populated,
  736. unsigned char *iic0_dimm_addr,
  737. unsigned long num_dimm_banks)
  738. {
  739. unsigned long dimm_num;
  740. unsigned long dimm_rank;
  741. unsigned long total_rank = 0;
  742. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  743. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  744. dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
  745. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  746. dimm_rank = (dimm_rank & 0x0F) +1;
  747. else
  748. dimm_rank = dimm_rank & 0x0F;
  749. if (dimm_rank > MAXRANKS) {
  750. printf("ERROR: DRAM DIMM detected with %lu ranks in "
  751. "slot %lu is not supported.\n", dimm_rank, dimm_num);
  752. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  753. printf("Replace the DIMM module with a supported DIMM.\n\n");
  754. spd_ddr_init_hang ();
  755. } else
  756. total_rank += dimm_rank;
  757. }
  758. if (total_rank > MAXRANKS) {
  759. printf("ERROR: DRAM DIMM detected with a total of %d ranks "
  760. "for all slots.\n", (unsigned int)total_rank);
  761. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  762. printf("Remove one of the DIMM modules.\n\n");
  763. spd_ddr_init_hang ();
  764. }
  765. }
  766. }
  767. /*------------------------------------------------------------------
  768. * only support 2.5V modules.
  769. * This routine verifies this.
  770. *-----------------------------------------------------------------*/
  771. static void check_voltage_type(unsigned long *dimm_populated,
  772. unsigned char *iic0_dimm_addr,
  773. unsigned long num_dimm_banks)
  774. {
  775. unsigned long dimm_num;
  776. unsigned long voltage_type;
  777. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  778. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  779. voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
  780. switch (voltage_type) {
  781. case 0x00:
  782. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  783. printf("This DIMM is 5.0 Volt/TTL.\n");
  784. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  785. (unsigned int)dimm_num);
  786. spd_ddr_init_hang ();
  787. break;
  788. case 0x01:
  789. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  790. printf("This DIMM is LVTTL.\n");
  791. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  792. (unsigned int)dimm_num);
  793. spd_ddr_init_hang ();
  794. break;
  795. case 0x02:
  796. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  797. printf("This DIMM is 1.5 Volt.\n");
  798. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  799. (unsigned int)dimm_num);
  800. spd_ddr_init_hang ();
  801. break;
  802. case 0x03:
  803. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  804. printf("This DIMM is 3.3 Volt/TTL.\n");
  805. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  806. (unsigned int)dimm_num);
  807. spd_ddr_init_hang ();
  808. break;
  809. case 0x04:
  810. /* 2.5 Voltage only for DDR1 */
  811. break;
  812. case 0x05:
  813. /* 1.8 Voltage only for DDR2 */
  814. break;
  815. default:
  816. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  817. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  818. (unsigned int)dimm_num);
  819. spd_ddr_init_hang ();
  820. break;
  821. }
  822. }
  823. }
  824. }
  825. /*-----------------------------------------------------------------------------+
  826. * program_copt1.
  827. *-----------------------------------------------------------------------------*/
  828. static void program_copt1(unsigned long *dimm_populated,
  829. unsigned char *iic0_dimm_addr,
  830. unsigned long num_dimm_banks)
  831. {
  832. unsigned long dimm_num;
  833. unsigned long mcopt1;
  834. unsigned long ecc_enabled;
  835. unsigned long ecc = 0;
  836. unsigned long data_width = 0;
  837. unsigned long dimm_32bit;
  838. unsigned long dimm_64bit;
  839. unsigned long registered = 0;
  840. unsigned long attribute = 0;
  841. unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  842. unsigned long bankcount;
  843. unsigned long ddrtype;
  844. unsigned long val;
  845. #ifdef CONFIG_DDR_ECC
  846. ecc_enabled = TRUE;
  847. #else
  848. ecc_enabled = FALSE;
  849. #endif
  850. dimm_32bit = FALSE;
  851. dimm_64bit = FALSE;
  852. buf0 = FALSE;
  853. buf1 = FALSE;
  854. /*------------------------------------------------------------------
  855. * Set memory controller options reg 1, SDRAM_MCOPT1.
  856. *-----------------------------------------------------------------*/
  857. mfsdram(SDRAM_MCOPT1, val);
  858. mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
  859. SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
  860. SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
  861. SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
  862. SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
  863. SDRAM_MCOPT1_DREF_MASK);
  864. mcopt1 |= SDRAM_MCOPT1_QDEP;
  865. mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
  866. mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
  867. mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
  868. mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
  869. mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
  870. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  871. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  872. /* test ecc support */
  873. ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
  874. if (ecc != 0x02) /* ecc not supported */
  875. ecc_enabled = FALSE;
  876. /* test bank count */
  877. bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
  878. if (bankcount == 0x04) /* bank count = 4 */
  879. mcopt1 |= SDRAM_MCOPT1_4_BANKS;
  880. else /* bank count = 8 */
  881. mcopt1 |= SDRAM_MCOPT1_8_BANKS;
  882. /* test DDR type */
  883. ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
  884. /* test for buffered/unbuffered, registered, differential clocks */
  885. registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
  886. attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
  887. /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  888. if (dimm_num == 0) {
  889. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  890. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  891. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  892. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  893. if (registered == 1) { /* DDR2 always buffered */
  894. /* TODO: what about above comments ? */
  895. mcopt1 |= SDRAM_MCOPT1_RDEN;
  896. buf0 = TRUE;
  897. } else {
  898. /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
  899. if ((attribute & 0x02) == 0x00) {
  900. /* buffered not supported */
  901. buf0 = FALSE;
  902. } else {
  903. mcopt1 |= SDRAM_MCOPT1_RDEN;
  904. buf0 = TRUE;
  905. }
  906. }
  907. }
  908. else if (dimm_num == 1) {
  909. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  910. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  911. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  912. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  913. if (registered == 1) {
  914. /* DDR2 always buffered */
  915. mcopt1 |= SDRAM_MCOPT1_RDEN;
  916. buf1 = TRUE;
  917. } else {
  918. if ((attribute & 0x02) == 0x00) {
  919. /* buffered not supported */
  920. buf1 = FALSE;
  921. } else {
  922. mcopt1 |= SDRAM_MCOPT1_RDEN;
  923. buf1 = TRUE;
  924. }
  925. }
  926. }
  927. /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
  928. data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
  929. (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
  930. switch (data_width) {
  931. case 72:
  932. case 64:
  933. dimm_64bit = TRUE;
  934. break;
  935. case 40:
  936. case 32:
  937. dimm_32bit = TRUE;
  938. break;
  939. default:
  940. printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
  941. data_width);
  942. printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
  943. break;
  944. }
  945. }
  946. }
  947. /* verify matching properties */
  948. if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
  949. if (buf0 != buf1) {
  950. printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
  951. spd_ddr_init_hang ();
  952. }
  953. }
  954. if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
  955. printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
  956. spd_ddr_init_hang ();
  957. }
  958. else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
  959. mcopt1 |= SDRAM_MCOPT1_DMWD_64;
  960. } else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
  961. mcopt1 |= SDRAM_MCOPT1_DMWD_32;
  962. } else {
  963. printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
  964. spd_ddr_init_hang ();
  965. }
  966. if (ecc_enabled == TRUE)
  967. mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
  968. else
  969. mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
  970. mtsdram(SDRAM_MCOPT1, mcopt1);
  971. }
  972. /*-----------------------------------------------------------------------------+
  973. * program_codt.
  974. *-----------------------------------------------------------------------------*/
  975. static void program_codt(unsigned long *dimm_populated,
  976. unsigned char *iic0_dimm_addr,
  977. unsigned long num_dimm_banks)
  978. {
  979. unsigned long codt;
  980. unsigned long modt0 = 0;
  981. unsigned long modt1 = 0;
  982. unsigned long modt2 = 0;
  983. unsigned long modt3 = 0;
  984. unsigned char dimm_num;
  985. unsigned char dimm_rank;
  986. unsigned char total_rank = 0;
  987. unsigned char total_dimm = 0;
  988. unsigned char dimm_type = 0;
  989. unsigned char firstSlot = 0;
  990. /*------------------------------------------------------------------
  991. * Set the SDRAM Controller On Die Termination Register
  992. *-----------------------------------------------------------------*/
  993. mfsdram(SDRAM_CODT, codt);
  994. codt &= ~(SDRAM_CODT_DQS_SINGLE_END | SDRAM_CODT_CKSE_SINGLE_END);
  995. codt |= SDRAM_CODT_IO_NMODE;
  996. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  997. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  998. dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
  999. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
  1000. dimm_rank = (dimm_rank & 0x0F) + 1;
  1001. dimm_type = SDRAM_DDR2;
  1002. } else {
  1003. dimm_rank = dimm_rank & 0x0F;
  1004. dimm_type = SDRAM_DDR1;
  1005. }
  1006. total_rank += dimm_rank;
  1007. total_dimm++;
  1008. if ((dimm_num == 0) && (total_dimm == 1))
  1009. firstSlot = TRUE;
  1010. else
  1011. firstSlot = FALSE;
  1012. }
  1013. }
  1014. if (dimm_type == SDRAM_DDR2) {
  1015. codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
  1016. if ((total_dimm == 1) && (firstSlot == TRUE)) {
  1017. if (total_rank == 1) { /* PUUU */
  1018. codt |= CALC_ODT_R(0);
  1019. modt0 = CALC_ODT_W(0);
  1020. modt1 = 0x00000000;
  1021. modt2 = 0x00000000;
  1022. modt3 = 0x00000000;
  1023. }
  1024. if (total_rank == 2) { /* PPUU */
  1025. codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
  1026. modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
  1027. modt1 = 0x00000000;
  1028. modt2 = 0x00000000;
  1029. modt3 = 0x00000000;
  1030. }
  1031. } else if ((total_dimm == 1) && (firstSlot != TRUE)) {
  1032. if (total_rank == 1) { /* UUPU */
  1033. codt |= CALC_ODT_R(2);
  1034. modt0 = 0x00000000;
  1035. modt1 = 0x00000000;
  1036. modt2 = CALC_ODT_W(2);
  1037. modt3 = 0x00000000;
  1038. }
  1039. if (total_rank == 2) { /* UUPP */
  1040. codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
  1041. modt0 = 0x00000000;
  1042. modt1 = 0x00000000;
  1043. modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
  1044. modt3 = 0x00000000;
  1045. }
  1046. }
  1047. if (total_dimm == 2) {
  1048. if (total_rank == 2) { /* PUPU */
  1049. codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
  1050. modt0 = CALC_ODT_RW(2);
  1051. modt1 = 0x00000000;
  1052. modt2 = CALC_ODT_RW(0);
  1053. modt3 = 0x00000000;
  1054. }
  1055. if (total_rank == 4) { /* PPPP */
  1056. codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
  1057. CALC_ODT_R(2) | CALC_ODT_R(3);
  1058. modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
  1059. modt1 = 0x00000000;
  1060. modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
  1061. modt3 = 0x00000000;
  1062. }
  1063. }
  1064. } else {
  1065. codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
  1066. modt0 = 0x00000000;
  1067. modt1 = 0x00000000;
  1068. modt2 = 0x00000000;
  1069. modt3 = 0x00000000;
  1070. if (total_dimm == 1) {
  1071. if (total_rank == 1)
  1072. codt |= 0x00800000;
  1073. if (total_rank == 2)
  1074. codt |= 0x02800000;
  1075. }
  1076. if (total_dimm == 2) {
  1077. if (total_rank == 2)
  1078. codt |= 0x08800000;
  1079. if (total_rank == 4)
  1080. codt |= 0x2a800000;
  1081. }
  1082. }
  1083. debug("nb of dimm %d\n", total_dimm);
  1084. debug("nb of rank %d\n", total_rank);
  1085. if (total_dimm == 1)
  1086. debug("dimm in slot %d\n", firstSlot);
  1087. mtsdram(SDRAM_CODT, codt);
  1088. mtsdram(SDRAM_MODT0, modt0);
  1089. mtsdram(SDRAM_MODT1, modt1);
  1090. mtsdram(SDRAM_MODT2, modt2);
  1091. mtsdram(SDRAM_MODT3, modt3);
  1092. }
  1093. /*-----------------------------------------------------------------------------+
  1094. * program_initplr.
  1095. *-----------------------------------------------------------------------------*/
  1096. static void program_initplr(unsigned long *dimm_populated,
  1097. unsigned char *iic0_dimm_addr,
  1098. unsigned long num_dimm_banks,
  1099. ddr_cas_id_t selected_cas,
  1100. int write_recovery)
  1101. {
  1102. u32 cas = 0;
  1103. u32 odt = 0;
  1104. u32 ods = 0;
  1105. u32 mr;
  1106. u32 wr;
  1107. u32 emr;
  1108. u32 emr2;
  1109. u32 emr3;
  1110. int dimm_num;
  1111. int total_dimm = 0;
  1112. /******************************************************
  1113. ** Assumption: if more than one DIMM, all DIMMs are the same
  1114. ** as already checked in check_memory_type
  1115. ******************************************************/
  1116. if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
  1117. mtsdram(SDRAM_INITPLR0, 0x81B80000);
  1118. mtsdram(SDRAM_INITPLR1, 0x81900400);
  1119. mtsdram(SDRAM_INITPLR2, 0x81810000);
  1120. mtsdram(SDRAM_INITPLR3, 0xff800162);
  1121. mtsdram(SDRAM_INITPLR4, 0x81900400);
  1122. mtsdram(SDRAM_INITPLR5, 0x86080000);
  1123. mtsdram(SDRAM_INITPLR6, 0x86080000);
  1124. mtsdram(SDRAM_INITPLR7, 0x81000062);
  1125. } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
  1126. switch (selected_cas) {
  1127. case DDR_CAS_3:
  1128. cas = 3 << 4;
  1129. break;
  1130. case DDR_CAS_4:
  1131. cas = 4 << 4;
  1132. break;
  1133. case DDR_CAS_5:
  1134. cas = 5 << 4;
  1135. break;
  1136. default:
  1137. printf("ERROR: ucode error on selected_cas value %d", selected_cas);
  1138. spd_ddr_init_hang ();
  1139. break;
  1140. }
  1141. #if 0
  1142. /*
  1143. * ToDo - Still a problem with the write recovery:
  1144. * On the Corsair CM2X512-5400C4 module, setting write recovery
  1145. * in the INITPLR reg to the value calculated in program_mode()
  1146. * results in not correctly working DDR2 memory (crash after
  1147. * relocation).
  1148. *
  1149. * So for now, set the write recovery to 3. This seems to work
  1150. * on the Corair module too.
  1151. *
  1152. * 2007-03-01, sr
  1153. */
  1154. switch (write_recovery) {
  1155. case 3:
  1156. wr = WRITE_RECOV_3;
  1157. break;
  1158. case 4:
  1159. wr = WRITE_RECOV_4;
  1160. break;
  1161. case 5:
  1162. wr = WRITE_RECOV_5;
  1163. break;
  1164. case 6:
  1165. wr = WRITE_RECOV_6;
  1166. break;
  1167. default:
  1168. printf("ERROR: write recovery not support (%d)", write_recovery);
  1169. spd_ddr_init_hang ();
  1170. break;
  1171. }
  1172. #else
  1173. wr = WRITE_RECOV_3; /* test-only, see description above */
  1174. #endif
  1175. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
  1176. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1177. total_dimm++;
  1178. if (total_dimm == 1) {
  1179. odt = ODT_150_OHM;
  1180. ods = ODS_FULL;
  1181. } else if (total_dimm == 2) {
  1182. odt = ODT_75_OHM;
  1183. ods = ODS_REDUCED;
  1184. } else {
  1185. printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
  1186. spd_ddr_init_hang ();
  1187. }
  1188. mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
  1189. emr = CMD_EMR | SELECT_EMR | odt | ods;
  1190. emr2 = CMD_EMR | SELECT_EMR2;
  1191. emr3 = CMD_EMR | SELECT_EMR3;
  1192. /* NOP - Wait 106 MemClk cycles */
  1193. mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
  1194. SDRAM_INITPLR_IMWT_ENCODE(106));
  1195. udelay(1000);
  1196. /* precharge 4 MemClk cycles */
  1197. mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1198. SDRAM_INITPLR_IMWT_ENCODE(4));
  1199. /* EMR2 - Wait tMRD (2 MemClk cycles) */
  1200. mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
  1201. SDRAM_INITPLR_IMWT_ENCODE(2));
  1202. /* EMR3 - Wait tMRD (2 MemClk cycles) */
  1203. mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
  1204. SDRAM_INITPLR_IMWT_ENCODE(2));
  1205. /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
  1206. mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
  1207. SDRAM_INITPLR_IMWT_ENCODE(2));
  1208. /* MR w/ DLL reset - 200 cycle wait for DLL reset */
  1209. mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
  1210. SDRAM_INITPLR_IMWT_ENCODE(200));
  1211. udelay(1000);
  1212. /* precharge 4 MemClk cycles */
  1213. mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1214. SDRAM_INITPLR_IMWT_ENCODE(4));
  1215. /* Refresh 25 MemClk cycles */
  1216. mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1217. SDRAM_INITPLR_IMWT_ENCODE(25));
  1218. /* Refresh 25 MemClk cycles */
  1219. mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1220. SDRAM_INITPLR_IMWT_ENCODE(25));
  1221. /* Refresh 25 MemClk cycles */
  1222. mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1223. SDRAM_INITPLR_IMWT_ENCODE(25));
  1224. /* Refresh 25 MemClk cycles */
  1225. mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1226. SDRAM_INITPLR_IMWT_ENCODE(25));
  1227. /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
  1228. mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
  1229. SDRAM_INITPLR_IMWT_ENCODE(2));
  1230. /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
  1231. mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
  1232. SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
  1233. /* EMR OCD Exit */
  1234. mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
  1235. SDRAM_INITPLR_IMWT_ENCODE(2));
  1236. } else {
  1237. printf("ERROR: ucode error as unknown DDR type in program_initplr");
  1238. spd_ddr_init_hang ();
  1239. }
  1240. }
  1241. /*------------------------------------------------------------------
  1242. * This routine programs the SDRAM_MMODE register.
  1243. * the selected_cas is an output parameter, that will be passed
  1244. * by caller to call the above program_initplr( )
  1245. *-----------------------------------------------------------------*/
  1246. static void program_mode(unsigned long *dimm_populated,
  1247. unsigned char *iic0_dimm_addr,
  1248. unsigned long num_dimm_banks,
  1249. ddr_cas_id_t *selected_cas,
  1250. int *write_recovery)
  1251. {
  1252. unsigned long dimm_num;
  1253. unsigned long sdram_ddr1;
  1254. unsigned long t_wr_ns;
  1255. unsigned long t_wr_clk;
  1256. unsigned long cas_bit;
  1257. unsigned long cas_index;
  1258. unsigned long sdram_freq;
  1259. unsigned long ddr_check;
  1260. unsigned long mmode;
  1261. unsigned long tcyc_reg;
  1262. unsigned long cycle_2_0_clk;
  1263. unsigned long cycle_2_5_clk;
  1264. unsigned long cycle_3_0_clk;
  1265. unsigned long cycle_4_0_clk;
  1266. unsigned long cycle_5_0_clk;
  1267. unsigned long max_2_0_tcyc_ns_x_100;
  1268. unsigned long max_2_5_tcyc_ns_x_100;
  1269. unsigned long max_3_0_tcyc_ns_x_100;
  1270. unsigned long max_4_0_tcyc_ns_x_100;
  1271. unsigned long max_5_0_tcyc_ns_x_100;
  1272. unsigned long cycle_time_ns_x_100[3];
  1273. PPC4xx_SYS_INFO board_cfg;
  1274. unsigned char cas_2_0_available;
  1275. unsigned char cas_2_5_available;
  1276. unsigned char cas_3_0_available;
  1277. unsigned char cas_4_0_available;
  1278. unsigned char cas_5_0_available;
  1279. unsigned long sdr_ddrpll;
  1280. /*------------------------------------------------------------------
  1281. * Get the board configuration info.
  1282. *-----------------------------------------------------------------*/
  1283. get_sys_info(&board_cfg);
  1284. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1285. sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
  1286. debug("sdram_freq=%lu\n", sdram_freq);
  1287. /*------------------------------------------------------------------
  1288. * Handle the timing. We need to find the worst case timing of all
  1289. * the dimm modules installed.
  1290. *-----------------------------------------------------------------*/
  1291. t_wr_ns = 0;
  1292. cas_2_0_available = TRUE;
  1293. cas_2_5_available = TRUE;
  1294. cas_3_0_available = TRUE;
  1295. cas_4_0_available = TRUE;
  1296. cas_5_0_available = TRUE;
  1297. max_2_0_tcyc_ns_x_100 = 10;
  1298. max_2_5_tcyc_ns_x_100 = 10;
  1299. max_3_0_tcyc_ns_x_100 = 10;
  1300. max_4_0_tcyc_ns_x_100 = 10;
  1301. max_5_0_tcyc_ns_x_100 = 10;
  1302. sdram_ddr1 = TRUE;
  1303. /* loop through all the DIMM slots on the board */
  1304. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1305. /* If a dimm is installed in a particular slot ... */
  1306. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1307. if (dimm_populated[dimm_num] == SDRAM_DDR1)
  1308. sdram_ddr1 = TRUE;
  1309. else
  1310. sdram_ddr1 = FALSE;
  1311. /* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /* not used in this loop. */
  1312. cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
  1313. debug("cas_bit[SPD byte 18]=%02lx\n", cas_bit);
  1314. /* For a particular DIMM, grab the three CAS values it supports */
  1315. for (cas_index = 0; cas_index < 3; cas_index++) {
  1316. switch (cas_index) {
  1317. case 0:
  1318. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  1319. break;
  1320. case 1:
  1321. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
  1322. break;
  1323. default:
  1324. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
  1325. break;
  1326. }
  1327. if ((tcyc_reg & 0x0F) >= 10) {
  1328. if ((tcyc_reg & 0x0F) == 0x0D) {
  1329. /* Convert from hex to decimal */
  1330. cycle_time_ns_x_100[cas_index] =
  1331. (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  1332. } else {
  1333. printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
  1334. "in slot %d\n", (unsigned int)dimm_num);
  1335. spd_ddr_init_hang ();
  1336. }
  1337. } else {
  1338. /* Convert from hex to decimal */
  1339. cycle_time_ns_x_100[cas_index] =
  1340. (((tcyc_reg & 0xF0) >> 4) * 100) +
  1341. ((tcyc_reg & 0x0F)*10);
  1342. }
  1343. debug("cas_index=%lu: cycle_time_ns_x_100=%lu\n", cas_index,
  1344. cycle_time_ns_x_100[cas_index]);
  1345. }
  1346. /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
  1347. /* supported for a particular DIMM. */
  1348. cas_index = 0;
  1349. if (sdram_ddr1) {
  1350. /*
  1351. * DDR devices use the following bitmask for CAS latency:
  1352. * Bit 7 6 5 4 3 2 1 0
  1353. * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
  1354. */
  1355. if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
  1356. (cycle_time_ns_x_100[cas_index] != 0)) {
  1357. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1358. cycle_time_ns_x_100[cas_index]);
  1359. cas_index++;
  1360. } else {
  1361. if (cas_index != 0)
  1362. cas_index++;
  1363. cas_4_0_available = FALSE;
  1364. }
  1365. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1366. (cycle_time_ns_x_100[cas_index] != 0)) {
  1367. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1368. cycle_time_ns_x_100[cas_index]);
  1369. cas_index++;
  1370. } else {
  1371. if (cas_index != 0)
  1372. cas_index++;
  1373. cas_3_0_available = FALSE;
  1374. }
  1375. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1376. (cycle_time_ns_x_100[cas_index] != 0)) {
  1377. max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
  1378. cycle_time_ns_x_100[cas_index]);
  1379. cas_index++;
  1380. } else {
  1381. if (cas_index != 0)
  1382. cas_index++;
  1383. cas_2_5_available = FALSE;
  1384. }
  1385. if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
  1386. (cycle_time_ns_x_100[cas_index] != 0)) {
  1387. max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
  1388. cycle_time_ns_x_100[cas_index]);
  1389. cas_index++;
  1390. } else {
  1391. if (cas_index != 0)
  1392. cas_index++;
  1393. cas_2_0_available = FALSE;
  1394. }
  1395. } else {
  1396. /*
  1397. * DDR2 devices use the following bitmask for CAS latency:
  1398. * Bit 7 6 5 4 3 2 1 0
  1399. * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
  1400. */
  1401. if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
  1402. (cycle_time_ns_x_100[cas_index] != 0)) {
  1403. max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
  1404. cycle_time_ns_x_100[cas_index]);
  1405. cas_index++;
  1406. } else {
  1407. if (cas_index != 0)
  1408. cas_index++;
  1409. cas_5_0_available = FALSE;
  1410. }
  1411. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1412. (cycle_time_ns_x_100[cas_index] != 0)) {
  1413. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1414. cycle_time_ns_x_100[cas_index]);
  1415. cas_index++;
  1416. } else {
  1417. if (cas_index != 0)
  1418. cas_index++;
  1419. cas_4_0_available = FALSE;
  1420. }
  1421. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1422. (cycle_time_ns_x_100[cas_index] != 0)) {
  1423. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1424. cycle_time_ns_x_100[cas_index]);
  1425. cas_index++;
  1426. } else {
  1427. if (cas_index != 0)
  1428. cas_index++;
  1429. cas_3_0_available = FALSE;
  1430. }
  1431. }
  1432. }
  1433. }
  1434. /*------------------------------------------------------------------
  1435. * Set the SDRAM mode, SDRAM_MMODE
  1436. *-----------------------------------------------------------------*/
  1437. mfsdram(SDRAM_MMODE, mmode);
  1438. mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
  1439. /* add 10 here because of rounding problems */
  1440. cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
  1441. cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
  1442. cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
  1443. cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
  1444. cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
  1445. debug("cycle_3_0_clk=%lu\n", cycle_3_0_clk);
  1446. debug("cycle_4_0_clk=%lu\n", cycle_4_0_clk);
  1447. debug("cycle_5_0_clk=%lu\n", cycle_5_0_clk);
  1448. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1449. if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
  1450. mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
  1451. *selected_cas = DDR_CAS_2;
  1452. } else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
  1453. mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
  1454. *selected_cas = DDR_CAS_2_5;
  1455. } else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1456. mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
  1457. *selected_cas = DDR_CAS_3;
  1458. } else {
  1459. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1460. printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
  1461. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
  1462. spd_ddr_init_hang ();
  1463. }
  1464. } else { /* DDR2 */
  1465. debug("cas_3_0_available=%d\n", cas_3_0_available);
  1466. debug("cas_4_0_available=%d\n", cas_4_0_available);
  1467. debug("cas_5_0_available=%d\n", cas_5_0_available);
  1468. if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1469. mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
  1470. *selected_cas = DDR_CAS_3;
  1471. } else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
  1472. mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
  1473. *selected_cas = DDR_CAS_4;
  1474. } else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
  1475. mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
  1476. *selected_cas = DDR_CAS_5;
  1477. } else {
  1478. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1479. printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
  1480. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
  1481. printf("cas3=%d cas4=%d cas5=%d\n",
  1482. cas_3_0_available, cas_4_0_available, cas_5_0_available);
  1483. printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
  1484. sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
  1485. spd_ddr_init_hang ();
  1486. }
  1487. }
  1488. if (sdram_ddr1 == TRUE)
  1489. mmode |= SDRAM_MMODE_WR_DDR1;
  1490. else {
  1491. /* loop through all the DIMM slots on the board */
  1492. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1493. /* If a dimm is installed in a particular slot ... */
  1494. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1495. t_wr_ns = max(t_wr_ns,
  1496. spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1497. }
  1498. /*
  1499. * convert from nanoseconds to ddr clocks
  1500. * round up if necessary
  1501. */
  1502. t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
  1503. ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
  1504. if (sdram_freq != ddr_check)
  1505. t_wr_clk++;
  1506. switch (t_wr_clk) {
  1507. case 0:
  1508. case 1:
  1509. case 2:
  1510. case 3:
  1511. mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
  1512. break;
  1513. case 4:
  1514. mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
  1515. break;
  1516. case 5:
  1517. mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
  1518. break;
  1519. default:
  1520. mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
  1521. break;
  1522. }
  1523. *write_recovery = t_wr_clk;
  1524. }
  1525. debug("CAS latency = %d\n", *selected_cas);
  1526. debug("Write recovery = %d\n", *write_recovery);
  1527. mtsdram(SDRAM_MMODE, mmode);
  1528. }
  1529. /*-----------------------------------------------------------------------------+
  1530. * program_rtr.
  1531. *-----------------------------------------------------------------------------*/
  1532. static void program_rtr(unsigned long *dimm_populated,
  1533. unsigned char *iic0_dimm_addr,
  1534. unsigned long num_dimm_banks)
  1535. {
  1536. PPC4xx_SYS_INFO board_cfg;
  1537. unsigned long max_refresh_rate;
  1538. unsigned long dimm_num;
  1539. unsigned long refresh_rate_type;
  1540. unsigned long refresh_rate;
  1541. unsigned long rint;
  1542. unsigned long sdram_freq;
  1543. unsigned long sdr_ddrpll;
  1544. unsigned long val;
  1545. /*------------------------------------------------------------------
  1546. * Get the board configuration info.
  1547. *-----------------------------------------------------------------*/
  1548. get_sys_info(&board_cfg);
  1549. /*------------------------------------------------------------------
  1550. * Set the SDRAM Refresh Timing Register, SDRAM_RTR
  1551. *-----------------------------------------------------------------*/
  1552. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1553. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1554. max_refresh_rate = 0;
  1555. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1556. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1557. refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
  1558. refresh_rate_type &= 0x7F;
  1559. switch (refresh_rate_type) {
  1560. case 0:
  1561. refresh_rate = 15625;
  1562. break;
  1563. case 1:
  1564. refresh_rate = 3906;
  1565. break;
  1566. case 2:
  1567. refresh_rate = 7812;
  1568. break;
  1569. case 3:
  1570. refresh_rate = 31250;
  1571. break;
  1572. case 4:
  1573. refresh_rate = 62500;
  1574. break;
  1575. case 5:
  1576. refresh_rate = 125000;
  1577. break;
  1578. default:
  1579. refresh_rate = 0;
  1580. printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
  1581. (unsigned int)dimm_num);
  1582. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1583. spd_ddr_init_hang ();
  1584. break;
  1585. }
  1586. max_refresh_rate = max(max_refresh_rate, refresh_rate);
  1587. }
  1588. }
  1589. rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
  1590. mfsdram(SDRAM_RTR, val);
  1591. mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
  1592. (SDRAM_RTR_RINT_ENCODE(rint)));
  1593. }
  1594. /*------------------------------------------------------------------
  1595. * This routine programs the SDRAM_TRx registers.
  1596. *-----------------------------------------------------------------*/
  1597. static void program_tr(unsigned long *dimm_populated,
  1598. unsigned char *iic0_dimm_addr,
  1599. unsigned long num_dimm_banks)
  1600. {
  1601. unsigned long dimm_num;
  1602. unsigned long sdram_ddr1;
  1603. unsigned long t_rp_ns;
  1604. unsigned long t_rcd_ns;
  1605. unsigned long t_rrd_ns;
  1606. unsigned long t_ras_ns;
  1607. unsigned long t_rc_ns;
  1608. unsigned long t_rfc_ns;
  1609. unsigned long t_wpc_ns;
  1610. unsigned long t_wtr_ns;
  1611. unsigned long t_rpc_ns;
  1612. unsigned long t_rp_clk;
  1613. unsigned long t_rcd_clk;
  1614. unsigned long t_rrd_clk;
  1615. unsigned long t_ras_clk;
  1616. unsigned long t_rc_clk;
  1617. unsigned long t_rfc_clk;
  1618. unsigned long t_wpc_clk;
  1619. unsigned long t_wtr_clk;
  1620. unsigned long t_rpc_clk;
  1621. unsigned long sdtr1, sdtr2, sdtr3;
  1622. unsigned long ddr_check;
  1623. unsigned long sdram_freq;
  1624. unsigned long sdr_ddrpll;
  1625. PPC4xx_SYS_INFO board_cfg;
  1626. /*------------------------------------------------------------------
  1627. * Get the board configuration info.
  1628. *-----------------------------------------------------------------*/
  1629. get_sys_info(&board_cfg);
  1630. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1631. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1632. /*------------------------------------------------------------------
  1633. * Handle the timing. We need to find the worst case timing of all
  1634. * the dimm modules installed.
  1635. *-----------------------------------------------------------------*/
  1636. t_rp_ns = 0;
  1637. t_rrd_ns = 0;
  1638. t_rcd_ns = 0;
  1639. t_ras_ns = 0;
  1640. t_rc_ns = 0;
  1641. t_rfc_ns = 0;
  1642. t_wpc_ns = 0;
  1643. t_wtr_ns = 0;
  1644. t_rpc_ns = 0;
  1645. sdram_ddr1 = TRUE;
  1646. /* loop through all the DIMM slots on the board */
  1647. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1648. /* If a dimm is installed in a particular slot ... */
  1649. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1650. if (dimm_populated[dimm_num] == SDRAM_DDR2)
  1651. sdram_ddr1 = TRUE;
  1652. else
  1653. sdram_ddr1 = FALSE;
  1654. t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
  1655. t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
  1656. t_rp_ns = max(t_rp_ns, spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
  1657. t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
  1658. t_rc_ns = max(t_rc_ns, spd_read(iic0_dimm_addr[dimm_num], 41));
  1659. t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
  1660. }
  1661. }
  1662. /*------------------------------------------------------------------
  1663. * Set the SDRAM Timing Reg 1, SDRAM_TR1
  1664. *-----------------------------------------------------------------*/
  1665. mfsdram(SDRAM_SDTR1, sdtr1);
  1666. sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
  1667. SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
  1668. /* default values */
  1669. sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
  1670. sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
  1671. /* normal operations */
  1672. sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
  1673. sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
  1674. mtsdram(SDRAM_SDTR1, sdtr1);
  1675. /*------------------------------------------------------------------
  1676. * Set the SDRAM Timing Reg 2, SDRAM_TR2
  1677. *-----------------------------------------------------------------*/
  1678. mfsdram(SDRAM_SDTR2, sdtr2);
  1679. sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
  1680. SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
  1681. SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
  1682. SDRAM_SDTR2_RRD_MASK);
  1683. /*
  1684. * convert t_rcd from nanoseconds to ddr clocks
  1685. * round up if necessary
  1686. */
  1687. t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
  1688. ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
  1689. if (sdram_freq != ddr_check)
  1690. t_rcd_clk++;
  1691. switch (t_rcd_clk) {
  1692. case 0:
  1693. case 1:
  1694. sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
  1695. break;
  1696. case 2:
  1697. sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
  1698. break;
  1699. case 3:
  1700. sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
  1701. break;
  1702. case 4:
  1703. sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
  1704. break;
  1705. default:
  1706. sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
  1707. break;
  1708. }
  1709. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1710. if (sdram_freq < 200000000) {
  1711. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1712. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1713. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1714. } else {
  1715. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1716. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1717. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1718. }
  1719. } else { /* DDR2 */
  1720. /* loop through all the DIMM slots on the board */
  1721. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1722. /* If a dimm is installed in a particular slot ... */
  1723. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1724. t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1725. t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
  1726. t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
  1727. }
  1728. }
  1729. /*
  1730. * convert from nanoseconds to ddr clocks
  1731. * round up if necessary
  1732. */
  1733. t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
  1734. ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
  1735. if (sdram_freq != ddr_check)
  1736. t_wpc_clk++;
  1737. switch (t_wpc_clk) {
  1738. case 0:
  1739. case 1:
  1740. case 2:
  1741. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1742. break;
  1743. case 3:
  1744. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1745. break;
  1746. case 4:
  1747. sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
  1748. break;
  1749. case 5:
  1750. sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
  1751. break;
  1752. default:
  1753. sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
  1754. break;
  1755. }
  1756. /*
  1757. * convert from nanoseconds to ddr clocks
  1758. * round up if necessary
  1759. */
  1760. t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
  1761. ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
  1762. if (sdram_freq != ddr_check)
  1763. t_wtr_clk++;
  1764. switch (t_wtr_clk) {
  1765. case 0:
  1766. case 1:
  1767. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1768. break;
  1769. case 2:
  1770. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1771. break;
  1772. case 3:
  1773. sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
  1774. break;
  1775. default:
  1776. sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
  1777. break;
  1778. }
  1779. /*
  1780. * convert from nanoseconds to ddr clocks
  1781. * round up if necessary
  1782. */
  1783. t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
  1784. ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
  1785. if (sdram_freq != ddr_check)
  1786. t_rpc_clk++;
  1787. switch (t_rpc_clk) {
  1788. case 0:
  1789. case 1:
  1790. case 2:
  1791. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1792. break;
  1793. case 3:
  1794. sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
  1795. break;
  1796. default:
  1797. sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
  1798. break;
  1799. }
  1800. }
  1801. /* default value */
  1802. sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
  1803. /*
  1804. * convert t_rrd from nanoseconds to ddr clocks
  1805. * round up if necessary
  1806. */
  1807. t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
  1808. ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
  1809. if (sdram_freq != ddr_check)
  1810. t_rrd_clk++;
  1811. if (t_rrd_clk == 3)
  1812. sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
  1813. else
  1814. sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
  1815. /*
  1816. * convert t_rp from nanoseconds to ddr clocks
  1817. * round up if necessary
  1818. */
  1819. t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
  1820. ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
  1821. if (sdram_freq != ddr_check)
  1822. t_rp_clk++;
  1823. switch (t_rp_clk) {
  1824. case 0:
  1825. case 1:
  1826. case 2:
  1827. case 3:
  1828. sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
  1829. break;
  1830. case 4:
  1831. sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
  1832. break;
  1833. case 5:
  1834. sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
  1835. break;
  1836. case 6:
  1837. sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
  1838. break;
  1839. default:
  1840. sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
  1841. break;
  1842. }
  1843. mtsdram(SDRAM_SDTR2, sdtr2);
  1844. /*------------------------------------------------------------------
  1845. * Set the SDRAM Timing Reg 3, SDRAM_TR3
  1846. *-----------------------------------------------------------------*/
  1847. mfsdram(SDRAM_SDTR3, sdtr3);
  1848. sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
  1849. SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
  1850. /*
  1851. * convert t_ras from nanoseconds to ddr clocks
  1852. * round up if necessary
  1853. */
  1854. t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
  1855. ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
  1856. if (sdram_freq != ddr_check)
  1857. t_ras_clk++;
  1858. sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
  1859. /*
  1860. * convert t_rc from nanoseconds to ddr clocks
  1861. * round up if necessary
  1862. */
  1863. t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
  1864. ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
  1865. if (sdram_freq != ddr_check)
  1866. t_rc_clk++;
  1867. sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
  1868. /* default xcs value */
  1869. sdtr3 |= SDRAM_SDTR3_XCS;
  1870. /*
  1871. * convert t_rfc from nanoseconds to ddr clocks
  1872. * round up if necessary
  1873. */
  1874. t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
  1875. ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
  1876. if (sdram_freq != ddr_check)
  1877. t_rfc_clk++;
  1878. sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
  1879. mtsdram(SDRAM_SDTR3, sdtr3);
  1880. }
  1881. /*-----------------------------------------------------------------------------+
  1882. * program_bxcf.
  1883. *-----------------------------------------------------------------------------*/
  1884. static void program_bxcf(unsigned long *dimm_populated,
  1885. unsigned char *iic0_dimm_addr,
  1886. unsigned long num_dimm_banks)
  1887. {
  1888. unsigned long dimm_num;
  1889. unsigned long num_col_addr;
  1890. unsigned long num_ranks;
  1891. unsigned long num_banks;
  1892. unsigned long mode;
  1893. unsigned long ind_rank;
  1894. unsigned long ind;
  1895. unsigned long ind_bank;
  1896. unsigned long bank_0_populated;
  1897. /*------------------------------------------------------------------
  1898. * Set the BxCF regs. First, wipe out the bank config registers.
  1899. *-----------------------------------------------------------------*/
  1900. mtsdram(SDRAM_MB0CF, 0x00000000);
  1901. mtsdram(SDRAM_MB1CF, 0x00000000);
  1902. mtsdram(SDRAM_MB2CF, 0x00000000);
  1903. mtsdram(SDRAM_MB3CF, 0x00000000);
  1904. mode = SDRAM_BXCF_M_BE_ENABLE;
  1905. bank_0_populated = 0;
  1906. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1907. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1908. num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
  1909. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1910. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1911. num_ranks = (num_ranks & 0x0F) +1;
  1912. else
  1913. num_ranks = num_ranks & 0x0F;
  1914. num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
  1915. for (ind_bank = 0; ind_bank < 2; ind_bank++) {
  1916. if (num_banks == 4)
  1917. ind = 0;
  1918. else
  1919. ind = 5 << 8;
  1920. switch (num_col_addr) {
  1921. case 0x08:
  1922. mode |= (SDRAM_BXCF_M_AM_0 + ind);
  1923. break;
  1924. case 0x09:
  1925. mode |= (SDRAM_BXCF_M_AM_1 + ind);
  1926. break;
  1927. case 0x0A:
  1928. mode |= (SDRAM_BXCF_M_AM_2 + ind);
  1929. break;
  1930. case 0x0B:
  1931. mode |= (SDRAM_BXCF_M_AM_3 + ind);
  1932. break;
  1933. case 0x0C:
  1934. mode |= (SDRAM_BXCF_M_AM_4 + ind);
  1935. break;
  1936. default:
  1937. printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
  1938. (unsigned int)dimm_num);
  1939. printf("ERROR: Unsupported value for number of "
  1940. "column addresses: %d.\n", (unsigned int)num_col_addr);
  1941. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1942. spd_ddr_init_hang ();
  1943. }
  1944. }
  1945. if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
  1946. bank_0_populated = 1;
  1947. for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
  1948. mtsdram(SDRAM_MB0CF +
  1949. ((dimm_num + bank_0_populated + ind_rank) << 2),
  1950. mode);
  1951. }
  1952. }
  1953. }
  1954. }
  1955. /*------------------------------------------------------------------
  1956. * program memory queue.
  1957. *-----------------------------------------------------------------*/
  1958. static void program_memory_queue(unsigned long *dimm_populated,
  1959. unsigned char *iic0_dimm_addr,
  1960. unsigned long num_dimm_banks)
  1961. {
  1962. unsigned long dimm_num;
  1963. phys_size_t rank_base_addr;
  1964. unsigned long rank_reg;
  1965. phys_size_t rank_size_bytes;
  1966. unsigned long rank_size_id;
  1967. unsigned long num_ranks;
  1968. unsigned long baseadd_size;
  1969. unsigned long i;
  1970. unsigned long bank_0_populated = 0;
  1971. phys_size_t total_size = 0;
  1972. /*------------------------------------------------------------------
  1973. * Reset the rank_base_address.
  1974. *-----------------------------------------------------------------*/
  1975. rank_reg = SDRAM_R0BAS;
  1976. rank_base_addr = 0x00000000;
  1977. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1978. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1979. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1980. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1981. num_ranks = (num_ranks & 0x0F) + 1;
  1982. else
  1983. num_ranks = num_ranks & 0x0F;
  1984. rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
  1985. /*------------------------------------------------------------------
  1986. * Set the sizes
  1987. *-----------------------------------------------------------------*/
  1988. baseadd_size = 0;
  1989. switch (rank_size_id) {
  1990. case 0x01:
  1991. baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
  1992. total_size = 1024;
  1993. break;
  1994. case 0x02:
  1995. baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
  1996. total_size = 2048;
  1997. break;
  1998. case 0x04:
  1999. baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
  2000. total_size = 4096;
  2001. break;
  2002. case 0x08:
  2003. baseadd_size |= SDRAM_RXBAS_SDSZ_32;
  2004. total_size = 32;
  2005. break;
  2006. case 0x10:
  2007. baseadd_size |= SDRAM_RXBAS_SDSZ_64;
  2008. total_size = 64;
  2009. break;
  2010. case 0x20:
  2011. baseadd_size |= SDRAM_RXBAS_SDSZ_128;
  2012. total_size = 128;
  2013. break;
  2014. case 0x40:
  2015. baseadd_size |= SDRAM_RXBAS_SDSZ_256;
  2016. total_size = 256;
  2017. break;
  2018. case 0x80:
  2019. baseadd_size |= SDRAM_RXBAS_SDSZ_512;
  2020. total_size = 512;
  2021. break;
  2022. default:
  2023. printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
  2024. (unsigned int)dimm_num);
  2025. printf("ERROR: Unsupported value for the banksize: %d.\n",
  2026. (unsigned int)rank_size_id);
  2027. printf("Replace the DIMM module with a supported DIMM.\n\n");
  2028. spd_ddr_init_hang ();
  2029. }
  2030. rank_size_bytes = total_size << 20;
  2031. if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
  2032. bank_0_populated = 1;
  2033. for (i = 0; i < num_ranks; i++) {
  2034. mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
  2035. (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
  2036. baseadd_size));
  2037. rank_base_addr += rank_size_bytes;
  2038. }
  2039. }
  2040. }
  2041. #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2042. defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
  2043. defined(CONFIG_460SX)
  2044. /*
  2045. * Enable high bandwidth access
  2046. * This is currently not used, but with this setup
  2047. * it is possible to use it later on in e.g. the Linux
  2048. * EMAC driver for performance gain.
  2049. */
  2050. mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
  2051. mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
  2052. /*
  2053. * Set optimal value for Memory Queue HB/LL Configuration registers
  2054. */
  2055. mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
  2056. SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
  2057. SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
  2058. mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
  2059. SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
  2060. SDRAM_CONF1LL_RPLM);
  2061. mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
  2062. #endif
  2063. }
  2064. /*-----------------------------------------------------------------------------+
  2065. * is_ecc_enabled.
  2066. *-----------------------------------------------------------------------------*/
  2067. static unsigned long is_ecc_enabled(void)
  2068. {
  2069. unsigned long dimm_num;
  2070. unsigned long ecc;
  2071. unsigned long val;
  2072. ecc = 0;
  2073. /* loop through all the DIMM slots on the board */
  2074. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2075. mfsdram(SDRAM_MCOPT1, val);
  2076. ecc = max(ecc, SDRAM_MCOPT1_MCHK_CHK_DECODE(val));
  2077. }
  2078. return ecc;
  2079. }
  2080. #ifdef CONFIG_DDR_ECC
  2081. /*-----------------------------------------------------------------------------+
  2082. * program_ecc.
  2083. *-----------------------------------------------------------------------------*/
  2084. static void program_ecc(unsigned long *dimm_populated,
  2085. unsigned char *iic0_dimm_addr,
  2086. unsigned long num_dimm_banks,
  2087. unsigned long tlb_word2_i_value)
  2088. {
  2089. unsigned long mcopt1;
  2090. unsigned long mcopt2;
  2091. unsigned long mcstat;
  2092. unsigned long dimm_num;
  2093. unsigned long ecc;
  2094. ecc = 0;
  2095. /* loop through all the DIMM slots on the board */
  2096. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2097. /* If a dimm is installed in a particular slot ... */
  2098. if (dimm_populated[dimm_num] != SDRAM_NONE)
  2099. ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
  2100. }
  2101. if (ecc == 0)
  2102. return;
  2103. if (sdram_memsize() > CONFIG_MAX_MEM_MAPPED) {
  2104. printf("\nWarning: Can't enable ECC on systems with more than 2GB of SDRAM!\n");
  2105. return;
  2106. }
  2107. mfsdram(SDRAM_MCOPT1, mcopt1);
  2108. mfsdram(SDRAM_MCOPT2, mcopt2);
  2109. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2110. /* DDR controller must be enabled and not in self-refresh. */
  2111. mfsdram(SDRAM_MCSTAT, mcstat);
  2112. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  2113. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  2114. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  2115. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  2116. program_ecc_addr(0, sdram_memsize(), tlb_word2_i_value);
  2117. }
  2118. }
  2119. return;
  2120. }
  2121. static void wait_ddr_idle(void)
  2122. {
  2123. u32 val;
  2124. do {
  2125. mfsdram(SDRAM_MCSTAT, val);
  2126. } while ((val & SDRAM_MCSTAT_IDLE_MASK) == SDRAM_MCSTAT_IDLE_NOT);
  2127. }
  2128. /*-----------------------------------------------------------------------------+
  2129. * program_ecc_addr.
  2130. *-----------------------------------------------------------------------------*/
  2131. static void program_ecc_addr(unsigned long start_address,
  2132. unsigned long num_bytes,
  2133. unsigned long tlb_word2_i_value)
  2134. {
  2135. unsigned long current_address;
  2136. unsigned long end_address;
  2137. unsigned long address_increment;
  2138. unsigned long mcopt1;
  2139. char str[] = "ECC generation -";
  2140. char slash[] = "\\|/-\\|/-";
  2141. int loop = 0;
  2142. int loopi = 0;
  2143. current_address = start_address;
  2144. mfsdram(SDRAM_MCOPT1, mcopt1);
  2145. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2146. mtsdram(SDRAM_MCOPT1,
  2147. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_GEN);
  2148. sync();
  2149. eieio();
  2150. wait_ddr_idle();
  2151. puts(str);
  2152. if (tlb_word2_i_value == TLB_WORD2_I_ENABLE) {
  2153. /* ECC bit set method for non-cached memory */
  2154. if ((mcopt1 & SDRAM_MCOPT1_DMWD_MASK) == SDRAM_MCOPT1_DMWD_32)
  2155. address_increment = 4;
  2156. else
  2157. address_increment = 8;
  2158. end_address = current_address + num_bytes;
  2159. while (current_address < end_address) {
  2160. *((unsigned long *)current_address) = 0x00000000;
  2161. current_address += address_increment;
  2162. if ((loop++ % (2 << 20)) == 0) {
  2163. putc('\b');
  2164. putc(slash[loopi++ % 8]);
  2165. }
  2166. }
  2167. } else {
  2168. /* ECC bit set method for cached memory */
  2169. dcbz_area(start_address, num_bytes);
  2170. /* Write modified dcache lines back to memory */
  2171. clean_dcache_range(start_address, start_address + num_bytes);
  2172. }
  2173. blank_string(strlen(str));
  2174. sync();
  2175. eieio();
  2176. wait_ddr_idle();
  2177. /* clear ECC error repoting registers */
  2178. mtsdram(SDRAM_ECCCR, 0xffffffff);
  2179. mtdcr(0x4c, 0xffffffff);
  2180. mtsdram(SDRAM_MCOPT1,
  2181. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_CHK_REP);
  2182. sync();
  2183. eieio();
  2184. wait_ddr_idle();
  2185. }
  2186. }
  2187. #endif
  2188. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2189. /*-----------------------------------------------------------------------------+
  2190. * program_DQS_calibration.
  2191. *-----------------------------------------------------------------------------*/
  2192. static void program_DQS_calibration(unsigned long *dimm_populated,
  2193. unsigned char *iic0_dimm_addr,
  2194. unsigned long num_dimm_banks)
  2195. {
  2196. unsigned long val;
  2197. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  2198. mtsdram(SDRAM_RQDC, 0x80000037);
  2199. mtsdram(SDRAM_RDCC, 0x40000000);
  2200. mtsdram(SDRAM_RFDC, 0x000001DF);
  2201. test();
  2202. #else
  2203. /*------------------------------------------------------------------
  2204. * Program RDCC register
  2205. * Read sample cycle auto-update enable
  2206. *-----------------------------------------------------------------*/
  2207. mfsdram(SDRAM_RDCC, val);
  2208. mtsdram(SDRAM_RDCC,
  2209. (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
  2210. | SDRAM_RDCC_RSAE_ENABLE);
  2211. /*------------------------------------------------------------------
  2212. * Program RQDC register
  2213. * Internal DQS delay mechanism enable
  2214. *-----------------------------------------------------------------*/
  2215. mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
  2216. /*------------------------------------------------------------------
  2217. * Program RFDC register
  2218. * Set Feedback Fractional Oversample
  2219. * Auto-detect read sample cycle enable
  2220. * Set RFOS to 1/4 of memclk cycle (0x3f)
  2221. *-----------------------------------------------------------------*/
  2222. mfsdram(SDRAM_RFDC, val);
  2223. mtsdram(SDRAM_RFDC,
  2224. (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
  2225. SDRAM_RFDC_RFFD_MASK))
  2226. | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
  2227. SDRAM_RFDC_RFFD_ENCODE(0)));
  2228. DQS_calibration_process();
  2229. #endif
  2230. }
  2231. static int short_mem_test(void)
  2232. {
  2233. u32 *membase;
  2234. u32 bxcr_num;
  2235. u32 bxcf;
  2236. int i;
  2237. int j;
  2238. phys_size_t base_addr;
  2239. u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
  2240. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2241. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2242. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2243. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2244. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2245. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2246. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2247. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2248. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2249. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2250. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2251. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2252. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2253. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2254. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2255. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2256. int l;
  2257. for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
  2258. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
  2259. /* Banks enabled */
  2260. if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2261. /* Bank is enabled */
  2262. /*
  2263. * Only run test on accessable memory (below 2GB)
  2264. */
  2265. base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
  2266. if (base_addr >= CONFIG_MAX_MEM_MAPPED)
  2267. continue;
  2268. /*------------------------------------------------------------------
  2269. * Run the short memory test.
  2270. *-----------------------------------------------------------------*/
  2271. membase = (u32 *)(u32)base_addr;
  2272. for (i = 0; i < NUMMEMTESTS; i++) {
  2273. for (j = 0; j < NUMMEMWORDS; j++) {
  2274. membase[j] = test[i][j];
  2275. ppcDcbf((u32)&(membase[j]));
  2276. }
  2277. sync();
  2278. for (l=0; l<NUMLOOPS; l++) {
  2279. for (j = 0; j < NUMMEMWORDS; j++) {
  2280. if (membase[j] != test[i][j]) {
  2281. ppcDcbf((u32)&(membase[j]));
  2282. return 0;
  2283. }
  2284. ppcDcbf((u32)&(membase[j]));
  2285. }
  2286. sync();
  2287. }
  2288. }
  2289. } /* if bank enabled */
  2290. } /* for bxcf_num */
  2291. return 1;
  2292. }
  2293. #ifndef HARD_CODED_DQS
  2294. /*-----------------------------------------------------------------------------+
  2295. * DQS_calibration_process.
  2296. *-----------------------------------------------------------------------------*/
  2297. static void DQS_calibration_process(void)
  2298. {
  2299. unsigned long rfdc_reg;
  2300. unsigned long rffd;
  2301. unsigned long val;
  2302. long rffd_average;
  2303. long max_start;
  2304. long min_end;
  2305. unsigned long begin_rqfd[MAXRANKS];
  2306. unsigned long begin_rffd[MAXRANKS];
  2307. unsigned long end_rqfd[MAXRANKS];
  2308. unsigned long end_rffd[MAXRANKS];
  2309. char window_found;
  2310. unsigned long dlycal;
  2311. unsigned long dly_val;
  2312. unsigned long max_pass_length;
  2313. unsigned long current_pass_length;
  2314. unsigned long current_fail_length;
  2315. unsigned long current_start;
  2316. long max_end;
  2317. unsigned char fail_found;
  2318. unsigned char pass_found;
  2319. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2320. u32 rqdc_reg;
  2321. u32 rqfd;
  2322. u32 rqfd_start;
  2323. u32 rqfd_average;
  2324. int loopi = 0;
  2325. char str[] = "Auto calibration -";
  2326. char slash[] = "\\|/-\\|/-";
  2327. /*------------------------------------------------------------------
  2328. * Test to determine the best read clock delay tuning bits.
  2329. *
  2330. * Before the DDR controller can be used, the read clock delay needs to be
  2331. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2332. * This value cannot be hardcoded into the program because it changes
  2333. * depending on the board's setup and environment.
  2334. * To do this, all delay values are tested to see if they
  2335. * work or not. By doing this, you get groups of fails with groups of
  2336. * passing values. The idea is to find the start and end of a passing
  2337. * window and take the center of it to use as the read clock delay.
  2338. *
  2339. * A failure has to be seen first so that when we hit a pass, we know
  2340. * that it is truely the start of the window. If we get passing values
  2341. * to start off with, we don't know if we are at the start of the window.
  2342. *
  2343. * The code assumes that a failure will always be found.
  2344. * If a failure is not found, there is no easy way to get the middle
  2345. * of the passing window. I guess we can pretty much pick any value
  2346. * but some values will be better than others. Since the lowest speed
  2347. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2348. * from experimentation it is safe to say you will always have a failure.
  2349. *-----------------------------------------------------------------*/
  2350. /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
  2351. rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
  2352. puts(str);
  2353. calibration_loop:
  2354. mfsdram(SDRAM_RQDC, rqdc_reg);
  2355. mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2356. SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
  2357. #else /* CONFIG_DDR_RQDC_FIXED */
  2358. /*
  2359. * On Katmai the complete auto-calibration somehow doesn't seem to
  2360. * produce the best results, meaning optimal values for RQFD/RFFD.
  2361. * This was discovered by GDA using a high bandwidth scope,
  2362. * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
  2363. * so now on Katmai "only" RFFD is auto-calibrated.
  2364. */
  2365. mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
  2366. #endif /* CONFIG_DDR_RQDC_FIXED */
  2367. max_start = 0;
  2368. min_end = 0;
  2369. begin_rqfd[0] = 0;
  2370. begin_rffd[0] = 0;
  2371. begin_rqfd[1] = 0;
  2372. begin_rffd[1] = 0;
  2373. end_rqfd[0] = 0;
  2374. end_rffd[0] = 0;
  2375. end_rqfd[1] = 0;
  2376. end_rffd[1] = 0;
  2377. window_found = FALSE;
  2378. max_pass_length = 0;
  2379. max_start = 0;
  2380. max_end = 0;
  2381. current_pass_length = 0;
  2382. current_fail_length = 0;
  2383. current_start = 0;
  2384. window_found = FALSE;
  2385. fail_found = FALSE;
  2386. pass_found = FALSE;
  2387. /*
  2388. * get the delay line calibration register value
  2389. */
  2390. mfsdram(SDRAM_DLCR, dlycal);
  2391. dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
  2392. for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
  2393. mfsdram(SDRAM_RFDC, rfdc_reg);
  2394. rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
  2395. /*------------------------------------------------------------------
  2396. * Set the timing reg for the test.
  2397. *-----------------------------------------------------------------*/
  2398. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
  2399. /*------------------------------------------------------------------
  2400. * See if the rffd value passed.
  2401. *-----------------------------------------------------------------*/
  2402. if (short_mem_test()) {
  2403. if (fail_found == TRUE) {
  2404. pass_found = TRUE;
  2405. if (current_pass_length == 0)
  2406. current_start = rffd;
  2407. current_fail_length = 0;
  2408. current_pass_length++;
  2409. if (current_pass_length > max_pass_length) {
  2410. max_pass_length = current_pass_length;
  2411. max_start = current_start;
  2412. max_end = rffd;
  2413. }
  2414. }
  2415. } else {
  2416. current_pass_length = 0;
  2417. current_fail_length++;
  2418. if (current_fail_length >= (dly_val >> 2)) {
  2419. if (fail_found == FALSE) {
  2420. fail_found = TRUE;
  2421. } else if (pass_found == TRUE) {
  2422. window_found = TRUE;
  2423. break;
  2424. }
  2425. }
  2426. }
  2427. } /* for rffd */
  2428. /*------------------------------------------------------------------
  2429. * Set the average RFFD value
  2430. *-----------------------------------------------------------------*/
  2431. rffd_average = ((max_start + max_end) >> 1);
  2432. if (rffd_average < 0)
  2433. rffd_average = 0;
  2434. if (rffd_average > SDRAM_RFDC_RFFD_MAX)
  2435. rffd_average = SDRAM_RFDC_RFFD_MAX;
  2436. /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
  2437. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
  2438. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2439. max_pass_length = 0;
  2440. max_start = 0;
  2441. max_end = 0;
  2442. current_pass_length = 0;
  2443. current_fail_length = 0;
  2444. current_start = 0;
  2445. window_found = FALSE;
  2446. fail_found = FALSE;
  2447. pass_found = FALSE;
  2448. for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
  2449. mfsdram(SDRAM_RQDC, rqdc_reg);
  2450. rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
  2451. /*------------------------------------------------------------------
  2452. * Set the timing reg for the test.
  2453. *-----------------------------------------------------------------*/
  2454. mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
  2455. /*------------------------------------------------------------------
  2456. * See if the rffd value passed.
  2457. *-----------------------------------------------------------------*/
  2458. if (short_mem_test()) {
  2459. if (fail_found == TRUE) {
  2460. pass_found = TRUE;
  2461. if (current_pass_length == 0)
  2462. current_start = rqfd;
  2463. current_fail_length = 0;
  2464. current_pass_length++;
  2465. if (current_pass_length > max_pass_length) {
  2466. max_pass_length = current_pass_length;
  2467. max_start = current_start;
  2468. max_end = rqfd;
  2469. }
  2470. }
  2471. } else {
  2472. current_pass_length = 0;
  2473. current_fail_length++;
  2474. if (fail_found == FALSE) {
  2475. fail_found = TRUE;
  2476. } else if (pass_found == TRUE) {
  2477. window_found = TRUE;
  2478. break;
  2479. }
  2480. }
  2481. }
  2482. rqfd_average = ((max_start + max_end) >> 1);
  2483. /*------------------------------------------------------------------
  2484. * Make sure we found the valid read passing window. Halt if not
  2485. *-----------------------------------------------------------------*/
  2486. if (window_found == FALSE) {
  2487. if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
  2488. putc('\b');
  2489. putc(slash[loopi++ % 8]);
  2490. /* try again from with a different RQFD start value */
  2491. rqfd_start++;
  2492. goto calibration_loop;
  2493. }
  2494. printf("\nERROR: Cannot determine a common read delay for the "
  2495. "DIMM(s) installed.\n");
  2496. debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
  2497. ppc4xx_ibm_ddr2_register_dump();
  2498. spd_ddr_init_hang ();
  2499. }
  2500. if (rqfd_average < 0)
  2501. rqfd_average = 0;
  2502. if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
  2503. rqfd_average = SDRAM_RQDC_RQFD_MAX;
  2504. mtsdram(SDRAM_RQDC,
  2505. (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2506. SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
  2507. blank_string(strlen(str));
  2508. #endif /* CONFIG_DDR_RQDC_FIXED */
  2509. /*
  2510. * Now complete RDSS configuration as mentioned on page 7 of the AMCC
  2511. * PowerPC440SP/SPe DDR2 application note:
  2512. * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
  2513. */
  2514. mfsdram(SDRAM_RTSR, val);
  2515. if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
  2516. mfsdram(SDRAM_RDCC, val);
  2517. if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
  2518. val += 0x40000000;
  2519. mtsdram(SDRAM_RDCC, val);
  2520. }
  2521. }
  2522. mfsdram(SDRAM_DLCR, val);
  2523. debug("%s[%d] DLCR: 0x%08lX\n", __FUNCTION__, __LINE__, val);
  2524. mfsdram(SDRAM_RQDC, val);
  2525. debug("%s[%d] RQDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
  2526. mfsdram(SDRAM_RFDC, val);
  2527. debug("%s[%d] RFDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
  2528. mfsdram(SDRAM_RDCC, val);
  2529. debug("%s[%d] RDCC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
  2530. }
  2531. #else /* calibration test with hardvalues */
  2532. /*-----------------------------------------------------------------------------+
  2533. * DQS_calibration_process.
  2534. *-----------------------------------------------------------------------------*/
  2535. static void test(void)
  2536. {
  2537. unsigned long dimm_num;
  2538. unsigned long ecc_temp;
  2539. unsigned long i, j;
  2540. unsigned long *membase;
  2541. unsigned long bxcf[MAXRANKS];
  2542. unsigned long val;
  2543. char window_found;
  2544. char begin_found[MAXDIMMS];
  2545. char end_found[MAXDIMMS];
  2546. char search_end[MAXDIMMS];
  2547. unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
  2548. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2549. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2550. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2551. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2552. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2553. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2554. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2555. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2556. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2557. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2558. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2559. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2560. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2561. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2562. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2563. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2564. /*------------------------------------------------------------------
  2565. * Test to determine the best read clock delay tuning bits.
  2566. *
  2567. * Before the DDR controller can be used, the read clock delay needs to be
  2568. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2569. * This value cannot be hardcoded into the program because it changes
  2570. * depending on the board's setup and environment.
  2571. * To do this, all delay values are tested to see if they
  2572. * work or not. By doing this, you get groups of fails with groups of
  2573. * passing values. The idea is to find the start and end of a passing
  2574. * window and take the center of it to use as the read clock delay.
  2575. *
  2576. * A failure has to be seen first so that when we hit a pass, we know
  2577. * that it is truely the start of the window. If we get passing values
  2578. * to start off with, we don't know if we are at the start of the window.
  2579. *
  2580. * The code assumes that a failure will always be found.
  2581. * If a failure is not found, there is no easy way to get the middle
  2582. * of the passing window. I guess we can pretty much pick any value
  2583. * but some values will be better than others. Since the lowest speed
  2584. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2585. * from experimentation it is safe to say you will always have a failure.
  2586. *-----------------------------------------------------------------*/
  2587. mfsdram(SDRAM_MCOPT1, ecc_temp);
  2588. ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
  2589. mfsdram(SDRAM_MCOPT1, val);
  2590. mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
  2591. SDRAM_MCOPT1_MCHK_NON);
  2592. window_found = FALSE;
  2593. begin_found[0] = FALSE;
  2594. end_found[0] = FALSE;
  2595. search_end[0] = FALSE;
  2596. begin_found[1] = FALSE;
  2597. end_found[1] = FALSE;
  2598. search_end[1] = FALSE;
  2599. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2600. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
  2601. /* Banks enabled */
  2602. if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2603. /* Bank is enabled */
  2604. membase =
  2605. (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
  2606. /*------------------------------------------------------------------
  2607. * Run the short memory test.
  2608. *-----------------------------------------------------------------*/
  2609. for (i = 0; i < NUMMEMTESTS; i++) {
  2610. for (j = 0; j < NUMMEMWORDS; j++) {
  2611. membase[j] = test[i][j];
  2612. ppcDcbf((u32)&(membase[j]));
  2613. }
  2614. sync();
  2615. for (j = 0; j < NUMMEMWORDS; j++) {
  2616. if (membase[j] != test[i][j]) {
  2617. ppcDcbf((u32)&(membase[j]));
  2618. break;
  2619. }
  2620. ppcDcbf((u32)&(membase[j]));
  2621. }
  2622. sync();
  2623. if (j < NUMMEMWORDS)
  2624. break;
  2625. }
  2626. /*------------------------------------------------------------------
  2627. * See if the rffd value passed.
  2628. *-----------------------------------------------------------------*/
  2629. if (i < NUMMEMTESTS) {
  2630. if ((end_found[dimm_num] == FALSE) &&
  2631. (search_end[dimm_num] == TRUE)) {
  2632. end_found[dimm_num] = TRUE;
  2633. }
  2634. if ((end_found[0] == TRUE) &&
  2635. (end_found[1] == TRUE))
  2636. break;
  2637. } else {
  2638. if (begin_found[dimm_num] == FALSE) {
  2639. begin_found[dimm_num] = TRUE;
  2640. search_end[dimm_num] = TRUE;
  2641. }
  2642. }
  2643. } else {
  2644. begin_found[dimm_num] = TRUE;
  2645. end_found[dimm_num] = TRUE;
  2646. }
  2647. }
  2648. if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
  2649. window_found = TRUE;
  2650. /*------------------------------------------------------------------
  2651. * Make sure we found the valid read passing window. Halt if not
  2652. *-----------------------------------------------------------------*/
  2653. if (window_found == FALSE) {
  2654. printf("ERROR: Cannot determine a common read delay for the "
  2655. "DIMM(s) installed.\n");
  2656. spd_ddr_init_hang ();
  2657. }
  2658. /*------------------------------------------------------------------
  2659. * Restore the ECC variable to what it originally was
  2660. *-----------------------------------------------------------------*/
  2661. mtsdram(SDRAM_MCOPT1,
  2662. (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
  2663. | ecc_temp);
  2664. }
  2665. #endif /* !HARD_CODED_DQS */
  2666. #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
  2667. #else /* CONFIG_SPD_EEPROM */
  2668. /*-----------------------------------------------------------------------------
  2669. * Function: initdram
  2670. * Description: Configures the PPC4xx IBM DDR1/DDR2 SDRAM memory controller.
  2671. * The configuration is performed using static, compile-
  2672. * time parameters.
  2673. * Configures the PPC405EX(r) and PPC460EX/GT
  2674. *---------------------------------------------------------------------------*/
  2675. phys_size_t initdram(int board_type)
  2676. {
  2677. /*
  2678. * Only run this SDRAM init code once. For NAND booting
  2679. * targets like Kilauea, we call initdram() early from the
  2680. * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
  2681. * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
  2682. * which calls initdram() again. This time the controller
  2683. * mustn't be reconfigured again since we're already running
  2684. * from SDRAM.
  2685. */
  2686. #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
  2687. unsigned long val;
  2688. #if defined(CONFIG_440)
  2689. mtdcr(SDRAM_R0BAS, CONFIG_SYS_SDRAM_R0BAS);
  2690. mtdcr(SDRAM_R1BAS, CONFIG_SYS_SDRAM_R1BAS);
  2691. mtdcr(SDRAM_R2BAS, CONFIG_SYS_SDRAM_R2BAS);
  2692. mtdcr(SDRAM_R3BAS, CONFIG_SYS_SDRAM_R3BAS);
  2693. mtdcr(SDRAM_PLBADDULL, CONFIG_SYS_SDRAM_PLBADDULL); /* MQ0_BAUL */
  2694. mtdcr(SDRAM_PLBADDUHB, CONFIG_SYS_SDRAM_PLBADDUHB); /* MQ0_BAUH */
  2695. mtdcr(SDRAM_CONF1LL, CONFIG_SYS_SDRAM_CONF1LL);
  2696. mtdcr(SDRAM_CONF1HB, CONFIG_SYS_SDRAM_CONF1HB);
  2697. mtdcr(SDRAM_CONFPATHB, CONFIG_SYS_SDRAM_CONFPATHB);
  2698. #endif
  2699. /* Set Memory Bank Configuration Registers */
  2700. mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
  2701. mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
  2702. mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
  2703. mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
  2704. /* Set Memory Clock Timing Register */
  2705. mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
  2706. /* Set Refresh Time Register */
  2707. mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
  2708. /* Set SDRAM Timing Registers */
  2709. mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
  2710. mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
  2711. mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
  2712. /* Set Mode and Extended Mode Registers */
  2713. mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
  2714. mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
  2715. /* Set Memory Controller Options 1 Register */
  2716. mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
  2717. /* Set Manual Initialization Control Registers */
  2718. mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
  2719. mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
  2720. mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
  2721. mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
  2722. mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
  2723. mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
  2724. mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
  2725. mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
  2726. mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
  2727. mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
  2728. mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
  2729. mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
  2730. mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
  2731. mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
  2732. mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
  2733. mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
  2734. /* Set On-Die Termination Registers */
  2735. mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
  2736. mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
  2737. mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
  2738. /* Set Write Timing Register */
  2739. mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
  2740. /*
  2741. * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
  2742. * SDRAM0_MCOPT2[IPTR] = 1
  2743. */
  2744. mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
  2745. SDRAM_MCOPT2_IPTR_EXECUTE));
  2746. /*
  2747. * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
  2748. * completion of initialization.
  2749. */
  2750. do {
  2751. mfsdram(SDRAM_MCSTAT, val);
  2752. } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
  2753. /* Set Delay Control Registers */
  2754. mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
  2755. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2756. mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
  2757. mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
  2758. mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
  2759. #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2760. /*
  2761. * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
  2762. */
  2763. mfsdram(SDRAM_MCOPT2, val);
  2764. mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
  2765. #if defined(CONFIG_440)
  2766. /*
  2767. * Program TLB entries with caches enabled, for best performace
  2768. * while auto-calibrating and ECC generation
  2769. */
  2770. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), 0);
  2771. #endif
  2772. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2773. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2774. /*------------------------------------------------------------------
  2775. | DQS calibration.
  2776. +-----------------------------------------------------------------*/
  2777. DQS_autocalibration();
  2778. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2779. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2780. #if defined(CONFIG_DDR_ECC)
  2781. ecc_init(CONFIG_SYS_SDRAM_BASE, CONFIG_SYS_MBYTES_SDRAM << 20);
  2782. #endif /* defined(CONFIG_DDR_ECC) */
  2783. #if defined(CONFIG_440)
  2784. /*
  2785. * Now after initialization (auto-calibration and ECC generation)
  2786. * remove the TLB entries with caches enabled and program again with
  2787. * desired cache functionality
  2788. */
  2789. remove_tlb(0, (CONFIG_SYS_MBYTES_SDRAM << 20));
  2790. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), MY_TLB_WORD2_I_ENABLE);
  2791. #endif
  2792. ppc4xx_ibm_ddr2_register_dump();
  2793. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2794. /*
  2795. * Clear potential errors resulting from auto-calibration.
  2796. * If not done, then we could get an interrupt later on when
  2797. * exceptions are enabled.
  2798. */
  2799. set_mcsr(get_mcsr());
  2800. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2801. #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
  2802. return (CONFIG_SYS_MBYTES_SDRAM << 20);
  2803. }
  2804. #endif /* CONFIG_SPD_EEPROM */
  2805. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2806. #if defined(CONFIG_440)
  2807. u32 mfdcr_any(u32 dcr)
  2808. {
  2809. u32 val;
  2810. switch (dcr) {
  2811. case SDRAM_R0BAS + 0:
  2812. val = mfdcr(SDRAM_R0BAS + 0);
  2813. break;
  2814. case SDRAM_R0BAS + 1:
  2815. val = mfdcr(SDRAM_R0BAS + 1);
  2816. break;
  2817. case SDRAM_R0BAS + 2:
  2818. val = mfdcr(SDRAM_R0BAS + 2);
  2819. break;
  2820. case SDRAM_R0BAS + 3:
  2821. val = mfdcr(SDRAM_R0BAS + 3);
  2822. break;
  2823. default:
  2824. printf("DCR %d not defined in case statement!!!\n", dcr);
  2825. val = 0; /* just to satisfy the compiler */
  2826. }
  2827. return val;
  2828. }
  2829. void mtdcr_any(u32 dcr, u32 val)
  2830. {
  2831. switch (dcr) {
  2832. case SDRAM_R0BAS + 0:
  2833. mtdcr(SDRAM_R0BAS + 0, val);
  2834. break;
  2835. case SDRAM_R0BAS + 1:
  2836. mtdcr(SDRAM_R0BAS + 1, val);
  2837. break;
  2838. case SDRAM_R0BAS + 2:
  2839. mtdcr(SDRAM_R0BAS + 2, val);
  2840. break;
  2841. case SDRAM_R0BAS + 3:
  2842. mtdcr(SDRAM_R0BAS + 3, val);
  2843. break;
  2844. default:
  2845. printf("DCR %d not defined in case statement!!!\n", dcr);
  2846. }
  2847. }
  2848. #endif /* defined(CONFIG_440) */
  2849. void blank_string(int size)
  2850. {
  2851. int i;
  2852. for (i = 0; i < size; i++)
  2853. putc('\b');
  2854. for (i = 0; i < size; i++)
  2855. putc(' ');
  2856. for (i = 0; i < size; i++)
  2857. putc('\b');
  2858. }
  2859. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2860. inline void ppc4xx_ibm_ddr2_register_dump(void)
  2861. {
  2862. #if defined(DEBUG)
  2863. printf("\nPPC4xx IBM DDR2 Register Dump:\n");
  2864. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2865. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2866. PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R0BAS);
  2867. PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R1BAS);
  2868. PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R2BAS);
  2869. PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R3BAS);
  2870. #endif /* (defined(CONFIG_440SP) || ... */
  2871. #if defined(CONFIG_405EX)
  2872. PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
  2873. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
  2874. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
  2875. PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
  2876. PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
  2877. PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
  2878. #endif /* defined(CONFIG_405EX) */
  2879. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
  2880. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
  2881. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
  2882. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
  2883. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
  2884. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
  2885. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
  2886. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
  2887. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
  2888. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
  2889. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
  2890. PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
  2891. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2892. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2893. PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
  2894. PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
  2895. /*
  2896. * OPART is only used as a trigger register.
  2897. *
  2898. * No data is contained in this register, and reading or writing
  2899. * to is can cause bad things to happen (hangs). Just skip it and
  2900. * report "N/A".
  2901. */
  2902. printf("%20s = N/A\n", "SDRAM_OPART");
  2903. #endif /* defined(CONFIG_440SP) || ... */
  2904. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
  2905. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
  2906. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
  2907. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
  2908. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
  2909. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
  2910. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
  2911. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
  2912. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
  2913. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
  2914. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
  2915. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
  2916. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
  2917. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
  2918. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
  2919. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
  2920. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
  2921. PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
  2922. PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
  2923. PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
  2924. PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
  2925. PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
  2926. PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
  2927. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
  2928. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
  2929. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
  2930. PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
  2931. PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
  2932. PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCCR);
  2933. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2934. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2935. PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
  2936. #endif /* defined(CONFIG_440SP) || ... */
  2937. PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
  2938. PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
  2939. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
  2940. #endif /* defined(DEBUG) */
  2941. }
  2942. #endif /* CONFIG_SDRAM_PPC4xx_IBM_DDR2 */