44x_spd_ddr2.c 102 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276
  1. /*
  2. * cpu/ppc4xx/44x_spd_ddr2.c
  3. * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
  4. * DDR2 controller (non Denali Core). Those currently are:
  5. *
  6. * 405: 405EX(r)
  7. * 440/460: 440SP/440SPe/460EX/460GT
  8. *
  9. * Copyright (c) 2008 Nuovation System Designs, LLC
  10. * Grant Erickson <gerickson@nuovations.com>
  11. * (C) Copyright 2007-2008
  12. * Stefan Roese, DENX Software Engineering, sr@denx.de.
  13. *
  14. * COPYRIGHT AMCC CORPORATION 2004
  15. *
  16. * See file CREDITS for list of people who contributed to this
  17. * project.
  18. *
  19. * This program is free software; you can redistribute it and/or
  20. * modify it under the terms of the GNU General Public License as
  21. * published by the Free Software Foundation; either version 2 of
  22. * the License, or (at your option) any later version.
  23. *
  24. * This program is distributed in the hope that it will be useful,
  25. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  26. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  27. * GNU General Public License for more details.
  28. *
  29. * You should have received a copy of the GNU General Public License
  30. * along with this program; if not, write to the Free Software
  31. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  32. * MA 02111-1307 USA
  33. *
  34. */
  35. /* define DEBUG for debugging output (obviously ;-)) */
  36. #if 0
  37. #define DEBUG
  38. #endif
  39. #include <common.h>
  40. #include <command.h>
  41. #include <ppc4xx.h>
  42. #include <i2c.h>
  43. #include <asm/io.h>
  44. #include <asm/processor.h>
  45. #include <asm/mmu.h>
  46. #include <asm/cache.h>
  47. #include "ecc.h"
  48. #if defined(CONFIG_SDRAM_PPC4xx_IBM_DDR2)
  49. #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
  50. do { \
  51. u32 data; \
  52. mfsdram(SDRAM_##mnemonic, data); \
  53. printf("%20s[%02x] = 0x%08X\n", \
  54. "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
  55. } while (0)
  56. #if defined(CONFIG_440)
  57. /*
  58. * This DDR2 setup code can dynamically setup the TLB entries for the DDR2
  59. * memory region. Right now the cache should still be disabled in U-Boot
  60. * because of the EMAC driver, that need its buffer descriptor to be located
  61. * in non cached memory.
  62. *
  63. * If at some time this restriction doesn't apply anymore, just define
  64. * CONFIG_4xx_DCACHE in the board config file and this code should setup
  65. * everything correctly.
  66. */
  67. #ifdef CONFIG_4xx_DCACHE
  68. /* enable caching on SDRAM */
  69. #define MY_TLB_WORD2_I_ENABLE 0
  70. #else
  71. /* disable caching on SDRAM */
  72. #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE
  73. #endif /* CONFIG_4xx_DCACHE */
  74. #endif /* CONFIG_440 */
  75. #if defined(CONFIG_SPD_EEPROM)
  76. /*-----------------------------------------------------------------------------+
  77. * Defines
  78. *-----------------------------------------------------------------------------*/
  79. #ifndef TRUE
  80. #define TRUE 1
  81. #endif
  82. #ifndef FALSE
  83. #define FALSE 0
  84. #endif
  85. #define SDRAM_DDR1 1
  86. #define SDRAM_DDR2 2
  87. #define SDRAM_NONE 0
  88. #define MAXDIMMS 2
  89. #define MAXRANKS 4
  90. #define MAXBXCF 4
  91. #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
  92. #define ONE_BILLION 1000000000
  93. #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
  94. #define CMD_NOP (7 << 19)
  95. #define CMD_PRECHARGE (2 << 19)
  96. #define CMD_REFRESH (1 << 19)
  97. #define CMD_EMR (0 << 19)
  98. #define CMD_READ (5 << 19)
  99. #define CMD_WRITE (4 << 19)
  100. #define SELECT_MR (0 << 16)
  101. #define SELECT_EMR (1 << 16)
  102. #define SELECT_EMR2 (2 << 16)
  103. #define SELECT_EMR3 (3 << 16)
  104. /* MR */
  105. #define DLL_RESET 0x00000100
  106. #define WRITE_RECOV_2 (1 << 9)
  107. #define WRITE_RECOV_3 (2 << 9)
  108. #define WRITE_RECOV_4 (3 << 9)
  109. #define WRITE_RECOV_5 (4 << 9)
  110. #define WRITE_RECOV_6 (5 << 9)
  111. #define BURST_LEN_4 0x00000002
  112. /* EMR */
  113. #define ODT_0_OHM 0x00000000
  114. #define ODT_50_OHM 0x00000044
  115. #define ODT_75_OHM 0x00000004
  116. #define ODT_150_OHM 0x00000040
  117. #define ODS_FULL 0x00000000
  118. #define ODS_REDUCED 0x00000002
  119. #define OCD_CALIB_DEF 0x00000380
  120. /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
  121. #define ODT_EB0R (0x80000000 >> 8)
  122. #define ODT_EB0W (0x80000000 >> 7)
  123. #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
  124. #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
  125. #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
  126. /* Defines for the Read Cycle Delay test */
  127. #define NUMMEMTESTS 8
  128. #define NUMMEMWORDS 8
  129. #define NUMLOOPS 64 /* memory test loops */
  130. /*
  131. * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
  132. * To support such configurations, we "only" map the first 2GB via the TLB's. We
  133. * need some free virtual address space for the remaining peripherals like, SoC
  134. * devices, FLASH etc.
  135. *
  136. * Note that ECC is currently not supported on configurations with more than 2GB
  137. * SDRAM. This is because we only map the first 2GB on such systems, and therefore
  138. * the ECC parity byte of the remaining area can't be written.
  139. */
  140. /*
  141. * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
  142. */
  143. void __spd_ddr_init_hang (void)
  144. {
  145. hang ();
  146. }
  147. void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
  148. /*
  149. * To provide an interface for board specific config values in this common
  150. * DDR setup code, we implement he "weak" default functions here. They return
  151. * the default value back to the caller.
  152. *
  153. * Please see include/configs/yucca.h for an example fora board specific
  154. * implementation.
  155. */
  156. u32 __ddr_wrdtr(u32 default_val)
  157. {
  158. return default_val;
  159. }
  160. u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
  161. u32 __ddr_clktr(u32 default_val)
  162. {
  163. return default_val;
  164. }
  165. u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
  166. /* Private Structure Definitions */
  167. /* enum only to ease code for cas latency setting */
  168. typedef enum ddr_cas_id {
  169. DDR_CAS_2 = 20,
  170. DDR_CAS_2_5 = 25,
  171. DDR_CAS_3 = 30,
  172. DDR_CAS_4 = 40,
  173. DDR_CAS_5 = 50
  174. } ddr_cas_id_t;
  175. /*-----------------------------------------------------------------------------+
  176. * Prototypes
  177. *-----------------------------------------------------------------------------*/
  178. static phys_size_t sdram_memsize(void);
  179. static void get_spd_info(unsigned long *dimm_populated,
  180. unsigned char *iic0_dimm_addr,
  181. unsigned long num_dimm_banks);
  182. static void check_mem_type(unsigned long *dimm_populated,
  183. unsigned char *iic0_dimm_addr,
  184. unsigned long num_dimm_banks);
  185. static void check_frequency(unsigned long *dimm_populated,
  186. unsigned char *iic0_dimm_addr,
  187. unsigned long num_dimm_banks);
  188. static void check_rank_number(unsigned long *dimm_populated,
  189. unsigned char *iic0_dimm_addr,
  190. unsigned long num_dimm_banks);
  191. static void check_voltage_type(unsigned long *dimm_populated,
  192. unsigned char *iic0_dimm_addr,
  193. unsigned long num_dimm_banks);
  194. static void program_memory_queue(unsigned long *dimm_populated,
  195. unsigned char *iic0_dimm_addr,
  196. unsigned long num_dimm_banks);
  197. static void program_codt(unsigned long *dimm_populated,
  198. unsigned char *iic0_dimm_addr,
  199. unsigned long num_dimm_banks);
  200. static void program_mode(unsigned long *dimm_populated,
  201. unsigned char *iic0_dimm_addr,
  202. unsigned long num_dimm_banks,
  203. ddr_cas_id_t *selected_cas,
  204. int *write_recovery);
  205. static void program_tr(unsigned long *dimm_populated,
  206. unsigned char *iic0_dimm_addr,
  207. unsigned long num_dimm_banks);
  208. static void program_rtr(unsigned long *dimm_populated,
  209. unsigned char *iic0_dimm_addr,
  210. unsigned long num_dimm_banks);
  211. static void program_bxcf(unsigned long *dimm_populated,
  212. unsigned char *iic0_dimm_addr,
  213. unsigned long num_dimm_banks);
  214. static void program_copt1(unsigned long *dimm_populated,
  215. unsigned char *iic0_dimm_addr,
  216. unsigned long num_dimm_banks);
  217. static void program_initplr(unsigned long *dimm_populated,
  218. unsigned char *iic0_dimm_addr,
  219. unsigned long num_dimm_banks,
  220. ddr_cas_id_t selected_cas,
  221. int write_recovery);
  222. static unsigned long is_ecc_enabled(void);
  223. #ifdef CONFIG_DDR_ECC
  224. static void program_ecc(unsigned long *dimm_populated,
  225. unsigned char *iic0_dimm_addr,
  226. unsigned long num_dimm_banks,
  227. unsigned long tlb_word2_i_value);
  228. static void program_ecc_addr(unsigned long start_address,
  229. unsigned long num_bytes,
  230. unsigned long tlb_word2_i_value);
  231. #endif
  232. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  233. static void program_DQS_calibration(unsigned long *dimm_populated,
  234. unsigned char *iic0_dimm_addr,
  235. unsigned long num_dimm_banks);
  236. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  237. static void test(void);
  238. #else
  239. static void DQS_calibration_process(void);
  240. #endif
  241. #endif
  242. int do_reset (cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]);
  243. void dcbz_area(u32 start_address, u32 num_bytes);
  244. static unsigned char spd_read(uchar chip, uint addr)
  245. {
  246. unsigned char data[2];
  247. if (i2c_probe(chip) == 0)
  248. if (i2c_read(chip, addr, 1, data, 1) == 0)
  249. return data[0];
  250. return 0;
  251. }
  252. /*-----------------------------------------------------------------------------+
  253. * sdram_memsize
  254. *-----------------------------------------------------------------------------*/
  255. static phys_size_t sdram_memsize(void)
  256. {
  257. phys_size_t mem_size;
  258. unsigned long mcopt2;
  259. unsigned long mcstat;
  260. unsigned long mb0cf;
  261. unsigned long sdsz;
  262. unsigned long i;
  263. mem_size = 0;
  264. mfsdram(SDRAM_MCOPT2, mcopt2);
  265. mfsdram(SDRAM_MCSTAT, mcstat);
  266. /* DDR controller must be enabled and not in self-refresh. */
  267. /* Otherwise memsize is zero. */
  268. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  269. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  270. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  271. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  272. for (i = 0; i < MAXBXCF; i++) {
  273. mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
  274. /* Banks enabled */
  275. if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  276. sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
  277. switch(sdsz) {
  278. case SDRAM_RXBAS_SDSZ_8:
  279. mem_size+=8;
  280. break;
  281. case SDRAM_RXBAS_SDSZ_16:
  282. mem_size+=16;
  283. break;
  284. case SDRAM_RXBAS_SDSZ_32:
  285. mem_size+=32;
  286. break;
  287. case SDRAM_RXBAS_SDSZ_64:
  288. mem_size+=64;
  289. break;
  290. case SDRAM_RXBAS_SDSZ_128:
  291. mem_size+=128;
  292. break;
  293. case SDRAM_RXBAS_SDSZ_256:
  294. mem_size+=256;
  295. break;
  296. case SDRAM_RXBAS_SDSZ_512:
  297. mem_size+=512;
  298. break;
  299. case SDRAM_RXBAS_SDSZ_1024:
  300. mem_size+=1024;
  301. break;
  302. case SDRAM_RXBAS_SDSZ_2048:
  303. mem_size+=2048;
  304. break;
  305. case SDRAM_RXBAS_SDSZ_4096:
  306. mem_size+=4096;
  307. break;
  308. default:
  309. printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
  310. , sdsz);
  311. mem_size=0;
  312. break;
  313. }
  314. }
  315. }
  316. }
  317. return mem_size << 20;
  318. }
  319. /*-----------------------------------------------------------------------------+
  320. * initdram. Initializes the 440SP Memory Queue and DDR SDRAM controller.
  321. * Note: This routine runs from flash with a stack set up in the chip's
  322. * sram space. It is important that the routine does not require .sbss, .bss or
  323. * .data sections. It also cannot call routines that require these sections.
  324. *-----------------------------------------------------------------------------*/
  325. /*-----------------------------------------------------------------------------
  326. * Function: initdram
  327. * Description: Configures SDRAM memory banks for DDR operation.
  328. * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
  329. * via the IIC bus and then configures the DDR SDRAM memory
  330. * banks appropriately. If Auto Memory Configuration is
  331. * not used, it is assumed that no DIMM is plugged
  332. *-----------------------------------------------------------------------------*/
  333. phys_size_t initdram(int board_type)
  334. {
  335. unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
  336. unsigned char spd0[MAX_SPD_BYTES];
  337. unsigned char spd1[MAX_SPD_BYTES];
  338. unsigned char *dimm_spd[MAXDIMMS];
  339. unsigned long dimm_populated[MAXDIMMS];
  340. unsigned long num_dimm_banks; /* on board dimm banks */
  341. unsigned long val;
  342. ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
  343. int write_recovery;
  344. phys_size_t dram_size = 0;
  345. num_dimm_banks = sizeof(iic0_dimm_addr);
  346. /*------------------------------------------------------------------
  347. * Set up an array of SPD matrixes.
  348. *-----------------------------------------------------------------*/
  349. dimm_spd[0] = spd0;
  350. dimm_spd[1] = spd1;
  351. /*------------------------------------------------------------------
  352. * Reset the DDR-SDRAM controller.
  353. *-----------------------------------------------------------------*/
  354. mtsdr(SDR0_SRST, (0x80000000 >> 10));
  355. mtsdr(SDR0_SRST, 0x00000000);
  356. /*
  357. * Make sure I2C controller is initialized
  358. * before continuing.
  359. */
  360. /* switch to correct I2C bus */
  361. I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
  362. i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
  363. /*------------------------------------------------------------------
  364. * Clear out the serial presence detect buffers.
  365. * Perform IIC reads from the dimm. Fill in the spds.
  366. * Check to see if the dimm slots are populated
  367. *-----------------------------------------------------------------*/
  368. get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  369. /*------------------------------------------------------------------
  370. * Check the memory type for the dimms plugged.
  371. *-----------------------------------------------------------------*/
  372. check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  373. /*------------------------------------------------------------------
  374. * Check the frequency supported for the dimms plugged.
  375. *-----------------------------------------------------------------*/
  376. check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  377. /*------------------------------------------------------------------
  378. * Check the total rank number.
  379. *-----------------------------------------------------------------*/
  380. check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  381. /*------------------------------------------------------------------
  382. * Check the voltage type for the dimms plugged.
  383. *-----------------------------------------------------------------*/
  384. check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  385. /*------------------------------------------------------------------
  386. * Program SDRAM controller options 2 register
  387. * Except Enabling of the memory controller.
  388. *-----------------------------------------------------------------*/
  389. mfsdram(SDRAM_MCOPT2, val);
  390. mtsdram(SDRAM_MCOPT2,
  391. (val &
  392. ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
  393. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
  394. SDRAM_MCOPT2_ISIE_MASK))
  395. | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
  396. SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
  397. SDRAM_MCOPT2_ISIE_ENABLE));
  398. /*------------------------------------------------------------------
  399. * Program SDRAM controller options 1 register
  400. * Note: Does not enable the memory controller.
  401. *-----------------------------------------------------------------*/
  402. program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  403. /*------------------------------------------------------------------
  404. * Set the SDRAM Controller On Die Termination Register
  405. *-----------------------------------------------------------------*/
  406. program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  407. /*------------------------------------------------------------------
  408. * Program SDRAM refresh register.
  409. *-----------------------------------------------------------------*/
  410. program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  411. /*------------------------------------------------------------------
  412. * Program SDRAM mode register.
  413. *-----------------------------------------------------------------*/
  414. program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  415. &selected_cas, &write_recovery);
  416. /*------------------------------------------------------------------
  417. * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
  418. *-----------------------------------------------------------------*/
  419. mfsdram(SDRAM_WRDTR, val);
  420. mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
  421. ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
  422. /*------------------------------------------------------------------
  423. * Set the SDRAM Clock Timing Register
  424. *-----------------------------------------------------------------*/
  425. mfsdram(SDRAM_CLKTR, val);
  426. mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
  427. ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
  428. /*------------------------------------------------------------------
  429. * Program the BxCF registers.
  430. *-----------------------------------------------------------------*/
  431. program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  432. /*------------------------------------------------------------------
  433. * Program SDRAM timing registers.
  434. *-----------------------------------------------------------------*/
  435. program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  436. /*------------------------------------------------------------------
  437. * Set the Extended Mode register
  438. *-----------------------------------------------------------------*/
  439. mfsdram(SDRAM_MEMODE, val);
  440. mtsdram(SDRAM_MEMODE,
  441. (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
  442. SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
  443. (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
  444. | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
  445. /*------------------------------------------------------------------
  446. * Program Initialization preload registers.
  447. *-----------------------------------------------------------------*/
  448. program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  449. selected_cas, write_recovery);
  450. /*------------------------------------------------------------------
  451. * Delay to ensure 200usec have elapsed since reset.
  452. *-----------------------------------------------------------------*/
  453. udelay(400);
  454. /*------------------------------------------------------------------
  455. * Set the memory queue core base addr.
  456. *-----------------------------------------------------------------*/
  457. program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  458. /*------------------------------------------------------------------
  459. * Program SDRAM controller options 2 register
  460. * Enable the memory controller.
  461. *-----------------------------------------------------------------*/
  462. mfsdram(SDRAM_MCOPT2, val);
  463. mtsdram(SDRAM_MCOPT2,
  464. (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
  465. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
  466. SDRAM_MCOPT2_IPTR_EXECUTE);
  467. /*------------------------------------------------------------------
  468. * Wait for IPTR_EXECUTE init sequence to complete.
  469. *-----------------------------------------------------------------*/
  470. do {
  471. mfsdram(SDRAM_MCSTAT, val);
  472. } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
  473. /* enable the controller only after init sequence completes */
  474. mfsdram(SDRAM_MCOPT2, val);
  475. mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
  476. /* Make sure delay-line calibration is done before proceeding */
  477. do {
  478. mfsdram(SDRAM_DLCR, val);
  479. } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
  480. /* get installed memory size */
  481. dram_size = sdram_memsize();
  482. /*
  483. * Limit size to 2GB
  484. */
  485. if (dram_size > CONFIG_MAX_MEM_MAPPED)
  486. dram_size = CONFIG_MAX_MEM_MAPPED;
  487. /* and program tlb entries for this size (dynamic) */
  488. /*
  489. * Program TLB entries with caches enabled, for best performace
  490. * while auto-calibrating and ECC generation
  491. */
  492. program_tlb(0, 0, dram_size, 0);
  493. /*------------------------------------------------------------------
  494. * DQS calibration.
  495. *-----------------------------------------------------------------*/
  496. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  497. DQS_autocalibration();
  498. #else
  499. program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  500. #endif
  501. #ifdef CONFIG_DDR_ECC
  502. /*------------------------------------------------------------------
  503. * If ecc is enabled, initialize the parity bits.
  504. *-----------------------------------------------------------------*/
  505. program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
  506. #endif
  507. /*
  508. * Now after initialization (auto-calibration and ECC generation)
  509. * remove the TLB entries with caches enabled and program again with
  510. * desired cache functionality
  511. */
  512. remove_tlb(0, dram_size);
  513. program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
  514. ppc4xx_ibm_ddr2_register_dump();
  515. /*
  516. * Clear potential errors resulting from auto-calibration.
  517. * If not done, then we could get an interrupt later on when
  518. * exceptions are enabled.
  519. */
  520. set_mcsr(get_mcsr());
  521. return sdram_memsize();
  522. }
  523. static void get_spd_info(unsigned long *dimm_populated,
  524. unsigned char *iic0_dimm_addr,
  525. unsigned long num_dimm_banks)
  526. {
  527. unsigned long dimm_num;
  528. unsigned long dimm_found;
  529. unsigned char num_of_bytes;
  530. unsigned char total_size;
  531. dimm_found = FALSE;
  532. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  533. num_of_bytes = 0;
  534. total_size = 0;
  535. num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
  536. debug("\nspd_read(0x%x) returned %d\n",
  537. iic0_dimm_addr[dimm_num], num_of_bytes);
  538. total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
  539. debug("spd_read(0x%x) returned %d\n",
  540. iic0_dimm_addr[dimm_num], total_size);
  541. if ((num_of_bytes != 0) && (total_size != 0)) {
  542. dimm_populated[dimm_num] = TRUE;
  543. dimm_found = TRUE;
  544. debug("DIMM slot %lu: populated\n", dimm_num);
  545. } else {
  546. dimm_populated[dimm_num] = FALSE;
  547. debug("DIMM slot %lu: Not populated\n", dimm_num);
  548. }
  549. }
  550. if (dimm_found == FALSE) {
  551. printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
  552. spd_ddr_init_hang ();
  553. }
  554. }
  555. void board_add_ram_info(int use_default)
  556. {
  557. PPC4xx_SYS_INFO board_cfg;
  558. u32 val;
  559. if (is_ecc_enabled())
  560. puts(" (ECC");
  561. else
  562. puts(" (ECC not");
  563. get_sys_info(&board_cfg);
  564. mfsdr(SDR0_DDR0, val);
  565. val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
  566. printf(" enabled, %d MHz", (val * 2) / 1000000);
  567. mfsdram(SDRAM_MMODE, val);
  568. val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
  569. printf(", CL%d)", val);
  570. }
  571. /*------------------------------------------------------------------
  572. * For the memory DIMMs installed, this routine verifies that they
  573. * really are DDR specific DIMMs.
  574. *-----------------------------------------------------------------*/
  575. static void check_mem_type(unsigned long *dimm_populated,
  576. unsigned char *iic0_dimm_addr,
  577. unsigned long num_dimm_banks)
  578. {
  579. unsigned long dimm_num;
  580. unsigned long dimm_type;
  581. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  582. if (dimm_populated[dimm_num] == TRUE) {
  583. dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
  584. switch (dimm_type) {
  585. case 1:
  586. printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
  587. "slot %d.\n", (unsigned int)dimm_num);
  588. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  589. printf("Replace the DIMM module with a supported DIMM.\n\n");
  590. spd_ddr_init_hang ();
  591. break;
  592. case 2:
  593. printf("ERROR: EDO DIMM detected in slot %d.\n",
  594. (unsigned int)dimm_num);
  595. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  596. printf("Replace the DIMM module with a supported DIMM.\n\n");
  597. spd_ddr_init_hang ();
  598. break;
  599. case 3:
  600. printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
  601. (unsigned int)dimm_num);
  602. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  603. printf("Replace the DIMM module with a supported DIMM.\n\n");
  604. spd_ddr_init_hang ();
  605. break;
  606. case 4:
  607. printf("ERROR: SDRAM DIMM detected in slot %d.\n",
  608. (unsigned int)dimm_num);
  609. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  610. printf("Replace the DIMM module with a supported DIMM.\n\n");
  611. spd_ddr_init_hang ();
  612. break;
  613. case 5:
  614. printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
  615. (unsigned int)dimm_num);
  616. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  617. printf("Replace the DIMM module with a supported DIMM.\n\n");
  618. spd_ddr_init_hang ();
  619. break;
  620. case 6:
  621. printf("ERROR: SGRAM DIMM detected in slot %d.\n",
  622. (unsigned int)dimm_num);
  623. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  624. printf("Replace the DIMM module with a supported DIMM.\n\n");
  625. spd_ddr_init_hang ();
  626. break;
  627. case 7:
  628. debug("DIMM slot %d: DDR1 SDRAM detected\n", dimm_num);
  629. dimm_populated[dimm_num] = SDRAM_DDR1;
  630. break;
  631. case 8:
  632. debug("DIMM slot %d: DDR2 SDRAM detected\n", dimm_num);
  633. dimm_populated[dimm_num] = SDRAM_DDR2;
  634. break;
  635. default:
  636. printf("ERROR: Unknown DIMM detected in slot %d.\n",
  637. (unsigned int)dimm_num);
  638. printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
  639. printf("Replace the DIMM module with a supported DIMM.\n\n");
  640. spd_ddr_init_hang ();
  641. break;
  642. }
  643. }
  644. }
  645. for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
  646. if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
  647. && (dimm_populated[dimm_num] != SDRAM_NONE)
  648. && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
  649. printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
  650. spd_ddr_init_hang ();
  651. }
  652. }
  653. }
  654. /*------------------------------------------------------------------
  655. * For the memory DIMMs installed, this routine verifies that
  656. * frequency previously calculated is supported.
  657. *-----------------------------------------------------------------*/
  658. static void check_frequency(unsigned long *dimm_populated,
  659. unsigned char *iic0_dimm_addr,
  660. unsigned long num_dimm_banks)
  661. {
  662. unsigned long dimm_num;
  663. unsigned long tcyc_reg;
  664. unsigned long cycle_time;
  665. unsigned long calc_cycle_time;
  666. unsigned long sdram_freq;
  667. unsigned long sdr_ddrpll;
  668. PPC4xx_SYS_INFO board_cfg;
  669. /*------------------------------------------------------------------
  670. * Get the board configuration info.
  671. *-----------------------------------------------------------------*/
  672. get_sys_info(&board_cfg);
  673. mfsdr(SDR0_DDR0, sdr_ddrpll);
  674. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  675. /*
  676. * calc_cycle_time is calculated from DDR frequency set by board/chip
  677. * and is expressed in multiple of 10 picoseconds
  678. * to match the way DIMM cycle time is calculated below.
  679. */
  680. calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
  681. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  682. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  683. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  684. /*
  685. * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
  686. * the higher order nibble (bits 4-7) designates the cycle time
  687. * to a granularity of 1ns;
  688. * the value presented by the lower order nibble (bits 0-3)
  689. * has a granularity of .1ns and is added to the value designated
  690. * by the higher nibble. In addition, four lines of the lower order
  691. * nibble are assigned to support +.25,+.33, +.66 and +.75.
  692. */
  693. /* Convert from hex to decimal */
  694. if ((tcyc_reg & 0x0F) == 0x0D)
  695. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  696. else if ((tcyc_reg & 0x0F) == 0x0C)
  697. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
  698. else if ((tcyc_reg & 0x0F) == 0x0B)
  699. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
  700. else if ((tcyc_reg & 0x0F) == 0x0A)
  701. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
  702. else
  703. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
  704. ((tcyc_reg & 0x0F)*10);
  705. debug("cycle_time=%d [10 picoseconds]\n", cycle_time);
  706. if (cycle_time > (calc_cycle_time + 10)) {
  707. /*
  708. * the provided sdram cycle_time is too small
  709. * for the available DIMM cycle_time.
  710. * The additionnal 100ps is here to accept a small incertainty.
  711. */
  712. printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
  713. "slot %d \n while calculated cycle time is %d ps.\n",
  714. (unsigned int)(cycle_time*10),
  715. (unsigned int)dimm_num,
  716. (unsigned int)(calc_cycle_time*10));
  717. printf("Replace the DIMM, or change DDR frequency via "
  718. "strapping bits.\n\n");
  719. spd_ddr_init_hang ();
  720. }
  721. }
  722. }
  723. }
  724. /*------------------------------------------------------------------
  725. * For the memory DIMMs installed, this routine verifies two
  726. * ranks/banks maximum are availables.
  727. *-----------------------------------------------------------------*/
  728. static void check_rank_number(unsigned long *dimm_populated,
  729. unsigned char *iic0_dimm_addr,
  730. unsigned long num_dimm_banks)
  731. {
  732. unsigned long dimm_num;
  733. unsigned long dimm_rank;
  734. unsigned long total_rank = 0;
  735. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  736. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  737. dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
  738. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  739. dimm_rank = (dimm_rank & 0x0F) +1;
  740. else
  741. dimm_rank = dimm_rank & 0x0F;
  742. if (dimm_rank > MAXRANKS) {
  743. printf("ERROR: DRAM DIMM detected with %lu ranks in "
  744. "slot %lu is not supported.\n", dimm_rank, dimm_num);
  745. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  746. printf("Replace the DIMM module with a supported DIMM.\n\n");
  747. spd_ddr_init_hang ();
  748. } else
  749. total_rank += dimm_rank;
  750. }
  751. if (total_rank > MAXRANKS) {
  752. printf("ERROR: DRAM DIMM detected with a total of %d ranks "
  753. "for all slots.\n", (unsigned int)total_rank);
  754. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  755. printf("Remove one of the DIMM modules.\n\n");
  756. spd_ddr_init_hang ();
  757. }
  758. }
  759. }
  760. /*------------------------------------------------------------------
  761. * only support 2.5V modules.
  762. * This routine verifies this.
  763. *-----------------------------------------------------------------*/
  764. static void check_voltage_type(unsigned long *dimm_populated,
  765. unsigned char *iic0_dimm_addr,
  766. unsigned long num_dimm_banks)
  767. {
  768. unsigned long dimm_num;
  769. unsigned long voltage_type;
  770. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  771. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  772. voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
  773. switch (voltage_type) {
  774. case 0x00:
  775. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  776. printf("This DIMM is 5.0 Volt/TTL.\n");
  777. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  778. (unsigned int)dimm_num);
  779. spd_ddr_init_hang ();
  780. break;
  781. case 0x01:
  782. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  783. printf("This DIMM is LVTTL.\n");
  784. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  785. (unsigned int)dimm_num);
  786. spd_ddr_init_hang ();
  787. break;
  788. case 0x02:
  789. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  790. printf("This DIMM is 1.5 Volt.\n");
  791. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  792. (unsigned int)dimm_num);
  793. spd_ddr_init_hang ();
  794. break;
  795. case 0x03:
  796. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  797. printf("This DIMM is 3.3 Volt/TTL.\n");
  798. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  799. (unsigned int)dimm_num);
  800. spd_ddr_init_hang ();
  801. break;
  802. case 0x04:
  803. /* 2.5 Voltage only for DDR1 */
  804. break;
  805. case 0x05:
  806. /* 1.8 Voltage only for DDR2 */
  807. break;
  808. default:
  809. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  810. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  811. (unsigned int)dimm_num);
  812. spd_ddr_init_hang ();
  813. break;
  814. }
  815. }
  816. }
  817. }
  818. /*-----------------------------------------------------------------------------+
  819. * program_copt1.
  820. *-----------------------------------------------------------------------------*/
  821. static void program_copt1(unsigned long *dimm_populated,
  822. unsigned char *iic0_dimm_addr,
  823. unsigned long num_dimm_banks)
  824. {
  825. unsigned long dimm_num;
  826. unsigned long mcopt1;
  827. unsigned long ecc_enabled;
  828. unsigned long ecc = 0;
  829. unsigned long data_width = 0;
  830. unsigned long dimm_32bit;
  831. unsigned long dimm_64bit;
  832. unsigned long registered = 0;
  833. unsigned long attribute = 0;
  834. unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  835. unsigned long bankcount;
  836. unsigned long ddrtype;
  837. unsigned long val;
  838. #ifdef CONFIG_DDR_ECC
  839. ecc_enabled = TRUE;
  840. #else
  841. ecc_enabled = FALSE;
  842. #endif
  843. dimm_32bit = FALSE;
  844. dimm_64bit = FALSE;
  845. buf0 = FALSE;
  846. buf1 = FALSE;
  847. /*------------------------------------------------------------------
  848. * Set memory controller options reg 1, SDRAM_MCOPT1.
  849. *-----------------------------------------------------------------*/
  850. mfsdram(SDRAM_MCOPT1, val);
  851. mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
  852. SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
  853. SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
  854. SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
  855. SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
  856. SDRAM_MCOPT1_DREF_MASK);
  857. mcopt1 |= SDRAM_MCOPT1_QDEP;
  858. mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
  859. mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
  860. mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
  861. mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
  862. mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
  863. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  864. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  865. /* test ecc support */
  866. ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
  867. if (ecc != 0x02) /* ecc not supported */
  868. ecc_enabled = FALSE;
  869. /* test bank count */
  870. bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
  871. if (bankcount == 0x04) /* bank count = 4 */
  872. mcopt1 |= SDRAM_MCOPT1_4_BANKS;
  873. else /* bank count = 8 */
  874. mcopt1 |= SDRAM_MCOPT1_8_BANKS;
  875. /* test DDR type */
  876. ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
  877. /* test for buffered/unbuffered, registered, differential clocks */
  878. registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
  879. attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
  880. /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  881. if (dimm_num == 0) {
  882. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  883. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  884. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  885. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  886. if (registered == 1) { /* DDR2 always buffered */
  887. /* TODO: what about above comments ? */
  888. mcopt1 |= SDRAM_MCOPT1_RDEN;
  889. buf0 = TRUE;
  890. } else {
  891. /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
  892. if ((attribute & 0x02) == 0x00) {
  893. /* buffered not supported */
  894. buf0 = FALSE;
  895. } else {
  896. mcopt1 |= SDRAM_MCOPT1_RDEN;
  897. buf0 = TRUE;
  898. }
  899. }
  900. }
  901. else if (dimm_num == 1) {
  902. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  903. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  904. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  905. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  906. if (registered == 1) {
  907. /* DDR2 always buffered */
  908. mcopt1 |= SDRAM_MCOPT1_RDEN;
  909. buf1 = TRUE;
  910. } else {
  911. if ((attribute & 0x02) == 0x00) {
  912. /* buffered not supported */
  913. buf1 = FALSE;
  914. } else {
  915. mcopt1 |= SDRAM_MCOPT1_RDEN;
  916. buf1 = TRUE;
  917. }
  918. }
  919. }
  920. /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
  921. data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
  922. (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
  923. switch (data_width) {
  924. case 72:
  925. case 64:
  926. dimm_64bit = TRUE;
  927. break;
  928. case 40:
  929. case 32:
  930. dimm_32bit = TRUE;
  931. break;
  932. default:
  933. printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
  934. data_width);
  935. printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
  936. break;
  937. }
  938. }
  939. }
  940. /* verify matching properties */
  941. if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
  942. if (buf0 != buf1) {
  943. printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
  944. spd_ddr_init_hang ();
  945. }
  946. }
  947. if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
  948. printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
  949. spd_ddr_init_hang ();
  950. }
  951. else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
  952. mcopt1 |= SDRAM_MCOPT1_DMWD_64;
  953. } else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
  954. mcopt1 |= SDRAM_MCOPT1_DMWD_32;
  955. } else {
  956. printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
  957. spd_ddr_init_hang ();
  958. }
  959. if (ecc_enabled == TRUE)
  960. mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
  961. else
  962. mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
  963. mtsdram(SDRAM_MCOPT1, mcopt1);
  964. }
  965. /*-----------------------------------------------------------------------------+
  966. * program_codt.
  967. *-----------------------------------------------------------------------------*/
  968. static void program_codt(unsigned long *dimm_populated,
  969. unsigned char *iic0_dimm_addr,
  970. unsigned long num_dimm_banks)
  971. {
  972. unsigned long codt;
  973. unsigned long modt0 = 0;
  974. unsigned long modt1 = 0;
  975. unsigned long modt2 = 0;
  976. unsigned long modt3 = 0;
  977. unsigned char dimm_num;
  978. unsigned char dimm_rank;
  979. unsigned char total_rank = 0;
  980. unsigned char total_dimm = 0;
  981. unsigned char dimm_type = 0;
  982. unsigned char firstSlot = 0;
  983. /*------------------------------------------------------------------
  984. * Set the SDRAM Controller On Die Termination Register
  985. *-----------------------------------------------------------------*/
  986. mfsdram(SDRAM_CODT, codt);
  987. codt &= ~(SDRAM_CODT_DQS_SINGLE_END | SDRAM_CODT_CKSE_SINGLE_END);
  988. codt |= SDRAM_CODT_IO_NMODE;
  989. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  990. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  991. dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
  992. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
  993. dimm_rank = (dimm_rank & 0x0F) + 1;
  994. dimm_type = SDRAM_DDR2;
  995. } else {
  996. dimm_rank = dimm_rank & 0x0F;
  997. dimm_type = SDRAM_DDR1;
  998. }
  999. total_rank += dimm_rank;
  1000. total_dimm++;
  1001. if ((dimm_num == 0) && (total_dimm == 1))
  1002. firstSlot = TRUE;
  1003. else
  1004. firstSlot = FALSE;
  1005. }
  1006. }
  1007. if (dimm_type == SDRAM_DDR2) {
  1008. codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
  1009. if ((total_dimm == 1) && (firstSlot == TRUE)) {
  1010. if (total_rank == 1) { /* PUUU */
  1011. codt |= CALC_ODT_R(0);
  1012. modt0 = CALC_ODT_W(0);
  1013. modt1 = 0x00000000;
  1014. modt2 = 0x00000000;
  1015. modt3 = 0x00000000;
  1016. }
  1017. if (total_rank == 2) { /* PPUU */
  1018. codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
  1019. modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
  1020. modt1 = 0x00000000;
  1021. modt2 = 0x00000000;
  1022. modt3 = 0x00000000;
  1023. }
  1024. } else if ((total_dimm == 1) && (firstSlot != TRUE)) {
  1025. if (total_rank == 1) { /* UUPU */
  1026. codt |= CALC_ODT_R(2);
  1027. modt0 = 0x00000000;
  1028. modt1 = 0x00000000;
  1029. modt2 = CALC_ODT_W(2);
  1030. modt3 = 0x00000000;
  1031. }
  1032. if (total_rank == 2) { /* UUPP */
  1033. codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
  1034. modt0 = 0x00000000;
  1035. modt1 = 0x00000000;
  1036. modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
  1037. modt3 = 0x00000000;
  1038. }
  1039. }
  1040. if (total_dimm == 2) {
  1041. if (total_rank == 2) { /* PUPU */
  1042. codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
  1043. modt0 = CALC_ODT_RW(2);
  1044. modt1 = 0x00000000;
  1045. modt2 = CALC_ODT_RW(0);
  1046. modt3 = 0x00000000;
  1047. }
  1048. if (total_rank == 4) { /* PPPP */
  1049. codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
  1050. CALC_ODT_R(2) | CALC_ODT_R(3);
  1051. modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
  1052. modt1 = 0x00000000;
  1053. modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
  1054. modt3 = 0x00000000;
  1055. }
  1056. }
  1057. } else {
  1058. codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
  1059. modt0 = 0x00000000;
  1060. modt1 = 0x00000000;
  1061. modt2 = 0x00000000;
  1062. modt3 = 0x00000000;
  1063. if (total_dimm == 1) {
  1064. if (total_rank == 1)
  1065. codt |= 0x00800000;
  1066. if (total_rank == 2)
  1067. codt |= 0x02800000;
  1068. }
  1069. if (total_dimm == 2) {
  1070. if (total_rank == 2)
  1071. codt |= 0x08800000;
  1072. if (total_rank == 4)
  1073. codt |= 0x2a800000;
  1074. }
  1075. }
  1076. debug("nb of dimm %d\n", total_dimm);
  1077. debug("nb of rank %d\n", total_rank);
  1078. if (total_dimm == 1)
  1079. debug("dimm in slot %d\n", firstSlot);
  1080. mtsdram(SDRAM_CODT, codt);
  1081. mtsdram(SDRAM_MODT0, modt0);
  1082. mtsdram(SDRAM_MODT1, modt1);
  1083. mtsdram(SDRAM_MODT2, modt2);
  1084. mtsdram(SDRAM_MODT3, modt3);
  1085. }
  1086. /*-----------------------------------------------------------------------------+
  1087. * program_initplr.
  1088. *-----------------------------------------------------------------------------*/
  1089. static void program_initplr(unsigned long *dimm_populated,
  1090. unsigned char *iic0_dimm_addr,
  1091. unsigned long num_dimm_banks,
  1092. ddr_cas_id_t selected_cas,
  1093. int write_recovery)
  1094. {
  1095. u32 cas = 0;
  1096. u32 odt = 0;
  1097. u32 ods = 0;
  1098. u32 mr;
  1099. u32 wr;
  1100. u32 emr;
  1101. u32 emr2;
  1102. u32 emr3;
  1103. int dimm_num;
  1104. int total_dimm = 0;
  1105. /******************************************************
  1106. ** Assumption: if more than one DIMM, all DIMMs are the same
  1107. ** as already checked in check_memory_type
  1108. ******************************************************/
  1109. if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
  1110. mtsdram(SDRAM_INITPLR0, 0x81B80000);
  1111. mtsdram(SDRAM_INITPLR1, 0x81900400);
  1112. mtsdram(SDRAM_INITPLR2, 0x81810000);
  1113. mtsdram(SDRAM_INITPLR3, 0xff800162);
  1114. mtsdram(SDRAM_INITPLR4, 0x81900400);
  1115. mtsdram(SDRAM_INITPLR5, 0x86080000);
  1116. mtsdram(SDRAM_INITPLR6, 0x86080000);
  1117. mtsdram(SDRAM_INITPLR7, 0x81000062);
  1118. } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
  1119. switch (selected_cas) {
  1120. case DDR_CAS_3:
  1121. cas = 3 << 4;
  1122. break;
  1123. case DDR_CAS_4:
  1124. cas = 4 << 4;
  1125. break;
  1126. case DDR_CAS_5:
  1127. cas = 5 << 4;
  1128. break;
  1129. default:
  1130. printf("ERROR: ucode error on selected_cas value %d", selected_cas);
  1131. spd_ddr_init_hang ();
  1132. break;
  1133. }
  1134. #if 0
  1135. /*
  1136. * ToDo - Still a problem with the write recovery:
  1137. * On the Corsair CM2X512-5400C4 module, setting write recovery
  1138. * in the INITPLR reg to the value calculated in program_mode()
  1139. * results in not correctly working DDR2 memory (crash after
  1140. * relocation).
  1141. *
  1142. * So for now, set the write recovery to 3. This seems to work
  1143. * on the Corair module too.
  1144. *
  1145. * 2007-03-01, sr
  1146. */
  1147. switch (write_recovery) {
  1148. case 3:
  1149. wr = WRITE_RECOV_3;
  1150. break;
  1151. case 4:
  1152. wr = WRITE_RECOV_4;
  1153. break;
  1154. case 5:
  1155. wr = WRITE_RECOV_5;
  1156. break;
  1157. case 6:
  1158. wr = WRITE_RECOV_6;
  1159. break;
  1160. default:
  1161. printf("ERROR: write recovery not support (%d)", write_recovery);
  1162. spd_ddr_init_hang ();
  1163. break;
  1164. }
  1165. #else
  1166. wr = WRITE_RECOV_3; /* test-only, see description above */
  1167. #endif
  1168. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
  1169. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1170. total_dimm++;
  1171. if (total_dimm == 1) {
  1172. odt = ODT_150_OHM;
  1173. ods = ODS_FULL;
  1174. } else if (total_dimm == 2) {
  1175. odt = ODT_75_OHM;
  1176. ods = ODS_REDUCED;
  1177. } else {
  1178. printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
  1179. spd_ddr_init_hang ();
  1180. }
  1181. mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
  1182. emr = CMD_EMR | SELECT_EMR | odt | ods;
  1183. emr2 = CMD_EMR | SELECT_EMR2;
  1184. emr3 = CMD_EMR | SELECT_EMR3;
  1185. /* NOP - Wait 106 MemClk cycles */
  1186. mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
  1187. SDRAM_INITPLR_IMWT_ENCODE(106));
  1188. udelay(1000);
  1189. /* precharge 4 MemClk cycles */
  1190. mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1191. SDRAM_INITPLR_IMWT_ENCODE(4));
  1192. /* EMR2 - Wait tMRD (2 MemClk cycles) */
  1193. mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
  1194. SDRAM_INITPLR_IMWT_ENCODE(2));
  1195. /* EMR3 - Wait tMRD (2 MemClk cycles) */
  1196. mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
  1197. SDRAM_INITPLR_IMWT_ENCODE(2));
  1198. /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
  1199. mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
  1200. SDRAM_INITPLR_IMWT_ENCODE(2));
  1201. /* MR w/ DLL reset - 200 cycle wait for DLL reset */
  1202. mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
  1203. SDRAM_INITPLR_IMWT_ENCODE(200));
  1204. udelay(1000);
  1205. /* precharge 4 MemClk cycles */
  1206. mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1207. SDRAM_INITPLR_IMWT_ENCODE(4));
  1208. /* Refresh 25 MemClk cycles */
  1209. mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1210. SDRAM_INITPLR_IMWT_ENCODE(25));
  1211. /* Refresh 25 MemClk cycles */
  1212. mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1213. SDRAM_INITPLR_IMWT_ENCODE(25));
  1214. /* Refresh 25 MemClk cycles */
  1215. mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1216. SDRAM_INITPLR_IMWT_ENCODE(25));
  1217. /* Refresh 25 MemClk cycles */
  1218. mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1219. SDRAM_INITPLR_IMWT_ENCODE(25));
  1220. /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
  1221. mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
  1222. SDRAM_INITPLR_IMWT_ENCODE(2));
  1223. /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
  1224. mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
  1225. SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
  1226. /* EMR OCD Exit */
  1227. mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
  1228. SDRAM_INITPLR_IMWT_ENCODE(2));
  1229. } else {
  1230. printf("ERROR: ucode error as unknown DDR type in program_initplr");
  1231. spd_ddr_init_hang ();
  1232. }
  1233. }
  1234. /*------------------------------------------------------------------
  1235. * This routine programs the SDRAM_MMODE register.
  1236. * the selected_cas is an output parameter, that will be passed
  1237. * by caller to call the above program_initplr( )
  1238. *-----------------------------------------------------------------*/
  1239. static void program_mode(unsigned long *dimm_populated,
  1240. unsigned char *iic0_dimm_addr,
  1241. unsigned long num_dimm_banks,
  1242. ddr_cas_id_t *selected_cas,
  1243. int *write_recovery)
  1244. {
  1245. unsigned long dimm_num;
  1246. unsigned long sdram_ddr1;
  1247. unsigned long t_wr_ns;
  1248. unsigned long t_wr_clk;
  1249. unsigned long cas_bit;
  1250. unsigned long cas_index;
  1251. unsigned long sdram_freq;
  1252. unsigned long ddr_check;
  1253. unsigned long mmode;
  1254. unsigned long tcyc_reg;
  1255. unsigned long cycle_2_0_clk;
  1256. unsigned long cycle_2_5_clk;
  1257. unsigned long cycle_3_0_clk;
  1258. unsigned long cycle_4_0_clk;
  1259. unsigned long cycle_5_0_clk;
  1260. unsigned long max_2_0_tcyc_ns_x_100;
  1261. unsigned long max_2_5_tcyc_ns_x_100;
  1262. unsigned long max_3_0_tcyc_ns_x_100;
  1263. unsigned long max_4_0_tcyc_ns_x_100;
  1264. unsigned long max_5_0_tcyc_ns_x_100;
  1265. unsigned long cycle_time_ns_x_100[3];
  1266. PPC4xx_SYS_INFO board_cfg;
  1267. unsigned char cas_2_0_available;
  1268. unsigned char cas_2_5_available;
  1269. unsigned char cas_3_0_available;
  1270. unsigned char cas_4_0_available;
  1271. unsigned char cas_5_0_available;
  1272. unsigned long sdr_ddrpll;
  1273. /*------------------------------------------------------------------
  1274. * Get the board configuration info.
  1275. *-----------------------------------------------------------------*/
  1276. get_sys_info(&board_cfg);
  1277. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1278. sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
  1279. debug("sdram_freq=%d\n", sdram_freq);
  1280. /*------------------------------------------------------------------
  1281. * Handle the timing. We need to find the worst case timing of all
  1282. * the dimm modules installed.
  1283. *-----------------------------------------------------------------*/
  1284. t_wr_ns = 0;
  1285. cas_2_0_available = TRUE;
  1286. cas_2_5_available = TRUE;
  1287. cas_3_0_available = TRUE;
  1288. cas_4_0_available = TRUE;
  1289. cas_5_0_available = TRUE;
  1290. max_2_0_tcyc_ns_x_100 = 10;
  1291. max_2_5_tcyc_ns_x_100 = 10;
  1292. max_3_0_tcyc_ns_x_100 = 10;
  1293. max_4_0_tcyc_ns_x_100 = 10;
  1294. max_5_0_tcyc_ns_x_100 = 10;
  1295. sdram_ddr1 = TRUE;
  1296. /* loop through all the DIMM slots on the board */
  1297. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1298. /* If a dimm is installed in a particular slot ... */
  1299. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1300. if (dimm_populated[dimm_num] == SDRAM_DDR1)
  1301. sdram_ddr1 = TRUE;
  1302. else
  1303. sdram_ddr1 = FALSE;
  1304. /* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /* not used in this loop. */
  1305. cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
  1306. debug("cas_bit[SPD byte 18]=%02x\n", cas_bit);
  1307. /* For a particular DIMM, grab the three CAS values it supports */
  1308. for (cas_index = 0; cas_index < 3; cas_index++) {
  1309. switch (cas_index) {
  1310. case 0:
  1311. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  1312. break;
  1313. case 1:
  1314. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
  1315. break;
  1316. default:
  1317. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
  1318. break;
  1319. }
  1320. if ((tcyc_reg & 0x0F) >= 10) {
  1321. if ((tcyc_reg & 0x0F) == 0x0D) {
  1322. /* Convert from hex to decimal */
  1323. cycle_time_ns_x_100[cas_index] =
  1324. (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  1325. } else {
  1326. printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
  1327. "in slot %d\n", (unsigned int)dimm_num);
  1328. spd_ddr_init_hang ();
  1329. }
  1330. } else {
  1331. /* Convert from hex to decimal */
  1332. cycle_time_ns_x_100[cas_index] =
  1333. (((tcyc_reg & 0xF0) >> 4) * 100) +
  1334. ((tcyc_reg & 0x0F)*10);
  1335. }
  1336. debug("cas_index=%d: cycle_time_ns_x_100=%d\n", cas_index,
  1337. cycle_time_ns_x_100[cas_index]);
  1338. }
  1339. /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
  1340. /* supported for a particular DIMM. */
  1341. cas_index = 0;
  1342. if (sdram_ddr1) {
  1343. /*
  1344. * DDR devices use the following bitmask for CAS latency:
  1345. * Bit 7 6 5 4 3 2 1 0
  1346. * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
  1347. */
  1348. if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
  1349. (cycle_time_ns_x_100[cas_index] != 0)) {
  1350. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1351. cycle_time_ns_x_100[cas_index]);
  1352. cas_index++;
  1353. } else {
  1354. if (cas_index != 0)
  1355. cas_index++;
  1356. cas_4_0_available = FALSE;
  1357. }
  1358. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1359. (cycle_time_ns_x_100[cas_index] != 0)) {
  1360. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1361. cycle_time_ns_x_100[cas_index]);
  1362. cas_index++;
  1363. } else {
  1364. if (cas_index != 0)
  1365. cas_index++;
  1366. cas_3_0_available = FALSE;
  1367. }
  1368. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1369. (cycle_time_ns_x_100[cas_index] != 0)) {
  1370. max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
  1371. cycle_time_ns_x_100[cas_index]);
  1372. cas_index++;
  1373. } else {
  1374. if (cas_index != 0)
  1375. cas_index++;
  1376. cas_2_5_available = FALSE;
  1377. }
  1378. if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
  1379. (cycle_time_ns_x_100[cas_index] != 0)) {
  1380. max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
  1381. cycle_time_ns_x_100[cas_index]);
  1382. cas_index++;
  1383. } else {
  1384. if (cas_index != 0)
  1385. cas_index++;
  1386. cas_2_0_available = FALSE;
  1387. }
  1388. } else {
  1389. /*
  1390. * DDR2 devices use the following bitmask for CAS latency:
  1391. * Bit 7 6 5 4 3 2 1 0
  1392. * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
  1393. */
  1394. if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
  1395. (cycle_time_ns_x_100[cas_index] != 0)) {
  1396. max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
  1397. cycle_time_ns_x_100[cas_index]);
  1398. cas_index++;
  1399. } else {
  1400. if (cas_index != 0)
  1401. cas_index++;
  1402. cas_5_0_available = FALSE;
  1403. }
  1404. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1405. (cycle_time_ns_x_100[cas_index] != 0)) {
  1406. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1407. cycle_time_ns_x_100[cas_index]);
  1408. cas_index++;
  1409. } else {
  1410. if (cas_index != 0)
  1411. cas_index++;
  1412. cas_4_0_available = FALSE;
  1413. }
  1414. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1415. (cycle_time_ns_x_100[cas_index] != 0)) {
  1416. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1417. cycle_time_ns_x_100[cas_index]);
  1418. cas_index++;
  1419. } else {
  1420. if (cas_index != 0)
  1421. cas_index++;
  1422. cas_3_0_available = FALSE;
  1423. }
  1424. }
  1425. }
  1426. }
  1427. /*------------------------------------------------------------------
  1428. * Set the SDRAM mode, SDRAM_MMODE
  1429. *-----------------------------------------------------------------*/
  1430. mfsdram(SDRAM_MMODE, mmode);
  1431. mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
  1432. /* add 10 here because of rounding problems */
  1433. cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
  1434. cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
  1435. cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
  1436. cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
  1437. cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
  1438. debug("cycle_3_0_clk=%d\n", cycle_3_0_clk);
  1439. debug("cycle_4_0_clk=%d\n", cycle_4_0_clk);
  1440. debug("cycle_5_0_clk=%d\n", cycle_5_0_clk);
  1441. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1442. if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
  1443. mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
  1444. *selected_cas = DDR_CAS_2;
  1445. } else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
  1446. mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
  1447. *selected_cas = DDR_CAS_2_5;
  1448. } else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1449. mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
  1450. *selected_cas = DDR_CAS_3;
  1451. } else {
  1452. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1453. printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
  1454. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
  1455. spd_ddr_init_hang ();
  1456. }
  1457. } else { /* DDR2 */
  1458. debug("cas_3_0_available=%d\n", cas_3_0_available);
  1459. debug("cas_4_0_available=%d\n", cas_4_0_available);
  1460. debug("cas_5_0_available=%d\n", cas_5_0_available);
  1461. if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1462. mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
  1463. *selected_cas = DDR_CAS_3;
  1464. } else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
  1465. mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
  1466. *selected_cas = DDR_CAS_4;
  1467. } else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
  1468. mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
  1469. *selected_cas = DDR_CAS_5;
  1470. } else {
  1471. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1472. printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
  1473. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
  1474. printf("cas3=%d cas4=%d cas5=%d\n",
  1475. cas_3_0_available, cas_4_0_available, cas_5_0_available);
  1476. printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
  1477. sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
  1478. spd_ddr_init_hang ();
  1479. }
  1480. }
  1481. if (sdram_ddr1 == TRUE)
  1482. mmode |= SDRAM_MMODE_WR_DDR1;
  1483. else {
  1484. /* loop through all the DIMM slots on the board */
  1485. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1486. /* If a dimm is installed in a particular slot ... */
  1487. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1488. t_wr_ns = max(t_wr_ns,
  1489. spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1490. }
  1491. /*
  1492. * convert from nanoseconds to ddr clocks
  1493. * round up if necessary
  1494. */
  1495. t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
  1496. ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
  1497. if (sdram_freq != ddr_check)
  1498. t_wr_clk++;
  1499. switch (t_wr_clk) {
  1500. case 0:
  1501. case 1:
  1502. case 2:
  1503. case 3:
  1504. mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
  1505. break;
  1506. case 4:
  1507. mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
  1508. break;
  1509. case 5:
  1510. mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
  1511. break;
  1512. default:
  1513. mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
  1514. break;
  1515. }
  1516. *write_recovery = t_wr_clk;
  1517. }
  1518. debug("CAS latency = %d\n", *selected_cas);
  1519. debug("Write recovery = %d\n", *write_recovery);
  1520. mtsdram(SDRAM_MMODE, mmode);
  1521. }
  1522. /*-----------------------------------------------------------------------------+
  1523. * program_rtr.
  1524. *-----------------------------------------------------------------------------*/
  1525. static void program_rtr(unsigned long *dimm_populated,
  1526. unsigned char *iic0_dimm_addr,
  1527. unsigned long num_dimm_banks)
  1528. {
  1529. PPC4xx_SYS_INFO board_cfg;
  1530. unsigned long max_refresh_rate;
  1531. unsigned long dimm_num;
  1532. unsigned long refresh_rate_type;
  1533. unsigned long refresh_rate;
  1534. unsigned long rint;
  1535. unsigned long sdram_freq;
  1536. unsigned long sdr_ddrpll;
  1537. unsigned long val;
  1538. /*------------------------------------------------------------------
  1539. * Get the board configuration info.
  1540. *-----------------------------------------------------------------*/
  1541. get_sys_info(&board_cfg);
  1542. /*------------------------------------------------------------------
  1543. * Set the SDRAM Refresh Timing Register, SDRAM_RTR
  1544. *-----------------------------------------------------------------*/
  1545. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1546. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1547. max_refresh_rate = 0;
  1548. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1549. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1550. refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
  1551. refresh_rate_type &= 0x7F;
  1552. switch (refresh_rate_type) {
  1553. case 0:
  1554. refresh_rate = 15625;
  1555. break;
  1556. case 1:
  1557. refresh_rate = 3906;
  1558. break;
  1559. case 2:
  1560. refresh_rate = 7812;
  1561. break;
  1562. case 3:
  1563. refresh_rate = 31250;
  1564. break;
  1565. case 4:
  1566. refresh_rate = 62500;
  1567. break;
  1568. case 5:
  1569. refresh_rate = 125000;
  1570. break;
  1571. default:
  1572. refresh_rate = 0;
  1573. printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
  1574. (unsigned int)dimm_num);
  1575. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1576. spd_ddr_init_hang ();
  1577. break;
  1578. }
  1579. max_refresh_rate = max(max_refresh_rate, refresh_rate);
  1580. }
  1581. }
  1582. rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
  1583. mfsdram(SDRAM_RTR, val);
  1584. mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
  1585. (SDRAM_RTR_RINT_ENCODE(rint)));
  1586. }
  1587. /*------------------------------------------------------------------
  1588. * This routine programs the SDRAM_TRx registers.
  1589. *-----------------------------------------------------------------*/
  1590. static void program_tr(unsigned long *dimm_populated,
  1591. unsigned char *iic0_dimm_addr,
  1592. unsigned long num_dimm_banks)
  1593. {
  1594. unsigned long dimm_num;
  1595. unsigned long sdram_ddr1;
  1596. unsigned long t_rp_ns;
  1597. unsigned long t_rcd_ns;
  1598. unsigned long t_rrd_ns;
  1599. unsigned long t_ras_ns;
  1600. unsigned long t_rc_ns;
  1601. unsigned long t_rfc_ns;
  1602. unsigned long t_wpc_ns;
  1603. unsigned long t_wtr_ns;
  1604. unsigned long t_rpc_ns;
  1605. unsigned long t_rp_clk;
  1606. unsigned long t_rcd_clk;
  1607. unsigned long t_rrd_clk;
  1608. unsigned long t_ras_clk;
  1609. unsigned long t_rc_clk;
  1610. unsigned long t_rfc_clk;
  1611. unsigned long t_wpc_clk;
  1612. unsigned long t_wtr_clk;
  1613. unsigned long t_rpc_clk;
  1614. unsigned long sdtr1, sdtr2, sdtr3;
  1615. unsigned long ddr_check;
  1616. unsigned long sdram_freq;
  1617. unsigned long sdr_ddrpll;
  1618. PPC4xx_SYS_INFO board_cfg;
  1619. /*------------------------------------------------------------------
  1620. * Get the board configuration info.
  1621. *-----------------------------------------------------------------*/
  1622. get_sys_info(&board_cfg);
  1623. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1624. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1625. /*------------------------------------------------------------------
  1626. * Handle the timing. We need to find the worst case timing of all
  1627. * the dimm modules installed.
  1628. *-----------------------------------------------------------------*/
  1629. t_rp_ns = 0;
  1630. t_rrd_ns = 0;
  1631. t_rcd_ns = 0;
  1632. t_ras_ns = 0;
  1633. t_rc_ns = 0;
  1634. t_rfc_ns = 0;
  1635. t_wpc_ns = 0;
  1636. t_wtr_ns = 0;
  1637. t_rpc_ns = 0;
  1638. sdram_ddr1 = TRUE;
  1639. /* loop through all the DIMM slots on the board */
  1640. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1641. /* If a dimm is installed in a particular slot ... */
  1642. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1643. if (dimm_populated[dimm_num] == SDRAM_DDR2)
  1644. sdram_ddr1 = TRUE;
  1645. else
  1646. sdram_ddr1 = FALSE;
  1647. t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
  1648. t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
  1649. t_rp_ns = max(t_rp_ns, spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
  1650. t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
  1651. t_rc_ns = max(t_rc_ns, spd_read(iic0_dimm_addr[dimm_num], 41));
  1652. t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
  1653. }
  1654. }
  1655. /*------------------------------------------------------------------
  1656. * Set the SDRAM Timing Reg 1, SDRAM_TR1
  1657. *-----------------------------------------------------------------*/
  1658. mfsdram(SDRAM_SDTR1, sdtr1);
  1659. sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
  1660. SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
  1661. /* default values */
  1662. sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
  1663. sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
  1664. /* normal operations */
  1665. sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
  1666. sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
  1667. mtsdram(SDRAM_SDTR1, sdtr1);
  1668. /*------------------------------------------------------------------
  1669. * Set the SDRAM Timing Reg 2, SDRAM_TR2
  1670. *-----------------------------------------------------------------*/
  1671. mfsdram(SDRAM_SDTR2, sdtr2);
  1672. sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
  1673. SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
  1674. SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
  1675. SDRAM_SDTR2_RRD_MASK);
  1676. /*
  1677. * convert t_rcd from nanoseconds to ddr clocks
  1678. * round up if necessary
  1679. */
  1680. t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
  1681. ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
  1682. if (sdram_freq != ddr_check)
  1683. t_rcd_clk++;
  1684. switch (t_rcd_clk) {
  1685. case 0:
  1686. case 1:
  1687. sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
  1688. break;
  1689. case 2:
  1690. sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
  1691. break;
  1692. case 3:
  1693. sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
  1694. break;
  1695. case 4:
  1696. sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
  1697. break;
  1698. default:
  1699. sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
  1700. break;
  1701. }
  1702. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1703. if (sdram_freq < 200000000) {
  1704. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1705. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1706. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1707. } else {
  1708. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1709. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1710. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1711. }
  1712. } else { /* DDR2 */
  1713. /* loop through all the DIMM slots on the board */
  1714. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1715. /* If a dimm is installed in a particular slot ... */
  1716. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1717. t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1718. t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
  1719. t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
  1720. }
  1721. }
  1722. /*
  1723. * convert from nanoseconds to ddr clocks
  1724. * round up if necessary
  1725. */
  1726. t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
  1727. ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
  1728. if (sdram_freq != ddr_check)
  1729. t_wpc_clk++;
  1730. switch (t_wpc_clk) {
  1731. case 0:
  1732. case 1:
  1733. case 2:
  1734. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1735. break;
  1736. case 3:
  1737. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1738. break;
  1739. case 4:
  1740. sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
  1741. break;
  1742. case 5:
  1743. sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
  1744. break;
  1745. default:
  1746. sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
  1747. break;
  1748. }
  1749. /*
  1750. * convert from nanoseconds to ddr clocks
  1751. * round up if necessary
  1752. */
  1753. t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
  1754. ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
  1755. if (sdram_freq != ddr_check)
  1756. t_wtr_clk++;
  1757. switch (t_wtr_clk) {
  1758. case 0:
  1759. case 1:
  1760. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1761. break;
  1762. case 2:
  1763. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1764. break;
  1765. case 3:
  1766. sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
  1767. break;
  1768. default:
  1769. sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
  1770. break;
  1771. }
  1772. /*
  1773. * convert from nanoseconds to ddr clocks
  1774. * round up if necessary
  1775. */
  1776. t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
  1777. ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
  1778. if (sdram_freq != ddr_check)
  1779. t_rpc_clk++;
  1780. switch (t_rpc_clk) {
  1781. case 0:
  1782. case 1:
  1783. case 2:
  1784. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1785. break;
  1786. case 3:
  1787. sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
  1788. break;
  1789. default:
  1790. sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
  1791. break;
  1792. }
  1793. }
  1794. /* default value */
  1795. sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
  1796. /*
  1797. * convert t_rrd from nanoseconds to ddr clocks
  1798. * round up if necessary
  1799. */
  1800. t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
  1801. ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
  1802. if (sdram_freq != ddr_check)
  1803. t_rrd_clk++;
  1804. if (t_rrd_clk == 3)
  1805. sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
  1806. else
  1807. sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
  1808. /*
  1809. * convert t_rp from nanoseconds to ddr clocks
  1810. * round up if necessary
  1811. */
  1812. t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
  1813. ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
  1814. if (sdram_freq != ddr_check)
  1815. t_rp_clk++;
  1816. switch (t_rp_clk) {
  1817. case 0:
  1818. case 1:
  1819. case 2:
  1820. case 3:
  1821. sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
  1822. break;
  1823. case 4:
  1824. sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
  1825. break;
  1826. case 5:
  1827. sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
  1828. break;
  1829. case 6:
  1830. sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
  1831. break;
  1832. default:
  1833. sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
  1834. break;
  1835. }
  1836. mtsdram(SDRAM_SDTR2, sdtr2);
  1837. /*------------------------------------------------------------------
  1838. * Set the SDRAM Timing Reg 3, SDRAM_TR3
  1839. *-----------------------------------------------------------------*/
  1840. mfsdram(SDRAM_SDTR3, sdtr3);
  1841. sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
  1842. SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
  1843. /*
  1844. * convert t_ras from nanoseconds to ddr clocks
  1845. * round up if necessary
  1846. */
  1847. t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
  1848. ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
  1849. if (sdram_freq != ddr_check)
  1850. t_ras_clk++;
  1851. sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
  1852. /*
  1853. * convert t_rc from nanoseconds to ddr clocks
  1854. * round up if necessary
  1855. */
  1856. t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
  1857. ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
  1858. if (sdram_freq != ddr_check)
  1859. t_rc_clk++;
  1860. sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
  1861. /* default xcs value */
  1862. sdtr3 |= SDRAM_SDTR3_XCS;
  1863. /*
  1864. * convert t_rfc from nanoseconds to ddr clocks
  1865. * round up if necessary
  1866. */
  1867. t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
  1868. ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
  1869. if (sdram_freq != ddr_check)
  1870. t_rfc_clk++;
  1871. sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
  1872. mtsdram(SDRAM_SDTR3, sdtr3);
  1873. }
  1874. /*-----------------------------------------------------------------------------+
  1875. * program_bxcf.
  1876. *-----------------------------------------------------------------------------*/
  1877. static void program_bxcf(unsigned long *dimm_populated,
  1878. unsigned char *iic0_dimm_addr,
  1879. unsigned long num_dimm_banks)
  1880. {
  1881. unsigned long dimm_num;
  1882. unsigned long num_col_addr;
  1883. unsigned long num_ranks;
  1884. unsigned long num_banks;
  1885. unsigned long mode;
  1886. unsigned long ind_rank;
  1887. unsigned long ind;
  1888. unsigned long ind_bank;
  1889. unsigned long bank_0_populated;
  1890. /*------------------------------------------------------------------
  1891. * Set the BxCF regs. First, wipe out the bank config registers.
  1892. *-----------------------------------------------------------------*/
  1893. mtsdram(SDRAM_MB0CF, 0x00000000);
  1894. mtsdram(SDRAM_MB1CF, 0x00000000);
  1895. mtsdram(SDRAM_MB2CF, 0x00000000);
  1896. mtsdram(SDRAM_MB3CF, 0x00000000);
  1897. mode = SDRAM_BXCF_M_BE_ENABLE;
  1898. bank_0_populated = 0;
  1899. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1900. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1901. num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
  1902. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1903. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1904. num_ranks = (num_ranks & 0x0F) +1;
  1905. else
  1906. num_ranks = num_ranks & 0x0F;
  1907. num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
  1908. for (ind_bank = 0; ind_bank < 2; ind_bank++) {
  1909. if (num_banks == 4)
  1910. ind = 0;
  1911. else
  1912. ind = 5 << 8;
  1913. switch (num_col_addr) {
  1914. case 0x08:
  1915. mode |= (SDRAM_BXCF_M_AM_0 + ind);
  1916. break;
  1917. case 0x09:
  1918. mode |= (SDRAM_BXCF_M_AM_1 + ind);
  1919. break;
  1920. case 0x0A:
  1921. mode |= (SDRAM_BXCF_M_AM_2 + ind);
  1922. break;
  1923. case 0x0B:
  1924. mode |= (SDRAM_BXCF_M_AM_3 + ind);
  1925. break;
  1926. case 0x0C:
  1927. mode |= (SDRAM_BXCF_M_AM_4 + ind);
  1928. break;
  1929. default:
  1930. printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
  1931. (unsigned int)dimm_num);
  1932. printf("ERROR: Unsupported value for number of "
  1933. "column addresses: %d.\n", (unsigned int)num_col_addr);
  1934. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1935. spd_ddr_init_hang ();
  1936. }
  1937. }
  1938. if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
  1939. bank_0_populated = 1;
  1940. for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
  1941. mtsdram(SDRAM_MB0CF +
  1942. ((dimm_num + bank_0_populated + ind_rank) << 2),
  1943. mode);
  1944. }
  1945. }
  1946. }
  1947. }
  1948. /*------------------------------------------------------------------
  1949. * program memory queue.
  1950. *-----------------------------------------------------------------*/
  1951. static void program_memory_queue(unsigned long *dimm_populated,
  1952. unsigned char *iic0_dimm_addr,
  1953. unsigned long num_dimm_banks)
  1954. {
  1955. unsigned long dimm_num;
  1956. phys_size_t rank_base_addr;
  1957. unsigned long rank_reg;
  1958. phys_size_t rank_size_bytes;
  1959. unsigned long rank_size_id;
  1960. unsigned long num_ranks;
  1961. unsigned long baseadd_size;
  1962. unsigned long i;
  1963. unsigned long bank_0_populated = 0;
  1964. phys_size_t total_size = 0;
  1965. /*------------------------------------------------------------------
  1966. * Reset the rank_base_address.
  1967. *-----------------------------------------------------------------*/
  1968. rank_reg = SDRAM_R0BAS;
  1969. rank_base_addr = 0x00000000;
  1970. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1971. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1972. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1973. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1974. num_ranks = (num_ranks & 0x0F) + 1;
  1975. else
  1976. num_ranks = num_ranks & 0x0F;
  1977. rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
  1978. /*------------------------------------------------------------------
  1979. * Set the sizes
  1980. *-----------------------------------------------------------------*/
  1981. baseadd_size = 0;
  1982. switch (rank_size_id) {
  1983. case 0x01:
  1984. baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
  1985. total_size = 1024;
  1986. break;
  1987. case 0x02:
  1988. baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
  1989. total_size = 2048;
  1990. break;
  1991. case 0x04:
  1992. baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
  1993. total_size = 4096;
  1994. break;
  1995. case 0x08:
  1996. baseadd_size |= SDRAM_RXBAS_SDSZ_32;
  1997. total_size = 32;
  1998. break;
  1999. case 0x10:
  2000. baseadd_size |= SDRAM_RXBAS_SDSZ_64;
  2001. total_size = 64;
  2002. break;
  2003. case 0x20:
  2004. baseadd_size |= SDRAM_RXBAS_SDSZ_128;
  2005. total_size = 128;
  2006. break;
  2007. case 0x40:
  2008. baseadd_size |= SDRAM_RXBAS_SDSZ_256;
  2009. total_size = 256;
  2010. break;
  2011. case 0x80:
  2012. baseadd_size |= SDRAM_RXBAS_SDSZ_512;
  2013. total_size = 512;
  2014. break;
  2015. default:
  2016. printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
  2017. (unsigned int)dimm_num);
  2018. printf("ERROR: Unsupported value for the banksize: %d.\n",
  2019. (unsigned int)rank_size_id);
  2020. printf("Replace the DIMM module with a supported DIMM.\n\n");
  2021. spd_ddr_init_hang ();
  2022. }
  2023. rank_size_bytes = total_size << 20;
  2024. if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
  2025. bank_0_populated = 1;
  2026. for (i = 0; i < num_ranks; i++) {
  2027. mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
  2028. (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
  2029. baseadd_size));
  2030. rank_base_addr += rank_size_bytes;
  2031. }
  2032. }
  2033. }
  2034. #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2035. defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
  2036. defined(CONFIG_460SX)
  2037. /*
  2038. * Enable high bandwidth access
  2039. * This is currently not used, but with this setup
  2040. * it is possible to use it later on in e.g. the Linux
  2041. * EMAC driver for performance gain.
  2042. */
  2043. mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
  2044. mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
  2045. /*
  2046. * Set optimal value for Memory Queue HB/LL Configuration registers
  2047. */
  2048. mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
  2049. SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
  2050. SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
  2051. mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
  2052. SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
  2053. SDRAM_CONF1LL_RPLM);
  2054. mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
  2055. #endif
  2056. }
  2057. /*-----------------------------------------------------------------------------+
  2058. * is_ecc_enabled.
  2059. *-----------------------------------------------------------------------------*/
  2060. static unsigned long is_ecc_enabled(void)
  2061. {
  2062. unsigned long dimm_num;
  2063. unsigned long ecc;
  2064. unsigned long val;
  2065. ecc = 0;
  2066. /* loop through all the DIMM slots on the board */
  2067. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2068. mfsdram(SDRAM_MCOPT1, val);
  2069. ecc = max(ecc, SDRAM_MCOPT1_MCHK_CHK_DECODE(val));
  2070. }
  2071. return ecc;
  2072. }
  2073. #ifdef CONFIG_DDR_ECC
  2074. /*-----------------------------------------------------------------------------+
  2075. * program_ecc.
  2076. *-----------------------------------------------------------------------------*/
  2077. static void program_ecc(unsigned long *dimm_populated,
  2078. unsigned char *iic0_dimm_addr,
  2079. unsigned long num_dimm_banks,
  2080. unsigned long tlb_word2_i_value)
  2081. {
  2082. unsigned long mcopt1;
  2083. unsigned long mcopt2;
  2084. unsigned long mcstat;
  2085. unsigned long dimm_num;
  2086. unsigned long ecc;
  2087. ecc = 0;
  2088. /* loop through all the DIMM slots on the board */
  2089. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2090. /* If a dimm is installed in a particular slot ... */
  2091. if (dimm_populated[dimm_num] != SDRAM_NONE)
  2092. ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
  2093. }
  2094. if (ecc == 0)
  2095. return;
  2096. if (sdram_memsize() > CONFIG_MAX_MEM_MAPPED) {
  2097. printf("\nWarning: Can't enable ECC on systems with more than 2GB of SDRAM!\n");
  2098. return;
  2099. }
  2100. mfsdram(SDRAM_MCOPT1, mcopt1);
  2101. mfsdram(SDRAM_MCOPT2, mcopt2);
  2102. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2103. /* DDR controller must be enabled and not in self-refresh. */
  2104. mfsdram(SDRAM_MCSTAT, mcstat);
  2105. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  2106. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  2107. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  2108. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  2109. program_ecc_addr(0, sdram_memsize(), tlb_word2_i_value);
  2110. }
  2111. }
  2112. return;
  2113. }
  2114. static void wait_ddr_idle(void)
  2115. {
  2116. u32 val;
  2117. do {
  2118. mfsdram(SDRAM_MCSTAT, val);
  2119. } while ((val & SDRAM_MCSTAT_IDLE_MASK) == SDRAM_MCSTAT_IDLE_NOT);
  2120. }
  2121. /*-----------------------------------------------------------------------------+
  2122. * program_ecc_addr.
  2123. *-----------------------------------------------------------------------------*/
  2124. static void program_ecc_addr(unsigned long start_address,
  2125. unsigned long num_bytes,
  2126. unsigned long tlb_word2_i_value)
  2127. {
  2128. unsigned long current_address;
  2129. unsigned long end_address;
  2130. unsigned long address_increment;
  2131. unsigned long mcopt1;
  2132. char str[] = "ECC generation -";
  2133. char slash[] = "\\|/-\\|/-";
  2134. int loop = 0;
  2135. int loopi = 0;
  2136. current_address = start_address;
  2137. mfsdram(SDRAM_MCOPT1, mcopt1);
  2138. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2139. mtsdram(SDRAM_MCOPT1,
  2140. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_GEN);
  2141. sync();
  2142. eieio();
  2143. wait_ddr_idle();
  2144. puts(str);
  2145. if (tlb_word2_i_value == TLB_WORD2_I_ENABLE) {
  2146. /* ECC bit set method for non-cached memory */
  2147. if ((mcopt1 & SDRAM_MCOPT1_DMWD_MASK) == SDRAM_MCOPT1_DMWD_32)
  2148. address_increment = 4;
  2149. else
  2150. address_increment = 8;
  2151. end_address = current_address + num_bytes;
  2152. while (current_address < end_address) {
  2153. *((unsigned long *)current_address) = 0x00000000;
  2154. current_address += address_increment;
  2155. if ((loop++ % (2 << 20)) == 0) {
  2156. putc('\b');
  2157. putc(slash[loopi++ % 8]);
  2158. }
  2159. }
  2160. } else {
  2161. /* ECC bit set method for cached memory */
  2162. dcbz_area(start_address, num_bytes);
  2163. /* Write modified dcache lines back to memory */
  2164. clean_dcache_range(start_address, start_address + num_bytes);
  2165. }
  2166. blank_string(strlen(str));
  2167. sync();
  2168. eieio();
  2169. wait_ddr_idle();
  2170. /* clear ECC error repoting registers */
  2171. mtsdram(SDRAM_ECCCR, 0xffffffff);
  2172. mtdcr(0x4c, 0xffffffff);
  2173. mtsdram(SDRAM_MCOPT1,
  2174. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_CHK_REP);
  2175. sync();
  2176. eieio();
  2177. wait_ddr_idle();
  2178. }
  2179. }
  2180. #endif
  2181. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2182. /*-----------------------------------------------------------------------------+
  2183. * program_DQS_calibration.
  2184. *-----------------------------------------------------------------------------*/
  2185. static void program_DQS_calibration(unsigned long *dimm_populated,
  2186. unsigned char *iic0_dimm_addr,
  2187. unsigned long num_dimm_banks)
  2188. {
  2189. unsigned long val;
  2190. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  2191. mtsdram(SDRAM_RQDC, 0x80000037);
  2192. mtsdram(SDRAM_RDCC, 0x40000000);
  2193. mtsdram(SDRAM_RFDC, 0x000001DF);
  2194. test();
  2195. #else
  2196. /*------------------------------------------------------------------
  2197. * Program RDCC register
  2198. * Read sample cycle auto-update enable
  2199. *-----------------------------------------------------------------*/
  2200. mfsdram(SDRAM_RDCC, val);
  2201. mtsdram(SDRAM_RDCC,
  2202. (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
  2203. | SDRAM_RDCC_RSAE_ENABLE);
  2204. /*------------------------------------------------------------------
  2205. * Program RQDC register
  2206. * Internal DQS delay mechanism enable
  2207. *-----------------------------------------------------------------*/
  2208. mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
  2209. /*------------------------------------------------------------------
  2210. * Program RFDC register
  2211. * Set Feedback Fractional Oversample
  2212. * Auto-detect read sample cycle enable
  2213. * Set RFOS to 1/4 of memclk cycle (0x3f)
  2214. *-----------------------------------------------------------------*/
  2215. mfsdram(SDRAM_RFDC, val);
  2216. mtsdram(SDRAM_RFDC,
  2217. (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
  2218. SDRAM_RFDC_RFFD_MASK))
  2219. | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
  2220. SDRAM_RFDC_RFFD_ENCODE(0)));
  2221. DQS_calibration_process();
  2222. #endif
  2223. }
  2224. static int short_mem_test(void)
  2225. {
  2226. u32 *membase;
  2227. u32 bxcr_num;
  2228. u32 bxcf;
  2229. int i;
  2230. int j;
  2231. phys_size_t base_addr;
  2232. u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
  2233. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2234. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2235. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2236. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2237. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2238. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2239. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2240. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2241. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2242. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2243. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2244. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2245. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2246. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2247. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2248. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2249. int l;
  2250. for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
  2251. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
  2252. /* Banks enabled */
  2253. if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2254. /* Bank is enabled */
  2255. /*
  2256. * Only run test on accessable memory (below 2GB)
  2257. */
  2258. base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
  2259. if (base_addr >= CONFIG_MAX_MEM_MAPPED)
  2260. continue;
  2261. /*------------------------------------------------------------------
  2262. * Run the short memory test.
  2263. *-----------------------------------------------------------------*/
  2264. membase = (u32 *)(u32)base_addr;
  2265. for (i = 0; i < NUMMEMTESTS; i++) {
  2266. for (j = 0; j < NUMMEMWORDS; j++) {
  2267. membase[j] = test[i][j];
  2268. ppcDcbf((u32)&(membase[j]));
  2269. }
  2270. sync();
  2271. for (l=0; l<NUMLOOPS; l++) {
  2272. for (j = 0; j < NUMMEMWORDS; j++) {
  2273. if (membase[j] != test[i][j]) {
  2274. ppcDcbf((u32)&(membase[j]));
  2275. return 0;
  2276. }
  2277. ppcDcbf((u32)&(membase[j]));
  2278. }
  2279. sync();
  2280. }
  2281. }
  2282. } /* if bank enabled */
  2283. } /* for bxcf_num */
  2284. return 1;
  2285. }
  2286. #ifndef HARD_CODED_DQS
  2287. /*-----------------------------------------------------------------------------+
  2288. * DQS_calibration_process.
  2289. *-----------------------------------------------------------------------------*/
  2290. static void DQS_calibration_process(void)
  2291. {
  2292. unsigned long rfdc_reg;
  2293. unsigned long rffd;
  2294. unsigned long val;
  2295. long rffd_average;
  2296. long max_start;
  2297. long min_end;
  2298. unsigned long begin_rqfd[MAXRANKS];
  2299. unsigned long begin_rffd[MAXRANKS];
  2300. unsigned long end_rqfd[MAXRANKS];
  2301. unsigned long end_rffd[MAXRANKS];
  2302. char window_found;
  2303. unsigned long dlycal;
  2304. unsigned long dly_val;
  2305. unsigned long max_pass_length;
  2306. unsigned long current_pass_length;
  2307. unsigned long current_fail_length;
  2308. unsigned long current_start;
  2309. long max_end;
  2310. unsigned char fail_found;
  2311. unsigned char pass_found;
  2312. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2313. u32 rqdc_reg;
  2314. u32 rqfd;
  2315. u32 rqfd_start;
  2316. u32 rqfd_average;
  2317. int loopi = 0;
  2318. char str[] = "Auto calibration -";
  2319. char slash[] = "\\|/-\\|/-";
  2320. /*------------------------------------------------------------------
  2321. * Test to determine the best read clock delay tuning bits.
  2322. *
  2323. * Before the DDR controller can be used, the read clock delay needs to be
  2324. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2325. * This value cannot be hardcoded into the program because it changes
  2326. * depending on the board's setup and environment.
  2327. * To do this, all delay values are tested to see if they
  2328. * work or not. By doing this, you get groups of fails with groups of
  2329. * passing values. The idea is to find the start and end of a passing
  2330. * window and take the center of it to use as the read clock delay.
  2331. *
  2332. * A failure has to be seen first so that when we hit a pass, we know
  2333. * that it is truely the start of the window. If we get passing values
  2334. * to start off with, we don't know if we are at the start of the window.
  2335. *
  2336. * The code assumes that a failure will always be found.
  2337. * If a failure is not found, there is no easy way to get the middle
  2338. * of the passing window. I guess we can pretty much pick any value
  2339. * but some values will be better than others. Since the lowest speed
  2340. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2341. * from experimentation it is safe to say you will always have a failure.
  2342. *-----------------------------------------------------------------*/
  2343. /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
  2344. rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
  2345. puts(str);
  2346. calibration_loop:
  2347. mfsdram(SDRAM_RQDC, rqdc_reg);
  2348. mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2349. SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
  2350. #else /* CONFIG_DDR_RQDC_FIXED */
  2351. /*
  2352. * On Katmai the complete auto-calibration somehow doesn't seem to
  2353. * produce the best results, meaning optimal values for RQFD/RFFD.
  2354. * This was discovered by GDA using a high bandwidth scope,
  2355. * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
  2356. * so now on Katmai "only" RFFD is auto-calibrated.
  2357. */
  2358. mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
  2359. #endif /* CONFIG_DDR_RQDC_FIXED */
  2360. max_start = 0;
  2361. min_end = 0;
  2362. begin_rqfd[0] = 0;
  2363. begin_rffd[0] = 0;
  2364. begin_rqfd[1] = 0;
  2365. begin_rffd[1] = 0;
  2366. end_rqfd[0] = 0;
  2367. end_rffd[0] = 0;
  2368. end_rqfd[1] = 0;
  2369. end_rffd[1] = 0;
  2370. window_found = FALSE;
  2371. max_pass_length = 0;
  2372. max_start = 0;
  2373. max_end = 0;
  2374. current_pass_length = 0;
  2375. current_fail_length = 0;
  2376. current_start = 0;
  2377. window_found = FALSE;
  2378. fail_found = FALSE;
  2379. pass_found = FALSE;
  2380. /*
  2381. * get the delay line calibration register value
  2382. */
  2383. mfsdram(SDRAM_DLCR, dlycal);
  2384. dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
  2385. for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
  2386. mfsdram(SDRAM_RFDC, rfdc_reg);
  2387. rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
  2388. /*------------------------------------------------------------------
  2389. * Set the timing reg for the test.
  2390. *-----------------------------------------------------------------*/
  2391. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
  2392. /*------------------------------------------------------------------
  2393. * See if the rffd value passed.
  2394. *-----------------------------------------------------------------*/
  2395. if (short_mem_test()) {
  2396. if (fail_found == TRUE) {
  2397. pass_found = TRUE;
  2398. if (current_pass_length == 0)
  2399. current_start = rffd;
  2400. current_fail_length = 0;
  2401. current_pass_length++;
  2402. if (current_pass_length > max_pass_length) {
  2403. max_pass_length = current_pass_length;
  2404. max_start = current_start;
  2405. max_end = rffd;
  2406. }
  2407. }
  2408. } else {
  2409. current_pass_length = 0;
  2410. current_fail_length++;
  2411. if (current_fail_length >= (dly_val >> 2)) {
  2412. if (fail_found == FALSE) {
  2413. fail_found = TRUE;
  2414. } else if (pass_found == TRUE) {
  2415. window_found = TRUE;
  2416. break;
  2417. }
  2418. }
  2419. }
  2420. } /* for rffd */
  2421. /*------------------------------------------------------------------
  2422. * Set the average RFFD value
  2423. *-----------------------------------------------------------------*/
  2424. rffd_average = ((max_start + max_end) >> 1);
  2425. if (rffd_average < 0)
  2426. rffd_average = 0;
  2427. if (rffd_average > SDRAM_RFDC_RFFD_MAX)
  2428. rffd_average = SDRAM_RFDC_RFFD_MAX;
  2429. /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
  2430. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
  2431. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2432. max_pass_length = 0;
  2433. max_start = 0;
  2434. max_end = 0;
  2435. current_pass_length = 0;
  2436. current_fail_length = 0;
  2437. current_start = 0;
  2438. window_found = FALSE;
  2439. fail_found = FALSE;
  2440. pass_found = FALSE;
  2441. for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
  2442. mfsdram(SDRAM_RQDC, rqdc_reg);
  2443. rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
  2444. /*------------------------------------------------------------------
  2445. * Set the timing reg for the test.
  2446. *-----------------------------------------------------------------*/
  2447. mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
  2448. /*------------------------------------------------------------------
  2449. * See if the rffd value passed.
  2450. *-----------------------------------------------------------------*/
  2451. if (short_mem_test()) {
  2452. if (fail_found == TRUE) {
  2453. pass_found = TRUE;
  2454. if (current_pass_length == 0)
  2455. current_start = rqfd;
  2456. current_fail_length = 0;
  2457. current_pass_length++;
  2458. if (current_pass_length > max_pass_length) {
  2459. max_pass_length = current_pass_length;
  2460. max_start = current_start;
  2461. max_end = rqfd;
  2462. }
  2463. }
  2464. } else {
  2465. current_pass_length = 0;
  2466. current_fail_length++;
  2467. if (fail_found == FALSE) {
  2468. fail_found = TRUE;
  2469. } else if (pass_found == TRUE) {
  2470. window_found = TRUE;
  2471. break;
  2472. }
  2473. }
  2474. }
  2475. rqfd_average = ((max_start + max_end) >> 1);
  2476. /*------------------------------------------------------------------
  2477. * Make sure we found the valid read passing window. Halt if not
  2478. *-----------------------------------------------------------------*/
  2479. if (window_found == FALSE) {
  2480. if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
  2481. putc('\b');
  2482. putc(slash[loopi++ % 8]);
  2483. /* try again from with a different RQFD start value */
  2484. rqfd_start++;
  2485. goto calibration_loop;
  2486. }
  2487. printf("\nERROR: Cannot determine a common read delay for the "
  2488. "DIMM(s) installed.\n");
  2489. debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
  2490. ppc4xx_ibm_ddr2_register_dump();
  2491. spd_ddr_init_hang ();
  2492. }
  2493. if (rqfd_average < 0)
  2494. rqfd_average = 0;
  2495. if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
  2496. rqfd_average = SDRAM_RQDC_RQFD_MAX;
  2497. mtsdram(SDRAM_RQDC,
  2498. (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2499. SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
  2500. blank_string(strlen(str));
  2501. #endif /* CONFIG_DDR_RQDC_FIXED */
  2502. /*
  2503. * Now complete RDSS configuration as mentioned on page 7 of the AMCC
  2504. * PowerPC440SP/SPe DDR2 application note:
  2505. * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
  2506. */
  2507. mfsdram(SDRAM_RTSR, val);
  2508. if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
  2509. mfsdram(SDRAM_RDCC, val);
  2510. if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
  2511. val += 0x40000000;
  2512. mtsdram(SDRAM_RDCC, val);
  2513. }
  2514. }
  2515. mfsdram(SDRAM_DLCR, val);
  2516. debug("%s[%d] DLCR: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2517. mfsdram(SDRAM_RQDC, val);
  2518. debug("%s[%d] RQDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2519. mfsdram(SDRAM_RFDC, val);
  2520. debug("%s[%d] RFDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2521. mfsdram(SDRAM_RDCC, val);
  2522. debug("%s[%d] RDCC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2523. }
  2524. #else /* calibration test with hardvalues */
  2525. /*-----------------------------------------------------------------------------+
  2526. * DQS_calibration_process.
  2527. *-----------------------------------------------------------------------------*/
  2528. static void test(void)
  2529. {
  2530. unsigned long dimm_num;
  2531. unsigned long ecc_temp;
  2532. unsigned long i, j;
  2533. unsigned long *membase;
  2534. unsigned long bxcf[MAXRANKS];
  2535. unsigned long val;
  2536. char window_found;
  2537. char begin_found[MAXDIMMS];
  2538. char end_found[MAXDIMMS];
  2539. char search_end[MAXDIMMS];
  2540. unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
  2541. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2542. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2543. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2544. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2545. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2546. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2547. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2548. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2549. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2550. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2551. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2552. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2553. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2554. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2555. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2556. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2557. /*------------------------------------------------------------------
  2558. * Test to determine the best read clock delay tuning bits.
  2559. *
  2560. * Before the DDR controller can be used, the read clock delay needs to be
  2561. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2562. * This value cannot be hardcoded into the program because it changes
  2563. * depending on the board's setup and environment.
  2564. * To do this, all delay values are tested to see if they
  2565. * work or not. By doing this, you get groups of fails with groups of
  2566. * passing values. The idea is to find the start and end of a passing
  2567. * window and take the center of it to use as the read clock delay.
  2568. *
  2569. * A failure has to be seen first so that when we hit a pass, we know
  2570. * that it is truely the start of the window. If we get passing values
  2571. * to start off with, we don't know if we are at the start of the window.
  2572. *
  2573. * The code assumes that a failure will always be found.
  2574. * If a failure is not found, there is no easy way to get the middle
  2575. * of the passing window. I guess we can pretty much pick any value
  2576. * but some values will be better than others. Since the lowest speed
  2577. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2578. * from experimentation it is safe to say you will always have a failure.
  2579. *-----------------------------------------------------------------*/
  2580. mfsdram(SDRAM_MCOPT1, ecc_temp);
  2581. ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
  2582. mfsdram(SDRAM_MCOPT1, val);
  2583. mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
  2584. SDRAM_MCOPT1_MCHK_NON);
  2585. window_found = FALSE;
  2586. begin_found[0] = FALSE;
  2587. end_found[0] = FALSE;
  2588. search_end[0] = FALSE;
  2589. begin_found[1] = FALSE;
  2590. end_found[1] = FALSE;
  2591. search_end[1] = FALSE;
  2592. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2593. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
  2594. /* Banks enabled */
  2595. if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2596. /* Bank is enabled */
  2597. membase =
  2598. (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
  2599. /*------------------------------------------------------------------
  2600. * Run the short memory test.
  2601. *-----------------------------------------------------------------*/
  2602. for (i = 0; i < NUMMEMTESTS; i++) {
  2603. for (j = 0; j < NUMMEMWORDS; j++) {
  2604. membase[j] = test[i][j];
  2605. ppcDcbf((u32)&(membase[j]));
  2606. }
  2607. sync();
  2608. for (j = 0; j < NUMMEMWORDS; j++) {
  2609. if (membase[j] != test[i][j]) {
  2610. ppcDcbf((u32)&(membase[j]));
  2611. break;
  2612. }
  2613. ppcDcbf((u32)&(membase[j]));
  2614. }
  2615. sync();
  2616. if (j < NUMMEMWORDS)
  2617. break;
  2618. }
  2619. /*------------------------------------------------------------------
  2620. * See if the rffd value passed.
  2621. *-----------------------------------------------------------------*/
  2622. if (i < NUMMEMTESTS) {
  2623. if ((end_found[dimm_num] == FALSE) &&
  2624. (search_end[dimm_num] == TRUE)) {
  2625. end_found[dimm_num] = TRUE;
  2626. }
  2627. if ((end_found[0] == TRUE) &&
  2628. (end_found[1] == TRUE))
  2629. break;
  2630. } else {
  2631. if (begin_found[dimm_num] == FALSE) {
  2632. begin_found[dimm_num] = TRUE;
  2633. search_end[dimm_num] = TRUE;
  2634. }
  2635. }
  2636. } else {
  2637. begin_found[dimm_num] = TRUE;
  2638. end_found[dimm_num] = TRUE;
  2639. }
  2640. }
  2641. if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
  2642. window_found = TRUE;
  2643. /*------------------------------------------------------------------
  2644. * Make sure we found the valid read passing window. Halt if not
  2645. *-----------------------------------------------------------------*/
  2646. if (window_found == FALSE) {
  2647. printf("ERROR: Cannot determine a common read delay for the "
  2648. "DIMM(s) installed.\n");
  2649. spd_ddr_init_hang ();
  2650. }
  2651. /*------------------------------------------------------------------
  2652. * Restore the ECC variable to what it originally was
  2653. *-----------------------------------------------------------------*/
  2654. mtsdram(SDRAM_MCOPT1,
  2655. (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
  2656. | ecc_temp);
  2657. }
  2658. #endif /* !HARD_CODED_DQS */
  2659. #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
  2660. #else /* CONFIG_SPD_EEPROM */
  2661. /*-----------------------------------------------------------------------------
  2662. * Function: initdram
  2663. * Description: Configures the PPC4xx IBM DDR1/DDR2 SDRAM memory controller.
  2664. * The configuration is performed using static, compile-
  2665. * time parameters.
  2666. * Configures the PPC405EX(r) and PPC460EX/GT
  2667. *---------------------------------------------------------------------------*/
  2668. phys_size_t initdram(int board_type)
  2669. {
  2670. /*
  2671. * Only run this SDRAM init code once. For NAND booting
  2672. * targets like Kilauea, we call initdram() early from the
  2673. * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
  2674. * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
  2675. * which calls initdram() again. This time the controller
  2676. * mustn't be reconfigured again since we're already running
  2677. * from SDRAM.
  2678. */
  2679. #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
  2680. unsigned long val;
  2681. #if defined(CONFIG_440)
  2682. mtdcr(SDRAM_R0BAS, CONFIG_SYS_SDRAM_R0BAS);
  2683. mtdcr(SDRAM_R1BAS, CONFIG_SYS_SDRAM_R1BAS);
  2684. mtdcr(SDRAM_R2BAS, CONFIG_SYS_SDRAM_R2BAS);
  2685. mtdcr(SDRAM_R3BAS, CONFIG_SYS_SDRAM_R3BAS);
  2686. mtdcr(SDRAM_PLBADDULL, CONFIG_SYS_SDRAM_PLBADDULL); /* MQ0_BAUL */
  2687. mtdcr(SDRAM_PLBADDUHB, CONFIG_SYS_SDRAM_PLBADDUHB); /* MQ0_BAUH */
  2688. mtdcr(SDRAM_CONF1LL, CONFIG_SYS_SDRAM_CONF1LL);
  2689. mtdcr(SDRAM_CONF1HB, CONFIG_SYS_SDRAM_CONF1HB);
  2690. mtdcr(SDRAM_CONFPATHB, CONFIG_SYS_SDRAM_CONFPATHB);
  2691. #endif
  2692. /* Set Memory Bank Configuration Registers */
  2693. mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
  2694. mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
  2695. mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
  2696. mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
  2697. /* Set Memory Clock Timing Register */
  2698. mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
  2699. /* Set Refresh Time Register */
  2700. mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
  2701. /* Set SDRAM Timing Registers */
  2702. mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
  2703. mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
  2704. mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
  2705. /* Set Mode and Extended Mode Registers */
  2706. mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
  2707. mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
  2708. /* Set Memory Controller Options 1 Register */
  2709. mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
  2710. /* Set Manual Initialization Control Registers */
  2711. mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
  2712. mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
  2713. mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
  2714. mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
  2715. mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
  2716. mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
  2717. mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
  2718. mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
  2719. mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
  2720. mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
  2721. mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
  2722. mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
  2723. mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
  2724. mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
  2725. mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
  2726. mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
  2727. /* Set On-Die Termination Registers */
  2728. mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
  2729. mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
  2730. mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
  2731. /* Set Write Timing Register */
  2732. mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
  2733. /*
  2734. * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
  2735. * SDRAM0_MCOPT2[IPTR] = 1
  2736. */
  2737. mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
  2738. SDRAM_MCOPT2_IPTR_EXECUTE));
  2739. /*
  2740. * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
  2741. * completion of initialization.
  2742. */
  2743. do {
  2744. mfsdram(SDRAM_MCSTAT, val);
  2745. } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
  2746. /* Set Delay Control Registers */
  2747. mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
  2748. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2749. mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
  2750. mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
  2751. mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
  2752. #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2753. /*
  2754. * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
  2755. */
  2756. mfsdram(SDRAM_MCOPT2, val);
  2757. mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
  2758. #if defined(CONFIG_440)
  2759. /*
  2760. * Program TLB entries with caches enabled, for best performace
  2761. * while auto-calibrating and ECC generation
  2762. */
  2763. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), 0);
  2764. #endif
  2765. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2766. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2767. /*------------------------------------------------------------------
  2768. | DQS calibration.
  2769. +-----------------------------------------------------------------*/
  2770. DQS_autocalibration();
  2771. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2772. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2773. #if defined(CONFIG_DDR_ECC)
  2774. ecc_init(CONFIG_SYS_SDRAM_BASE, CONFIG_SYS_MBYTES_SDRAM << 20);
  2775. #endif /* defined(CONFIG_DDR_ECC) */
  2776. #if defined(CONFIG_440)
  2777. /*
  2778. * Now after initialization (auto-calibration and ECC generation)
  2779. * remove the TLB entries with caches enabled and program again with
  2780. * desired cache functionality
  2781. */
  2782. remove_tlb(0, (CONFIG_SYS_MBYTES_SDRAM << 20));
  2783. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), MY_TLB_WORD2_I_ENABLE);
  2784. #endif
  2785. ppc4xx_ibm_ddr2_register_dump();
  2786. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2787. /*
  2788. * Clear potential errors resulting from auto-calibration.
  2789. * If not done, then we could get an interrupt later on when
  2790. * exceptions are enabled.
  2791. */
  2792. set_mcsr(get_mcsr());
  2793. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2794. #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
  2795. return (CONFIG_SYS_MBYTES_SDRAM << 20);
  2796. }
  2797. #endif /* CONFIG_SPD_EEPROM */
  2798. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2799. #if defined(CONFIG_440)
  2800. u32 mfdcr_any(u32 dcr)
  2801. {
  2802. u32 val;
  2803. switch (dcr) {
  2804. case SDRAM_R0BAS + 0:
  2805. val = mfdcr(SDRAM_R0BAS + 0);
  2806. break;
  2807. case SDRAM_R0BAS + 1:
  2808. val = mfdcr(SDRAM_R0BAS + 1);
  2809. break;
  2810. case SDRAM_R0BAS + 2:
  2811. val = mfdcr(SDRAM_R0BAS + 2);
  2812. break;
  2813. case SDRAM_R0BAS + 3:
  2814. val = mfdcr(SDRAM_R0BAS + 3);
  2815. break;
  2816. default:
  2817. printf("DCR %d not defined in case statement!!!\n", dcr);
  2818. val = 0; /* just to satisfy the compiler */
  2819. }
  2820. return val;
  2821. }
  2822. void mtdcr_any(u32 dcr, u32 val)
  2823. {
  2824. switch (dcr) {
  2825. case SDRAM_R0BAS + 0:
  2826. mtdcr(SDRAM_R0BAS + 0, val);
  2827. break;
  2828. case SDRAM_R0BAS + 1:
  2829. mtdcr(SDRAM_R0BAS + 1, val);
  2830. break;
  2831. case SDRAM_R0BAS + 2:
  2832. mtdcr(SDRAM_R0BAS + 2, val);
  2833. break;
  2834. case SDRAM_R0BAS + 3:
  2835. mtdcr(SDRAM_R0BAS + 3, val);
  2836. break;
  2837. default:
  2838. printf("DCR %d not defined in case statement!!!\n", dcr);
  2839. }
  2840. }
  2841. #endif /* defined(CONFIG_440) */
  2842. void blank_string(int size)
  2843. {
  2844. int i;
  2845. for (i = 0; i < size; i++)
  2846. putc('\b');
  2847. for (i = 0; i < size; i++)
  2848. putc(' ');
  2849. for (i = 0; i < size; i++)
  2850. putc('\b');
  2851. }
  2852. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2853. inline void ppc4xx_ibm_ddr2_register_dump(void)
  2854. {
  2855. #if defined(DEBUG)
  2856. printf("\nPPC4xx IBM DDR2 Register Dump:\n");
  2857. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2858. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2859. PPC4xx_IBM_DDR2_DUMP_REGISTER(R0BAS);
  2860. PPC4xx_IBM_DDR2_DUMP_REGISTER(R1BAS);
  2861. PPC4xx_IBM_DDR2_DUMP_REGISTER(R2BAS);
  2862. PPC4xx_IBM_DDR2_DUMP_REGISTER(R3BAS);
  2863. #endif /* (defined(CONFIG_440SP) || ... */
  2864. #if defined(CONFIG_405EX)
  2865. PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
  2866. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
  2867. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
  2868. PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
  2869. PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
  2870. PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
  2871. #endif /* defined(CONFIG_405EX) */
  2872. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
  2873. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
  2874. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
  2875. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
  2876. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
  2877. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
  2878. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
  2879. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
  2880. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
  2881. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
  2882. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
  2883. PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
  2884. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2885. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2886. PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
  2887. PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
  2888. /*
  2889. * OPART is only used as a trigger register.
  2890. *
  2891. * No data is contained in this register, and reading or writing
  2892. * to is can cause bad things to happen (hangs). Just skip it and
  2893. * report "N/A".
  2894. */
  2895. printf("%20s = N/A\n", "SDRAM_OPART");
  2896. #endif /* defined(CONFIG_440SP) || ... */
  2897. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
  2898. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
  2899. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
  2900. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
  2901. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
  2902. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
  2903. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
  2904. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
  2905. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
  2906. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
  2907. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
  2908. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
  2909. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
  2910. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
  2911. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
  2912. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
  2913. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
  2914. PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
  2915. PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
  2916. PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
  2917. PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
  2918. PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
  2919. PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
  2920. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
  2921. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
  2922. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
  2923. PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
  2924. PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
  2925. PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCCR);
  2926. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2927. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2928. PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
  2929. #endif /* defined(CONFIG_440SP) || ... */
  2930. PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
  2931. PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
  2932. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
  2933. #endif /* defined(DEBUG) */
  2934. }
  2935. #endif /* CONFIG_SDRAM_PPC4xx_IBM_DDR2 */