44x_spd_ddr2.c 102 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282
  1. /*
  2. * cpu/ppc4xx/44x_spd_ddr2.c
  3. * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
  4. * DDR2 controller (non Denali Core). Those currently are:
  5. *
  6. * 405: 405EX(r)
  7. * 440/460: 440SP/440SPe/460EX/460GT
  8. *
  9. * Copyright (c) 2008 Nuovation System Designs, LLC
  10. * Grant Erickson <gerickson@nuovations.com>
  11. * (C) Copyright 2007-2008
  12. * Stefan Roese, DENX Software Engineering, sr@denx.de.
  13. *
  14. * COPYRIGHT AMCC CORPORATION 2004
  15. *
  16. * See file CREDITS for list of people who contributed to this
  17. * project.
  18. *
  19. * This program is free software; you can redistribute it and/or
  20. * modify it under the terms of the GNU General Public License as
  21. * published by the Free Software Foundation; either version 2 of
  22. * the License, or (at your option) any later version.
  23. *
  24. * This program is distributed in the hope that it will be useful,
  25. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  26. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  27. * GNU General Public License for more details.
  28. *
  29. * You should have received a copy of the GNU General Public License
  30. * along with this program; if not, write to the Free Software
  31. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  32. * MA 02111-1307 USA
  33. *
  34. */
  35. /* define DEBUG for debugging output (obviously ;-)) */
  36. #if 0
  37. #define DEBUG
  38. #endif
  39. #include <common.h>
  40. #include <command.h>
  41. #include <ppc4xx.h>
  42. #include <i2c.h>
  43. #include <asm/io.h>
  44. #include <asm/processor.h>
  45. #include <asm/mmu.h>
  46. #include <asm/cache.h>
  47. #include "ecc.h"
  48. #if defined(CONFIG_SDRAM_PPC4xx_IBM_DDR2)
  49. #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
  50. do { \
  51. u32 data; \
  52. mfsdram(SDRAM_##mnemonic, data); \
  53. printf("%20s[%02x] = 0x%08X\n", \
  54. "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
  55. } while (0)
  56. #if defined(CONFIG_440)
  57. /*
  58. * This DDR2 setup code can dynamically setup the TLB entries for the DDR2
  59. * memory region. Right now the cache should still be disabled in U-Boot
  60. * because of the EMAC driver, that need its buffer descriptor to be located
  61. * in non cached memory.
  62. *
  63. * If at some time this restriction doesn't apply anymore, just define
  64. * CONFIG_4xx_DCACHE in the board config file and this code should setup
  65. * everything correctly.
  66. */
  67. #ifdef CONFIG_4xx_DCACHE
  68. /* enable caching on SDRAM */
  69. #define MY_TLB_WORD2_I_ENABLE 0
  70. #else
  71. /* disable caching on SDRAM */
  72. #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE
  73. #endif /* CONFIG_4xx_DCACHE */
  74. #endif /* CONFIG_440 */
  75. #if defined(CONFIG_SPD_EEPROM)
  76. /*-----------------------------------------------------------------------------+
  77. * Defines
  78. *-----------------------------------------------------------------------------*/
  79. #ifndef TRUE
  80. #define TRUE 1
  81. #endif
  82. #ifndef FALSE
  83. #define FALSE 0
  84. #endif
  85. #define SDRAM_DDR1 1
  86. #define SDRAM_DDR2 2
  87. #define SDRAM_NONE 0
  88. #define MAXDIMMS 2
  89. #define MAXRANKS 4
  90. #define MAXBXCF 4
  91. #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
  92. #define ONE_BILLION 1000000000
  93. #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
  94. #define CMD_NOP (7 << 19)
  95. #define CMD_PRECHARGE (2 << 19)
  96. #define CMD_REFRESH (1 << 19)
  97. #define CMD_EMR (0 << 19)
  98. #define CMD_READ (5 << 19)
  99. #define CMD_WRITE (4 << 19)
  100. #define SELECT_MR (0 << 16)
  101. #define SELECT_EMR (1 << 16)
  102. #define SELECT_EMR2 (2 << 16)
  103. #define SELECT_EMR3 (3 << 16)
  104. /* MR */
  105. #define DLL_RESET 0x00000100
  106. #define WRITE_RECOV_2 (1 << 9)
  107. #define WRITE_RECOV_3 (2 << 9)
  108. #define WRITE_RECOV_4 (3 << 9)
  109. #define WRITE_RECOV_5 (4 << 9)
  110. #define WRITE_RECOV_6 (5 << 9)
  111. #define BURST_LEN_4 0x00000002
  112. /* EMR */
  113. #define ODT_0_OHM 0x00000000
  114. #define ODT_50_OHM 0x00000044
  115. #define ODT_75_OHM 0x00000004
  116. #define ODT_150_OHM 0x00000040
  117. #define ODS_FULL 0x00000000
  118. #define ODS_REDUCED 0x00000002
  119. #define OCD_CALIB_DEF 0x00000380
  120. /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
  121. #define ODT_EB0R (0x80000000 >> 8)
  122. #define ODT_EB0W (0x80000000 >> 7)
  123. #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
  124. #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
  125. #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
  126. /* Defines for the Read Cycle Delay test */
  127. #define NUMMEMTESTS 8
  128. #define NUMMEMWORDS 8
  129. #define NUMLOOPS 64 /* memory test loops */
  130. /*
  131. * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
  132. * To support such configurations, we "only" map the first 2GB via the TLB's. We
  133. * need some free virtual address space for the remaining peripherals like, SoC
  134. * devices, FLASH etc.
  135. *
  136. * Note that ECC is currently not supported on configurations with more than 2GB
  137. * SDRAM. This is because we only map the first 2GB on such systems, and therefore
  138. * the ECC parity byte of the remaining area can't be written.
  139. */
  140. #ifndef CONFIG_MAX_MEM_MAPPED
  141. #define CONFIG_MAX_MEM_MAPPED ((phys_size_t)2 << 30)
  142. #endif
  143. /*
  144. * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
  145. */
  146. void __spd_ddr_init_hang (void)
  147. {
  148. hang ();
  149. }
  150. void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
  151. /*
  152. * To provide an interface for board specific config values in this common
  153. * DDR setup code, we implement he "weak" default functions here. They return
  154. * the default value back to the caller.
  155. *
  156. * Please see include/configs/yucca.h for an example fora board specific
  157. * implementation.
  158. */
  159. u32 __ddr_wrdtr(u32 default_val)
  160. {
  161. return default_val;
  162. }
  163. u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
  164. u32 __ddr_clktr(u32 default_val)
  165. {
  166. return default_val;
  167. }
  168. u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
  169. /* Private Structure Definitions */
  170. /* enum only to ease code for cas latency setting */
  171. typedef enum ddr_cas_id {
  172. DDR_CAS_2 = 20,
  173. DDR_CAS_2_5 = 25,
  174. DDR_CAS_3 = 30,
  175. DDR_CAS_4 = 40,
  176. DDR_CAS_5 = 50
  177. } ddr_cas_id_t;
  178. /*-----------------------------------------------------------------------------+
  179. * Prototypes
  180. *-----------------------------------------------------------------------------*/
  181. static phys_size_t sdram_memsize(void);
  182. static void get_spd_info(unsigned long *dimm_populated,
  183. unsigned char *iic0_dimm_addr,
  184. unsigned long num_dimm_banks);
  185. static void check_mem_type(unsigned long *dimm_populated,
  186. unsigned char *iic0_dimm_addr,
  187. unsigned long num_dimm_banks);
  188. static void check_frequency(unsigned long *dimm_populated,
  189. unsigned char *iic0_dimm_addr,
  190. unsigned long num_dimm_banks);
  191. static void check_rank_number(unsigned long *dimm_populated,
  192. unsigned char *iic0_dimm_addr,
  193. unsigned long num_dimm_banks);
  194. static void check_voltage_type(unsigned long *dimm_populated,
  195. unsigned char *iic0_dimm_addr,
  196. unsigned long num_dimm_banks);
  197. static void program_memory_queue(unsigned long *dimm_populated,
  198. unsigned char *iic0_dimm_addr,
  199. unsigned long num_dimm_banks);
  200. static void program_codt(unsigned long *dimm_populated,
  201. unsigned char *iic0_dimm_addr,
  202. unsigned long num_dimm_banks);
  203. static void program_mode(unsigned long *dimm_populated,
  204. unsigned char *iic0_dimm_addr,
  205. unsigned long num_dimm_banks,
  206. ddr_cas_id_t *selected_cas,
  207. int *write_recovery);
  208. static void program_tr(unsigned long *dimm_populated,
  209. unsigned char *iic0_dimm_addr,
  210. unsigned long num_dimm_banks);
  211. static void program_rtr(unsigned long *dimm_populated,
  212. unsigned char *iic0_dimm_addr,
  213. unsigned long num_dimm_banks);
  214. static void program_bxcf(unsigned long *dimm_populated,
  215. unsigned char *iic0_dimm_addr,
  216. unsigned long num_dimm_banks);
  217. static void program_copt1(unsigned long *dimm_populated,
  218. unsigned char *iic0_dimm_addr,
  219. unsigned long num_dimm_banks);
  220. static void program_initplr(unsigned long *dimm_populated,
  221. unsigned char *iic0_dimm_addr,
  222. unsigned long num_dimm_banks,
  223. ddr_cas_id_t selected_cas,
  224. int write_recovery);
  225. static unsigned long is_ecc_enabled(void);
  226. #ifdef CONFIG_DDR_ECC
  227. static void program_ecc(unsigned long *dimm_populated,
  228. unsigned char *iic0_dimm_addr,
  229. unsigned long num_dimm_banks,
  230. unsigned long tlb_word2_i_value);
  231. static void program_ecc_addr(unsigned long start_address,
  232. unsigned long num_bytes,
  233. unsigned long tlb_word2_i_value);
  234. #endif
  235. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  236. static void program_DQS_calibration(unsigned long *dimm_populated,
  237. unsigned char *iic0_dimm_addr,
  238. unsigned long num_dimm_banks);
  239. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  240. static void test(void);
  241. #else
  242. static void DQS_calibration_process(void);
  243. #endif
  244. #endif
  245. int do_reset (cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]);
  246. void dcbz_area(u32 start_address, u32 num_bytes);
  247. static unsigned char spd_read(uchar chip, uint addr)
  248. {
  249. unsigned char data[2];
  250. if (i2c_probe(chip) == 0)
  251. if (i2c_read(chip, addr, 1, data, 1) == 0)
  252. return data[0];
  253. return 0;
  254. }
  255. /*-----------------------------------------------------------------------------+
  256. * sdram_memsize
  257. *-----------------------------------------------------------------------------*/
  258. static phys_size_t sdram_memsize(void)
  259. {
  260. phys_size_t mem_size;
  261. unsigned long mcopt2;
  262. unsigned long mcstat;
  263. unsigned long mb0cf;
  264. unsigned long sdsz;
  265. unsigned long i;
  266. mem_size = 0;
  267. mfsdram(SDRAM_MCOPT2, mcopt2);
  268. mfsdram(SDRAM_MCSTAT, mcstat);
  269. /* DDR controller must be enabled and not in self-refresh. */
  270. /* Otherwise memsize is zero. */
  271. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  272. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  273. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  274. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  275. for (i = 0; i < MAXBXCF; i++) {
  276. mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
  277. /* Banks enabled */
  278. if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  279. sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
  280. switch(sdsz) {
  281. case SDRAM_RXBAS_SDSZ_8:
  282. mem_size+=8;
  283. break;
  284. case SDRAM_RXBAS_SDSZ_16:
  285. mem_size+=16;
  286. break;
  287. case SDRAM_RXBAS_SDSZ_32:
  288. mem_size+=32;
  289. break;
  290. case SDRAM_RXBAS_SDSZ_64:
  291. mem_size+=64;
  292. break;
  293. case SDRAM_RXBAS_SDSZ_128:
  294. mem_size+=128;
  295. break;
  296. case SDRAM_RXBAS_SDSZ_256:
  297. mem_size+=256;
  298. break;
  299. case SDRAM_RXBAS_SDSZ_512:
  300. mem_size+=512;
  301. break;
  302. case SDRAM_RXBAS_SDSZ_1024:
  303. mem_size+=1024;
  304. break;
  305. case SDRAM_RXBAS_SDSZ_2048:
  306. mem_size+=2048;
  307. break;
  308. case SDRAM_RXBAS_SDSZ_4096:
  309. mem_size+=4096;
  310. break;
  311. default:
  312. printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
  313. , sdsz);
  314. mem_size=0;
  315. break;
  316. }
  317. }
  318. }
  319. }
  320. return mem_size << 20;
  321. }
  322. /*-----------------------------------------------------------------------------+
  323. * initdram. Initializes the 440SP Memory Queue and DDR SDRAM controller.
  324. * Note: This routine runs from flash with a stack set up in the chip's
  325. * sram space. It is important that the routine does not require .sbss, .bss or
  326. * .data sections. It also cannot call routines that require these sections.
  327. *-----------------------------------------------------------------------------*/
  328. /*-----------------------------------------------------------------------------
  329. * Function: initdram
  330. * Description: Configures SDRAM memory banks for DDR operation.
  331. * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
  332. * via the IIC bus and then configures the DDR SDRAM memory
  333. * banks appropriately. If Auto Memory Configuration is
  334. * not used, it is assumed that no DIMM is plugged
  335. *-----------------------------------------------------------------------------*/
  336. phys_size_t initdram(int board_type)
  337. {
  338. unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
  339. unsigned char spd0[MAX_SPD_BYTES];
  340. unsigned char spd1[MAX_SPD_BYTES];
  341. unsigned char *dimm_spd[MAXDIMMS];
  342. unsigned long dimm_populated[MAXDIMMS];
  343. unsigned long num_dimm_banks; /* on board dimm banks */
  344. unsigned long val;
  345. ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
  346. int write_recovery;
  347. phys_size_t dram_size = 0;
  348. num_dimm_banks = sizeof(iic0_dimm_addr);
  349. /*------------------------------------------------------------------
  350. * Set up an array of SPD matrixes.
  351. *-----------------------------------------------------------------*/
  352. dimm_spd[0] = spd0;
  353. dimm_spd[1] = spd1;
  354. /*------------------------------------------------------------------
  355. * Reset the DDR-SDRAM controller.
  356. *-----------------------------------------------------------------*/
  357. mtsdr(SDR0_SRST, (0x80000000 >> 10));
  358. mtsdr(SDR0_SRST, 0x00000000);
  359. /*
  360. * Make sure I2C controller is initialized
  361. * before continuing.
  362. */
  363. /* switch to correct I2C bus */
  364. I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
  365. i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
  366. /*------------------------------------------------------------------
  367. * Clear out the serial presence detect buffers.
  368. * Perform IIC reads from the dimm. Fill in the spds.
  369. * Check to see if the dimm slots are populated
  370. *-----------------------------------------------------------------*/
  371. get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  372. /*------------------------------------------------------------------
  373. * Check the memory type for the dimms plugged.
  374. *-----------------------------------------------------------------*/
  375. check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  376. /*------------------------------------------------------------------
  377. * Check the frequency supported for the dimms plugged.
  378. *-----------------------------------------------------------------*/
  379. check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  380. /*------------------------------------------------------------------
  381. * Check the total rank number.
  382. *-----------------------------------------------------------------*/
  383. check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  384. /*------------------------------------------------------------------
  385. * Check the voltage type for the dimms plugged.
  386. *-----------------------------------------------------------------*/
  387. check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  388. /*------------------------------------------------------------------
  389. * Program SDRAM controller options 2 register
  390. * Except Enabling of the memory controller.
  391. *-----------------------------------------------------------------*/
  392. mfsdram(SDRAM_MCOPT2, val);
  393. mtsdram(SDRAM_MCOPT2,
  394. (val &
  395. ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
  396. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
  397. SDRAM_MCOPT2_ISIE_MASK))
  398. | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
  399. SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
  400. SDRAM_MCOPT2_ISIE_ENABLE));
  401. /*------------------------------------------------------------------
  402. * Program SDRAM controller options 1 register
  403. * Note: Does not enable the memory controller.
  404. *-----------------------------------------------------------------*/
  405. program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  406. /*------------------------------------------------------------------
  407. * Set the SDRAM Controller On Die Termination Register
  408. *-----------------------------------------------------------------*/
  409. program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  410. /*------------------------------------------------------------------
  411. * Program SDRAM refresh register.
  412. *-----------------------------------------------------------------*/
  413. program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  414. /*------------------------------------------------------------------
  415. * Program SDRAM mode register.
  416. *-----------------------------------------------------------------*/
  417. program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  418. &selected_cas, &write_recovery);
  419. /*------------------------------------------------------------------
  420. * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
  421. *-----------------------------------------------------------------*/
  422. mfsdram(SDRAM_WRDTR, val);
  423. mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
  424. ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
  425. /*------------------------------------------------------------------
  426. * Set the SDRAM Clock Timing Register
  427. *-----------------------------------------------------------------*/
  428. mfsdram(SDRAM_CLKTR, val);
  429. mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
  430. ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
  431. /*------------------------------------------------------------------
  432. * Program the BxCF registers.
  433. *-----------------------------------------------------------------*/
  434. program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  435. /*------------------------------------------------------------------
  436. * Program SDRAM timing registers.
  437. *-----------------------------------------------------------------*/
  438. program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  439. /*------------------------------------------------------------------
  440. * Set the Extended Mode register
  441. *-----------------------------------------------------------------*/
  442. mfsdram(SDRAM_MEMODE, val);
  443. mtsdram(SDRAM_MEMODE,
  444. (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
  445. SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
  446. (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
  447. | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
  448. /*------------------------------------------------------------------
  449. * Program Initialization preload registers.
  450. *-----------------------------------------------------------------*/
  451. program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
  452. selected_cas, write_recovery);
  453. /*------------------------------------------------------------------
  454. * Delay to ensure 200usec have elapsed since reset.
  455. *-----------------------------------------------------------------*/
  456. udelay(400);
  457. /*------------------------------------------------------------------
  458. * Set the memory queue core base addr.
  459. *-----------------------------------------------------------------*/
  460. program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  461. /*------------------------------------------------------------------
  462. * Program SDRAM controller options 2 register
  463. * Enable the memory controller.
  464. *-----------------------------------------------------------------*/
  465. mfsdram(SDRAM_MCOPT2, val);
  466. mtsdram(SDRAM_MCOPT2,
  467. (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
  468. SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
  469. SDRAM_MCOPT2_IPTR_EXECUTE);
  470. /*------------------------------------------------------------------
  471. * Wait for IPTR_EXECUTE init sequence to complete.
  472. *-----------------------------------------------------------------*/
  473. do {
  474. mfsdram(SDRAM_MCSTAT, val);
  475. } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
  476. /* enable the controller only after init sequence completes */
  477. mfsdram(SDRAM_MCOPT2, val);
  478. mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
  479. /* Make sure delay-line calibration is done before proceeding */
  480. do {
  481. mfsdram(SDRAM_DLCR, val);
  482. } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
  483. /* get installed memory size */
  484. dram_size = sdram_memsize();
  485. /*
  486. * Limit size to 2GB
  487. */
  488. if (dram_size > CONFIG_MAX_MEM_MAPPED)
  489. dram_size = CONFIG_MAX_MEM_MAPPED;
  490. /* and program tlb entries for this size (dynamic) */
  491. /*
  492. * Program TLB entries with caches enabled, for best performace
  493. * while auto-calibrating and ECC generation
  494. */
  495. program_tlb(0, 0, dram_size, 0);
  496. /*------------------------------------------------------------------
  497. * DQS calibration.
  498. *-----------------------------------------------------------------*/
  499. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  500. DQS_autocalibration();
  501. #else
  502. program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
  503. #endif
  504. #ifdef CONFIG_DDR_ECC
  505. /*------------------------------------------------------------------
  506. * If ecc is enabled, initialize the parity bits.
  507. *-----------------------------------------------------------------*/
  508. program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
  509. #endif
  510. /*
  511. * Now after initialization (auto-calibration and ECC generation)
  512. * remove the TLB entries with caches enabled and program again with
  513. * desired cache functionality
  514. */
  515. remove_tlb(0, dram_size);
  516. program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
  517. ppc4xx_ibm_ddr2_register_dump();
  518. /*
  519. * Clear potential errors resulting from auto-calibration.
  520. * If not done, then we could get an interrupt later on when
  521. * exceptions are enabled.
  522. */
  523. set_mcsr(get_mcsr());
  524. return sdram_memsize();
  525. }
  526. static void get_spd_info(unsigned long *dimm_populated,
  527. unsigned char *iic0_dimm_addr,
  528. unsigned long num_dimm_banks)
  529. {
  530. unsigned long dimm_num;
  531. unsigned long dimm_found;
  532. unsigned char num_of_bytes;
  533. unsigned char total_size;
  534. dimm_found = FALSE;
  535. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  536. num_of_bytes = 0;
  537. total_size = 0;
  538. num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
  539. debug("\nspd_read(0x%x) returned %d\n",
  540. iic0_dimm_addr[dimm_num], num_of_bytes);
  541. total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
  542. debug("spd_read(0x%x) returned %d\n",
  543. iic0_dimm_addr[dimm_num], total_size);
  544. if ((num_of_bytes != 0) && (total_size != 0)) {
  545. dimm_populated[dimm_num] = TRUE;
  546. dimm_found = TRUE;
  547. debug("DIMM slot %lu: populated\n", dimm_num);
  548. } else {
  549. dimm_populated[dimm_num] = FALSE;
  550. debug("DIMM slot %lu: Not populated\n", dimm_num);
  551. }
  552. }
  553. if (dimm_found == FALSE) {
  554. printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
  555. spd_ddr_init_hang ();
  556. }
  557. }
  558. void board_add_ram_info(int use_default)
  559. {
  560. PPC4xx_SYS_INFO board_cfg;
  561. u32 val;
  562. if (is_ecc_enabled())
  563. puts(" (ECC");
  564. else
  565. puts(" (ECC not");
  566. get_sys_info(&board_cfg);
  567. mfsdr(SDR0_DDR0, val);
  568. val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
  569. printf(" enabled, %d MHz", (val * 2) / 1000000);
  570. mfsdram(SDRAM_MMODE, val);
  571. val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
  572. printf(", CL%d)", val);
  573. }
  574. /*------------------------------------------------------------------
  575. * For the memory DIMMs installed, this routine verifies that they
  576. * really are DDR specific DIMMs.
  577. *-----------------------------------------------------------------*/
  578. static void check_mem_type(unsigned long *dimm_populated,
  579. unsigned char *iic0_dimm_addr,
  580. unsigned long num_dimm_banks)
  581. {
  582. unsigned long dimm_num;
  583. unsigned long dimm_type;
  584. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  585. if (dimm_populated[dimm_num] == TRUE) {
  586. dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
  587. switch (dimm_type) {
  588. case 1:
  589. printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
  590. "slot %d.\n", (unsigned int)dimm_num);
  591. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  592. printf("Replace the DIMM module with a supported DIMM.\n\n");
  593. spd_ddr_init_hang ();
  594. break;
  595. case 2:
  596. printf("ERROR: EDO DIMM detected in slot %d.\n",
  597. (unsigned int)dimm_num);
  598. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  599. printf("Replace the DIMM module with a supported DIMM.\n\n");
  600. spd_ddr_init_hang ();
  601. break;
  602. case 3:
  603. printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
  604. (unsigned int)dimm_num);
  605. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  606. printf("Replace the DIMM module with a supported DIMM.\n\n");
  607. spd_ddr_init_hang ();
  608. break;
  609. case 4:
  610. printf("ERROR: SDRAM DIMM detected in slot %d.\n",
  611. (unsigned int)dimm_num);
  612. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  613. printf("Replace the DIMM module with a supported DIMM.\n\n");
  614. spd_ddr_init_hang ();
  615. break;
  616. case 5:
  617. printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
  618. (unsigned int)dimm_num);
  619. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  620. printf("Replace the DIMM module with a supported DIMM.\n\n");
  621. spd_ddr_init_hang ();
  622. break;
  623. case 6:
  624. printf("ERROR: SGRAM DIMM detected in slot %d.\n",
  625. (unsigned int)dimm_num);
  626. printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
  627. printf("Replace the DIMM module with a supported DIMM.\n\n");
  628. spd_ddr_init_hang ();
  629. break;
  630. case 7:
  631. debug("DIMM slot %d: DDR1 SDRAM detected\n", dimm_num);
  632. dimm_populated[dimm_num] = SDRAM_DDR1;
  633. break;
  634. case 8:
  635. debug("DIMM slot %d: DDR2 SDRAM detected\n", dimm_num);
  636. dimm_populated[dimm_num] = SDRAM_DDR2;
  637. break;
  638. default:
  639. printf("ERROR: Unknown DIMM detected in slot %d.\n",
  640. (unsigned int)dimm_num);
  641. printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
  642. printf("Replace the DIMM module with a supported DIMM.\n\n");
  643. spd_ddr_init_hang ();
  644. break;
  645. }
  646. }
  647. }
  648. for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
  649. if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
  650. && (dimm_populated[dimm_num] != SDRAM_NONE)
  651. && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
  652. printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
  653. spd_ddr_init_hang ();
  654. }
  655. }
  656. }
  657. /*------------------------------------------------------------------
  658. * For the memory DIMMs installed, this routine verifies that
  659. * frequency previously calculated is supported.
  660. *-----------------------------------------------------------------*/
  661. static void check_frequency(unsigned long *dimm_populated,
  662. unsigned char *iic0_dimm_addr,
  663. unsigned long num_dimm_banks)
  664. {
  665. unsigned long dimm_num;
  666. unsigned long tcyc_reg;
  667. unsigned long cycle_time;
  668. unsigned long calc_cycle_time;
  669. unsigned long sdram_freq;
  670. unsigned long sdr_ddrpll;
  671. PPC4xx_SYS_INFO board_cfg;
  672. /*------------------------------------------------------------------
  673. * Get the board configuration info.
  674. *-----------------------------------------------------------------*/
  675. get_sys_info(&board_cfg);
  676. mfsdr(SDR0_DDR0, sdr_ddrpll);
  677. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  678. /*
  679. * calc_cycle_time is calculated from DDR frequency set by board/chip
  680. * and is expressed in multiple of 10 picoseconds
  681. * to match the way DIMM cycle time is calculated below.
  682. */
  683. calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
  684. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  685. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  686. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  687. /*
  688. * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
  689. * the higher order nibble (bits 4-7) designates the cycle time
  690. * to a granularity of 1ns;
  691. * the value presented by the lower order nibble (bits 0-3)
  692. * has a granularity of .1ns and is added to the value designated
  693. * by the higher nibble. In addition, four lines of the lower order
  694. * nibble are assigned to support +.25,+.33, +.66 and +.75.
  695. */
  696. /* Convert from hex to decimal */
  697. if ((tcyc_reg & 0x0F) == 0x0D)
  698. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  699. else if ((tcyc_reg & 0x0F) == 0x0C)
  700. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
  701. else if ((tcyc_reg & 0x0F) == 0x0B)
  702. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
  703. else if ((tcyc_reg & 0x0F) == 0x0A)
  704. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
  705. else
  706. cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
  707. ((tcyc_reg & 0x0F)*10);
  708. debug("cycle_time=%d [10 picoseconds]\n", cycle_time);
  709. if (cycle_time > (calc_cycle_time + 10)) {
  710. /*
  711. * the provided sdram cycle_time is too small
  712. * for the available DIMM cycle_time.
  713. * The additionnal 100ps is here to accept a small incertainty.
  714. */
  715. printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
  716. "slot %d \n while calculated cycle time is %d ps.\n",
  717. (unsigned int)(cycle_time*10),
  718. (unsigned int)dimm_num,
  719. (unsigned int)(calc_cycle_time*10));
  720. printf("Replace the DIMM, or change DDR frequency via "
  721. "strapping bits.\n\n");
  722. spd_ddr_init_hang ();
  723. }
  724. }
  725. }
  726. }
  727. /*------------------------------------------------------------------
  728. * For the memory DIMMs installed, this routine verifies two
  729. * ranks/banks maximum are availables.
  730. *-----------------------------------------------------------------*/
  731. static void check_rank_number(unsigned long *dimm_populated,
  732. unsigned char *iic0_dimm_addr,
  733. unsigned long num_dimm_banks)
  734. {
  735. unsigned long dimm_num;
  736. unsigned long dimm_rank;
  737. unsigned long total_rank = 0;
  738. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  739. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  740. dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
  741. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  742. dimm_rank = (dimm_rank & 0x0F) +1;
  743. else
  744. dimm_rank = dimm_rank & 0x0F;
  745. if (dimm_rank > MAXRANKS) {
  746. printf("ERROR: DRAM DIMM detected with %lu ranks in "
  747. "slot %lu is not supported.\n", dimm_rank, dimm_num);
  748. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  749. printf("Replace the DIMM module with a supported DIMM.\n\n");
  750. spd_ddr_init_hang ();
  751. } else
  752. total_rank += dimm_rank;
  753. }
  754. if (total_rank > MAXRANKS) {
  755. printf("ERROR: DRAM DIMM detected with a total of %d ranks "
  756. "for all slots.\n", (unsigned int)total_rank);
  757. printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
  758. printf("Remove one of the DIMM modules.\n\n");
  759. spd_ddr_init_hang ();
  760. }
  761. }
  762. }
  763. /*------------------------------------------------------------------
  764. * only support 2.5V modules.
  765. * This routine verifies this.
  766. *-----------------------------------------------------------------*/
  767. static void check_voltage_type(unsigned long *dimm_populated,
  768. unsigned char *iic0_dimm_addr,
  769. unsigned long num_dimm_banks)
  770. {
  771. unsigned long dimm_num;
  772. unsigned long voltage_type;
  773. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  774. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  775. voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
  776. switch (voltage_type) {
  777. case 0x00:
  778. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  779. printf("This DIMM is 5.0 Volt/TTL.\n");
  780. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  781. (unsigned int)dimm_num);
  782. spd_ddr_init_hang ();
  783. break;
  784. case 0x01:
  785. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  786. printf("This DIMM is LVTTL.\n");
  787. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  788. (unsigned int)dimm_num);
  789. spd_ddr_init_hang ();
  790. break;
  791. case 0x02:
  792. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  793. printf("This DIMM is 1.5 Volt.\n");
  794. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  795. (unsigned int)dimm_num);
  796. spd_ddr_init_hang ();
  797. break;
  798. case 0x03:
  799. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  800. printf("This DIMM is 3.3 Volt/TTL.\n");
  801. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  802. (unsigned int)dimm_num);
  803. spd_ddr_init_hang ();
  804. break;
  805. case 0x04:
  806. /* 2.5 Voltage only for DDR1 */
  807. break;
  808. case 0x05:
  809. /* 1.8 Voltage only for DDR2 */
  810. break;
  811. default:
  812. printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
  813. printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
  814. (unsigned int)dimm_num);
  815. spd_ddr_init_hang ();
  816. break;
  817. }
  818. }
  819. }
  820. }
  821. /*-----------------------------------------------------------------------------+
  822. * program_copt1.
  823. *-----------------------------------------------------------------------------*/
  824. static void program_copt1(unsigned long *dimm_populated,
  825. unsigned char *iic0_dimm_addr,
  826. unsigned long num_dimm_banks)
  827. {
  828. unsigned long dimm_num;
  829. unsigned long mcopt1;
  830. unsigned long ecc_enabled;
  831. unsigned long ecc = 0;
  832. unsigned long data_width = 0;
  833. unsigned long dimm_32bit;
  834. unsigned long dimm_64bit;
  835. unsigned long registered = 0;
  836. unsigned long attribute = 0;
  837. unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  838. unsigned long bankcount;
  839. unsigned long ddrtype;
  840. unsigned long val;
  841. #ifdef CONFIG_DDR_ECC
  842. ecc_enabled = TRUE;
  843. #else
  844. ecc_enabled = FALSE;
  845. #endif
  846. dimm_32bit = FALSE;
  847. dimm_64bit = FALSE;
  848. buf0 = FALSE;
  849. buf1 = FALSE;
  850. /*------------------------------------------------------------------
  851. * Set memory controller options reg 1, SDRAM_MCOPT1.
  852. *-----------------------------------------------------------------*/
  853. mfsdram(SDRAM_MCOPT1, val);
  854. mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
  855. SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
  856. SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
  857. SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
  858. SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
  859. SDRAM_MCOPT1_DREF_MASK);
  860. mcopt1 |= SDRAM_MCOPT1_QDEP;
  861. mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
  862. mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
  863. mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
  864. mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
  865. mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
  866. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  867. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  868. /* test ecc support */
  869. ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
  870. if (ecc != 0x02) /* ecc not supported */
  871. ecc_enabled = FALSE;
  872. /* test bank count */
  873. bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
  874. if (bankcount == 0x04) /* bank count = 4 */
  875. mcopt1 |= SDRAM_MCOPT1_4_BANKS;
  876. else /* bank count = 8 */
  877. mcopt1 |= SDRAM_MCOPT1_8_BANKS;
  878. /* test DDR type */
  879. ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
  880. /* test for buffered/unbuffered, registered, differential clocks */
  881. registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
  882. attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
  883. /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
  884. if (dimm_num == 0) {
  885. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  886. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  887. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  888. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  889. if (registered == 1) { /* DDR2 always buffered */
  890. /* TODO: what about above comments ? */
  891. mcopt1 |= SDRAM_MCOPT1_RDEN;
  892. buf0 = TRUE;
  893. } else {
  894. /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
  895. if ((attribute & 0x02) == 0x00) {
  896. /* buffered not supported */
  897. buf0 = FALSE;
  898. } else {
  899. mcopt1 |= SDRAM_MCOPT1_RDEN;
  900. buf0 = TRUE;
  901. }
  902. }
  903. }
  904. else if (dimm_num == 1) {
  905. if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
  906. mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
  907. if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
  908. mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
  909. if (registered == 1) {
  910. /* DDR2 always buffered */
  911. mcopt1 |= SDRAM_MCOPT1_RDEN;
  912. buf1 = TRUE;
  913. } else {
  914. if ((attribute & 0x02) == 0x00) {
  915. /* buffered not supported */
  916. buf1 = FALSE;
  917. } else {
  918. mcopt1 |= SDRAM_MCOPT1_RDEN;
  919. buf1 = TRUE;
  920. }
  921. }
  922. }
  923. /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
  924. data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
  925. (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
  926. switch (data_width) {
  927. case 72:
  928. case 64:
  929. dimm_64bit = TRUE;
  930. break;
  931. case 40:
  932. case 32:
  933. dimm_32bit = TRUE;
  934. break;
  935. default:
  936. printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
  937. data_width);
  938. printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
  939. break;
  940. }
  941. }
  942. }
  943. /* verify matching properties */
  944. if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
  945. if (buf0 != buf1) {
  946. printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
  947. spd_ddr_init_hang ();
  948. }
  949. }
  950. if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
  951. printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
  952. spd_ddr_init_hang ();
  953. }
  954. else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
  955. mcopt1 |= SDRAM_MCOPT1_DMWD_64;
  956. } else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
  957. mcopt1 |= SDRAM_MCOPT1_DMWD_32;
  958. } else {
  959. printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
  960. spd_ddr_init_hang ();
  961. }
  962. if (ecc_enabled == TRUE)
  963. mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
  964. else
  965. mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
  966. mtsdram(SDRAM_MCOPT1, mcopt1);
  967. }
  968. /*-----------------------------------------------------------------------------+
  969. * program_codt.
  970. *-----------------------------------------------------------------------------*/
  971. static void program_codt(unsigned long *dimm_populated,
  972. unsigned char *iic0_dimm_addr,
  973. unsigned long num_dimm_banks)
  974. {
  975. unsigned long codt;
  976. unsigned long modt0 = 0;
  977. unsigned long modt1 = 0;
  978. unsigned long modt2 = 0;
  979. unsigned long modt3 = 0;
  980. unsigned char dimm_num;
  981. unsigned char dimm_rank;
  982. unsigned char total_rank = 0;
  983. unsigned char total_dimm = 0;
  984. unsigned char dimm_type = 0;
  985. unsigned char firstSlot = 0;
  986. /*------------------------------------------------------------------
  987. * Set the SDRAM Controller On Die Termination Register
  988. *-----------------------------------------------------------------*/
  989. mfsdram(SDRAM_CODT, codt);
  990. codt |= (SDRAM_CODT_IO_NMODE
  991. & (~SDRAM_CODT_DQS_SINGLE_END
  992. & ~SDRAM_CODT_CKSE_SINGLE_END
  993. & ~SDRAM_CODT_FEEBBACK_RCV_SINGLE_END
  994. & ~SDRAM_CODT_FEEBBACK_DRV_SINGLE_END));
  995. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  996. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  997. dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
  998. if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
  999. dimm_rank = (dimm_rank & 0x0F) + 1;
  1000. dimm_type = SDRAM_DDR2;
  1001. } else {
  1002. dimm_rank = dimm_rank & 0x0F;
  1003. dimm_type = SDRAM_DDR1;
  1004. }
  1005. total_rank += dimm_rank;
  1006. total_dimm++;
  1007. if ((dimm_num == 0) && (total_dimm == 1))
  1008. firstSlot = TRUE;
  1009. else
  1010. firstSlot = FALSE;
  1011. }
  1012. }
  1013. if (dimm_type == SDRAM_DDR2) {
  1014. codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
  1015. if ((total_dimm == 1) && (firstSlot == TRUE)) {
  1016. if (total_rank == 1) { /* PUUU */
  1017. codt |= CALC_ODT_R(0);
  1018. modt0 = CALC_ODT_W(0);
  1019. modt1 = 0x00000000;
  1020. modt2 = 0x00000000;
  1021. modt3 = 0x00000000;
  1022. }
  1023. if (total_rank == 2) { /* PPUU */
  1024. codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
  1025. modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
  1026. modt1 = 0x00000000;
  1027. modt2 = 0x00000000;
  1028. modt3 = 0x00000000;
  1029. }
  1030. } else if ((total_dimm == 1) && (firstSlot != TRUE)) {
  1031. if (total_rank == 1) { /* UUPU */
  1032. codt |= CALC_ODT_R(2);
  1033. modt0 = 0x00000000;
  1034. modt1 = 0x00000000;
  1035. modt2 = CALC_ODT_W(2);
  1036. modt3 = 0x00000000;
  1037. }
  1038. if (total_rank == 2) { /* UUPP */
  1039. codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
  1040. modt0 = 0x00000000;
  1041. modt1 = 0x00000000;
  1042. modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
  1043. modt3 = 0x00000000;
  1044. }
  1045. }
  1046. if (total_dimm == 2) {
  1047. if (total_rank == 2) { /* PUPU */
  1048. codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
  1049. modt0 = CALC_ODT_RW(2);
  1050. modt1 = 0x00000000;
  1051. modt2 = CALC_ODT_RW(0);
  1052. modt3 = 0x00000000;
  1053. }
  1054. if (total_rank == 4) { /* PPPP */
  1055. codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
  1056. CALC_ODT_R(2) | CALC_ODT_R(3);
  1057. modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
  1058. modt1 = 0x00000000;
  1059. modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
  1060. modt3 = 0x00000000;
  1061. }
  1062. }
  1063. } else {
  1064. codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
  1065. modt0 = 0x00000000;
  1066. modt1 = 0x00000000;
  1067. modt2 = 0x00000000;
  1068. modt3 = 0x00000000;
  1069. if (total_dimm == 1) {
  1070. if (total_rank == 1)
  1071. codt |= 0x00800000;
  1072. if (total_rank == 2)
  1073. codt |= 0x02800000;
  1074. }
  1075. if (total_dimm == 2) {
  1076. if (total_rank == 2)
  1077. codt |= 0x08800000;
  1078. if (total_rank == 4)
  1079. codt |= 0x2a800000;
  1080. }
  1081. }
  1082. debug("nb of dimm %d\n", total_dimm);
  1083. debug("nb of rank %d\n", total_rank);
  1084. if (total_dimm == 1)
  1085. debug("dimm in slot %d\n", firstSlot);
  1086. mtsdram(SDRAM_CODT, codt);
  1087. mtsdram(SDRAM_MODT0, modt0);
  1088. mtsdram(SDRAM_MODT1, modt1);
  1089. mtsdram(SDRAM_MODT2, modt2);
  1090. mtsdram(SDRAM_MODT3, modt3);
  1091. }
  1092. /*-----------------------------------------------------------------------------+
  1093. * program_initplr.
  1094. *-----------------------------------------------------------------------------*/
  1095. static void program_initplr(unsigned long *dimm_populated,
  1096. unsigned char *iic0_dimm_addr,
  1097. unsigned long num_dimm_banks,
  1098. ddr_cas_id_t selected_cas,
  1099. int write_recovery)
  1100. {
  1101. u32 cas = 0;
  1102. u32 odt = 0;
  1103. u32 ods = 0;
  1104. u32 mr;
  1105. u32 wr;
  1106. u32 emr;
  1107. u32 emr2;
  1108. u32 emr3;
  1109. int dimm_num;
  1110. int total_dimm = 0;
  1111. /******************************************************
  1112. ** Assumption: if more than one DIMM, all DIMMs are the same
  1113. ** as already checked in check_memory_type
  1114. ******************************************************/
  1115. if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
  1116. mtsdram(SDRAM_INITPLR0, 0x81B80000);
  1117. mtsdram(SDRAM_INITPLR1, 0x81900400);
  1118. mtsdram(SDRAM_INITPLR2, 0x81810000);
  1119. mtsdram(SDRAM_INITPLR3, 0xff800162);
  1120. mtsdram(SDRAM_INITPLR4, 0x81900400);
  1121. mtsdram(SDRAM_INITPLR5, 0x86080000);
  1122. mtsdram(SDRAM_INITPLR6, 0x86080000);
  1123. mtsdram(SDRAM_INITPLR7, 0x81000062);
  1124. } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
  1125. switch (selected_cas) {
  1126. case DDR_CAS_3:
  1127. cas = 3 << 4;
  1128. break;
  1129. case DDR_CAS_4:
  1130. cas = 4 << 4;
  1131. break;
  1132. case DDR_CAS_5:
  1133. cas = 5 << 4;
  1134. break;
  1135. default:
  1136. printf("ERROR: ucode error on selected_cas value %d", selected_cas);
  1137. spd_ddr_init_hang ();
  1138. break;
  1139. }
  1140. #if 0
  1141. /*
  1142. * ToDo - Still a problem with the write recovery:
  1143. * On the Corsair CM2X512-5400C4 module, setting write recovery
  1144. * in the INITPLR reg to the value calculated in program_mode()
  1145. * results in not correctly working DDR2 memory (crash after
  1146. * relocation).
  1147. *
  1148. * So for now, set the write recovery to 3. This seems to work
  1149. * on the Corair module too.
  1150. *
  1151. * 2007-03-01, sr
  1152. */
  1153. switch (write_recovery) {
  1154. case 3:
  1155. wr = WRITE_RECOV_3;
  1156. break;
  1157. case 4:
  1158. wr = WRITE_RECOV_4;
  1159. break;
  1160. case 5:
  1161. wr = WRITE_RECOV_5;
  1162. break;
  1163. case 6:
  1164. wr = WRITE_RECOV_6;
  1165. break;
  1166. default:
  1167. printf("ERROR: write recovery not support (%d)", write_recovery);
  1168. spd_ddr_init_hang ();
  1169. break;
  1170. }
  1171. #else
  1172. wr = WRITE_RECOV_3; /* test-only, see description above */
  1173. #endif
  1174. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
  1175. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1176. total_dimm++;
  1177. if (total_dimm == 1) {
  1178. odt = ODT_150_OHM;
  1179. ods = ODS_FULL;
  1180. } else if (total_dimm == 2) {
  1181. odt = ODT_75_OHM;
  1182. ods = ODS_REDUCED;
  1183. } else {
  1184. printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
  1185. spd_ddr_init_hang ();
  1186. }
  1187. mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
  1188. emr = CMD_EMR | SELECT_EMR | odt | ods;
  1189. emr2 = CMD_EMR | SELECT_EMR2;
  1190. emr3 = CMD_EMR | SELECT_EMR3;
  1191. /* NOP - Wait 106 MemClk cycles */
  1192. mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
  1193. SDRAM_INITPLR_IMWT_ENCODE(106));
  1194. udelay(1000);
  1195. /* precharge 4 MemClk cycles */
  1196. mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1197. SDRAM_INITPLR_IMWT_ENCODE(4));
  1198. /* EMR2 - Wait tMRD (2 MemClk cycles) */
  1199. mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
  1200. SDRAM_INITPLR_IMWT_ENCODE(2));
  1201. /* EMR3 - Wait tMRD (2 MemClk cycles) */
  1202. mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
  1203. SDRAM_INITPLR_IMWT_ENCODE(2));
  1204. /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
  1205. mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
  1206. SDRAM_INITPLR_IMWT_ENCODE(2));
  1207. /* MR w/ DLL reset - 200 cycle wait for DLL reset */
  1208. mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
  1209. SDRAM_INITPLR_IMWT_ENCODE(200));
  1210. udelay(1000);
  1211. /* precharge 4 MemClk cycles */
  1212. mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
  1213. SDRAM_INITPLR_IMWT_ENCODE(4));
  1214. /* Refresh 25 MemClk cycles */
  1215. mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1216. SDRAM_INITPLR_IMWT_ENCODE(25));
  1217. /* Refresh 25 MemClk cycles */
  1218. mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1219. SDRAM_INITPLR_IMWT_ENCODE(25));
  1220. /* Refresh 25 MemClk cycles */
  1221. mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1222. SDRAM_INITPLR_IMWT_ENCODE(25));
  1223. /* Refresh 25 MemClk cycles */
  1224. mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
  1225. SDRAM_INITPLR_IMWT_ENCODE(25));
  1226. /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
  1227. mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
  1228. SDRAM_INITPLR_IMWT_ENCODE(2));
  1229. /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
  1230. mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
  1231. SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
  1232. /* EMR OCD Exit */
  1233. mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
  1234. SDRAM_INITPLR_IMWT_ENCODE(2));
  1235. } else {
  1236. printf("ERROR: ucode error as unknown DDR type in program_initplr");
  1237. spd_ddr_init_hang ();
  1238. }
  1239. }
  1240. /*------------------------------------------------------------------
  1241. * This routine programs the SDRAM_MMODE register.
  1242. * the selected_cas is an output parameter, that will be passed
  1243. * by caller to call the above program_initplr( )
  1244. *-----------------------------------------------------------------*/
  1245. static void program_mode(unsigned long *dimm_populated,
  1246. unsigned char *iic0_dimm_addr,
  1247. unsigned long num_dimm_banks,
  1248. ddr_cas_id_t *selected_cas,
  1249. int *write_recovery)
  1250. {
  1251. unsigned long dimm_num;
  1252. unsigned long sdram_ddr1;
  1253. unsigned long t_wr_ns;
  1254. unsigned long t_wr_clk;
  1255. unsigned long cas_bit;
  1256. unsigned long cas_index;
  1257. unsigned long sdram_freq;
  1258. unsigned long ddr_check;
  1259. unsigned long mmode;
  1260. unsigned long tcyc_reg;
  1261. unsigned long cycle_2_0_clk;
  1262. unsigned long cycle_2_5_clk;
  1263. unsigned long cycle_3_0_clk;
  1264. unsigned long cycle_4_0_clk;
  1265. unsigned long cycle_5_0_clk;
  1266. unsigned long max_2_0_tcyc_ns_x_100;
  1267. unsigned long max_2_5_tcyc_ns_x_100;
  1268. unsigned long max_3_0_tcyc_ns_x_100;
  1269. unsigned long max_4_0_tcyc_ns_x_100;
  1270. unsigned long max_5_0_tcyc_ns_x_100;
  1271. unsigned long cycle_time_ns_x_100[3];
  1272. PPC4xx_SYS_INFO board_cfg;
  1273. unsigned char cas_2_0_available;
  1274. unsigned char cas_2_5_available;
  1275. unsigned char cas_3_0_available;
  1276. unsigned char cas_4_0_available;
  1277. unsigned char cas_5_0_available;
  1278. unsigned long sdr_ddrpll;
  1279. /*------------------------------------------------------------------
  1280. * Get the board configuration info.
  1281. *-----------------------------------------------------------------*/
  1282. get_sys_info(&board_cfg);
  1283. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1284. sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
  1285. debug("sdram_freq=%d\n", sdram_freq);
  1286. /*------------------------------------------------------------------
  1287. * Handle the timing. We need to find the worst case timing of all
  1288. * the dimm modules installed.
  1289. *-----------------------------------------------------------------*/
  1290. t_wr_ns = 0;
  1291. cas_2_0_available = TRUE;
  1292. cas_2_5_available = TRUE;
  1293. cas_3_0_available = TRUE;
  1294. cas_4_0_available = TRUE;
  1295. cas_5_0_available = TRUE;
  1296. max_2_0_tcyc_ns_x_100 = 10;
  1297. max_2_5_tcyc_ns_x_100 = 10;
  1298. max_3_0_tcyc_ns_x_100 = 10;
  1299. max_4_0_tcyc_ns_x_100 = 10;
  1300. max_5_0_tcyc_ns_x_100 = 10;
  1301. sdram_ddr1 = TRUE;
  1302. /* loop through all the DIMM slots on the board */
  1303. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1304. /* If a dimm is installed in a particular slot ... */
  1305. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1306. if (dimm_populated[dimm_num] == SDRAM_DDR1)
  1307. sdram_ddr1 = TRUE;
  1308. else
  1309. sdram_ddr1 = FALSE;
  1310. /* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /* not used in this loop. */
  1311. cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
  1312. debug("cas_bit[SPD byte 18]=%02x\n", cas_bit);
  1313. /* For a particular DIMM, grab the three CAS values it supports */
  1314. for (cas_index = 0; cas_index < 3; cas_index++) {
  1315. switch (cas_index) {
  1316. case 0:
  1317. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
  1318. break;
  1319. case 1:
  1320. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
  1321. break;
  1322. default:
  1323. tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
  1324. break;
  1325. }
  1326. if ((tcyc_reg & 0x0F) >= 10) {
  1327. if ((tcyc_reg & 0x0F) == 0x0D) {
  1328. /* Convert from hex to decimal */
  1329. cycle_time_ns_x_100[cas_index] =
  1330. (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
  1331. } else {
  1332. printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
  1333. "in slot %d\n", (unsigned int)dimm_num);
  1334. spd_ddr_init_hang ();
  1335. }
  1336. } else {
  1337. /* Convert from hex to decimal */
  1338. cycle_time_ns_x_100[cas_index] =
  1339. (((tcyc_reg & 0xF0) >> 4) * 100) +
  1340. ((tcyc_reg & 0x0F)*10);
  1341. }
  1342. debug("cas_index=%d: cycle_time_ns_x_100=%d\n", cas_index,
  1343. cycle_time_ns_x_100[cas_index]);
  1344. }
  1345. /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
  1346. /* supported for a particular DIMM. */
  1347. cas_index = 0;
  1348. if (sdram_ddr1) {
  1349. /*
  1350. * DDR devices use the following bitmask for CAS latency:
  1351. * Bit 7 6 5 4 3 2 1 0
  1352. * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
  1353. */
  1354. if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
  1355. (cycle_time_ns_x_100[cas_index] != 0)) {
  1356. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1357. cycle_time_ns_x_100[cas_index]);
  1358. cas_index++;
  1359. } else {
  1360. if (cas_index != 0)
  1361. cas_index++;
  1362. cas_4_0_available = FALSE;
  1363. }
  1364. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1365. (cycle_time_ns_x_100[cas_index] != 0)) {
  1366. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1367. cycle_time_ns_x_100[cas_index]);
  1368. cas_index++;
  1369. } else {
  1370. if (cas_index != 0)
  1371. cas_index++;
  1372. cas_3_0_available = FALSE;
  1373. }
  1374. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1375. (cycle_time_ns_x_100[cas_index] != 0)) {
  1376. max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
  1377. cycle_time_ns_x_100[cas_index]);
  1378. cas_index++;
  1379. } else {
  1380. if (cas_index != 0)
  1381. cas_index++;
  1382. cas_2_5_available = FALSE;
  1383. }
  1384. if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
  1385. (cycle_time_ns_x_100[cas_index] != 0)) {
  1386. max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
  1387. cycle_time_ns_x_100[cas_index]);
  1388. cas_index++;
  1389. } else {
  1390. if (cas_index != 0)
  1391. cas_index++;
  1392. cas_2_0_available = FALSE;
  1393. }
  1394. } else {
  1395. /*
  1396. * DDR2 devices use the following bitmask for CAS latency:
  1397. * Bit 7 6 5 4 3 2 1 0
  1398. * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
  1399. */
  1400. if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
  1401. (cycle_time_ns_x_100[cas_index] != 0)) {
  1402. max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
  1403. cycle_time_ns_x_100[cas_index]);
  1404. cas_index++;
  1405. } else {
  1406. if (cas_index != 0)
  1407. cas_index++;
  1408. cas_5_0_available = FALSE;
  1409. }
  1410. if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
  1411. (cycle_time_ns_x_100[cas_index] != 0)) {
  1412. max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
  1413. cycle_time_ns_x_100[cas_index]);
  1414. cas_index++;
  1415. } else {
  1416. if (cas_index != 0)
  1417. cas_index++;
  1418. cas_4_0_available = FALSE;
  1419. }
  1420. if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
  1421. (cycle_time_ns_x_100[cas_index] != 0)) {
  1422. max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
  1423. cycle_time_ns_x_100[cas_index]);
  1424. cas_index++;
  1425. } else {
  1426. if (cas_index != 0)
  1427. cas_index++;
  1428. cas_3_0_available = FALSE;
  1429. }
  1430. }
  1431. }
  1432. }
  1433. /*------------------------------------------------------------------
  1434. * Set the SDRAM mode, SDRAM_MMODE
  1435. *-----------------------------------------------------------------*/
  1436. mfsdram(SDRAM_MMODE, mmode);
  1437. mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
  1438. /* add 10 here because of rounding problems */
  1439. cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
  1440. cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
  1441. cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
  1442. cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
  1443. cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
  1444. debug("cycle_3_0_clk=%d\n", cycle_3_0_clk);
  1445. debug("cycle_4_0_clk=%d\n", cycle_4_0_clk);
  1446. debug("cycle_5_0_clk=%d\n", cycle_5_0_clk);
  1447. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1448. if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
  1449. mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
  1450. *selected_cas = DDR_CAS_2;
  1451. } else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
  1452. mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
  1453. *selected_cas = DDR_CAS_2_5;
  1454. } else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1455. mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
  1456. *selected_cas = DDR_CAS_3;
  1457. } else {
  1458. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1459. printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
  1460. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
  1461. spd_ddr_init_hang ();
  1462. }
  1463. } else { /* DDR2 */
  1464. debug("cas_3_0_available=%d\n", cas_3_0_available);
  1465. debug("cas_4_0_available=%d\n", cas_4_0_available);
  1466. debug("cas_5_0_available=%d\n", cas_5_0_available);
  1467. if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
  1468. mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
  1469. *selected_cas = DDR_CAS_3;
  1470. } else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
  1471. mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
  1472. *selected_cas = DDR_CAS_4;
  1473. } else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
  1474. mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
  1475. *selected_cas = DDR_CAS_5;
  1476. } else {
  1477. printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
  1478. printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
  1479. printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
  1480. printf("cas3=%d cas4=%d cas5=%d\n",
  1481. cas_3_0_available, cas_4_0_available, cas_5_0_available);
  1482. printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
  1483. sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
  1484. spd_ddr_init_hang ();
  1485. }
  1486. }
  1487. if (sdram_ddr1 == TRUE)
  1488. mmode |= SDRAM_MMODE_WR_DDR1;
  1489. else {
  1490. /* loop through all the DIMM slots on the board */
  1491. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1492. /* If a dimm is installed in a particular slot ... */
  1493. if (dimm_populated[dimm_num] != SDRAM_NONE)
  1494. t_wr_ns = max(t_wr_ns,
  1495. spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1496. }
  1497. /*
  1498. * convert from nanoseconds to ddr clocks
  1499. * round up if necessary
  1500. */
  1501. t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
  1502. ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
  1503. if (sdram_freq != ddr_check)
  1504. t_wr_clk++;
  1505. switch (t_wr_clk) {
  1506. case 0:
  1507. case 1:
  1508. case 2:
  1509. case 3:
  1510. mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
  1511. break;
  1512. case 4:
  1513. mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
  1514. break;
  1515. case 5:
  1516. mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
  1517. break;
  1518. default:
  1519. mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
  1520. break;
  1521. }
  1522. *write_recovery = t_wr_clk;
  1523. }
  1524. debug("CAS latency = %d\n", *selected_cas);
  1525. debug("Write recovery = %d\n", *write_recovery);
  1526. mtsdram(SDRAM_MMODE, mmode);
  1527. }
  1528. /*-----------------------------------------------------------------------------+
  1529. * program_rtr.
  1530. *-----------------------------------------------------------------------------*/
  1531. static void program_rtr(unsigned long *dimm_populated,
  1532. unsigned char *iic0_dimm_addr,
  1533. unsigned long num_dimm_banks)
  1534. {
  1535. PPC4xx_SYS_INFO board_cfg;
  1536. unsigned long max_refresh_rate;
  1537. unsigned long dimm_num;
  1538. unsigned long refresh_rate_type;
  1539. unsigned long refresh_rate;
  1540. unsigned long rint;
  1541. unsigned long sdram_freq;
  1542. unsigned long sdr_ddrpll;
  1543. unsigned long val;
  1544. /*------------------------------------------------------------------
  1545. * Get the board configuration info.
  1546. *-----------------------------------------------------------------*/
  1547. get_sys_info(&board_cfg);
  1548. /*------------------------------------------------------------------
  1549. * Set the SDRAM Refresh Timing Register, SDRAM_RTR
  1550. *-----------------------------------------------------------------*/
  1551. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1552. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1553. max_refresh_rate = 0;
  1554. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1555. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1556. refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
  1557. refresh_rate_type &= 0x7F;
  1558. switch (refresh_rate_type) {
  1559. case 0:
  1560. refresh_rate = 15625;
  1561. break;
  1562. case 1:
  1563. refresh_rate = 3906;
  1564. break;
  1565. case 2:
  1566. refresh_rate = 7812;
  1567. break;
  1568. case 3:
  1569. refresh_rate = 31250;
  1570. break;
  1571. case 4:
  1572. refresh_rate = 62500;
  1573. break;
  1574. case 5:
  1575. refresh_rate = 125000;
  1576. break;
  1577. default:
  1578. refresh_rate = 0;
  1579. printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
  1580. (unsigned int)dimm_num);
  1581. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1582. spd_ddr_init_hang ();
  1583. break;
  1584. }
  1585. max_refresh_rate = max(max_refresh_rate, refresh_rate);
  1586. }
  1587. }
  1588. rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
  1589. mfsdram(SDRAM_RTR, val);
  1590. mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
  1591. (SDRAM_RTR_RINT_ENCODE(rint)));
  1592. }
  1593. /*------------------------------------------------------------------
  1594. * This routine programs the SDRAM_TRx registers.
  1595. *-----------------------------------------------------------------*/
  1596. static void program_tr(unsigned long *dimm_populated,
  1597. unsigned char *iic0_dimm_addr,
  1598. unsigned long num_dimm_banks)
  1599. {
  1600. unsigned long dimm_num;
  1601. unsigned long sdram_ddr1;
  1602. unsigned long t_rp_ns;
  1603. unsigned long t_rcd_ns;
  1604. unsigned long t_rrd_ns;
  1605. unsigned long t_ras_ns;
  1606. unsigned long t_rc_ns;
  1607. unsigned long t_rfc_ns;
  1608. unsigned long t_wpc_ns;
  1609. unsigned long t_wtr_ns;
  1610. unsigned long t_rpc_ns;
  1611. unsigned long t_rp_clk;
  1612. unsigned long t_rcd_clk;
  1613. unsigned long t_rrd_clk;
  1614. unsigned long t_ras_clk;
  1615. unsigned long t_rc_clk;
  1616. unsigned long t_rfc_clk;
  1617. unsigned long t_wpc_clk;
  1618. unsigned long t_wtr_clk;
  1619. unsigned long t_rpc_clk;
  1620. unsigned long sdtr1, sdtr2, sdtr3;
  1621. unsigned long ddr_check;
  1622. unsigned long sdram_freq;
  1623. unsigned long sdr_ddrpll;
  1624. PPC4xx_SYS_INFO board_cfg;
  1625. /*------------------------------------------------------------------
  1626. * Get the board configuration info.
  1627. *-----------------------------------------------------------------*/
  1628. get_sys_info(&board_cfg);
  1629. mfsdr(SDR0_DDR0, sdr_ddrpll);
  1630. sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
  1631. /*------------------------------------------------------------------
  1632. * Handle the timing. We need to find the worst case timing of all
  1633. * the dimm modules installed.
  1634. *-----------------------------------------------------------------*/
  1635. t_rp_ns = 0;
  1636. t_rrd_ns = 0;
  1637. t_rcd_ns = 0;
  1638. t_ras_ns = 0;
  1639. t_rc_ns = 0;
  1640. t_rfc_ns = 0;
  1641. t_wpc_ns = 0;
  1642. t_wtr_ns = 0;
  1643. t_rpc_ns = 0;
  1644. sdram_ddr1 = TRUE;
  1645. /* loop through all the DIMM slots on the board */
  1646. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1647. /* If a dimm is installed in a particular slot ... */
  1648. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1649. if (dimm_populated[dimm_num] == SDRAM_DDR2)
  1650. sdram_ddr1 = TRUE;
  1651. else
  1652. sdram_ddr1 = FALSE;
  1653. t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
  1654. t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
  1655. t_rp_ns = max(t_rp_ns, spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
  1656. t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
  1657. t_rc_ns = max(t_rc_ns, spd_read(iic0_dimm_addr[dimm_num], 41));
  1658. t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
  1659. }
  1660. }
  1661. /*------------------------------------------------------------------
  1662. * Set the SDRAM Timing Reg 1, SDRAM_TR1
  1663. *-----------------------------------------------------------------*/
  1664. mfsdram(SDRAM_SDTR1, sdtr1);
  1665. sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
  1666. SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
  1667. /* default values */
  1668. sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
  1669. sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
  1670. /* normal operations */
  1671. sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
  1672. sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
  1673. mtsdram(SDRAM_SDTR1, sdtr1);
  1674. /*------------------------------------------------------------------
  1675. * Set the SDRAM Timing Reg 2, SDRAM_TR2
  1676. *-----------------------------------------------------------------*/
  1677. mfsdram(SDRAM_SDTR2, sdtr2);
  1678. sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
  1679. SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
  1680. SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
  1681. SDRAM_SDTR2_RRD_MASK);
  1682. /*
  1683. * convert t_rcd from nanoseconds to ddr clocks
  1684. * round up if necessary
  1685. */
  1686. t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
  1687. ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
  1688. if (sdram_freq != ddr_check)
  1689. t_rcd_clk++;
  1690. switch (t_rcd_clk) {
  1691. case 0:
  1692. case 1:
  1693. sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
  1694. break;
  1695. case 2:
  1696. sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
  1697. break;
  1698. case 3:
  1699. sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
  1700. break;
  1701. case 4:
  1702. sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
  1703. break;
  1704. default:
  1705. sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
  1706. break;
  1707. }
  1708. if (sdram_ddr1 == TRUE) { /* DDR1 */
  1709. if (sdram_freq < 200000000) {
  1710. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1711. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1712. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1713. } else {
  1714. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1715. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1716. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1717. }
  1718. } else { /* DDR2 */
  1719. /* loop through all the DIMM slots on the board */
  1720. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1721. /* If a dimm is installed in a particular slot ... */
  1722. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1723. t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
  1724. t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
  1725. t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
  1726. }
  1727. }
  1728. /*
  1729. * convert from nanoseconds to ddr clocks
  1730. * round up if necessary
  1731. */
  1732. t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
  1733. ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
  1734. if (sdram_freq != ddr_check)
  1735. t_wpc_clk++;
  1736. switch (t_wpc_clk) {
  1737. case 0:
  1738. case 1:
  1739. case 2:
  1740. sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
  1741. break;
  1742. case 3:
  1743. sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
  1744. break;
  1745. case 4:
  1746. sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
  1747. break;
  1748. case 5:
  1749. sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
  1750. break;
  1751. default:
  1752. sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
  1753. break;
  1754. }
  1755. /*
  1756. * convert from nanoseconds to ddr clocks
  1757. * round up if necessary
  1758. */
  1759. t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
  1760. ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
  1761. if (sdram_freq != ddr_check)
  1762. t_wtr_clk++;
  1763. switch (t_wtr_clk) {
  1764. case 0:
  1765. case 1:
  1766. sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
  1767. break;
  1768. case 2:
  1769. sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
  1770. break;
  1771. case 3:
  1772. sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
  1773. break;
  1774. default:
  1775. sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
  1776. break;
  1777. }
  1778. /*
  1779. * convert from nanoseconds to ddr clocks
  1780. * round up if necessary
  1781. */
  1782. t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
  1783. ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
  1784. if (sdram_freq != ddr_check)
  1785. t_rpc_clk++;
  1786. switch (t_rpc_clk) {
  1787. case 0:
  1788. case 1:
  1789. case 2:
  1790. sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
  1791. break;
  1792. case 3:
  1793. sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
  1794. break;
  1795. default:
  1796. sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
  1797. break;
  1798. }
  1799. }
  1800. /* default value */
  1801. sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
  1802. /*
  1803. * convert t_rrd from nanoseconds to ddr clocks
  1804. * round up if necessary
  1805. */
  1806. t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
  1807. ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
  1808. if (sdram_freq != ddr_check)
  1809. t_rrd_clk++;
  1810. if (t_rrd_clk == 3)
  1811. sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
  1812. else
  1813. sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
  1814. /*
  1815. * convert t_rp from nanoseconds to ddr clocks
  1816. * round up if necessary
  1817. */
  1818. t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
  1819. ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
  1820. if (sdram_freq != ddr_check)
  1821. t_rp_clk++;
  1822. switch (t_rp_clk) {
  1823. case 0:
  1824. case 1:
  1825. case 2:
  1826. case 3:
  1827. sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
  1828. break;
  1829. case 4:
  1830. sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
  1831. break;
  1832. case 5:
  1833. sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
  1834. break;
  1835. case 6:
  1836. sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
  1837. break;
  1838. default:
  1839. sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
  1840. break;
  1841. }
  1842. mtsdram(SDRAM_SDTR2, sdtr2);
  1843. /*------------------------------------------------------------------
  1844. * Set the SDRAM Timing Reg 3, SDRAM_TR3
  1845. *-----------------------------------------------------------------*/
  1846. mfsdram(SDRAM_SDTR3, sdtr3);
  1847. sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
  1848. SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
  1849. /*
  1850. * convert t_ras from nanoseconds to ddr clocks
  1851. * round up if necessary
  1852. */
  1853. t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
  1854. ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
  1855. if (sdram_freq != ddr_check)
  1856. t_ras_clk++;
  1857. sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
  1858. /*
  1859. * convert t_rc from nanoseconds to ddr clocks
  1860. * round up if necessary
  1861. */
  1862. t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
  1863. ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
  1864. if (sdram_freq != ddr_check)
  1865. t_rc_clk++;
  1866. sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
  1867. /* default xcs value */
  1868. sdtr3 |= SDRAM_SDTR3_XCS;
  1869. /*
  1870. * convert t_rfc from nanoseconds to ddr clocks
  1871. * round up if necessary
  1872. */
  1873. t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
  1874. ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
  1875. if (sdram_freq != ddr_check)
  1876. t_rfc_clk++;
  1877. sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
  1878. mtsdram(SDRAM_SDTR3, sdtr3);
  1879. }
  1880. /*-----------------------------------------------------------------------------+
  1881. * program_bxcf.
  1882. *-----------------------------------------------------------------------------*/
  1883. static void program_bxcf(unsigned long *dimm_populated,
  1884. unsigned char *iic0_dimm_addr,
  1885. unsigned long num_dimm_banks)
  1886. {
  1887. unsigned long dimm_num;
  1888. unsigned long num_col_addr;
  1889. unsigned long num_ranks;
  1890. unsigned long num_banks;
  1891. unsigned long mode;
  1892. unsigned long ind_rank;
  1893. unsigned long ind;
  1894. unsigned long ind_bank;
  1895. unsigned long bank_0_populated;
  1896. /*------------------------------------------------------------------
  1897. * Set the BxCF regs. First, wipe out the bank config registers.
  1898. *-----------------------------------------------------------------*/
  1899. mtsdram(SDRAM_MB0CF, 0x00000000);
  1900. mtsdram(SDRAM_MB1CF, 0x00000000);
  1901. mtsdram(SDRAM_MB2CF, 0x00000000);
  1902. mtsdram(SDRAM_MB3CF, 0x00000000);
  1903. mode = SDRAM_BXCF_M_BE_ENABLE;
  1904. bank_0_populated = 0;
  1905. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1906. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1907. num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
  1908. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1909. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1910. num_ranks = (num_ranks & 0x0F) +1;
  1911. else
  1912. num_ranks = num_ranks & 0x0F;
  1913. num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
  1914. for (ind_bank = 0; ind_bank < 2; ind_bank++) {
  1915. if (num_banks == 4)
  1916. ind = 0;
  1917. else
  1918. ind = 5 << 8;
  1919. switch (num_col_addr) {
  1920. case 0x08:
  1921. mode |= (SDRAM_BXCF_M_AM_0 + ind);
  1922. break;
  1923. case 0x09:
  1924. mode |= (SDRAM_BXCF_M_AM_1 + ind);
  1925. break;
  1926. case 0x0A:
  1927. mode |= (SDRAM_BXCF_M_AM_2 + ind);
  1928. break;
  1929. case 0x0B:
  1930. mode |= (SDRAM_BXCF_M_AM_3 + ind);
  1931. break;
  1932. case 0x0C:
  1933. mode |= (SDRAM_BXCF_M_AM_4 + ind);
  1934. break;
  1935. default:
  1936. printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
  1937. (unsigned int)dimm_num);
  1938. printf("ERROR: Unsupported value for number of "
  1939. "column addresses: %d.\n", (unsigned int)num_col_addr);
  1940. printf("Replace the DIMM module with a supported DIMM.\n\n");
  1941. spd_ddr_init_hang ();
  1942. }
  1943. }
  1944. if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
  1945. bank_0_populated = 1;
  1946. for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
  1947. mtsdram(SDRAM_MB0CF +
  1948. ((dimm_num + bank_0_populated + ind_rank) << 2),
  1949. mode);
  1950. }
  1951. }
  1952. }
  1953. }
  1954. /*------------------------------------------------------------------
  1955. * program memory queue.
  1956. *-----------------------------------------------------------------*/
  1957. static void program_memory_queue(unsigned long *dimm_populated,
  1958. unsigned char *iic0_dimm_addr,
  1959. unsigned long num_dimm_banks)
  1960. {
  1961. unsigned long dimm_num;
  1962. phys_size_t rank_base_addr;
  1963. unsigned long rank_reg;
  1964. phys_size_t rank_size_bytes;
  1965. unsigned long rank_size_id;
  1966. unsigned long num_ranks;
  1967. unsigned long baseadd_size;
  1968. unsigned long i;
  1969. unsigned long bank_0_populated = 0;
  1970. phys_size_t total_size = 0;
  1971. /*------------------------------------------------------------------
  1972. * Reset the rank_base_address.
  1973. *-----------------------------------------------------------------*/
  1974. rank_reg = SDRAM_R0BAS;
  1975. rank_base_addr = 0x00000000;
  1976. for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
  1977. if (dimm_populated[dimm_num] != SDRAM_NONE) {
  1978. num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
  1979. if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
  1980. num_ranks = (num_ranks & 0x0F) + 1;
  1981. else
  1982. num_ranks = num_ranks & 0x0F;
  1983. rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
  1984. /*------------------------------------------------------------------
  1985. * Set the sizes
  1986. *-----------------------------------------------------------------*/
  1987. baseadd_size = 0;
  1988. switch (rank_size_id) {
  1989. case 0x01:
  1990. baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
  1991. total_size = 1024;
  1992. break;
  1993. case 0x02:
  1994. baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
  1995. total_size = 2048;
  1996. break;
  1997. case 0x04:
  1998. baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
  1999. total_size = 4096;
  2000. break;
  2001. case 0x08:
  2002. baseadd_size |= SDRAM_RXBAS_SDSZ_32;
  2003. total_size = 32;
  2004. break;
  2005. case 0x10:
  2006. baseadd_size |= SDRAM_RXBAS_SDSZ_64;
  2007. total_size = 64;
  2008. break;
  2009. case 0x20:
  2010. baseadd_size |= SDRAM_RXBAS_SDSZ_128;
  2011. total_size = 128;
  2012. break;
  2013. case 0x40:
  2014. baseadd_size |= SDRAM_RXBAS_SDSZ_256;
  2015. total_size = 256;
  2016. break;
  2017. case 0x80:
  2018. baseadd_size |= SDRAM_RXBAS_SDSZ_512;
  2019. total_size = 512;
  2020. break;
  2021. default:
  2022. printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
  2023. (unsigned int)dimm_num);
  2024. printf("ERROR: Unsupported value for the banksize: %d.\n",
  2025. (unsigned int)rank_size_id);
  2026. printf("Replace the DIMM module with a supported DIMM.\n\n");
  2027. spd_ddr_init_hang ();
  2028. }
  2029. rank_size_bytes = total_size << 20;
  2030. if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
  2031. bank_0_populated = 1;
  2032. for (i = 0; i < num_ranks; i++) {
  2033. mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
  2034. (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
  2035. baseadd_size));
  2036. rank_base_addr += rank_size_bytes;
  2037. }
  2038. }
  2039. }
  2040. #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2041. defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
  2042. defined(CONFIG_460SX)
  2043. /*
  2044. * Enable high bandwidth access
  2045. * This is currently not used, but with this setup
  2046. * it is possible to use it later on in e.g. the Linux
  2047. * EMAC driver for performance gain.
  2048. */
  2049. mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
  2050. mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
  2051. /*
  2052. * Set optimal value for Memory Queue HB/LL Configuration registers
  2053. */
  2054. mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
  2055. SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
  2056. SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
  2057. mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
  2058. SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
  2059. SDRAM_CONF1LL_RPLM);
  2060. mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
  2061. #endif
  2062. }
  2063. /*-----------------------------------------------------------------------------+
  2064. * is_ecc_enabled.
  2065. *-----------------------------------------------------------------------------*/
  2066. static unsigned long is_ecc_enabled(void)
  2067. {
  2068. unsigned long dimm_num;
  2069. unsigned long ecc;
  2070. unsigned long val;
  2071. ecc = 0;
  2072. /* loop through all the DIMM slots on the board */
  2073. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2074. mfsdram(SDRAM_MCOPT1, val);
  2075. ecc = max(ecc, SDRAM_MCOPT1_MCHK_CHK_DECODE(val));
  2076. }
  2077. return ecc;
  2078. }
  2079. #ifdef CONFIG_DDR_ECC
  2080. /*-----------------------------------------------------------------------------+
  2081. * program_ecc.
  2082. *-----------------------------------------------------------------------------*/
  2083. static void program_ecc(unsigned long *dimm_populated,
  2084. unsigned char *iic0_dimm_addr,
  2085. unsigned long num_dimm_banks,
  2086. unsigned long tlb_word2_i_value)
  2087. {
  2088. unsigned long mcopt1;
  2089. unsigned long mcopt2;
  2090. unsigned long mcstat;
  2091. unsigned long dimm_num;
  2092. unsigned long ecc;
  2093. ecc = 0;
  2094. /* loop through all the DIMM slots on the board */
  2095. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2096. /* If a dimm is installed in a particular slot ... */
  2097. if (dimm_populated[dimm_num] != SDRAM_NONE)
  2098. ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
  2099. }
  2100. if (ecc == 0)
  2101. return;
  2102. if (sdram_memsize() > CONFIG_MAX_MEM_MAPPED) {
  2103. printf("\nWarning: Can't enable ECC on systems with more than 2GB of SDRAM!\n");
  2104. return;
  2105. }
  2106. mfsdram(SDRAM_MCOPT1, mcopt1);
  2107. mfsdram(SDRAM_MCOPT2, mcopt2);
  2108. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2109. /* DDR controller must be enabled and not in self-refresh. */
  2110. mfsdram(SDRAM_MCSTAT, mcstat);
  2111. if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
  2112. && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
  2113. && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
  2114. == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
  2115. program_ecc_addr(0, sdram_memsize(), tlb_word2_i_value);
  2116. }
  2117. }
  2118. return;
  2119. }
  2120. static void wait_ddr_idle(void)
  2121. {
  2122. u32 val;
  2123. do {
  2124. mfsdram(SDRAM_MCSTAT, val);
  2125. } while ((val & SDRAM_MCSTAT_IDLE_MASK) == SDRAM_MCSTAT_IDLE_NOT);
  2126. }
  2127. /*-----------------------------------------------------------------------------+
  2128. * program_ecc_addr.
  2129. *-----------------------------------------------------------------------------*/
  2130. static void program_ecc_addr(unsigned long start_address,
  2131. unsigned long num_bytes,
  2132. unsigned long tlb_word2_i_value)
  2133. {
  2134. unsigned long current_address;
  2135. unsigned long end_address;
  2136. unsigned long address_increment;
  2137. unsigned long mcopt1;
  2138. char str[] = "ECC generation -";
  2139. char slash[] = "\\|/-\\|/-";
  2140. int loop = 0;
  2141. int loopi = 0;
  2142. current_address = start_address;
  2143. mfsdram(SDRAM_MCOPT1, mcopt1);
  2144. if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
  2145. mtsdram(SDRAM_MCOPT1,
  2146. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_GEN);
  2147. sync();
  2148. eieio();
  2149. wait_ddr_idle();
  2150. puts(str);
  2151. if (tlb_word2_i_value == TLB_WORD2_I_ENABLE) {
  2152. /* ECC bit set method for non-cached memory */
  2153. if ((mcopt1 & SDRAM_MCOPT1_DMWD_MASK) == SDRAM_MCOPT1_DMWD_32)
  2154. address_increment = 4;
  2155. else
  2156. address_increment = 8;
  2157. end_address = current_address + num_bytes;
  2158. while (current_address < end_address) {
  2159. *((unsigned long *)current_address) = 0x00000000;
  2160. current_address += address_increment;
  2161. if ((loop++ % (2 << 20)) == 0) {
  2162. putc('\b');
  2163. putc(slash[loopi++ % 8]);
  2164. }
  2165. }
  2166. } else {
  2167. /* ECC bit set method for cached memory */
  2168. dcbz_area(start_address, num_bytes);
  2169. /* Write modified dcache lines back to memory */
  2170. clean_dcache_range(start_address, start_address + num_bytes);
  2171. }
  2172. blank_string(strlen(str));
  2173. sync();
  2174. eieio();
  2175. wait_ddr_idle();
  2176. /* clear ECC error repoting registers */
  2177. mtsdram(SDRAM_ECCCR, 0xffffffff);
  2178. mtdcr(0x4c, 0xffffffff);
  2179. mtsdram(SDRAM_MCOPT1,
  2180. (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_CHK_REP);
  2181. sync();
  2182. eieio();
  2183. wait_ddr_idle();
  2184. }
  2185. }
  2186. #endif
  2187. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2188. /*-----------------------------------------------------------------------------+
  2189. * program_DQS_calibration.
  2190. *-----------------------------------------------------------------------------*/
  2191. static void program_DQS_calibration(unsigned long *dimm_populated,
  2192. unsigned char *iic0_dimm_addr,
  2193. unsigned long num_dimm_banks)
  2194. {
  2195. unsigned long val;
  2196. #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
  2197. mtsdram(SDRAM_RQDC, 0x80000037);
  2198. mtsdram(SDRAM_RDCC, 0x40000000);
  2199. mtsdram(SDRAM_RFDC, 0x000001DF);
  2200. test();
  2201. #else
  2202. /*------------------------------------------------------------------
  2203. * Program RDCC register
  2204. * Read sample cycle auto-update enable
  2205. *-----------------------------------------------------------------*/
  2206. mfsdram(SDRAM_RDCC, val);
  2207. mtsdram(SDRAM_RDCC,
  2208. (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
  2209. | SDRAM_RDCC_RSAE_ENABLE);
  2210. /*------------------------------------------------------------------
  2211. * Program RQDC register
  2212. * Internal DQS delay mechanism enable
  2213. *-----------------------------------------------------------------*/
  2214. mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
  2215. /*------------------------------------------------------------------
  2216. * Program RFDC register
  2217. * Set Feedback Fractional Oversample
  2218. * Auto-detect read sample cycle enable
  2219. * Set RFOS to 1/4 of memclk cycle (0x3f)
  2220. *-----------------------------------------------------------------*/
  2221. mfsdram(SDRAM_RFDC, val);
  2222. mtsdram(SDRAM_RFDC,
  2223. (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
  2224. SDRAM_RFDC_RFFD_MASK))
  2225. | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
  2226. SDRAM_RFDC_RFFD_ENCODE(0)));
  2227. DQS_calibration_process();
  2228. #endif
  2229. }
  2230. static int short_mem_test(void)
  2231. {
  2232. u32 *membase;
  2233. u32 bxcr_num;
  2234. u32 bxcf;
  2235. int i;
  2236. int j;
  2237. phys_size_t base_addr;
  2238. u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
  2239. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2240. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2241. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2242. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2243. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2244. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2245. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2246. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2247. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2248. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2249. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2250. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2251. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2252. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2253. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2254. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2255. int l;
  2256. for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
  2257. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
  2258. /* Banks enabled */
  2259. if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2260. /* Bank is enabled */
  2261. /*
  2262. * Only run test on accessable memory (below 2GB)
  2263. */
  2264. base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
  2265. if (base_addr >= CONFIG_MAX_MEM_MAPPED)
  2266. continue;
  2267. /*------------------------------------------------------------------
  2268. * Run the short memory test.
  2269. *-----------------------------------------------------------------*/
  2270. membase = (u32 *)(u32)base_addr;
  2271. for (i = 0; i < NUMMEMTESTS; i++) {
  2272. for (j = 0; j < NUMMEMWORDS; j++) {
  2273. membase[j] = test[i][j];
  2274. ppcDcbf((u32)&(membase[j]));
  2275. }
  2276. sync();
  2277. for (l=0; l<NUMLOOPS; l++) {
  2278. for (j = 0; j < NUMMEMWORDS; j++) {
  2279. if (membase[j] != test[i][j]) {
  2280. ppcDcbf((u32)&(membase[j]));
  2281. return 0;
  2282. }
  2283. ppcDcbf((u32)&(membase[j]));
  2284. }
  2285. sync();
  2286. }
  2287. }
  2288. } /* if bank enabled */
  2289. } /* for bxcf_num */
  2290. return 1;
  2291. }
  2292. #ifndef HARD_CODED_DQS
  2293. /*-----------------------------------------------------------------------------+
  2294. * DQS_calibration_process.
  2295. *-----------------------------------------------------------------------------*/
  2296. static void DQS_calibration_process(void)
  2297. {
  2298. unsigned long rfdc_reg;
  2299. unsigned long rffd;
  2300. unsigned long val;
  2301. long rffd_average;
  2302. long max_start;
  2303. long min_end;
  2304. unsigned long begin_rqfd[MAXRANKS];
  2305. unsigned long begin_rffd[MAXRANKS];
  2306. unsigned long end_rqfd[MAXRANKS];
  2307. unsigned long end_rffd[MAXRANKS];
  2308. char window_found;
  2309. unsigned long dlycal;
  2310. unsigned long dly_val;
  2311. unsigned long max_pass_length;
  2312. unsigned long current_pass_length;
  2313. unsigned long current_fail_length;
  2314. unsigned long current_start;
  2315. long max_end;
  2316. unsigned char fail_found;
  2317. unsigned char pass_found;
  2318. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2319. u32 rqdc_reg;
  2320. u32 rqfd;
  2321. u32 rqfd_start;
  2322. u32 rqfd_average;
  2323. int loopi = 0;
  2324. char str[] = "Auto calibration -";
  2325. char slash[] = "\\|/-\\|/-";
  2326. /*------------------------------------------------------------------
  2327. * Test to determine the best read clock delay tuning bits.
  2328. *
  2329. * Before the DDR controller can be used, the read clock delay needs to be
  2330. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2331. * This value cannot be hardcoded into the program because it changes
  2332. * depending on the board's setup and environment.
  2333. * To do this, all delay values are tested to see if they
  2334. * work or not. By doing this, you get groups of fails with groups of
  2335. * passing values. The idea is to find the start and end of a passing
  2336. * window and take the center of it to use as the read clock delay.
  2337. *
  2338. * A failure has to be seen first so that when we hit a pass, we know
  2339. * that it is truely the start of the window. If we get passing values
  2340. * to start off with, we don't know if we are at the start of the window.
  2341. *
  2342. * The code assumes that a failure will always be found.
  2343. * If a failure is not found, there is no easy way to get the middle
  2344. * of the passing window. I guess we can pretty much pick any value
  2345. * but some values will be better than others. Since the lowest speed
  2346. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2347. * from experimentation it is safe to say you will always have a failure.
  2348. *-----------------------------------------------------------------*/
  2349. /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
  2350. rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
  2351. puts(str);
  2352. calibration_loop:
  2353. mfsdram(SDRAM_RQDC, rqdc_reg);
  2354. mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2355. SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
  2356. #else /* CONFIG_DDR_RQDC_FIXED */
  2357. /*
  2358. * On Katmai the complete auto-calibration somehow doesn't seem to
  2359. * produce the best results, meaning optimal values for RQFD/RFFD.
  2360. * This was discovered by GDA using a high bandwidth scope,
  2361. * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
  2362. * so now on Katmai "only" RFFD is auto-calibrated.
  2363. */
  2364. mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
  2365. #endif /* CONFIG_DDR_RQDC_FIXED */
  2366. max_start = 0;
  2367. min_end = 0;
  2368. begin_rqfd[0] = 0;
  2369. begin_rffd[0] = 0;
  2370. begin_rqfd[1] = 0;
  2371. begin_rffd[1] = 0;
  2372. end_rqfd[0] = 0;
  2373. end_rffd[0] = 0;
  2374. end_rqfd[1] = 0;
  2375. end_rffd[1] = 0;
  2376. window_found = FALSE;
  2377. max_pass_length = 0;
  2378. max_start = 0;
  2379. max_end = 0;
  2380. current_pass_length = 0;
  2381. current_fail_length = 0;
  2382. current_start = 0;
  2383. window_found = FALSE;
  2384. fail_found = FALSE;
  2385. pass_found = FALSE;
  2386. /*
  2387. * get the delay line calibration register value
  2388. */
  2389. mfsdram(SDRAM_DLCR, dlycal);
  2390. dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
  2391. for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
  2392. mfsdram(SDRAM_RFDC, rfdc_reg);
  2393. rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
  2394. /*------------------------------------------------------------------
  2395. * Set the timing reg for the test.
  2396. *-----------------------------------------------------------------*/
  2397. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
  2398. /*------------------------------------------------------------------
  2399. * See if the rffd value passed.
  2400. *-----------------------------------------------------------------*/
  2401. if (short_mem_test()) {
  2402. if (fail_found == TRUE) {
  2403. pass_found = TRUE;
  2404. if (current_pass_length == 0)
  2405. current_start = rffd;
  2406. current_fail_length = 0;
  2407. current_pass_length++;
  2408. if (current_pass_length > max_pass_length) {
  2409. max_pass_length = current_pass_length;
  2410. max_start = current_start;
  2411. max_end = rffd;
  2412. }
  2413. }
  2414. } else {
  2415. current_pass_length = 0;
  2416. current_fail_length++;
  2417. if (current_fail_length >= (dly_val >> 2)) {
  2418. if (fail_found == FALSE) {
  2419. fail_found = TRUE;
  2420. } else if (pass_found == TRUE) {
  2421. window_found = TRUE;
  2422. break;
  2423. }
  2424. }
  2425. }
  2426. } /* for rffd */
  2427. /*------------------------------------------------------------------
  2428. * Set the average RFFD value
  2429. *-----------------------------------------------------------------*/
  2430. rffd_average = ((max_start + max_end) >> 1);
  2431. if (rffd_average < 0)
  2432. rffd_average = 0;
  2433. if (rffd_average > SDRAM_RFDC_RFFD_MAX)
  2434. rffd_average = SDRAM_RFDC_RFFD_MAX;
  2435. /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
  2436. mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
  2437. #if !defined(CONFIG_DDR_RQDC_FIXED)
  2438. max_pass_length = 0;
  2439. max_start = 0;
  2440. max_end = 0;
  2441. current_pass_length = 0;
  2442. current_fail_length = 0;
  2443. current_start = 0;
  2444. window_found = FALSE;
  2445. fail_found = FALSE;
  2446. pass_found = FALSE;
  2447. for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
  2448. mfsdram(SDRAM_RQDC, rqdc_reg);
  2449. rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
  2450. /*------------------------------------------------------------------
  2451. * Set the timing reg for the test.
  2452. *-----------------------------------------------------------------*/
  2453. mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
  2454. /*------------------------------------------------------------------
  2455. * See if the rffd value passed.
  2456. *-----------------------------------------------------------------*/
  2457. if (short_mem_test()) {
  2458. if (fail_found == TRUE) {
  2459. pass_found = TRUE;
  2460. if (current_pass_length == 0)
  2461. current_start = rqfd;
  2462. current_fail_length = 0;
  2463. current_pass_length++;
  2464. if (current_pass_length > max_pass_length) {
  2465. max_pass_length = current_pass_length;
  2466. max_start = current_start;
  2467. max_end = rqfd;
  2468. }
  2469. }
  2470. } else {
  2471. current_pass_length = 0;
  2472. current_fail_length++;
  2473. if (fail_found == FALSE) {
  2474. fail_found = TRUE;
  2475. } else if (pass_found == TRUE) {
  2476. window_found = TRUE;
  2477. break;
  2478. }
  2479. }
  2480. }
  2481. rqfd_average = ((max_start + max_end) >> 1);
  2482. /*------------------------------------------------------------------
  2483. * Make sure we found the valid read passing window. Halt if not
  2484. *-----------------------------------------------------------------*/
  2485. if (window_found == FALSE) {
  2486. if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
  2487. putc('\b');
  2488. putc(slash[loopi++ % 8]);
  2489. /* try again from with a different RQFD start value */
  2490. rqfd_start++;
  2491. goto calibration_loop;
  2492. }
  2493. printf("\nERROR: Cannot determine a common read delay for the "
  2494. "DIMM(s) installed.\n");
  2495. debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
  2496. ppc4xx_ibm_ddr2_register_dump();
  2497. spd_ddr_init_hang ();
  2498. }
  2499. if (rqfd_average < 0)
  2500. rqfd_average = 0;
  2501. if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
  2502. rqfd_average = SDRAM_RQDC_RQFD_MAX;
  2503. mtsdram(SDRAM_RQDC,
  2504. (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
  2505. SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
  2506. blank_string(strlen(str));
  2507. #endif /* CONFIG_DDR_RQDC_FIXED */
  2508. /*
  2509. * Now complete RDSS configuration as mentioned on page 7 of the AMCC
  2510. * PowerPC440SP/SPe DDR2 application note:
  2511. * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
  2512. */
  2513. mfsdram(SDRAM_RTSR, val);
  2514. if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
  2515. mfsdram(SDRAM_RDCC, val);
  2516. if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
  2517. val += 0x40000000;
  2518. mtsdram(SDRAM_RDCC, val);
  2519. }
  2520. }
  2521. mfsdram(SDRAM_DLCR, val);
  2522. debug("%s[%d] DLCR: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2523. mfsdram(SDRAM_RQDC, val);
  2524. debug("%s[%d] RQDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2525. mfsdram(SDRAM_RFDC, val);
  2526. debug("%s[%d] RFDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2527. mfsdram(SDRAM_RDCC, val);
  2528. debug("%s[%d] RDCC: 0x%08X\n", __FUNCTION__, __LINE__, val);
  2529. }
  2530. #else /* calibration test with hardvalues */
  2531. /*-----------------------------------------------------------------------------+
  2532. * DQS_calibration_process.
  2533. *-----------------------------------------------------------------------------*/
  2534. static void test(void)
  2535. {
  2536. unsigned long dimm_num;
  2537. unsigned long ecc_temp;
  2538. unsigned long i, j;
  2539. unsigned long *membase;
  2540. unsigned long bxcf[MAXRANKS];
  2541. unsigned long val;
  2542. char window_found;
  2543. char begin_found[MAXDIMMS];
  2544. char end_found[MAXDIMMS];
  2545. char search_end[MAXDIMMS];
  2546. unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
  2547. {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
  2548. 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
  2549. {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
  2550. 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
  2551. {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
  2552. 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
  2553. {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
  2554. 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
  2555. {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
  2556. 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
  2557. {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
  2558. 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
  2559. {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
  2560. 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
  2561. {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
  2562. 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
  2563. /*------------------------------------------------------------------
  2564. * Test to determine the best read clock delay tuning bits.
  2565. *
  2566. * Before the DDR controller can be used, the read clock delay needs to be
  2567. * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
  2568. * This value cannot be hardcoded into the program because it changes
  2569. * depending on the board's setup and environment.
  2570. * To do this, all delay values are tested to see if they
  2571. * work or not. By doing this, you get groups of fails with groups of
  2572. * passing values. The idea is to find the start and end of a passing
  2573. * window and take the center of it to use as the read clock delay.
  2574. *
  2575. * A failure has to be seen first so that when we hit a pass, we know
  2576. * that it is truely the start of the window. If we get passing values
  2577. * to start off with, we don't know if we are at the start of the window.
  2578. *
  2579. * The code assumes that a failure will always be found.
  2580. * If a failure is not found, there is no easy way to get the middle
  2581. * of the passing window. I guess we can pretty much pick any value
  2582. * but some values will be better than others. Since the lowest speed
  2583. * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
  2584. * from experimentation it is safe to say you will always have a failure.
  2585. *-----------------------------------------------------------------*/
  2586. mfsdram(SDRAM_MCOPT1, ecc_temp);
  2587. ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
  2588. mfsdram(SDRAM_MCOPT1, val);
  2589. mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
  2590. SDRAM_MCOPT1_MCHK_NON);
  2591. window_found = FALSE;
  2592. begin_found[0] = FALSE;
  2593. end_found[0] = FALSE;
  2594. search_end[0] = FALSE;
  2595. begin_found[1] = FALSE;
  2596. end_found[1] = FALSE;
  2597. search_end[1] = FALSE;
  2598. for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
  2599. mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
  2600. /* Banks enabled */
  2601. if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
  2602. /* Bank is enabled */
  2603. membase =
  2604. (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
  2605. /*------------------------------------------------------------------
  2606. * Run the short memory test.
  2607. *-----------------------------------------------------------------*/
  2608. for (i = 0; i < NUMMEMTESTS; i++) {
  2609. for (j = 0; j < NUMMEMWORDS; j++) {
  2610. membase[j] = test[i][j];
  2611. ppcDcbf((u32)&(membase[j]));
  2612. }
  2613. sync();
  2614. for (j = 0; j < NUMMEMWORDS; j++) {
  2615. if (membase[j] != test[i][j]) {
  2616. ppcDcbf((u32)&(membase[j]));
  2617. break;
  2618. }
  2619. ppcDcbf((u32)&(membase[j]));
  2620. }
  2621. sync();
  2622. if (j < NUMMEMWORDS)
  2623. break;
  2624. }
  2625. /*------------------------------------------------------------------
  2626. * See if the rffd value passed.
  2627. *-----------------------------------------------------------------*/
  2628. if (i < NUMMEMTESTS) {
  2629. if ((end_found[dimm_num] == FALSE) &&
  2630. (search_end[dimm_num] == TRUE)) {
  2631. end_found[dimm_num] = TRUE;
  2632. }
  2633. if ((end_found[0] == TRUE) &&
  2634. (end_found[1] == TRUE))
  2635. break;
  2636. } else {
  2637. if (begin_found[dimm_num] == FALSE) {
  2638. begin_found[dimm_num] = TRUE;
  2639. search_end[dimm_num] = TRUE;
  2640. }
  2641. }
  2642. } else {
  2643. begin_found[dimm_num] = TRUE;
  2644. end_found[dimm_num] = TRUE;
  2645. }
  2646. }
  2647. if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
  2648. window_found = TRUE;
  2649. /*------------------------------------------------------------------
  2650. * Make sure we found the valid read passing window. Halt if not
  2651. *-----------------------------------------------------------------*/
  2652. if (window_found == FALSE) {
  2653. printf("ERROR: Cannot determine a common read delay for the "
  2654. "DIMM(s) installed.\n");
  2655. spd_ddr_init_hang ();
  2656. }
  2657. /*------------------------------------------------------------------
  2658. * Restore the ECC variable to what it originally was
  2659. *-----------------------------------------------------------------*/
  2660. mtsdram(SDRAM_MCOPT1,
  2661. (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
  2662. | ecc_temp);
  2663. }
  2664. #endif /* !HARD_CODED_DQS */
  2665. #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
  2666. #else /* CONFIG_SPD_EEPROM */
  2667. /*-----------------------------------------------------------------------------
  2668. * Function: initdram
  2669. * Description: Configures the PPC4xx IBM DDR1/DDR2 SDRAM memory controller.
  2670. * The configuration is performed using static, compile-
  2671. * time parameters.
  2672. * Configures the PPC405EX(r) and PPC460EX/GT
  2673. *---------------------------------------------------------------------------*/
  2674. phys_size_t initdram(int board_type)
  2675. {
  2676. /*
  2677. * Only run this SDRAM init code once. For NAND booting
  2678. * targets like Kilauea, we call initdram() early from the
  2679. * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
  2680. * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
  2681. * which calls initdram() again. This time the controller
  2682. * mustn't be reconfigured again since we're already running
  2683. * from SDRAM.
  2684. */
  2685. #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
  2686. unsigned long val;
  2687. #if defined(CONFIG_440)
  2688. mtdcr(SDRAM_R0BAS, CONFIG_SYS_SDRAM_R0BAS);
  2689. mtdcr(SDRAM_R1BAS, CONFIG_SYS_SDRAM_R1BAS);
  2690. mtdcr(SDRAM_R2BAS, CONFIG_SYS_SDRAM_R2BAS);
  2691. mtdcr(SDRAM_R3BAS, CONFIG_SYS_SDRAM_R3BAS);
  2692. mtdcr(SDRAM_PLBADDULL, CONFIG_SYS_SDRAM_PLBADDULL); /* MQ0_BAUL */
  2693. mtdcr(SDRAM_PLBADDUHB, CONFIG_SYS_SDRAM_PLBADDUHB); /* MQ0_BAUH */
  2694. mtdcr(SDRAM_CONF1LL, CONFIG_SYS_SDRAM_CONF1LL);
  2695. mtdcr(SDRAM_CONF1HB, CONFIG_SYS_SDRAM_CONF1HB);
  2696. mtdcr(SDRAM_CONFPATHB, CONFIG_SYS_SDRAM_CONFPATHB);
  2697. #endif
  2698. /* Set Memory Bank Configuration Registers */
  2699. mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
  2700. mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
  2701. mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
  2702. mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
  2703. /* Set Memory Clock Timing Register */
  2704. mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
  2705. /* Set Refresh Time Register */
  2706. mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
  2707. /* Set SDRAM Timing Registers */
  2708. mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
  2709. mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
  2710. mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
  2711. /* Set Mode and Extended Mode Registers */
  2712. mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
  2713. mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
  2714. /* Set Memory Controller Options 1 Register */
  2715. mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
  2716. /* Set Manual Initialization Control Registers */
  2717. mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
  2718. mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
  2719. mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
  2720. mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
  2721. mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
  2722. mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
  2723. mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
  2724. mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
  2725. mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
  2726. mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
  2727. mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
  2728. mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
  2729. mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
  2730. mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
  2731. mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
  2732. mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
  2733. /* Set On-Die Termination Registers */
  2734. mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
  2735. mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
  2736. mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
  2737. /* Set Write Timing Register */
  2738. mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
  2739. /*
  2740. * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
  2741. * SDRAM0_MCOPT2[IPTR] = 1
  2742. */
  2743. mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
  2744. SDRAM_MCOPT2_IPTR_EXECUTE));
  2745. /*
  2746. * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
  2747. * completion of initialization.
  2748. */
  2749. do {
  2750. mfsdram(SDRAM_MCSTAT, val);
  2751. } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
  2752. /* Set Delay Control Registers */
  2753. mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
  2754. #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2755. mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
  2756. mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
  2757. mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
  2758. #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2759. /*
  2760. * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
  2761. */
  2762. mfsdram(SDRAM_MCOPT2, val);
  2763. mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
  2764. #if defined(CONFIG_440)
  2765. /*
  2766. * Program TLB entries with caches enabled, for best performace
  2767. * while auto-calibrating and ECC generation
  2768. */
  2769. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), 0);
  2770. #endif
  2771. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2772. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2773. /*------------------------------------------------------------------
  2774. | DQS calibration.
  2775. +-----------------------------------------------------------------*/
  2776. DQS_autocalibration();
  2777. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2778. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2779. #if defined(CONFIG_DDR_ECC)
  2780. ecc_init(CONFIG_SYS_SDRAM_BASE, CONFIG_SYS_MBYTES_SDRAM << 20);
  2781. #endif /* defined(CONFIG_DDR_ECC) */
  2782. #if defined(CONFIG_440)
  2783. /*
  2784. * Now after initialization (auto-calibration and ECC generation)
  2785. * remove the TLB entries with caches enabled and program again with
  2786. * desired cache functionality
  2787. */
  2788. remove_tlb(0, (CONFIG_SYS_MBYTES_SDRAM << 20));
  2789. program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), MY_TLB_WORD2_I_ENABLE);
  2790. #endif
  2791. ppc4xx_ibm_ddr2_register_dump();
  2792. #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
  2793. /*
  2794. * Clear potential errors resulting from auto-calibration.
  2795. * If not done, then we could get an interrupt later on when
  2796. * exceptions are enabled.
  2797. */
  2798. set_mcsr(get_mcsr());
  2799. #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
  2800. #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
  2801. return (CONFIG_SYS_MBYTES_SDRAM << 20);
  2802. }
  2803. #endif /* CONFIG_SPD_EEPROM */
  2804. #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
  2805. #if defined(CONFIG_440)
  2806. u32 mfdcr_any(u32 dcr)
  2807. {
  2808. u32 val;
  2809. switch (dcr) {
  2810. case SDRAM_R0BAS + 0:
  2811. val = mfdcr(SDRAM_R0BAS + 0);
  2812. break;
  2813. case SDRAM_R0BAS + 1:
  2814. val = mfdcr(SDRAM_R0BAS + 1);
  2815. break;
  2816. case SDRAM_R0BAS + 2:
  2817. val = mfdcr(SDRAM_R0BAS + 2);
  2818. break;
  2819. case SDRAM_R0BAS + 3:
  2820. val = mfdcr(SDRAM_R0BAS + 3);
  2821. break;
  2822. default:
  2823. printf("DCR %d not defined in case statement!!!\n", dcr);
  2824. val = 0; /* just to satisfy the compiler */
  2825. }
  2826. return val;
  2827. }
  2828. void mtdcr_any(u32 dcr, u32 val)
  2829. {
  2830. switch (dcr) {
  2831. case SDRAM_R0BAS + 0:
  2832. mtdcr(SDRAM_R0BAS + 0, val);
  2833. break;
  2834. case SDRAM_R0BAS + 1:
  2835. mtdcr(SDRAM_R0BAS + 1, val);
  2836. break;
  2837. case SDRAM_R0BAS + 2:
  2838. mtdcr(SDRAM_R0BAS + 2, val);
  2839. break;
  2840. case SDRAM_R0BAS + 3:
  2841. mtdcr(SDRAM_R0BAS + 3, val);
  2842. break;
  2843. default:
  2844. printf("DCR %d not defined in case statement!!!\n", dcr);
  2845. }
  2846. }
  2847. #endif /* defined(CONFIG_440) */
  2848. void blank_string(int size)
  2849. {
  2850. int i;
  2851. for (i = 0; i < size; i++)
  2852. putc('\b');
  2853. for (i = 0; i < size; i++)
  2854. putc(' ');
  2855. for (i = 0; i < size; i++)
  2856. putc('\b');
  2857. }
  2858. #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
  2859. inline void ppc4xx_ibm_ddr2_register_dump(void)
  2860. {
  2861. #if defined(DEBUG)
  2862. printf("\nPPC4xx IBM DDR2 Register Dump:\n");
  2863. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2864. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2865. PPC4xx_IBM_DDR2_DUMP_REGISTER(R0BAS);
  2866. PPC4xx_IBM_DDR2_DUMP_REGISTER(R1BAS);
  2867. PPC4xx_IBM_DDR2_DUMP_REGISTER(R2BAS);
  2868. PPC4xx_IBM_DDR2_DUMP_REGISTER(R3BAS);
  2869. #endif /* (defined(CONFIG_440SP) || ... */
  2870. #if defined(CONFIG_405EX)
  2871. PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
  2872. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
  2873. PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
  2874. PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
  2875. PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
  2876. PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
  2877. #endif /* defined(CONFIG_405EX) */
  2878. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
  2879. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
  2880. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
  2881. PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
  2882. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
  2883. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
  2884. PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
  2885. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
  2886. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
  2887. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
  2888. PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
  2889. PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
  2890. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2891. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2892. PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
  2893. PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
  2894. /*
  2895. * OPART is only used as a trigger register.
  2896. *
  2897. * No data is contained in this register, and reading or writing
  2898. * to is can cause bad things to happen (hangs). Just skip it and
  2899. * report "N/A".
  2900. */
  2901. printf("%20s = N/A\n", "SDRAM_OPART");
  2902. #endif /* defined(CONFIG_440SP) || ... */
  2903. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
  2904. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
  2905. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
  2906. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
  2907. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
  2908. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
  2909. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
  2910. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
  2911. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
  2912. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
  2913. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
  2914. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
  2915. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
  2916. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
  2917. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
  2918. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
  2919. PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
  2920. PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
  2921. PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
  2922. PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
  2923. PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
  2924. PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
  2925. PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
  2926. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
  2927. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
  2928. PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
  2929. PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
  2930. PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
  2931. PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCCR);
  2932. #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
  2933. defined(CONFIG_460EX) || defined(CONFIG_460GT))
  2934. PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
  2935. #endif /* defined(CONFIG_440SP) || ... */
  2936. PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
  2937. PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
  2938. PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
  2939. #endif /* defined(DEBUG) */
  2940. }
  2941. #endif /* CONFIG_SDRAM_PPC4xx_IBM_DDR2 */