1*91f16700Schasinglulu /* 2*91f16700Schasinglulu * Copyright (C) 2018-2022, STMicroelectronics - All Rights Reserved 3*91f16700Schasinglulu * 4*91f16700Schasinglulu * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause 5*91f16700Schasinglulu */ 6*91f16700Schasinglulu 7*91f16700Schasinglulu #include <errno.h> 8*91f16700Schasinglulu #include <stddef.h> 9*91f16700Schasinglulu 10*91f16700Schasinglulu #include <arch.h> 11*91f16700Schasinglulu #include <arch_helpers.h> 12*91f16700Schasinglulu #include <common/debug.h> 13*91f16700Schasinglulu #include <drivers/clk.h> 14*91f16700Schasinglulu #include <drivers/delay_timer.h> 15*91f16700Schasinglulu #include <drivers/st/stm32mp1_ddr.h> 16*91f16700Schasinglulu #include <drivers/st/stm32mp1_ddr_regs.h> 17*91f16700Schasinglulu #include <drivers/st/stm32mp1_pwr.h> 18*91f16700Schasinglulu #include <drivers/st/stm32mp1_ram.h> 19*91f16700Schasinglulu #include <drivers/st/stm32mp_ddr.h> 20*91f16700Schasinglulu #include <lib/mmio.h> 21*91f16700Schasinglulu #include <plat/common/platform.h> 22*91f16700Schasinglulu 23*91f16700Schasinglulu #include <platform_def.h> 24*91f16700Schasinglulu 25*91f16700Schasinglulu #define DDRCTL_REG(x, y) \ 26*91f16700Schasinglulu { \ 27*91f16700Schasinglulu .name = #x, \ 28*91f16700Schasinglulu .offset = offsetof(struct stm32mp_ddrctl, x), \ 29*91f16700Schasinglulu .par_offset = offsetof(struct y, x) \ 30*91f16700Schasinglulu } 31*91f16700Schasinglulu 32*91f16700Schasinglulu #define DDRPHY_REG(x, y) \ 33*91f16700Schasinglulu { \ 34*91f16700Schasinglulu .name = #x, \ 35*91f16700Schasinglulu .offset = offsetof(struct stm32mp_ddrphy, x), \ 36*91f16700Schasinglulu .par_offset = offsetof(struct y, x) \ 37*91f16700Schasinglulu } 38*91f16700Schasinglulu 39*91f16700Schasinglulu /* 40*91f16700Schasinglulu * PARAMETERS: value get from device tree : 41*91f16700Schasinglulu * size / order need to be aligned with binding 42*91f16700Schasinglulu * modification NOT ALLOWED !!! 43*91f16700Schasinglulu */ 44*91f16700Schasinglulu #define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */ 45*91f16700Schasinglulu #define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */ 46*91f16700Schasinglulu #define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */ 47*91f16700Schasinglulu #if STM32MP_DDR_DUAL_AXI_PORT 48*91f16700Schasinglulu #define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */ 49*91f16700Schasinglulu #else 50*91f16700Schasinglulu #define DDRCTL_REG_PERF_SIZE 11 /* st,ctl-perf */ 51*91f16700Schasinglulu #endif 52*91f16700Schasinglulu 53*91f16700Schasinglulu #if STM32MP_DDR_32BIT_INTERFACE 54*91f16700Schasinglulu #define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */ 55*91f16700Schasinglulu #else 56*91f16700Schasinglulu #define DDRPHY_REG_REG_SIZE 9 /* st,phy-reg */ 57*91f16700Schasinglulu #endif 58*91f16700Schasinglulu #define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */ 59*91f16700Schasinglulu 60*91f16700Schasinglulu #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg) 61*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = { 62*91f16700Schasinglulu DDRCTL_REG_REG(mstr), 63*91f16700Schasinglulu DDRCTL_REG_REG(mrctrl0), 64*91f16700Schasinglulu DDRCTL_REG_REG(mrctrl1), 65*91f16700Schasinglulu DDRCTL_REG_REG(derateen), 66*91f16700Schasinglulu DDRCTL_REG_REG(derateint), 67*91f16700Schasinglulu DDRCTL_REG_REG(pwrctl), 68*91f16700Schasinglulu DDRCTL_REG_REG(pwrtmg), 69*91f16700Schasinglulu DDRCTL_REG_REG(hwlpctl), 70*91f16700Schasinglulu DDRCTL_REG_REG(rfshctl0), 71*91f16700Schasinglulu DDRCTL_REG_REG(rfshctl3), 72*91f16700Schasinglulu DDRCTL_REG_REG(crcparctl0), 73*91f16700Schasinglulu DDRCTL_REG_REG(zqctl0), 74*91f16700Schasinglulu DDRCTL_REG_REG(dfitmg0), 75*91f16700Schasinglulu DDRCTL_REG_REG(dfitmg1), 76*91f16700Schasinglulu DDRCTL_REG_REG(dfilpcfg0), 77*91f16700Schasinglulu DDRCTL_REG_REG(dfiupd0), 78*91f16700Schasinglulu DDRCTL_REG_REG(dfiupd1), 79*91f16700Schasinglulu DDRCTL_REG_REG(dfiupd2), 80*91f16700Schasinglulu DDRCTL_REG_REG(dfiphymstr), 81*91f16700Schasinglulu DDRCTL_REG_REG(odtmap), 82*91f16700Schasinglulu DDRCTL_REG_REG(dbg0), 83*91f16700Schasinglulu DDRCTL_REG_REG(dbg1), 84*91f16700Schasinglulu DDRCTL_REG_REG(dbgcmd), 85*91f16700Schasinglulu DDRCTL_REG_REG(poisoncfg), 86*91f16700Schasinglulu DDRCTL_REG_REG(pccfg), 87*91f16700Schasinglulu }; 88*91f16700Schasinglulu 89*91f16700Schasinglulu #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing) 90*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = { 91*91f16700Schasinglulu DDRCTL_REG_TIMING(rfshtmg), 92*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg0), 93*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg1), 94*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg2), 95*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg3), 96*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg4), 97*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg5), 98*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg6), 99*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg7), 100*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg8), 101*91f16700Schasinglulu DDRCTL_REG_TIMING(dramtmg14), 102*91f16700Schasinglulu DDRCTL_REG_TIMING(odtcfg), 103*91f16700Schasinglulu }; 104*91f16700Schasinglulu 105*91f16700Schasinglulu #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map) 106*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = { 107*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap1), 108*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap2), 109*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap3), 110*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap4), 111*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap5), 112*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap6), 113*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap9), 114*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap10), 115*91f16700Schasinglulu DDRCTL_REG_MAP(addrmap11), 116*91f16700Schasinglulu }; 117*91f16700Schasinglulu 118*91f16700Schasinglulu #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf) 119*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = { 120*91f16700Schasinglulu DDRCTL_REG_PERF(sched), 121*91f16700Schasinglulu DDRCTL_REG_PERF(sched1), 122*91f16700Schasinglulu DDRCTL_REG_PERF(perfhpr1), 123*91f16700Schasinglulu DDRCTL_REG_PERF(perflpr1), 124*91f16700Schasinglulu DDRCTL_REG_PERF(perfwr1), 125*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgr_0), 126*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgw_0), 127*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgqos0_0), 128*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgqos1_0), 129*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgwqos0_0), 130*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgwqos1_0), 131*91f16700Schasinglulu #if STM32MP_DDR_DUAL_AXI_PORT 132*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgr_1), 133*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgw_1), 134*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgqos0_1), 135*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgqos1_1), 136*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgwqos0_1), 137*91f16700Schasinglulu DDRCTL_REG_PERF(pcfgwqos1_1), 138*91f16700Schasinglulu #endif 139*91f16700Schasinglulu }; 140*91f16700Schasinglulu 141*91f16700Schasinglulu #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg) 142*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = { 143*91f16700Schasinglulu DDRPHY_REG_REG(pgcr), 144*91f16700Schasinglulu DDRPHY_REG_REG(aciocr), 145*91f16700Schasinglulu DDRPHY_REG_REG(dxccr), 146*91f16700Schasinglulu DDRPHY_REG_REG(dsgcr), 147*91f16700Schasinglulu DDRPHY_REG_REG(dcr), 148*91f16700Schasinglulu DDRPHY_REG_REG(odtcr), 149*91f16700Schasinglulu DDRPHY_REG_REG(zq0cr1), 150*91f16700Schasinglulu DDRPHY_REG_REG(dx0gcr), 151*91f16700Schasinglulu DDRPHY_REG_REG(dx1gcr), 152*91f16700Schasinglulu #if STM32MP_DDR_32BIT_INTERFACE 153*91f16700Schasinglulu DDRPHY_REG_REG(dx2gcr), 154*91f16700Schasinglulu DDRPHY_REG_REG(dx3gcr), 155*91f16700Schasinglulu #endif 156*91f16700Schasinglulu }; 157*91f16700Schasinglulu 158*91f16700Schasinglulu #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing) 159*91f16700Schasinglulu static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = { 160*91f16700Schasinglulu DDRPHY_REG_TIMING(ptr0), 161*91f16700Schasinglulu DDRPHY_REG_TIMING(ptr1), 162*91f16700Schasinglulu DDRPHY_REG_TIMING(ptr2), 163*91f16700Schasinglulu DDRPHY_REG_TIMING(dtpr0), 164*91f16700Schasinglulu DDRPHY_REG_TIMING(dtpr1), 165*91f16700Schasinglulu DDRPHY_REG_TIMING(dtpr2), 166*91f16700Schasinglulu DDRPHY_REG_TIMING(mr0), 167*91f16700Schasinglulu DDRPHY_REG_TIMING(mr1), 168*91f16700Schasinglulu DDRPHY_REG_TIMING(mr2), 169*91f16700Schasinglulu DDRPHY_REG_TIMING(mr3), 170*91f16700Schasinglulu }; 171*91f16700Schasinglulu 172*91f16700Schasinglulu /* 173*91f16700Schasinglulu * REGISTERS ARRAY: used to parse device tree and interactive mode 174*91f16700Schasinglulu */ 175*91f16700Schasinglulu static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] = { 176*91f16700Schasinglulu [REG_REG] = { 177*91f16700Schasinglulu .name = "static", 178*91f16700Schasinglulu .desc = ddr_reg, 179*91f16700Schasinglulu .size = DDRCTL_REG_REG_SIZE, 180*91f16700Schasinglulu .base = DDR_BASE 181*91f16700Schasinglulu }, 182*91f16700Schasinglulu [REG_TIMING] = { 183*91f16700Schasinglulu .name = "timing", 184*91f16700Schasinglulu .desc = ddr_timing, 185*91f16700Schasinglulu .size = DDRCTL_REG_TIMING_SIZE, 186*91f16700Schasinglulu .base = DDR_BASE 187*91f16700Schasinglulu }, 188*91f16700Schasinglulu [REG_PERF] = { 189*91f16700Schasinglulu .name = "perf", 190*91f16700Schasinglulu .desc = ddr_perf, 191*91f16700Schasinglulu .size = DDRCTL_REG_PERF_SIZE, 192*91f16700Schasinglulu .base = DDR_BASE 193*91f16700Schasinglulu }, 194*91f16700Schasinglulu [REG_MAP] = { 195*91f16700Schasinglulu .name = "map", 196*91f16700Schasinglulu .desc = ddr_map, 197*91f16700Schasinglulu .size = DDRCTL_REG_MAP_SIZE, 198*91f16700Schasinglulu .base = DDR_BASE 199*91f16700Schasinglulu }, 200*91f16700Schasinglulu [REGPHY_REG] = { 201*91f16700Schasinglulu .name = "static", 202*91f16700Schasinglulu .desc = ddrphy_reg, 203*91f16700Schasinglulu .size = DDRPHY_REG_REG_SIZE, 204*91f16700Schasinglulu .base = DDRPHY_BASE 205*91f16700Schasinglulu }, 206*91f16700Schasinglulu [REGPHY_TIMING] = { 207*91f16700Schasinglulu .name = "timing", 208*91f16700Schasinglulu .desc = ddrphy_timing, 209*91f16700Schasinglulu .size = DDRPHY_REG_TIMING_SIZE, 210*91f16700Schasinglulu .base = DDRPHY_BASE 211*91f16700Schasinglulu }, 212*91f16700Schasinglulu }; 213*91f16700Schasinglulu 214*91f16700Schasinglulu static void stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy *phy) 215*91f16700Schasinglulu { 216*91f16700Schasinglulu uint32_t pgsr; 217*91f16700Schasinglulu int error = 0; 218*91f16700Schasinglulu uint64_t timeout = timeout_init_us(TIMEOUT_US_1S); 219*91f16700Schasinglulu 220*91f16700Schasinglulu do { 221*91f16700Schasinglulu pgsr = mmio_read_32((uintptr_t)&phy->pgsr); 222*91f16700Schasinglulu 223*91f16700Schasinglulu VERBOSE(" > [0x%lx] pgsr = 0x%x &\n", 224*91f16700Schasinglulu (uintptr_t)&phy->pgsr, pgsr); 225*91f16700Schasinglulu 226*91f16700Schasinglulu if (timeout_elapsed(timeout)) { 227*91f16700Schasinglulu panic(); 228*91f16700Schasinglulu } 229*91f16700Schasinglulu 230*91f16700Schasinglulu if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) { 231*91f16700Schasinglulu VERBOSE("DQS Gate Trainig Error\n"); 232*91f16700Schasinglulu error++; 233*91f16700Schasinglulu } 234*91f16700Schasinglulu 235*91f16700Schasinglulu if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) { 236*91f16700Schasinglulu VERBOSE("DQS Gate Trainig Intermittent Error\n"); 237*91f16700Schasinglulu error++; 238*91f16700Schasinglulu } 239*91f16700Schasinglulu 240*91f16700Schasinglulu if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) { 241*91f16700Schasinglulu VERBOSE("DQS Drift Error\n"); 242*91f16700Schasinglulu error++; 243*91f16700Schasinglulu } 244*91f16700Schasinglulu 245*91f16700Schasinglulu if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) { 246*91f16700Schasinglulu VERBOSE("Read Valid Training Error\n"); 247*91f16700Schasinglulu error++; 248*91f16700Schasinglulu } 249*91f16700Schasinglulu 250*91f16700Schasinglulu if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) { 251*91f16700Schasinglulu VERBOSE("Read Valid Training Intermittent Error\n"); 252*91f16700Schasinglulu error++; 253*91f16700Schasinglulu } 254*91f16700Schasinglulu } while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0)); 255*91f16700Schasinglulu VERBOSE("\n[0x%lx] pgsr = 0x%x\n", 256*91f16700Schasinglulu (uintptr_t)&phy->pgsr, pgsr); 257*91f16700Schasinglulu } 258*91f16700Schasinglulu 259*91f16700Schasinglulu static void stm32mp1_ddrphy_init(struct stm32mp_ddrphy *phy, uint32_t pir) 260*91f16700Schasinglulu { 261*91f16700Schasinglulu uint32_t pir_init = pir | DDRPHYC_PIR_INIT; 262*91f16700Schasinglulu 263*91f16700Schasinglulu mmio_write_32((uintptr_t)&phy->pir, pir_init); 264*91f16700Schasinglulu VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n", 265*91f16700Schasinglulu (uintptr_t)&phy->pir, pir_init, 266*91f16700Schasinglulu mmio_read_32((uintptr_t)&phy->pir)); 267*91f16700Schasinglulu 268*91f16700Schasinglulu /* Need to wait 10 configuration clock before start polling */ 269*91f16700Schasinglulu udelay(10); 270*91f16700Schasinglulu 271*91f16700Schasinglulu /* Wait DRAM initialization and Gate Training Evaluation complete */ 272*91f16700Schasinglulu stm32mp1_ddrphy_idone_wait(phy); 273*91f16700Schasinglulu } 274*91f16700Schasinglulu 275*91f16700Schasinglulu /* Wait quasi dynamic register update */ 276*91f16700Schasinglulu static void stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv *priv, uint32_t mode) 277*91f16700Schasinglulu { 278*91f16700Schasinglulu uint64_t timeout; 279*91f16700Schasinglulu uint32_t stat; 280*91f16700Schasinglulu int break_loop = 0; 281*91f16700Schasinglulu 282*91f16700Schasinglulu timeout = timeout_init_us(TIMEOUT_US_1S); 283*91f16700Schasinglulu for ( ; ; ) { 284*91f16700Schasinglulu uint32_t operating_mode; 285*91f16700Schasinglulu uint32_t selref_type; 286*91f16700Schasinglulu 287*91f16700Schasinglulu stat = mmio_read_32((uintptr_t)&priv->ctl->stat); 288*91f16700Schasinglulu operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK; 289*91f16700Schasinglulu selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK; 290*91f16700Schasinglulu VERBOSE("[0x%lx] stat = 0x%x\n", 291*91f16700Schasinglulu (uintptr_t)&priv->ctl->stat, stat); 292*91f16700Schasinglulu if (timeout_elapsed(timeout)) { 293*91f16700Schasinglulu panic(); 294*91f16700Schasinglulu } 295*91f16700Schasinglulu 296*91f16700Schasinglulu if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) { 297*91f16700Schasinglulu /* 298*91f16700Schasinglulu * Self-refresh due to software 299*91f16700Schasinglulu * => checking also STAT.selfref_type. 300*91f16700Schasinglulu */ 301*91f16700Schasinglulu if ((operating_mode == 302*91f16700Schasinglulu DDRCTRL_STAT_OPERATING_MODE_SR) && 303*91f16700Schasinglulu (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) { 304*91f16700Schasinglulu break_loop = 1; 305*91f16700Schasinglulu } 306*91f16700Schasinglulu } else if (operating_mode == mode) { 307*91f16700Schasinglulu break_loop = 1; 308*91f16700Schasinglulu } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) && 309*91f16700Schasinglulu (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) && 310*91f16700Schasinglulu (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) { 311*91f16700Schasinglulu /* Normal mode: handle also automatic self refresh */ 312*91f16700Schasinglulu break_loop = 1; 313*91f16700Schasinglulu } 314*91f16700Schasinglulu 315*91f16700Schasinglulu if (break_loop == 1) { 316*91f16700Schasinglulu break; 317*91f16700Schasinglulu } 318*91f16700Schasinglulu } 319*91f16700Schasinglulu 320*91f16700Schasinglulu VERBOSE("[0x%lx] stat = 0x%x\n", 321*91f16700Schasinglulu (uintptr_t)&priv->ctl->stat, stat); 322*91f16700Schasinglulu } 323*91f16700Schasinglulu 324*91f16700Schasinglulu /* Mode Register Writes (MRW or MRS) */ 325*91f16700Schasinglulu static void stm32mp1_mode_register_write(struct stm32mp_ddr_priv *priv, uint8_t addr, 326*91f16700Schasinglulu uint32_t data) 327*91f16700Schasinglulu { 328*91f16700Schasinglulu uint32_t mrctrl0; 329*91f16700Schasinglulu 330*91f16700Schasinglulu VERBOSE("MRS: %d = %x\n", addr, data); 331*91f16700Schasinglulu 332*91f16700Schasinglulu /* 333*91f16700Schasinglulu * 1. Poll MRSTAT.mr_wr_busy until it is '0'. 334*91f16700Schasinglulu * This checks that there is no outstanding MR transaction. 335*91f16700Schasinglulu * No write should be performed to MRCTRL0 and MRCTRL1 336*91f16700Schasinglulu * if MRSTAT.mr_wr_busy = 1. 337*91f16700Schasinglulu */ 338*91f16700Schasinglulu while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 339*91f16700Schasinglulu DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 340*91f16700Schasinglulu ; 341*91f16700Schasinglulu } 342*91f16700Schasinglulu 343*91f16700Schasinglulu /* 344*91f16700Schasinglulu * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank 345*91f16700Schasinglulu * and (for MRWs) MRCTRL1.mr_data to define the MR transaction. 346*91f16700Schasinglulu */ 347*91f16700Schasinglulu mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE | 348*91f16700Schasinglulu DDRCTRL_MRCTRL0_MR_RANK_ALL | 349*91f16700Schasinglulu (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) & 350*91f16700Schasinglulu DDRCTRL_MRCTRL0_MR_ADDR_MASK); 351*91f16700Schasinglulu mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 352*91f16700Schasinglulu VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n", 353*91f16700Schasinglulu (uintptr_t)&priv->ctl->mrctrl0, 354*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0); 355*91f16700Schasinglulu mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data); 356*91f16700Schasinglulu VERBOSE("[0x%lx] mrctrl1 = 0x%x\n", 357*91f16700Schasinglulu (uintptr_t)&priv->ctl->mrctrl1, 358*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->mrctrl1)); 359*91f16700Schasinglulu 360*91f16700Schasinglulu /* 361*91f16700Schasinglulu * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This 362*91f16700Schasinglulu * bit is self-clearing, and triggers the MR transaction. 363*91f16700Schasinglulu * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs 364*91f16700Schasinglulu * the MR transaction to SDRAM, and no further access can be 365*91f16700Schasinglulu * initiated until it is deasserted. 366*91f16700Schasinglulu */ 367*91f16700Schasinglulu mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR; 368*91f16700Schasinglulu mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 369*91f16700Schasinglulu 370*91f16700Schasinglulu while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 371*91f16700Schasinglulu DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 372*91f16700Schasinglulu ; 373*91f16700Schasinglulu } 374*91f16700Schasinglulu 375*91f16700Schasinglulu VERBOSE("[0x%lx] mrctrl0 = 0x%x\n", 376*91f16700Schasinglulu (uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 377*91f16700Schasinglulu } 378*91f16700Schasinglulu 379*91f16700Schasinglulu /* Switch DDR3 from DLL-on to DLL-off */ 380*91f16700Schasinglulu static void stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv *priv) 381*91f16700Schasinglulu { 382*91f16700Schasinglulu uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1); 383*91f16700Schasinglulu uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2); 384*91f16700Schasinglulu uint32_t dbgcam; 385*91f16700Schasinglulu 386*91f16700Schasinglulu VERBOSE("mr1: 0x%x\n", mr1); 387*91f16700Schasinglulu VERBOSE("mr2: 0x%x\n", mr2); 388*91f16700Schasinglulu 389*91f16700Schasinglulu /* 390*91f16700Schasinglulu * 1. Set the DBG1.dis_hif = 1. 391*91f16700Schasinglulu * This prevents further reads/writes being received on the HIF. 392*91f16700Schasinglulu */ 393*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 394*91f16700Schasinglulu VERBOSE("[0x%lx] dbg1 = 0x%x\n", 395*91f16700Schasinglulu (uintptr_t)&priv->ctl->dbg1, 396*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 397*91f16700Schasinglulu 398*91f16700Schasinglulu /* 399*91f16700Schasinglulu * 2. Ensure all commands have been flushed from the uMCTL2 by polling 400*91f16700Schasinglulu * DBGCAM.wr_data_pipeline_empty = 1, 401*91f16700Schasinglulu * DBGCAM.rd_data_pipeline_empty = 1, 402*91f16700Schasinglulu * DBGCAM.dbg_wr_q_depth = 0 , 403*91f16700Schasinglulu * DBGCAM.dbg_lpr_q_depth = 0, and 404*91f16700Schasinglulu * DBGCAM.dbg_hpr_q_depth = 0. 405*91f16700Schasinglulu */ 406*91f16700Schasinglulu do { 407*91f16700Schasinglulu dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam); 408*91f16700Schasinglulu VERBOSE("[0x%lx] dbgcam = 0x%x\n", 409*91f16700Schasinglulu (uintptr_t)&priv->ctl->dbgcam, dbgcam); 410*91f16700Schasinglulu } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) == 411*91f16700Schasinglulu DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) && 412*91f16700Schasinglulu ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U)); 413*91f16700Schasinglulu 414*91f16700Schasinglulu /* 415*91f16700Schasinglulu * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 416*91f16700Schasinglulu * to disable RTT_NOM: 417*91f16700Schasinglulu * a. DDR3: Write to MR1[9], MR1[6] and MR1[2] 418*91f16700Schasinglulu * b. DDR4: Write to MR1[10:8] 419*91f16700Schasinglulu */ 420*91f16700Schasinglulu mr1 &= ~(BIT(9) | BIT(6) | BIT(2)); 421*91f16700Schasinglulu stm32mp1_mode_register_write(priv, 1, mr1); 422*91f16700Schasinglulu 423*91f16700Schasinglulu /* 424*91f16700Schasinglulu * 4. For DDR4 only: Perform an MRS command 425*91f16700Schasinglulu * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6] 426*91f16700Schasinglulu * to disable RTT_PARK 427*91f16700Schasinglulu */ 428*91f16700Schasinglulu 429*91f16700Schasinglulu /* 430*91f16700Schasinglulu * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 431*91f16700Schasinglulu * to write to MR2[10:9], to disable RTT_WR 432*91f16700Schasinglulu * (and therefore disable dynamic ODT). 433*91f16700Schasinglulu * This applies for both DDR3 and DDR4. 434*91f16700Schasinglulu */ 435*91f16700Schasinglulu mr2 &= ~GENMASK(10, 9); 436*91f16700Schasinglulu stm32mp1_mode_register_write(priv, 2, mr2); 437*91f16700Schasinglulu 438*91f16700Schasinglulu /* 439*91f16700Schasinglulu * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 440*91f16700Schasinglulu * to disable the DLL. The timing of this MRS is automatically 441*91f16700Schasinglulu * handled by the uMCTL2. 442*91f16700Schasinglulu * a. DDR3: Write to MR1[0] 443*91f16700Schasinglulu * b. DDR4: Write to MR1[0] 444*91f16700Schasinglulu */ 445*91f16700Schasinglulu mr1 |= BIT(0); 446*91f16700Schasinglulu stm32mp1_mode_register_write(priv, 1, mr1); 447*91f16700Schasinglulu 448*91f16700Schasinglulu /* 449*91f16700Schasinglulu * 7. Put the SDRAM into self-refresh mode by setting 450*91f16700Schasinglulu * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure 451*91f16700Schasinglulu * the DDRC has entered self-refresh. 452*91f16700Schasinglulu */ 453*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl, 454*91f16700Schasinglulu DDRCTRL_PWRCTL_SELFREF_SW); 455*91f16700Schasinglulu VERBOSE("[0x%lx] pwrctl = 0x%x\n", 456*91f16700Schasinglulu (uintptr_t)&priv->ctl->pwrctl, 457*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->pwrctl)); 458*91f16700Schasinglulu 459*91f16700Schasinglulu /* 460*91f16700Schasinglulu * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the 461*91f16700Schasinglulu * DWC_ddr_umctl2 core is in self-refresh mode. 462*91f16700Schasinglulu * Ensure transition to self-refresh was due to software 463*91f16700Schasinglulu * by checking that STAT.selfref_type[1:0]=2. 464*91f16700Schasinglulu */ 465*91f16700Schasinglulu stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR); 466*91f16700Schasinglulu 467*91f16700Schasinglulu /* 468*91f16700Schasinglulu * 9. Set the MSTR.dll_off_mode = 1. 469*91f16700Schasinglulu * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field 470*91f16700Schasinglulu */ 471*91f16700Schasinglulu stm32mp_ddr_start_sw_done(priv->ctl); 472*91f16700Schasinglulu 473*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE); 474*91f16700Schasinglulu VERBOSE("[0x%lx] mstr = 0x%x\n", 475*91f16700Schasinglulu (uintptr_t)&priv->ctl->mstr, 476*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->mstr)); 477*91f16700Schasinglulu 478*91f16700Schasinglulu stm32mp_ddr_wait_sw_done_ack(priv->ctl); 479*91f16700Schasinglulu 480*91f16700Schasinglulu /* 10. Change the clock frequency to the desired value. */ 481*91f16700Schasinglulu 482*91f16700Schasinglulu /* 483*91f16700Schasinglulu * 11. Update any registers which may be required to change for the new 484*91f16700Schasinglulu * frequency. This includes static and dynamic registers. 485*91f16700Schasinglulu * This includes both uMCTL2 registers and PHY registers. 486*91f16700Schasinglulu */ 487*91f16700Schasinglulu 488*91f16700Schasinglulu /* Change Bypass Mode Frequency Range */ 489*91f16700Schasinglulu if (clk_get_rate(DDRPHYC) < 100000000U) { 490*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr, 491*91f16700Schasinglulu DDRPHYC_DLLGCR_BPS200); 492*91f16700Schasinglulu } else { 493*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->dllgcr, 494*91f16700Schasinglulu DDRPHYC_DLLGCR_BPS200); 495*91f16700Schasinglulu } 496*91f16700Schasinglulu 497*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS); 498*91f16700Schasinglulu 499*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr, 500*91f16700Schasinglulu DDRPHYC_DXNDLLCR_DLLDIS); 501*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr, 502*91f16700Schasinglulu DDRPHYC_DXNDLLCR_DLLDIS); 503*91f16700Schasinglulu #if STM32MP_DDR_32BIT_INTERFACE 504*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr, 505*91f16700Schasinglulu DDRPHYC_DXNDLLCR_DLLDIS); 506*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr, 507*91f16700Schasinglulu DDRPHYC_DXNDLLCR_DLLDIS); 508*91f16700Schasinglulu #endif 509*91f16700Schasinglulu 510*91f16700Schasinglulu /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */ 511*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl, 512*91f16700Schasinglulu DDRCTRL_PWRCTL_SELFREF_SW); 513*91f16700Schasinglulu stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 514*91f16700Schasinglulu 515*91f16700Schasinglulu /* 516*91f16700Schasinglulu * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command 517*91f16700Schasinglulu * at this point. 518*91f16700Schasinglulu */ 519*91f16700Schasinglulu 520*91f16700Schasinglulu /* 521*91f16700Schasinglulu * 14. Perform MRS commands as required to re-program timing registers 522*91f16700Schasinglulu * in the SDRAM for the new frequency 523*91f16700Schasinglulu * (in particular, CL, CWL and WR may need to be changed). 524*91f16700Schasinglulu */ 525*91f16700Schasinglulu 526*91f16700Schasinglulu /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */ 527*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 528*91f16700Schasinglulu VERBOSE("[0x%lx] dbg1 = 0x%x\n", 529*91f16700Schasinglulu (uintptr_t)&priv->ctl->dbg1, 530*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 531*91f16700Schasinglulu } 532*91f16700Schasinglulu 533*91f16700Schasinglulu static void stm32mp1_refresh_disable(struct stm32mp_ddrctl *ctl) 534*91f16700Schasinglulu { 535*91f16700Schasinglulu stm32mp_ddr_start_sw_done(ctl); 536*91f16700Schasinglulu /* Quasi-dynamic register update*/ 537*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&ctl->rfshctl3, 538*91f16700Schasinglulu DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 539*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN); 540*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&ctl->dfimisc, 541*91f16700Schasinglulu DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 542*91f16700Schasinglulu stm32mp_ddr_wait_sw_done_ack(ctl); 543*91f16700Schasinglulu } 544*91f16700Schasinglulu 545*91f16700Schasinglulu static void stm32mp1_refresh_restore(struct stm32mp_ddrctl *ctl, 546*91f16700Schasinglulu uint32_t rfshctl3, uint32_t pwrctl) 547*91f16700Schasinglulu { 548*91f16700Schasinglulu stm32mp_ddr_start_sw_done(ctl); 549*91f16700Schasinglulu if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) { 550*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, 551*91f16700Schasinglulu DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 552*91f16700Schasinglulu } 553*91f16700Schasinglulu if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) { 554*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&ctl->pwrctl, 555*91f16700Schasinglulu DDRCTRL_PWRCTL_POWERDOWN_EN); 556*91f16700Schasinglulu } 557*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&ctl->dfimisc, 558*91f16700Schasinglulu DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 559*91f16700Schasinglulu stm32mp_ddr_wait_sw_done_ack(ctl); 560*91f16700Schasinglulu } 561*91f16700Schasinglulu 562*91f16700Schasinglulu void stm32mp1_ddr_init(struct stm32mp_ddr_priv *priv, 563*91f16700Schasinglulu struct stm32mp_ddr_config *config) 564*91f16700Schasinglulu { 565*91f16700Schasinglulu uint32_t pir; 566*91f16700Schasinglulu int ret = -EINVAL; 567*91f16700Schasinglulu 568*91f16700Schasinglulu if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 569*91f16700Schasinglulu ret = stm32mp_board_ddr_power_init(STM32MP_DDR3); 570*91f16700Schasinglulu } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) { 571*91f16700Schasinglulu ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR2); 572*91f16700Schasinglulu } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) { 573*91f16700Schasinglulu ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR3); 574*91f16700Schasinglulu } else { 575*91f16700Schasinglulu ERROR("DDR type not supported\n"); 576*91f16700Schasinglulu } 577*91f16700Schasinglulu 578*91f16700Schasinglulu if (ret != 0) { 579*91f16700Schasinglulu panic(); 580*91f16700Schasinglulu } 581*91f16700Schasinglulu 582*91f16700Schasinglulu VERBOSE("name = %s\n", config->info.name); 583*91f16700Schasinglulu VERBOSE("speed = %u kHz\n", config->info.speed); 584*91f16700Schasinglulu VERBOSE("size = 0x%x\n", config->info.size); 585*91f16700Schasinglulu 586*91f16700Schasinglulu /* DDR INIT SEQUENCE */ 587*91f16700Schasinglulu 588*91f16700Schasinglulu /* 589*91f16700Schasinglulu * 1. Program the DWC_ddr_umctl2 registers 590*91f16700Schasinglulu * nota: check DFIMISC.dfi_init_complete = 0 591*91f16700Schasinglulu */ 592*91f16700Schasinglulu 593*91f16700Schasinglulu /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */ 594*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 595*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 596*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 597*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 598*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 599*91f16700Schasinglulu mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 600*91f16700Schasinglulu 601*91f16700Schasinglulu /* 1.2. start CLOCK */ 602*91f16700Schasinglulu if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) { 603*91f16700Schasinglulu panic(); 604*91f16700Schasinglulu } 605*91f16700Schasinglulu 606*91f16700Schasinglulu /* 1.3. deassert reset */ 607*91f16700Schasinglulu /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */ 608*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 609*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 610*91f16700Schasinglulu /* 611*91f16700Schasinglulu * De-assert presetn once the clocks are active 612*91f16700Schasinglulu * and stable via DDRCAPBRST bit. 613*91f16700Schasinglulu */ 614*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 615*91f16700Schasinglulu 616*91f16700Schasinglulu /* 1.4. wait 128 cycles to permit initialization of end logic */ 617*91f16700Schasinglulu udelay(2); 618*91f16700Schasinglulu /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */ 619*91f16700Schasinglulu 620*91f16700Schasinglulu /* 1.5. initialize registers ddr_umctl2 */ 621*91f16700Schasinglulu /* Stop uMCTL2 before PHY is ready */ 622*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc, 623*91f16700Schasinglulu DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 624*91f16700Schasinglulu VERBOSE("[0x%lx] dfimisc = 0x%x\n", 625*91f16700Schasinglulu (uintptr_t)&priv->ctl->dfimisc, 626*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 627*91f16700Schasinglulu 628*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers); 629*91f16700Schasinglulu 630*91f16700Schasinglulu /* DDR3 = don't set DLLOFF for init mode */ 631*91f16700Schasinglulu if ((config->c_reg.mstr & 632*91f16700Schasinglulu (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 633*91f16700Schasinglulu == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 634*91f16700Schasinglulu VERBOSE("deactivate DLL OFF in mstr\n"); 635*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->ctl->mstr, 636*91f16700Schasinglulu DDRCTRL_MSTR_DLL_OFF_MODE); 637*91f16700Schasinglulu VERBOSE("[0x%lx] mstr = 0x%x\n", 638*91f16700Schasinglulu (uintptr_t)&priv->ctl->mstr, 639*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->mstr)); 640*91f16700Schasinglulu } 641*91f16700Schasinglulu 642*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers); 643*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers); 644*91f16700Schasinglulu 645*91f16700Schasinglulu /* Skip CTRL init, SDRAM init is done by PHY PUBL */ 646*91f16700Schasinglulu mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0, 647*91f16700Schasinglulu DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK, 648*91f16700Schasinglulu DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL); 649*91f16700Schasinglulu VERBOSE("[0x%lx] init0 = 0x%x\n", 650*91f16700Schasinglulu (uintptr_t)&priv->ctl->init0, 651*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->init0)); 652*91f16700Schasinglulu 653*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers); 654*91f16700Schasinglulu 655*91f16700Schasinglulu /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */ 656*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 657*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 658*91f16700Schasinglulu mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 659*91f16700Schasinglulu 660*91f16700Schasinglulu /* 661*91f16700Schasinglulu * 3. start PHY init by accessing relevant PUBL registers 662*91f16700Schasinglulu * (DXGCR, DCR, PTR*, MR*, DTPR*) 663*91f16700Schasinglulu */ 664*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REGPHY_REG, &config->p_reg, ddr_registers); 665*91f16700Schasinglulu stm32mp_ddr_set_reg(priv, REGPHY_TIMING, &config->p_timing, ddr_registers); 666*91f16700Schasinglulu 667*91f16700Schasinglulu /* DDR3 = don't set DLLOFF for init mode */ 668*91f16700Schasinglulu if ((config->c_reg.mstr & 669*91f16700Schasinglulu (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 670*91f16700Schasinglulu == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 671*91f16700Schasinglulu VERBOSE("deactivate DLL OFF in mr1\n"); 672*91f16700Schasinglulu mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0)); 673*91f16700Schasinglulu VERBOSE("[0x%lx] mr1 = 0x%x\n", 674*91f16700Schasinglulu (uintptr_t)&priv->phy->mr1, 675*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->phy->mr1)); 676*91f16700Schasinglulu } 677*91f16700Schasinglulu 678*91f16700Schasinglulu /* 679*91f16700Schasinglulu * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE 680*91f16700Schasinglulu * Perform DDR PHY DRAM initialization and Gate Training Evaluation 681*91f16700Schasinglulu */ 682*91f16700Schasinglulu stm32mp1_ddrphy_idone_wait(priv->phy); 683*91f16700Schasinglulu 684*91f16700Schasinglulu /* 685*91f16700Schasinglulu * 5. Indicate to PUBL that controller performs SDRAM initialization 686*91f16700Schasinglulu * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE 687*91f16700Schasinglulu * DRAM init is done by PHY, init0.skip_dram.init = 1 688*91f16700Schasinglulu */ 689*91f16700Schasinglulu 690*91f16700Schasinglulu pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL | 691*91f16700Schasinglulu DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC; 692*91f16700Schasinglulu 693*91f16700Schasinglulu if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 694*91f16700Schasinglulu pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */ 695*91f16700Schasinglulu } 696*91f16700Schasinglulu 697*91f16700Schasinglulu stm32mp1_ddrphy_init(priv->phy, pir); 698*91f16700Schasinglulu 699*91f16700Schasinglulu /* 700*91f16700Schasinglulu * 6. SET DFIMISC.dfi_init_complete_en to 1 701*91f16700Schasinglulu * Enable quasi-dynamic register programming. 702*91f16700Schasinglulu */ 703*91f16700Schasinglulu stm32mp_ddr_start_sw_done(priv->ctl); 704*91f16700Schasinglulu 705*91f16700Schasinglulu mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc, 706*91f16700Schasinglulu DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 707*91f16700Schasinglulu VERBOSE("[0x%lx] dfimisc = 0x%x\n", 708*91f16700Schasinglulu (uintptr_t)&priv->ctl->dfimisc, 709*91f16700Schasinglulu mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 710*91f16700Schasinglulu 711*91f16700Schasinglulu stm32mp_ddr_wait_sw_done_ack(priv->ctl); 712*91f16700Schasinglulu 713*91f16700Schasinglulu /* 714*91f16700Schasinglulu * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode 715*91f16700Schasinglulu * by monitoring STAT.operating_mode signal 716*91f16700Schasinglulu */ 717*91f16700Schasinglulu 718*91f16700Schasinglulu /* Wait uMCTL2 ready */ 719*91f16700Schasinglulu stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 720*91f16700Schasinglulu 721*91f16700Schasinglulu /* Switch to DLL OFF mode */ 722*91f16700Schasinglulu if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) { 723*91f16700Schasinglulu stm32mp1_ddr3_dll_off(priv); 724*91f16700Schasinglulu } 725*91f16700Schasinglulu 726*91f16700Schasinglulu VERBOSE("DDR DQS training : "); 727*91f16700Schasinglulu 728*91f16700Schasinglulu /* 729*91f16700Schasinglulu * 8. Disable Auto refresh and power down by setting 730*91f16700Schasinglulu * - RFSHCTL3.dis_au_refresh = 1 731*91f16700Schasinglulu * - PWRCTL.powerdown_en = 0 732*91f16700Schasinglulu * - DFIMISC.dfiinit_complete_en = 0 733*91f16700Schasinglulu */ 734*91f16700Schasinglulu stm32mp1_refresh_disable(priv->ctl); 735*91f16700Schasinglulu 736*91f16700Schasinglulu /* 737*91f16700Schasinglulu * 9. Program PUBL PGCR to enable refresh during training 738*91f16700Schasinglulu * and rank to train 739*91f16700Schasinglulu * not done => keep the programed value in PGCR 740*91f16700Schasinglulu */ 741*91f16700Schasinglulu 742*91f16700Schasinglulu /* 743*91f16700Schasinglulu * 10. configure PUBL PIR register to specify which training step 744*91f16700Schasinglulu * to run 745*91f16700Schasinglulu * RVTRN is executed only on LPDDR2/LPDDR3 746*91f16700Schasinglulu */ 747*91f16700Schasinglulu pir = DDRPHYC_PIR_QSTRN; 748*91f16700Schasinglulu if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) { 749*91f16700Schasinglulu pir |= DDRPHYC_PIR_RVTRN; 750*91f16700Schasinglulu } 751*91f16700Schasinglulu 752*91f16700Schasinglulu stm32mp1_ddrphy_init(priv->phy, pir); 753*91f16700Schasinglulu 754*91f16700Schasinglulu /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */ 755*91f16700Schasinglulu stm32mp1_ddrphy_idone_wait(priv->phy); 756*91f16700Schasinglulu 757*91f16700Schasinglulu /* 758*91f16700Schasinglulu * 12. set back registers in step 8 to the original values if desidered 759*91f16700Schasinglulu */ 760*91f16700Schasinglulu stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3, 761*91f16700Schasinglulu config->c_reg.pwrctl); 762*91f16700Schasinglulu 763*91f16700Schasinglulu stm32mp_ddr_enable_axi_port(priv->ctl); 764*91f16700Schasinglulu } 765