1 /*
2  * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <arch_helpers.h>
8 #include <debug.h>
9 #include <dram.h>
10 #include <plat_private.h>
11 #include <platform_def.h>
12 #include <pmu_regs.h>
13 #include <rk3399_def.h>
14 #include <secure.h>
15 #include <soc.h>
16 #include <suspend.h>
17 
18 #define PMUGRF_OS_REG0			0x300
19 #define PMUGRF_OS_REG1			0x304
20 #define PMUGRF_OS_REG2			0x308
21 #define PMUGRF_OS_REG3			0x30c
22 
23 #define CRU_SFTRST_DDR_CTRL(ch, n)	((0x1 << (8 + 16 + (ch) * 4)) | \
24 					 ((n) << (8 + (ch) * 4)))
25 #define CRU_SFTRST_DDR_PHY(ch, n)	((0x1 << (9 + 16 + (ch) * 4)) | \
26 					 ((n) << (9 + (ch) * 4)))
27 
28 #define FBDIV_ENC(n)			((n) << 16)
29 #define FBDIV_DEC(n)			(((n) >> 16) & 0xfff)
30 #define POSTDIV2_ENC(n)			((n) << 12)
31 #define POSTDIV2_DEC(n)			(((n) >> 12) & 0x7)
32 #define POSTDIV1_ENC(n)			((n) << 8)
33 #define POSTDIV1_DEC(n)			(((n) >> 8) & 0x7)
34 #define REFDIV_ENC(n)			(n)
35 #define REFDIV_DEC(n)			((n) & 0x3f)
36 
37 /* PMU CRU */
38 #define PMUCRU_RSTNHOLD_CON0		0x120
39 #define PMUCRU_RSTNHOLD_CON1		0x124
40 
41 #define PRESET_GPIO0_HOLD(n)		(((n) << 7) | WMSK_BIT(7))
42 #define PRESET_GPIO1_HOLD(n)		(((n) << 8) | WMSK_BIT(8))
43 
44 #define SYS_COUNTER_FREQ_IN_MHZ		(SYS_COUNTER_FREQ_IN_TICKS / 1000000)
45 
46 __pmusramdata uint32_t dpll_data[PLL_CON_COUNT];
47 __pmusramdata uint32_t cru_clksel_con6;
48 
49 /*
50  * Copy @num registers from @src to @dst
51  */
sram_regcpy(uintptr_t dst,uintptr_t src,uint32_t num)52 static __pmusramfunc void sram_regcpy(uintptr_t dst, uintptr_t src,
53 		uint32_t num)
54 {
55 	while (num--) {
56 		mmio_write_32(dst, mmio_read_32(src));
57 		dst += sizeof(uint32_t);
58 		src += sizeof(uint32_t);
59 	}
60 }
61 
62 /*
63  * Copy @num registers from @src to @dst
64  * This is intentionally a copy of the sram_regcpy function. PMUSRAM functions
65  * cannot be called from code running in DRAM.
66  */
dram_regcpy(uintptr_t dst,uintptr_t src,uint32_t num)67 static void dram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
68 {
69 	while (num--) {
70 		mmio_write_32(dst, mmio_read_32(src));
71 		dst += sizeof(uint32_t);
72 		src += sizeof(uint32_t);
73 	}
74 }
75 
sram_get_timer_value(void)76 static __pmusramfunc uint32_t sram_get_timer_value(void)
77 {
78 	/*
79 	 * Generic delay timer implementation expects the timer to be a down
80 	 * counter. We apply bitwise NOT operator to the tick values returned
81 	 * by read_cntpct_el0() to simulate the down counter.
82 	 */
83 	return (uint32_t)(~read_cntpct_el0());
84 }
85 
sram_udelay(uint32_t usec)86 static __pmusramfunc void sram_udelay(uint32_t usec)
87 {
88 	uint32_t start, cnt, delta, delta_us;
89 
90 	/* counter is decreasing */
91 	start = sram_get_timer_value();
92 	do {
93 		cnt = sram_get_timer_value();
94 		if (cnt > start) {
95 			delta = UINT32_MAX - cnt;
96 			delta += start;
97 		} else
98 			delta = start - cnt;
99 		delta_us = (delta * SYS_COUNTER_FREQ_IN_MHZ);
100 	} while (delta_us < usec);
101 }
102 
configure_sgrf(void)103 static __pmusramfunc void configure_sgrf(void)
104 {
105 	/*
106 	 * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
107 	 * IC ECO bug, need to set this register.
108 	 *
109 	 * SGRF_DDR_RGN_BYPS:
110 	 * After the PD_CENTER suspend/resume, the DDR region
111 	 * related registers in the SGRF will be reset, we
112 	 * need to re-initialize them.
113 	 */
114 	mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
115 		      SGRF_DDR_RGN_DPLL_CLK |
116 		      SGRF_DDR_RGN_RTC_CLK |
117 		      SGRF_DDR_RGN_BYPS);
118 }
119 
rkclk_ddr_reset(uint32_t channel,uint32_t ctl,uint32_t phy)120 static __pmusramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
121 		uint32_t phy)
122 {
123 	channel &= 0x1;
124 	ctl &= 0x1;
125 	phy &= 0x1;
126 	mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
127 		      CRU_SFTRST_DDR_CTRL(channel, ctl) |
128 		      CRU_SFTRST_DDR_PHY(channel, phy));
129 }
130 
phy_pctrl_reset(uint32_t ch)131 static __pmusramfunc void phy_pctrl_reset(uint32_t ch)
132 {
133 	rkclk_ddr_reset(ch, 1, 1);
134 	sram_udelay(10);
135 	rkclk_ddr_reset(ch, 1, 0);
136 	sram_udelay(10);
137 	rkclk_ddr_reset(ch, 0, 0);
138 	sram_udelay(10);
139 }
140 
set_cs_training_index(uint32_t ch,uint32_t rank)141 static __pmusramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
142 {
143 	uint32_t byte;
144 
145 	/* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
146 	for (byte = 0; byte < 4; byte++)
147 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 24,
148 				   rank << 24);
149 }
150 
select_per_cs_training_index(uint32_t ch,uint32_t rank)151 static __pmusramfunc void select_per_cs_training_index(uint32_t ch,
152 		uint32_t rank)
153 {
154 	/* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
155 	if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
156 		set_cs_training_index(ch, rank);
157 }
158 
override_write_leveling_value(uint32_t ch)159 static __pmusramfunc void override_write_leveling_value(uint32_t ch)
160 {
161 	uint32_t byte;
162 
163 	for (byte = 0; byte < 4; byte++) {
164 		/*
165 		 * PHY_8/136/264/392
166 		 * phy_per_cs_training_multicast_en_X 1bit offset_16
167 		 */
168 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 16,
169 				   1 << 16);
170 		mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
171 				   0xffff << 16,
172 				   0x200 << 16);
173 	}
174 
175 	/* CTL_200 ctrlupd_req 1bit offset_8 */
176 	mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
177 }
178 
data_training(uint32_t ch,struct rk3399_sdram_params * sdram_params,uint32_t training_flag)179 static __pmusramfunc int data_training(uint32_t ch,
180 		struct rk3399_sdram_params *sdram_params,
181 		uint32_t training_flag)
182 {
183 	uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
184 	uint32_t rank = sdram_params->ch[ch].rank;
185 	uint32_t rank_mask;
186 	uint32_t i, tmp;
187 
188 	if (sdram_params->dramtype == LPDDR4)
189 		rank_mask = (rank == 1) ? 0x5 : 0xf;
190 	else
191 		rank_mask = (rank == 1) ? 0x1 : 0x3;
192 
193 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
194 	mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
195 
196 	if (training_flag == PI_FULL_TRAINING) {
197 		if (sdram_params->dramtype == LPDDR4) {
198 			training_flag = PI_WRITE_LEVELING |
199 					PI_READ_GATE_TRAINING |
200 					PI_READ_LEVELING |
201 					PI_WDQ_LEVELING;
202 		} else if (sdram_params->dramtype == LPDDR3) {
203 			training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
204 					PI_READ_GATE_TRAINING;
205 		} else if (sdram_params->dramtype == DDR3) {
206 			training_flag = PI_WRITE_LEVELING |
207 					PI_READ_GATE_TRAINING |
208 					PI_READ_LEVELING;
209 		}
210 	}
211 
212 	/* ca training(LPDDR4,LPDDR3 support) */
213 	if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
214 		for (i = 0; i < 4; i++) {
215 			if (!(rank_mask & (1 << i)))
216 				continue;
217 
218 			select_per_cs_training_index(ch, i);
219 			/* PI_100 PI_CALVL_EN:RW:8:2 */
220 			mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
221 
222 			/* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
223 			mmio_clrsetbits_32(PI_REG(ch, 92),
224 					   (0x1 << 16) | (0x3 << 24),
225 					   (0x1 << 16) | (i << 24));
226 			while (1) {
227 				/* PI_174 PI_INT_STATUS:RD:8:18 */
228 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
229 
230 				/*
231 				 * check status obs
232 				 * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
233 				 */
234 				obs_0 = mmio_read_32(PHY_REG(ch, 532));
235 				obs_1 = mmio_read_32(PHY_REG(ch, 660));
236 				obs_2 = mmio_read_32(PHY_REG(ch, 788));
237 				if (((obs_0 >> 30) & 0x3) ||
238 				    ((obs_1 >> 30) & 0x3) ||
239 				    ((obs_2 >> 30) & 0x3))
240 					obs_err = 1;
241 				if ((((tmp >> 11) & 0x1) == 0x1) &&
242 				    (((tmp >> 13) & 0x1) == 0x1) &&
243 				    (((tmp >> 5) & 0x1) == 0x0) &&
244 				    (obs_err == 0))
245 					break;
246 				else if ((((tmp >> 5) & 0x1) == 0x1) ||
247 					 (obs_err == 1))
248 					return -1;
249 			}
250 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
251 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
252 		}
253 		mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
254 	}
255 
256 	/* write leveling(LPDDR4,LPDDR3,DDR3 support) */
257 	if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
258 		for (i = 0; i < rank; i++) {
259 			select_per_cs_training_index(ch, i);
260 			/* PI_60 PI_WRLVL_EN:RW:8:2 */
261 			mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
262 			/* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
263 			mmio_clrsetbits_32(PI_REG(ch, 59),
264 					   (0x1 << 8) | (0x3 << 16),
265 					   (0x1 << 8) | (i << 16));
266 
267 			while (1) {
268 				/* PI_174 PI_INT_STATUS:RD:8:18 */
269 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
270 
271 				/*
272 				 * check status obs, if error maybe can not
273 				 * get leveling done PHY_40/168/296/424
274 				 * phy_wrlvl_status_obs_X:0:13
275 				 */
276 				obs_0 = mmio_read_32(PHY_REG(ch, 40));
277 				obs_1 = mmio_read_32(PHY_REG(ch, 168));
278 				obs_2 = mmio_read_32(PHY_REG(ch, 296));
279 				obs_3 = mmio_read_32(PHY_REG(ch, 424));
280 				if (((obs_0 >> 12) & 0x1) ||
281 				    ((obs_1 >> 12) & 0x1) ||
282 				    ((obs_2 >> 12) & 0x1) ||
283 				    ((obs_3 >> 12) & 0x1))
284 					obs_err = 1;
285 				if ((((tmp >> 10) & 0x1) == 0x1) &&
286 				    (((tmp >> 13) & 0x1) == 0x1) &&
287 				    (((tmp >> 4) & 0x1) == 0x0) &&
288 				    (obs_err == 0))
289 					break;
290 				else if ((((tmp >> 4) & 0x1) == 0x1) ||
291 					 (obs_err == 1))
292 					return -1;
293 			}
294 
295 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
296 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
297 		}
298 		override_write_leveling_value(ch);
299 		mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
300 	}
301 
302 	/* read gate training(LPDDR4,LPDDR3,DDR3 support) */
303 	if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
304 		for (i = 0; i < rank; i++) {
305 			select_per_cs_training_index(ch, i);
306 			/* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
307 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
308 					   0x2 << 24);
309 			/*
310 			 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
311 			 * PI_RDLVL_CS:RW:24:2
312 			 */
313 			mmio_clrsetbits_32(PI_REG(ch, 74),
314 					   (0x1 << 16) | (0x3 << 24),
315 					   (0x1 << 16) | (i << 24));
316 
317 			while (1) {
318 				/* PI_174 PI_INT_STATUS:RD:8:18 */
319 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
320 
321 				/*
322 				 * check status obs
323 				 * PHY_43/171/299/427
324 				 *     PHY_GTLVL_STATUS_OBS_x:16:8
325 				 */
326 				obs_0 = mmio_read_32(PHY_REG(ch, 43));
327 				obs_1 = mmio_read_32(PHY_REG(ch, 171));
328 				obs_2 = mmio_read_32(PHY_REG(ch, 299));
329 				obs_3 = mmio_read_32(PHY_REG(ch, 427));
330 				if (((obs_0 >> (16 + 6)) & 0x3) ||
331 				    ((obs_1 >> (16 + 6)) & 0x3) ||
332 				    ((obs_2 >> (16 + 6)) & 0x3) ||
333 				    ((obs_3 >> (16 + 6)) & 0x3))
334 					obs_err = 1;
335 				if ((((tmp >> 9) & 0x1) == 0x1) &&
336 				    (((tmp >> 13) & 0x1) == 0x1) &&
337 				    (((tmp >> 3) & 0x1) == 0x0) &&
338 				    (obs_err == 0))
339 					break;
340 				else if ((((tmp >> 3) & 0x1) == 0x1) ||
341 					 (obs_err == 1))
342 					return -1;
343 			}
344 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
345 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
346 		}
347 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
348 	}
349 
350 	/* read leveling(LPDDR4,LPDDR3,DDR3 support) */
351 	if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
352 		for (i = 0; i < rank; i++) {
353 			select_per_cs_training_index(ch, i);
354 			/* PI_80 PI_RDLVL_EN:RW:16:2 */
355 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
356 					   0x2 << 16);
357 			/* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
358 			mmio_clrsetbits_32(PI_REG(ch, 74),
359 					   (0x1 << 8) | (0x3 << 24),
360 					   (0x1 << 8) | (i << 24));
361 			while (1) {
362 				/* PI_174 PI_INT_STATUS:RD:8:18 */
363 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
364 
365 				/*
366 				 * make sure status obs not report error bit
367 				 * PHY_46/174/302/430
368 				 *     phy_rdlvl_status_obs_X:16:8
369 				 */
370 				if ((((tmp >> 8) & 0x1) == 0x1) &&
371 				    (((tmp >> 13) & 0x1) == 0x1) &&
372 				    (((tmp >> 2) & 0x1) == 0x0))
373 					break;
374 				else if (((tmp >> 2) & 0x1) == 0x1)
375 					return -1;
376 			}
377 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
378 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
379 		}
380 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
381 	}
382 
383 	/* wdq leveling(LPDDR4 support) */
384 	if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
385 		for (i = 0; i < 4; i++) {
386 			if (!(rank_mask & (1 << i)))
387 				continue;
388 
389 			select_per_cs_training_index(ch, i);
390 			/*
391 			 * disable PI_WDQLVL_VREF_EN before wdq leveling?
392 			 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
393 			 */
394 			mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
395 			/* PI_124 PI_WDQLVL_EN:RW:16:2 */
396 			mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
397 					   0x2 << 16);
398 			/* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
399 			mmio_clrsetbits_32(PI_REG(ch, 121),
400 					   (0x1 << 8) | (0x3 << 16),
401 					   (0x1 << 8) | (i << 16));
402 			while (1) {
403 				/* PI_174 PI_INT_STATUS:RD:8:18 */
404 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
405 				if ((((tmp >> 12) & 0x1) == 0x1) &&
406 				    (((tmp >> 13) & 0x1) == 0x1) &&
407 				    (((tmp >> 6) & 0x1) == 0x0))
408 					break;
409 				else if (((tmp >> 6) & 0x1) == 0x1)
410 					return -1;
411 			}
412 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
413 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
414 		}
415 		mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
416 	}
417 
418 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
419 	mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
420 
421 	return 0;
422 }
423 
set_ddrconfig(struct rk3399_sdram_params * sdram_params,unsigned char channel,uint32_t ddrconfig)424 static __pmusramfunc void set_ddrconfig(
425 		struct rk3399_sdram_params *sdram_params,
426 		unsigned char channel, uint32_t ddrconfig)
427 {
428 	/* only need to set ddrconfig */
429 	struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
430 	unsigned int cs0_cap = 0;
431 	unsigned int cs1_cap = 0;
432 
433 	cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
434 	if (ch->rank > 1)
435 		cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
436 	if (ch->row_3_4) {
437 		cs0_cap = cs0_cap * 3 / 4;
438 		cs1_cap = cs1_cap * 3 / 4;
439 	}
440 
441 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
442 		      ddrconfig | (ddrconfig << 6));
443 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
444 		      ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
445 }
446 
dram_all_config(struct rk3399_sdram_params * sdram_params)447 static __pmusramfunc void dram_all_config(
448 		struct rk3399_sdram_params *sdram_params)
449 {
450 	unsigned int i;
451 
452 	for (i = 0; i < 2; i++) {
453 		struct rk3399_sdram_channel *info = &sdram_params->ch[i];
454 		struct rk3399_msch_timings *noc = &info->noc_timings;
455 
456 		if (sdram_params->ch[i].col == 0)
457 			continue;
458 
459 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
460 			      noc->ddrtiminga0.d32);
461 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
462 			      noc->ddrtimingb0.d32);
463 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
464 			      noc->ddrtimingc0.d32);
465 		mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
466 			      noc->devtodev0.d32);
467 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
468 
469 		/* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
470 		if (sdram_params->ch[i].rank == 1)
471 			mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
472 	}
473 
474 	DDR_STRIDE(sdram_params->stride);
475 
476 	/* reboot hold register set */
477 	mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
478 		      CRU_PMU_SGRF_RST_RLS |
479 		      PRESET_GPIO0_HOLD(1) |
480 		      PRESET_GPIO1_HOLD(1));
481 	mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
482 }
483 
pctl_cfg(uint32_t ch,struct rk3399_sdram_params * sdram_params)484 static __pmusramfunc void pctl_cfg(uint32_t ch,
485 		struct rk3399_sdram_params *sdram_params)
486 {
487 	const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
488 	const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
489 	const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
490 	uint32_t tmp, tmp1, tmp2, i;
491 
492 	/*
493 	 * Workaround controller bug:
494 	 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
495 	 */
496 	sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
497 		    CTL_REG_NUM - 1);
498 	mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
499 	sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
500 		    PI_REG_NUM);
501 
502 	sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
503 		    3);
504 
505 	mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
506 				PWRUP_SREFRESH_EXIT);
507 
508 	/* PHY_DLL_RST_EN */
509 	mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
510 	dmbst();
511 
512 	mmio_setbits_32(PI_REG(ch, 0), START);
513 	mmio_setbits_32(CTL_REG(ch, 0), START);
514 
515 	/* wait lock */
516 	while (1) {
517 		tmp = mmio_read_32(PHY_REG(ch, 920));
518 		tmp1 = mmio_read_32(PHY_REG(ch, 921));
519 		tmp2 = mmio_read_32(PHY_REG(ch, 922));
520 		if ((((tmp >> 16) & 0x1) == 0x1) &&
521 		     (((tmp1 >> 16) & 0x1) == 0x1) &&
522 		     (((tmp1 >> 0) & 0x1) == 0x1) &&
523 		     (((tmp2 >> 0) & 0x1) == 0x1))
524 			break;
525 		/* if PLL bypass,don't need wait lock */
526 		if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
527 			break;
528 	}
529 
530 	sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
531 
532 	for (i = 0; i < 4; i++)
533 		sram_regcpy(PHY_REG(ch, 128 * i),
534 			    (uintptr_t)&phy_regs->phy0[0], 91);
535 
536 	for (i = 0; i < 3; i++)
537 		sram_regcpy(PHY_REG(ch, 512 + 128 * i),
538 				(uintptr_t)&phy_regs->phy512[i][0], 38);
539 }
540 
dram_switch_to_next_index(struct rk3399_sdram_params * sdram_params)541 static __pmusramfunc int dram_switch_to_next_index(
542 		struct rk3399_sdram_params *sdram_params)
543 {
544 	uint32_t ch, ch_count;
545 	uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
546 
547 	mmio_write_32(CIC_BASE + CIC_CTRL0,
548 		      (((0x3 << 4) | (1 << 2) | 1) << 16) |
549 		      (fn << 4) | (1 << 2) | 1);
550 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
551 		;
552 
553 	mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
554 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
555 		;
556 
557 	ch_count = sdram_params->num_channels;
558 
559 	/* LPDDR4 f2 cann't do training, all training will fail */
560 	for (ch = 0; ch < ch_count; ch++) {
561 		mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
562 				   fn << 8);
563 
564 		/* data_training failed */
565 		if (data_training(ch, sdram_params, PI_FULL_TRAINING))
566 			return -1;
567 	}
568 
569 	return 0;
570 }
571 
572 /*
573  * Needs to be done for both channels at once in case of a shared reset signal
574  * between channels.
575  */
pctl_start(uint32_t channel_mask,struct rk3399_sdram_params * sdram_params)576 static __pmusramfunc int pctl_start(uint32_t channel_mask,
577 		struct rk3399_sdram_params *sdram_params)
578 {
579 	uint32_t count;
580 	uint32_t byte;
581 
582 	mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
583 	mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
584 
585 	/* need de-access IO retention before controller START */
586 	if (channel_mask & (1 << 0))
587 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
588 	if (channel_mask & (1 << 1))
589 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
590 
591 	/* PHY_DLL_RST_EN */
592 	if (channel_mask & (1 << 0))
593 		mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
594 				   0x2 << 24);
595 	if (channel_mask & (1 << 1))
596 		mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
597 				   0x2 << 24);
598 
599 	/* check ERROR bit */
600 	if (channel_mask & (1 << 0)) {
601 		count = 0;
602 		while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
603 			/* CKE is low, loop 10ms */
604 			if (count > 100)
605 				return -1;
606 
607 			sram_udelay(100);
608 			count++;
609 		}
610 
611 		mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
612 
613 		/* Restore the PHY_RX_CAL_DQS value */
614 		for (byte = 0; byte < 4; byte++)
615 			mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
616 					   0xfff << 16,
617 					   sdram_params->rx_cal_dqs[0][byte]);
618 	}
619 	if (channel_mask & (1 << 1)) {
620 		count = 0;
621 		while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
622 			/* CKE is low, loop 10ms */
623 			if (count > 100)
624 				return -1;
625 
626 			sram_udelay(100);
627 			count++;
628 		}
629 
630 		mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
631 
632 		/* Restore the PHY_RX_CAL_DQS value */
633 		for (byte = 0; byte < 4; byte++)
634 			mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
635 					   0xfff << 16,
636 					   sdram_params->rx_cal_dqs[1][byte]);
637 	}
638 
639 	return 0;
640 }
641 
pmusram_restore_pll(int pll_id,uint32_t * src)642 __pmusramfunc static void pmusram_restore_pll(int pll_id, uint32_t *src)
643 {
644 	mmio_write_32((CRU_BASE + CRU_PLL_CON(pll_id, 3)), PLL_SLOW_MODE);
645 
646 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 0), src[0] | REG_SOC_WMSK);
647 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 1), src[1] | REG_SOC_WMSK);
648 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 2), src[2]);
649 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 4), src[4] | REG_SOC_WMSK);
650 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 5), src[5] | REG_SOC_WMSK);
651 
652 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 3), src[3] | REG_SOC_WMSK);
653 
654 	while ((mmio_read_32(CRU_BASE + CRU_PLL_CON(pll_id, 2)) &
655 		(1 << 31)) == 0x0)
656 		;
657 }
658 
dmc_suspend(void)659 void dmc_suspend(void)
660 {
661 	struct rk3399_sdram_params *sdram_params = &sdram_config;
662 	struct rk3399_ddr_publ_regs *phy_regs;
663 	uint32_t *params_ctl;
664 	uint32_t *params_pi;
665 	uint32_t refdiv, postdiv2, postdiv1, fbdiv;
666 	uint32_t ch, byte, i;
667 
668 	phy_regs = &sdram_params->phy_regs;
669 	params_ctl = sdram_params->pctl_regs.denali_ctl;
670 	params_pi = sdram_params->pi_regs.denali_pi;
671 
672 	/* save dpll register and ddr clock register value to pmusram */
673 	cru_clksel_con6 = mmio_read_32(CRU_BASE + CRU_CLKSEL_CON6);
674 	for (i = 0; i < PLL_CON_COUNT; i++)
675 		dpll_data[i] = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, i));
676 
677 	fbdiv = dpll_data[0] & 0xfff;
678 	postdiv2 = POSTDIV2_DEC(dpll_data[1]);
679 	postdiv1 = POSTDIV1_DEC(dpll_data[1]);
680 	refdiv = REFDIV_DEC(dpll_data[1]);
681 
682 	sdram_params->ddr_freq = ((fbdiv * 24) /
683 				(refdiv * postdiv1 * postdiv2)) * MHz;
684 
685 	INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
686 	sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
687 			       0x7) != 0) ? 1 : 0;
688 
689 	/* copy the registers CTL PI and PHY */
690 	dram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
691 
692 	/* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
693 	params_ctl[0] &= ~(0x1 << 0);
694 
695 	dram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
696 		    PI_REG_NUM);
697 
698 	/* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
699 	params_pi[0] &= ~(0x1 << 0);
700 
701 	dram_regcpy((uintptr_t)&phy_regs->phy0[0],
702 			    PHY_REG(0, 0), 91);
703 
704 	for (i = 0; i < 3; i++)
705 		dram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
706 			    PHY_REG(0, 512 + 128 * i), 38);
707 
708 	dram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
709 
710 	for (ch = 0; ch < sdram_params->num_channels; ch++) {
711 		for (byte = 0; byte < 4; byte++)
712 			sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
713 				mmio_read_32(PHY_REG(ch, 57 + byte * 128));
714 	}
715 
716 	/* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
717 	phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
718 	phy_regs->phy896[957 - 896] |= 1 << 24;
719 	phy_regs->phy896[0] |= 1;
720 	phy_regs->phy896[0] &= ~(0x3 << 8);
721 }
722 
dmc_resume(void)723 __pmusramfunc void dmc_resume(void)
724 {
725 	struct rk3399_sdram_params *sdram_params = &sdram_config;
726 	uint32_t channel_mask = 0;
727 	uint32_t channel;
728 
729 	sram_secure_timer_init();
730 
731 	/*
732 	 * we switch ddr clock to abpll when suspend,
733 	 * we set back to dpll here
734 	 */
735 	mmio_write_32(CRU_BASE + CRU_CLKSEL_CON6,
736 			cru_clksel_con6 | REG_SOC_WMSK);
737 	pmusram_restore_pll(DPLL_ID, dpll_data);
738 
739 	configure_sgrf();
740 
741 retry:
742 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
743 		phy_pctrl_reset(channel);
744 		pctl_cfg(channel, sdram_params);
745 	}
746 
747 	for (channel = 0; channel < 2; channel++) {
748 		if (sdram_params->ch[channel].col)
749 			channel_mask |= 1 << channel;
750 	}
751 
752 	if (pctl_start(channel_mask, sdram_params) < 0)
753 		goto retry;
754 
755 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
756 		/* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
757 		if (sdram_params->dramtype == LPDDR3)
758 			sram_udelay(10);
759 
760 		/* If traning fail, retry to do it again. */
761 		if (data_training(channel, sdram_params, PI_FULL_TRAINING))
762 			goto retry;
763 
764 		set_ddrconfig(sdram_params, channel,
765 			      sdram_params->ch[channel].ddrconfig);
766 	}
767 
768 	dram_all_config(sdram_params);
769 
770 	/* Switch to index 1 and prepare for DDR frequency switch. */
771 	dram_switch_to_next_index(sdram_params);
772 }
773