]> rtime.felk.cvut.cz Git - sojka/nv-tegra/linux-3.10.git/blob - drivers/platform/tegra/mc/tegra21_emc_cc_r21015.c
8c10cd84eeae82060bc2a3abbf720dba353bcba2
[sojka/nv-tegra/linux-3.10.git] / drivers / platform / tegra / mc / tegra21_emc_cc_r21015.c
1 /*
2  * drivers/platform/tegra/tegra21_emc_cc_r21012.c
3  *
4  * Copyright (c) 2014, NVIDIA CORPORATION.  All rights reserved.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  */
15
16 #include <linux/kernel.h>
17 #include <linux/io.h>
18 #include <linux/clk.h>
19 #include <linux/delay.h>
20 #include <linux/platform_data/tegra_emc_pdata.h>
21
22 /* Select v21015 versions of some functions. */
23 #define __TEGRA_EMC_V21015
24
25 #include <tegra/tegra21_emc.h>
26 #include <tegra/mc-regs-t21x.h>
27
28 #include "iomap.h"
29
30 /*
31  * This clock change is actually equivalent to 21018 now.
32  */
33 #define DVFS_CLOCK_CHANGE_VERSION       21019
34 #define EMC_PRELOCK_VERSION             2101
35
36 void dll_disable(int channel_mode)
37 {
38         u32 emc_cfg_dig_dll;
39
40         emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
41         emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
42         emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
43         emc_timing_update(channel_mode);
44
45         wait_for_update(EMC_CFG_DIG_DLL, EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 0);
46         if (channel_mode == DUAL_CHANNEL)
47                 wait_for_update(EMC_CFG_DIG_DLL,
48                                 EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 1);
49 }
50
51 void dll_enable(int channel_mode)
52 {
53         u32 emc_cfg_dig_dll;
54
55         emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
56         emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
57         emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
58         emc_timing_update(channel_mode);
59
60         wait_for_update(EMC_CFG_DIG_DLL, EMC_CFG_DIG_DLL_CFG_DLL_EN, 1, 0);
61         if (channel_mode == DUAL_CHANNEL)
62                 wait_for_update(EMC_CFG_DIG_DLL,
63                                 EMC_CFG_DIG_DLL_CFG_DLL_EN, 1, 1);
64 }
65
66 /*
67  * When derating is enabled periodic training needs to update both sets of
68  * tables. This function copys the necessary periodic training settings from
69  * the current timing into it's alternate timing derated/normal timing.
70  */
71 void __update_emc_alt_timing(struct tegra21_emc_table *current_timing)
72 {
73         struct tegra21_emc_table *current_table, *alt_timing;
74         int i;
75
76         /* Only have alternate timings when there are derated tables present. */
77         if (!tegra_emc_table_derated)
78                 return;
79
80         current_table = emc_get_table(dram_over_temp_state);
81         i = current_timing - current_table;
82
83         BUG_ON(i < 0 || i > tegra_emc_table_size);
84
85         if (dram_over_temp_state == DRAM_OVER_TEMP_THROTTLE)
86                 alt_timing = &tegra_emc_table[i];
87         else
88                 alt_timing = &tegra_emc_table_derated[i];
89
90         __emc_copy_table_params(current_timing, alt_timing,
91                                 EMC_COPY_TABLE_PARAM_PERIODIC_FIELDS |
92                                 EMC_COPY_TABLE_PARAM_PTFV_FIELDS);
93 }
94
95 /*
96  * It is possible for periodic training to be skipped during the DVFS change. As
97  * an exmaple: suppose the DRAM is trained at 20C - the trained_dram_clktree_*
98  * values will reflect this. Now, supposing the EMC goes to 1600MHz and runs for
99  * a while. If the EMC swaps to some other freq, say 204MHz, while the DRAM is
100  * very hot the current_dram_clktree_* values will reflect this. Why is this a
101  * problem? If we go back to 1600MHz and the temp is still very hot then there
102  * will not be a large difference in the osc reading from the DRAM and we won't
103  * do any periodic training during DVFS. Thus we write the 20C trimmers when in
104  * reality we needed to compute new trimmers based on the current temp.
105  *
106  * Thus function avoids the above mess by simply making the
107  * current_dram_clktree_* fields the same as trained_dram_clktree_* so that we
108  * always do the periodic calibration if needed.
109  */
110 void __reset_dram_clktree_values(struct tegra21_emc_table *table)
111 {
112 #define __RESET_CLKTREE(TBL, C, D, U)                                   \
113         TBL->current_dram_clktree_c ## C ## d ## D ## u ## U =          \
114                 TBL->trained_dram_clktree_c ## C ## d ## D ## u ## U
115
116         __RESET_CLKTREE(table, 0, 0, 0);
117         __RESET_CLKTREE(table, 0, 0, 1);
118         __RESET_CLKTREE(table, 0, 1, 0);
119         __RESET_CLKTREE(table, 0, 1, 1);
120         __RESET_CLKTREE(table, 1, 0, 0);
121         __RESET_CLKTREE(table, 1, 0, 1);
122         __RESET_CLKTREE(table, 1, 1, 0);
123         __RESET_CLKTREE(table, 1, 1, 1);
124 }
125
126 u32 actual_osc_clocks(u32 in)
127 {
128         if (in < 0x40)
129                 return in * 16;
130         else if (in < 0x80)
131                 return 2048;
132         else if (in < 0xc0)
133                 return 4096;
134         else
135                 return 8192;
136 }
137
138 static u32 update_clock_tree_delay(struct tegra21_emc_table *last_timing,
139                                    struct tegra21_emc_table *next_timing,
140                                    u32 dram_dev_num, u32 channel_mode)
141 {
142         u32 mrr_req = 0, mrr_data = 0;
143         u32 temp0_0 = 0, temp0_1 = 0, temp1_0 = 0, temp1_1 = 0;
144         s32 tdel = 0, tmdel = 0, adel = 0;
145         u32 cval;
146         u32 last_timing_rate_mhz = last_timing->rate / 1000;
147         u32 next_timing_rate_mhz = next_timing->rate / 1000;
148
149         /*
150          * Dev0 MSB.
151          */
152         mrr_req = (2 << EMC_MRR_DEV_SEL_SHIFT) |
153                 (19 << EMC_MRR_MA_SHIFT);
154         emc_writel(mrr_req, EMC_MRR);
155
156         WARN(wait_for_update(EMC_EMC_STATUS,
157                              EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
158              "Timed out waiting for MRR 19 (ch=0)\n");
159         if (channel_mode == DUAL_CHANNEL)
160                 WARN(wait_for_update(EMC_EMC_STATUS,
161                                      EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
162                      "Timed out waiting for MRR 19 (ch=1)\n");
163
164         mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
165                 EMC_MRR_DATA_SHIFT;
166
167         temp0_0 = (mrr_data & 0xff) << 8;
168         temp0_1 = mrr_data & 0xff00;
169
170         if (channel_mode == DUAL_CHANNEL) {
171                 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
172                         EMC_MRR_DATA_SHIFT;
173                 temp1_0 = (mrr_data & 0xff) << 8;
174                 temp1_1 = mrr_data & 0xff00;
175         }
176
177         /*
178          * Dev0 LSB.
179          */
180         mrr_req = (mrr_req & ~EMC_MRR_MA_MASK) | (18 << EMC_MRR_MA_SHIFT);
181         emc_writel(mrr_req, EMC_MRR);
182
183         WARN(wait_for_update(EMC_EMC_STATUS,
184                              EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
185              "Timed out waiting for MRR 18 (ch=0)\n");
186         if (channel_mode == DUAL_CHANNEL)
187                 WARN(wait_for_update(EMC_EMC_STATUS,
188                                      EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
189                      "Timed out waiting for MRR 18 (ch=1)\n");
190
191         mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
192                 EMC_MRR_DATA_SHIFT;
193
194         temp0_0 |= mrr_data & 0xff;
195         temp0_1 |= (mrr_data & 0xff00) >> 8;
196
197         if (channel_mode == DUAL_CHANNEL) {
198                 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
199                         EMC_MRR_DATA_SHIFT;
200                 temp1_0 |= (mrr_data & 0xff);
201                 temp1_1 |= (mrr_data & 0xff00) >> 8;
202         }
203
204         cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
205                 (last_timing_rate_mhz * 2 * temp0_0);
206         tdel = next_timing->current_dram_clktree_c0d0u0 - cval;
207         tmdel = (tdel < 0) ? -1 * tdel : tdel;
208         adel = tmdel;
209
210         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
211             next_timing->tree_margin)
212                 next_timing->current_dram_clktree_c0d0u0 = cval;
213
214         cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
215                 (last_timing_rate_mhz * 2 * temp0_1);
216         tdel = next_timing->current_dram_clktree_c0d0u1 - cval;
217         tmdel = (tdel < 0) ? -1 * tdel : tdel;
218
219         if (tmdel > adel)
220                 adel = tmdel;
221
222         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
223             next_timing->tree_margin)
224                 next_timing->current_dram_clktree_c0d0u1 = cval;
225
226         if (channel_mode == DUAL_CHANNEL) {
227                 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
228                         (last_timing_rate_mhz * 2 * temp1_0);
229                 tdel = next_timing->current_dram_clktree_c1d0u0 - cval;
230                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
231                 if (tmdel > adel)
232                         adel = tmdel;
233
234                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
235                      next_timing->tree_margin)
236                         next_timing->current_dram_clktree_c1d0u0 = cval;
237
238                 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
239                         (last_timing_rate_mhz * 2 * temp1_1);
240                 tdel = next_timing->current_dram_clktree_c1d0u1 - cval;
241                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
242
243                 if (tmdel > adel)
244                         adel = tmdel;
245
246                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
247                     next_timing->tree_margin)
248                         next_timing->current_dram_clktree_c1d0u1 = cval;
249
250         }
251
252         if (dram_dev_num != TWO_RANK)
253                 goto done;
254
255         /*
256          * Dev1 MSB.
257          */
258         mrr_req = (1 << EMC_MRR_DEV_SEL_SHIFT) |
259                 (19 << EMC_MRR_MA_SHIFT);
260         emc_writel(mrr_req, EMC_MRR);
261
262         WARN(wait_for_update(EMC_EMC_STATUS,
263                              EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
264              "Timed out waiting for MRR 19 (ch=0)\n");
265         if (channel_mode == DUAL_CHANNEL)
266                 WARN(wait_for_update(EMC_EMC_STATUS,
267                                      EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
268                      "Timed out waiting for MRR 19 (ch=1)\n");
269
270         mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
271                 EMC_MRR_DATA_SHIFT;
272
273         temp0_0 = (mrr_data & 0xff) << 8;
274         temp0_1 = mrr_data & 0xff00;
275
276         if (channel_mode == DUAL_CHANNEL) {
277                 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
278                         EMC_MRR_DATA_SHIFT;
279                 temp1_0 = (mrr_data & 0xff) << 8;
280                 temp1_1 = mrr_data & 0xff00;
281         }
282
283         /*
284          * Dev1 LSB.
285          */
286         mrr_req = (mrr_req & ~EMC_MRR_MA_MASK) | (18 << EMC_MRR_MA_SHIFT);
287         emc_writel(mrr_req, EMC_MRR);
288
289         WARN(wait_for_update(EMC_EMC_STATUS,
290                              EMC_EMC_STATUS_MRR_DIVLD, 1, 0),
291              "Timed out waiting for MRR 18 (ch=0)\n");
292         if (channel_mode == DUAL_CHANNEL)
293                 WARN(wait_for_update(EMC_EMC_STATUS,
294                                      EMC_EMC_STATUS_MRR_DIVLD, 1, 1),
295                      "Timed out waiting for MRR 18 (ch=1)\n");
296
297         mrr_data = (emc_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
298                 EMC_MRR_DATA_SHIFT;
299
300         temp0_0 |= mrr_data & 0xff;
301         temp0_1 |= (mrr_data & 0xff00) >> 8;
302
303         if (channel_mode == DUAL_CHANNEL) {
304                 mrr_data = (emc1_readl(EMC_MRR) & EMC_MRR_DATA_MASK) <<
305                         EMC_MRR_DATA_SHIFT;
306                 temp1_0 |= (mrr_data & 0xff);
307                 temp1_1 |= (mrr_data & 0xff00) >> 8;
308         }
309
310         cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
311                 (last_timing_rate_mhz * 2 * temp0_0);
312         tdel = next_timing->current_dram_clktree_c0d1u0 - cval;
313         tmdel = (tdel < 0) ? -1 * tdel : tdel;
314         if (tmdel > adel)
315                 adel = tmdel;
316
317         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
318             next_timing->tree_margin)
319                 next_timing->current_dram_clktree_c0d1u0 = cval;
320
321         cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
322                 (last_timing_rate_mhz * 2 * temp0_1);
323         tdel = next_timing->current_dram_clktree_c0d1u1 - cval;
324         tmdel = (tdel < 0) ? -1 * tdel : tdel;
325         if (tmdel > adel)
326                 adel = tmdel;
327
328         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
329             next_timing->tree_margin)
330                 next_timing->current_dram_clktree_c0d1u1 = cval;
331
332         if (channel_mode == DUAL_CHANNEL){
333                 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
334                         (last_timing_rate_mhz * 2 * temp1_0);
335                 tdel = next_timing->current_dram_clktree_c1d1u0 - cval;
336                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
337                 if (tmdel > adel)
338                         adel = tmdel;
339
340                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
341                     next_timing->tree_margin)
342                         next_timing->current_dram_clktree_c1d1u0 = cval;
343
344                 cval = (1000000 * actual_osc_clocks(last_timing->run_clocks)) /
345                         (last_timing_rate_mhz * 2 * temp1_1);
346                 tdel = next_timing->current_dram_clktree_c1d1u1 - cval;
347                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
348                 if (tmdel > adel)
349                         adel = tmdel;
350
351                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
352                     next_timing->tree_margin)
353                         next_timing->current_dram_clktree_c1d1u1 = cval;
354         }
355
356 done:
357         return adel;
358 }
359
360 void start_periodic_compensation(void)
361 {
362         u32 mpc_req = 0x4b;
363
364         emc_writel(mpc_req, EMC_MPC);
365         mpc_req = emc_readl(EMC_MPC);
366 }
367
368 /*
369  * The per channel registers dont fit in with the normal set up for making
370  * *_INDEX style enum fields because there are two identical register names
371  * but two channels. Here we define some _INDEX macros to deal with the one
372  * place we need per channel distinctions.
373  */
374 #define EMC0_EMC_CMD_BRLSHFT_0_INDEX    0
375 #define EMC1_EMC_CMD_BRLSHFT_1_INDEX    1
376 #define EMC0_EMC_DATA_BRLSHFT_0_INDEX   2
377 #define EMC1_EMC_DATA_BRLSHFT_0_INDEX   3
378 #define EMC0_EMC_DATA_BRLSHFT_1_INDEX   4
379 #define EMC1_EMC_DATA_BRLSHFT_1_INDEX   5
380 #define EMC0_EMC_QUSE_BRLSHFT_0_INDEX   6
381 #define EMC1_EMC_QUSE_BRLSHFT_1_INDEX   7
382 #define EMC0_EMC_QUSE_BRLSHFT_2_INDEX   8
383 #define EMC1_EMC_QUSE_BRLSHFT_3_INDEX   9
384
385 /*
386  * Complicated table of registers and fields! Yikes. Essentially this
387  * boils down to (reg_field + (reg_field * 64)).
388  */
389 #define TRIM_REG(chan, rank, reg, byte)                                 \
390         ((EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ##        \
391           _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte ## _MASK &     \
392           next_timing->trim_regs[EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ##     \
393                                  rank ## _ ## reg ## _INDEX]) >>        \
394          EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ##         \
395          _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte ## _SHIFT)      \
396         +                                                               \
397         (((EMC_DATA_BRLSHFT_ ## rank ## _RANK ## rank ## _BYTE ##       \
398            byte ## _DATA_BRLSHFT_MASK &                                 \
399            next_timing->trim_regs_per_ch[EMC ## chan ##                 \
400                               _EMC_DATA_BRLSHFT_ ## rank ## _INDEX]) >> \
401           EMC_DATA_BRLSHFT_ ## rank ## _RANK ## rank ## _BYTE ##        \
402           byte ## _DATA_BRLSHFT_SHIFT) * 64)
403
404 /*
405  * Compute the temp variable in apply_periodic_compensation_trimmer(). It
406  * reduces to (reg_field | reg_field).
407  */
408 #define CALC_TEMP(rank, reg, byte1, byte2, n)                           \
409         ((new[n] << EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ##     \
410           reg ## _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte1 ## _SHIFT) & \
411          EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ##         \
412          _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte1 ## _MASK)      \
413         |                                                               \
414         ((new[n + 1] << EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## \
415           reg ## _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte2 ## _SHIFT) & \
416          EMC_PMACRO_OB_DDLL_LONG_DQ_RANK ## rank ## _ ## reg ##         \
417          _OB_DDLL_LONG_DQ_RANK ## rank ## _BYTE ## byte2 ## _MASK)      \
418
419 u32 apply_periodic_compensation_trimmer(
420                         struct tegra21_emc_table *next_timing, u32 offset)
421 {
422         u32 i, temp = 0;
423         u32 next_timing_rate_mhz = next_timing->rate / 1000;
424         s32 tree_delta[4];
425         s32 tree_delta_taps[4];
426         s32 new[] = {
427                 TRIM_REG(0, 0, 0, 0),
428                 TRIM_REG(0, 0, 0, 1),
429                 TRIM_REG(0, 0, 1, 2),
430                 TRIM_REG(0, 0, 1, 3),
431
432                 TRIM_REG(1, 0, 2, 4),
433                 TRIM_REG(1, 0, 2, 5),
434                 TRIM_REG(1, 0, 3, 6),
435                 TRIM_REG(1, 0, 3, 7),
436
437                 TRIM_REG(0, 1, 0, 0),
438                 TRIM_REG(0, 1, 0, 1),
439                 TRIM_REG(0, 1, 1, 2),
440                 TRIM_REG(0, 1, 1, 3),
441
442                 TRIM_REG(1, 1, 2, 4),
443                 TRIM_REG(1, 1, 2, 5),
444                 TRIM_REG(1, 1, 3, 6),
445                 TRIM_REG(1, 1, 3, 7)
446         };
447
448         switch (offset) {
449         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0:
450         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1:
451         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2:
452         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3:
453         case EMC_DATA_BRLSHFT_0:
454                 tree_delta[0] = 128 *
455                         (next_timing->current_dram_clktree_c0d0u0 -
456                          next_timing->trained_dram_clktree_c0d0u0);
457                 tree_delta[1] = 128 *
458                         (next_timing->current_dram_clktree_c0d0u1 -
459                          next_timing->trained_dram_clktree_c0d0u1);
460                 tree_delta[2] = 128 *
461                         (next_timing->current_dram_clktree_c1d0u0 -
462                          next_timing->trained_dram_clktree_c1d0u0);
463                 tree_delta[3] = 128 *
464                         (next_timing->current_dram_clktree_c1d0u1 -
465                          next_timing->trained_dram_clktree_c1d0u1);
466
467                 tree_delta_taps[0] =
468                         (tree_delta[0] * (s32)next_timing_rate_mhz) / 1000000;
469                 tree_delta_taps[1] =
470                         (tree_delta[1] * (s32)next_timing_rate_mhz) / 1000000;
471                 tree_delta_taps[2] =
472                         (tree_delta[2] * (s32)next_timing_rate_mhz) / 1000000;
473                 tree_delta_taps[3] =
474                         (tree_delta[3] * (s32)next_timing_rate_mhz) / 1000000;
475
476                 for(i = 0; i < 4; i++) {
477                         if ((tree_delta_taps[i] > next_timing->tree_margin) ||
478                             (tree_delta_taps[i] <
479                             (-1 * next_timing->tree_margin))) {
480                                 new[i * 2] = new[i * 2] + tree_delta_taps[i];
481                                 new[i * 2 + 1] = new[i * 2 + 1]
482                                         + tree_delta_taps[i];
483                         }
484                 }
485
486                 if (offset == EMC_DATA_BRLSHFT_0) {
487                         for (i = 0; i < 8; i++)
488                                 new[i] = new[i] / 64;
489                 } else {
490                         for (i = 0; i < 8; i++)
491                                 new[i] = new[i] % 64;
492                 }
493                 break;
494
495         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0:
496         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1:
497         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2:
498         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3:
499         case EMC_DATA_BRLSHFT_1:
500                 tree_delta[0] = 128 *
501                         (next_timing->current_dram_clktree_c0d1u0 -
502                          next_timing->trained_dram_clktree_c0d1u0);
503                 tree_delta[1] = 128 *
504                         (next_timing->current_dram_clktree_c0d1u1 -
505                          next_timing->trained_dram_clktree_c0d1u1);
506                 tree_delta[2] = 128 *
507                         (next_timing->current_dram_clktree_c1d1u0 -
508                          next_timing->trained_dram_clktree_c1d1u0);
509                 tree_delta[3] = 128 *
510                         (next_timing->current_dram_clktree_c1d1u1 -
511                          next_timing->trained_dram_clktree_c1d1u1);
512
513                 tree_delta_taps[0] =
514                         (tree_delta[0] * (s32)next_timing_rate_mhz) / 1000000;
515                 tree_delta_taps[1] =
516                         (tree_delta[1] * (s32)next_timing_rate_mhz) / 1000000;
517                 tree_delta_taps[2] =
518                         (tree_delta[2] * (s32)next_timing_rate_mhz) / 1000000;
519                 tree_delta_taps[3] =
520                         (tree_delta[3] * (s32)next_timing_rate_mhz) / 1000000;
521
522                 for(i = 0; i < 4; i++){
523                         if ((tree_delta_taps[i] > next_timing->tree_margin) ||
524                             (tree_delta_taps[i] <
525                              (-1 * next_timing->tree_margin))){
526                                 new[8 + i * 2] = new[8 + i * 2] +
527                                         tree_delta_taps[i];
528                                 new[8 + i * 2 + 1] = new[8 + i * 2 + 1] +
529                                         tree_delta_taps[i];
530                         }
531                 }
532
533                 if (offset == EMC_DATA_BRLSHFT_1) {
534                         for (i = 0; i < 8; i++)
535                                 new[i + 8] = new[i + 8] / 64;
536                 } else {
537                         for(i = 0; i < 8; i++)
538                                 new[i + 8] = new[i + 8] % 64;
539                 }
540                 break;
541         }
542
543         switch (offset) {
544         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0:
545                 /* rank, reg, byte1, byte2, n */
546                 temp = CALC_TEMP(0, 0, 0, 1, 0);
547                 break;
548         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1:
549                 temp = CALC_TEMP(0, 1, 2, 3, 2);
550                 break;
551         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2:
552                 temp = CALC_TEMP(0, 2, 4, 5, 4);
553                 break;
554         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3:
555                 temp = CALC_TEMP(0, 3, 6, 7, 6);
556                 break;
557         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0:
558                 temp = CALC_TEMP(1, 0, 0, 1, 8);
559                 break;
560         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1:
561                 temp = CALC_TEMP(1, 1, 2, 3, 10);
562                 break;
563         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2:
564                 temp = CALC_TEMP(1, 2, 4, 5, 12);
565                 break;
566         case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3:
567                 temp = CALC_TEMP(1, 3, 6, 7, 14);
568                 break;
569         case EMC_DATA_BRLSHFT_0:
570                 temp =
571                 ((new[0] << EMC_DATA_BRLSHFT_0_RANK0_BYTE0_DATA_BRLSHFT_SHIFT) &
572                             EMC_DATA_BRLSHFT_0_RANK0_BYTE0_DATA_BRLSHFT_MASK) |
573                 ((new[1] << EMC_DATA_BRLSHFT_0_RANK0_BYTE1_DATA_BRLSHFT_SHIFT) &
574                             EMC_DATA_BRLSHFT_0_RANK0_BYTE1_DATA_BRLSHFT_MASK) |
575                 ((new[2] << EMC_DATA_BRLSHFT_0_RANK0_BYTE2_DATA_BRLSHFT_SHIFT) &
576                             EMC_DATA_BRLSHFT_0_RANK0_BYTE2_DATA_BRLSHFT_MASK) |
577                 ((new[3] << EMC_DATA_BRLSHFT_0_RANK0_BYTE3_DATA_BRLSHFT_SHIFT) &
578                             EMC_DATA_BRLSHFT_0_RANK0_BYTE3_DATA_BRLSHFT_MASK) |
579                 ((new[4] << EMC_DATA_BRLSHFT_0_RANK0_BYTE4_DATA_BRLSHFT_SHIFT) &
580                             EMC_DATA_BRLSHFT_0_RANK0_BYTE4_DATA_BRLSHFT_MASK) |
581                 ((new[5] << EMC_DATA_BRLSHFT_0_RANK0_BYTE5_DATA_BRLSHFT_SHIFT) &
582                             EMC_DATA_BRLSHFT_0_RANK0_BYTE5_DATA_BRLSHFT_MASK) |
583                 ((new[6] << EMC_DATA_BRLSHFT_0_RANK0_BYTE6_DATA_BRLSHFT_SHIFT) &
584                             EMC_DATA_BRLSHFT_0_RANK0_BYTE6_DATA_BRLSHFT_MASK) |
585                 ((new[7] << EMC_DATA_BRLSHFT_0_RANK0_BYTE7_DATA_BRLSHFT_SHIFT) &
586                             EMC_DATA_BRLSHFT_0_RANK0_BYTE7_DATA_BRLSHFT_MASK);
587                 break;
588         case EMC_DATA_BRLSHFT_1:
589                 temp =
590                 ((new[8] << EMC_DATA_BRLSHFT_1_RANK1_BYTE0_DATA_BRLSHFT_SHIFT) &
591                             EMC_DATA_BRLSHFT_1_RANK1_BYTE0_DATA_BRLSHFT_MASK) |
592                 ((new[9] << EMC_DATA_BRLSHFT_1_RANK1_BYTE1_DATA_BRLSHFT_SHIFT) &
593                             EMC_DATA_BRLSHFT_1_RANK1_BYTE1_DATA_BRLSHFT_MASK) |
594                 ((new[10] <<
595                             EMC_DATA_BRLSHFT_1_RANK1_BYTE2_DATA_BRLSHFT_SHIFT) &
596                             EMC_DATA_BRLSHFT_1_RANK1_BYTE2_DATA_BRLSHFT_MASK) |
597                 ((new[11] <<
598                             EMC_DATA_BRLSHFT_1_RANK1_BYTE3_DATA_BRLSHFT_SHIFT) &
599                             EMC_DATA_BRLSHFT_1_RANK1_BYTE3_DATA_BRLSHFT_MASK) |
600                 ((new[12] <<
601                             EMC_DATA_BRLSHFT_1_RANK1_BYTE4_DATA_BRLSHFT_SHIFT) &
602                             EMC_DATA_BRLSHFT_1_RANK1_BYTE4_DATA_BRLSHFT_MASK) |
603                 ((new[13] <<
604                             EMC_DATA_BRLSHFT_1_RANK1_BYTE5_DATA_BRLSHFT_SHIFT) &
605                             EMC_DATA_BRLSHFT_1_RANK1_BYTE5_DATA_BRLSHFT_MASK) |
606                 ((new[14] <<
607                             EMC_DATA_BRLSHFT_1_RANK1_BYTE6_DATA_BRLSHFT_SHIFT) &
608                             EMC_DATA_BRLSHFT_1_RANK1_BYTE6_DATA_BRLSHFT_MASK) |
609                 ((new[15] <<
610                             EMC_DATA_BRLSHFT_1_RANK1_BYTE7_DATA_BRLSHFT_SHIFT) &
611                             EMC_DATA_BRLSHFT_1_RANK1_BYTE7_DATA_BRLSHFT_MASK);
612                 break;
613         default:
614                 break;
615         }
616
617         return temp;
618 }
619
620 u32 __do_periodic_emc_compensation_r21015(
621                         struct tegra21_emc_table *current_timing)
622 {
623         u32 dram_dev_num;
624         u32 channel_mode;
625         u32 emc_cfg,emc_cfg_o;
626         u32 emc_dbg_o;
627         u32 del, i;
628         u32 list[] = {
629                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0,
630                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1,
631                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2,
632                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3,
633                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0,
634                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1,
635                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2,
636                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3,
637                 EMC_DATA_BRLSHFT_0,
638                 EMC_DATA_BRLSHFT_1
639         };
640         u32 items = ARRAY_SIZE(list);
641         u32 emc_cfg_update;
642
643         if (current_timing->periodic_training) {
644                 channel_mode = !!(current_timing->burst_regs[EMC_FBIO_CFG7_INDEX] &
645                                   (1 << 2));
646                 dram_dev_num = 1 + (mc_readl(MC_EMEM_ADR_CFG) & 0x1);
647
648                 emc_cc_dbg(PER_TRAIN, "Periodic training starting\n");
649
650                 emc_dbg_o = emc_readl(EMC_DBG);
651                 emc_cfg_o = emc_readl(EMC_CFG);
652                 emc_cfg = emc_cfg_o & ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
653                                         EMC_CFG_DRAM_CLKSTOP_PD |
654                                         EMC_CFG_DRAM_CLKSTOP_PD);
655
656                 /*
657                  * 1. Power optimizations should be off.
658                  */
659                 emc_writel(emc_cfg, EMC_CFG);
660
661                 /* Does emc_timing_update() for above changes. */
662                 dll_disable(channel_mode);
663
664                 wait_for_update(EMC_EMC_STATUS,
665                                 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 0);
666                 if (channel_mode)
667                         wait_for_update(EMC_EMC_STATUS,
668                                 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 1);
669
670                 wait_for_update(EMC_EMC_STATUS,
671                                 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 0);
672                 if (channel_mode)
673                         wait_for_update(EMC_EMC_STATUS,
674                                 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 1);
675
676                 emc_cfg_update = emc_readl(EMC_CFG_UPDATE);
677                 emc_writel((emc_cfg_update &
678                             ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK) |
679                            (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT),
680                            EMC_CFG_UPDATE);
681
682                 /*
683                  * 2. osc kick off - this assumes training and dvfs have set
684                  *    correct MR23.
685                  */
686                 start_periodic_compensation();
687
688                 /*
689                  * 3. Let dram capture its clock tree delays.
690                  */
691                 udelay((actual_osc_clocks(current_timing->run_clocks) * 1000) /
692                        current_timing->rate + 1);
693
694                 /*
695                  * 4. Check delta wrt previous values (save value if margin
696                  *    exceeds what is set in table).
697                  */
698                 del = update_clock_tree_delay(current_timing, current_timing,
699                                               dram_dev_num, channel_mode);
700
701                 /*
702                  * 5. Apply compensation w.r.t. trained values (if clock tree
703                  *    has drifted more than the set margin).
704                  */
705                 if (current_timing->tree_margin <
706                     ((del * 128 * (current_timing->rate / 1000)) / 1000000)) {
707                         for (i = 0; i < items; i++) {
708                                 u32 tmp = apply_periodic_compensation_trimmer(
709                                                        current_timing, list[i]);
710                                 emc_writel(tmp, list[i]);
711                         }
712                 }
713
714                 emc_writel(emc_cfg_o, EMC_CFG);
715
716                 /*
717                  * 6. Timing update actally applies the new trimmers.
718                  */
719                 emc_timing_update(channel_mode);
720
721                 /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
722                 emc_writel(emc_cfg_update, EMC_CFG_UPDATE);
723
724                 /* 6.2. Restore the DLL. */
725                 dll_enable(channel_mode);
726
727                 /*
728                  * 7. Copy over the periodic training registers that we updated
729                  *    here to the corresponding derated/non-derated table.
730                  */
731                 __update_emc_alt_timing(current_timing);
732         }
733
734         return 0;
735 }
736
737 /*
738  * Source clock period is in picoseconds. Returns the ramp down wait time in
739  * picoseconds.
740  */
741 u32 do_dvfs_power_ramp_down(u32 clk, int flip_backward,
742                             struct tegra21_emc_table *last_timing,
743                             struct tegra21_emc_table *next_timing)
744 {
745         u32 ramp_down_wait = 0;
746         u32 pmacro_cmd_pad;
747         u32 pmacro_dq_pad;
748         u32 pmacro_rfu1;
749         u32 pmacro_cfg5;
750         u32 pmacro_common_tx;
751         u32 seq_wait;
752
753         emc_cc_dbg(PRAMP_DN, "flip_backward = %d\n", flip_backward);
754
755         if (flip_backward) {
756                 pmacro_cmd_pad   = next_timing->
757                         burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
758                 pmacro_dq_pad    = next_timing->
759                         burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
760                 pmacro_rfu1      = next_timing->
761                         burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
762                 pmacro_cfg5      = next_timing->
763                         burst_regs[EMC_FBIO_CFG5_INDEX];
764                 pmacro_common_tx = next_timing->
765                         burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
766         } else {
767                 pmacro_cmd_pad   = last_timing->
768                         burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
769                 pmacro_dq_pad    = last_timing->
770                         burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
771                 pmacro_rfu1      = last_timing->
772                         burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
773                 pmacro_cfg5      = last_timing->
774                         burst_regs[EMC_FBIO_CFG5_INDEX];
775                 pmacro_common_tx = last_timing->
776                         burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
777         }
778
779         pmacro_cmd_pad |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
780
781         ccfifo_writel(pmacro_cmd_pad, EMC_PMACRO_CMD_PAD_TX_CTRL, 0);
782         ccfifo_writel(pmacro_cfg5 | EMC_FBIO_CFG5_CMD_TX_DIS, EMC_FBIO_CFG5,
783                       12);
784         ramp_down_wait = 12 * clk;
785
786         seq_wait = (100000 / clk) + 1;
787
788         if (clk < (1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD)) {
789                 emc_cc_dbg(PRAMP_DN, "clk < FGCG_HIGH_SPEED_THRESHOLD;\n");
790                 emc_cc_dbg(PRAMP_DN, "  %u vs %u\n", clk,
791                            1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD);
792
793                 if (clk < (1000000 / IOBRICK_DCC_THRESHOLD)) {
794                         emc_cc_dbg(PRAMP_DN, "clk < IOBRICK_DCC_THRESHOLD;\n");
795                         emc_cc_dbg(PRAMP_DN, "  %u vs %u\n", clk,
796                            1000000 / IOBRICK_DCC_THRESHOLD);
797
798                         pmacro_cmd_pad &=
799                                 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
800                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
801                         pmacro_cmd_pad |=
802                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
803                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC;
804                         ccfifo_writel(pmacro_cmd_pad,
805                                       EMC_PMACRO_CMD_PAD_TX_CTRL, seq_wait);
806                         ramp_down_wait += 100000;
807
808                         pmacro_dq_pad &=
809                               ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
810                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
811                         pmacro_dq_pad |=
812                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
813                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC;
814                         ccfifo_writel(pmacro_dq_pad,
815                                       EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
816                         ccfifo_writel(pmacro_rfu1 & ~0x01120112,
817                                       EMC_PMACRO_BRICK_CTRL_RFU1, 0);
818                 } else {
819                         emc_cc_dbg(PRAMP_DN, "clk > IOBRICK_DCC_THRESHOLD\n");
820                         ccfifo_writel(pmacro_rfu1 & ~0x01120112,
821                                       EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
822                         ramp_down_wait += 100000;
823                 }
824
825                 ccfifo_writel(pmacro_rfu1 & ~0x01bf01bf,
826                               EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
827                 ramp_down_wait += 100000;
828
829                 if (clk < (1000000 / IOBRICK_DCC_THRESHOLD)) {
830                         pmacro_cmd_pad &=
831                                 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
832                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC |
833                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
834                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC);
835                         ccfifo_writel(pmacro_cmd_pad,
836                                       EMC_PMACRO_CMD_PAD_TX_CTRL, seq_wait);
837                         ramp_down_wait += 100000;
838
839                         pmacro_dq_pad &=
840                               ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
841                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC |
842                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
843                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC);
844                         ccfifo_writel(pmacro_dq_pad,
845                                       EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
846                         ccfifo_writel(pmacro_rfu1 & ~0x07ff07ff,
847                                       EMC_PMACRO_BRICK_CTRL_RFU1, 0);
848                 } else {
849                         ccfifo_writel(pmacro_rfu1 & ~0x07ff07ff,
850                                       EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait);
851                         ramp_down_wait += 100000;
852                 }
853         } else {
854                 emc_cc_dbg(PRAMP_DN, "clk > FGCG_HIGH_SPEED_THRESHOLD\n");
855                 ccfifo_writel(pmacro_rfu1 & ~0xffff07ff,
856                               EMC_PMACRO_BRICK_CTRL_RFU1, seq_wait + 19);
857                 ramp_down_wait += 100000 + (20 * clk);
858         }
859
860         if (clk < (1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD)) {
861                 emc_cc_dbg(PRAMP_DN, "clk < FGCG_MID_SPEED_THRESHOLD;\n");
862                 emc_cc_dbg(PRAMP_DN, "  %u vs %u\n", clk,
863                            1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD);
864
865                 ramp_down_wait += 100000;
866                 ccfifo_writel(pmacro_common_tx & ~0x5,
867                               EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
868                 ramp_down_wait += 100000;
869                 ccfifo_writel(pmacro_common_tx & ~0xf,
870                               EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
871                 ramp_down_wait += 100000;
872                 ccfifo_writel(0, 0, seq_wait);
873                 ramp_down_wait += 100000;
874         } else {
875                 emc_cc_dbg(PRAMP_DN, "clk > FGCG_MID_SPEED_THRESHOLD\n");
876                 ccfifo_writel(pmacro_common_tx & ~0xf,
877                               EMC_PMACRO_COMMON_PAD_TX_CTRL, seq_wait);
878         }
879
880         return ramp_down_wait;
881 }
882
883 /*
884  * Similar to do_dvfs_power_ramp_down() except this does the power ramp up.
885  */
886 noinline u32 do_dvfs_power_ramp_up(u32 clk, int flip_backward,
887                                    struct tegra21_emc_table *last_timing,
888                                    struct tegra21_emc_table *next_timing)
889 {
890         u32 pmacro_cmd_pad;
891         u32 pmacro_dq_pad;
892         u32 pmacro_rfu1;
893         u32 pmacro_cfg5;
894         u32 pmacro_common_tx;
895         u32 ramp_up_wait = 0;
896
897         if (flip_backward) {
898                 pmacro_cmd_pad   = last_timing->
899                         burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
900                 pmacro_dq_pad    = last_timing->
901                         burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
902                 pmacro_rfu1      = last_timing->
903                         burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
904                 pmacro_cfg5      = last_timing->burst_regs[EMC_FBIO_CFG5_INDEX];
905                 pmacro_common_tx = last_timing->
906                         burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
907         } else {
908                 pmacro_cmd_pad   = next_timing->
909                         burst_regs[EMC_PMACRO_CMD_PAD_TX_CTRL_INDEX];
910                 pmacro_dq_pad    = next_timing->
911                         burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
912                 pmacro_rfu1      = next_timing->
913                         burst_regs[EMC_PMACRO_BRICK_CTRL_RFU1_INDEX];
914                 pmacro_cfg5      = next_timing->
915                         burst_regs[EMC_FBIO_CFG5_INDEX];
916                 pmacro_common_tx = next_timing->
917                         burst_regs[EMC_PMACRO_COMMON_PAD_TX_CTRL_INDEX];
918         }
919         pmacro_cmd_pad |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
920
921         if (clk < 1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD) {
922                 ccfifo_writel(pmacro_common_tx & 0xa,
923                               EMC_PMACRO_COMMON_PAD_TX_CTRL, 0);
924                 ccfifo_writel(pmacro_common_tx & 0xf,
925                               EMC_PMACRO_COMMON_PAD_TX_CTRL,
926                               (100000 / clk) + 1);
927                 ramp_up_wait += 100000;
928         } else {
929                 ccfifo_writel(pmacro_common_tx | 0x8,
930                               EMC_PMACRO_COMMON_PAD_TX_CTRL, 0);
931         }
932
933         if (clk < 1000000 / DVFS_FGCG_HIGH_SPEED_THRESHOLD) {
934                 if (clk < 1000000 / IOBRICK_DCC_THRESHOLD) {
935                         pmacro_cmd_pad |=
936                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
937                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC;
938                         pmacro_cmd_pad &=
939                                 ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
940                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
941                         ccfifo_writel(pmacro_cmd_pad,
942                                       EMC_PMACRO_CMD_PAD_TX_CTRL,
943                                       (100000 / clk) + 1);
944                         ramp_up_wait += 100000;
945
946                         pmacro_dq_pad |=
947                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
948                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC;
949                         pmacro_dq_pad &=
950                                ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
951                                  EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
952                         ccfifo_writel(pmacro_dq_pad,
953                                       EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
954                         ccfifo_writel(pmacro_rfu1 & 0xfe40fe40,
955                                       EMC_PMACRO_BRICK_CTRL_RFU1, 0);
956                 } else {
957                         ccfifo_writel(pmacro_rfu1 & 0xfe40fe40,
958                                       EMC_PMACRO_BRICK_CTRL_RFU1,
959                                       (100000 / clk) + 1);
960                         ramp_up_wait += 100000;
961                 }
962
963                 ccfifo_writel(pmacro_rfu1 & 0xfeedfeed,
964                               EMC_PMACRO_BRICK_CTRL_RFU1, (100000 / clk) + 1);
965                 ramp_up_wait += 100000;
966
967                 if (clk < 1000000 / IOBRICK_DCC_THRESHOLD) {
968                         pmacro_cmd_pad |=
969                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
970                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
971                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
972                                 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC;
973                         ccfifo_writel(pmacro_cmd_pad,
974                                       EMC_PMACRO_CMD_PAD_TX_CTRL,
975                                       (100000 / clk) + 1);
976                         ramp_up_wait += 100000;
977
978                         pmacro_dq_pad |=
979                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
980                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
981                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
982                                 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC;
983                         ccfifo_writel(pmacro_dq_pad,
984                                       EMC_PMACRO_DATA_PAD_TX_CTRL, 0);
985                         ccfifo_writel(pmacro_rfu1,
986                                       EMC_PMACRO_BRICK_CTRL_RFU1, 0);
987                 } else {
988                         ccfifo_writel(pmacro_rfu1,
989                                       EMC_PMACRO_BRICK_CTRL_RFU1,
990                                       (100000 / clk) + 1);
991                         ramp_up_wait += 100000;
992                 }
993
994                 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
995                               EMC_FBIO_CFG5, (100000 / clk) + 10);
996                 ramp_up_wait += 100000 + (10 * clk);
997         } else if (clk < 1000000 / DVFS_FGCG_MID_SPEED_THRESHOLD) {
998                 ccfifo_writel(pmacro_rfu1 | 0x06000600,
999                               EMC_PMACRO_BRICK_CTRL_RFU1, (100000 / clk) + 1);
1000                 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
1001                               EMC_FBIO_CFG5, (100000 / clk) + 10);
1002                 ramp_up_wait += 100000 + 10 * clk;
1003         } else {
1004                 ccfifo_writel(pmacro_rfu1 | 0x00000600,
1005                               EMC_PMACRO_BRICK_CTRL_RFU1, 0);
1006                 ccfifo_writel(pmacro_cfg5 & ~EMC_FBIO_CFG5_CMD_TX_DIS,
1007                               EMC_FBIO_CFG5, 12);
1008                 ramp_up_wait += 12 * clk;
1009         }
1010
1011         pmacro_cmd_pad &= ~EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1012         ccfifo_writel(pmacro_cmd_pad, EMC_PMACRO_CMD_PAD_TX_CTRL, 5);
1013
1014         return ramp_up_wait;
1015 }
1016
1017 /*
1018  * Change the DLL's input clock. Used during the DLL prelock sequence.
1019  */
1020 void change_dll_src(struct tegra21_emc_table *next_timing, u32 clksrc)
1021 {
1022         u32 out_enb_x;
1023         u32 dll_setting = next_timing->dll_clk_src;
1024         u32 emc_clk_src;
1025         u32 emc_clk_div;
1026
1027         out_enb_x = 0;
1028         emc_clk_src = (clksrc & EMC_CLK_EMC_2X_CLK_SRC_MASK) >>
1029                 EMC_CLK_EMC_2X_CLK_SRC_SHIFT;
1030         emc_clk_div = (clksrc & EMC_CLK_EMC_2X_CLK_DIVISOR_MASK) >>
1031                 EMC_CLK_EMC_2X_CLK_DIVISOR_SHIFT;
1032
1033         dll_setting &= ~(DLL_CLK_EMC_DLL_CLK_SRC_MASK |
1034                          DLL_CLK_EMC_DLL_CLK_DIVISOR_MASK);
1035         dll_setting |= emc_clk_src << DLL_CLK_EMC_DLL_CLK_SRC_SHIFT;
1036         dll_setting |= emc_clk_div << DLL_CLK_EMC_DLL_CLK_DIVISOR_SHIFT;
1037
1038         /* Low jitter and undivided are the same thing. */
1039         dll_setting &= ~DLL_CLK_EMC_DLL_DDLL_CLK_SEL_MASK;
1040         if (emc_clk_src == EMC_CLK_SOURCE_PLLMB_LJ)
1041                 dll_setting |= (PLLM_VCOB <<
1042                                 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1043         else if (emc_clk_src == EMC_CLK_SOURCE_PLLM_LJ)
1044                 dll_setting |= (PLLM_VCOA <<
1045                                 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1046         else
1047                 dll_setting |= (EMC_DLL_SWITCH_OUT <<
1048                                 DLL_CLK_EMC_DLL_DDLL_CLK_SEL_SHIFT);
1049
1050         /* Now program the clock source. */
1051         emc_cc_dbg(REGS, "clk source: 0x%08x => 0x%p\n", dll_setting,
1052                    clk_base + CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL);
1053         writel(dll_setting, clk_base + CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL);
1054
1055         if (next_timing->clk_out_enb_x_0_clk_enb_emc_dll) {
1056                 writel(CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1057                        clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_SET);
1058                 emc_cc_dbg(REGS, "out_enb_x_set: 0x%08x => 0x%p\n",
1059                            CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1060                            clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_SET);
1061         } else {
1062                 writel(CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1063                        clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_CLR);
1064                 emc_cc_dbg(REGS, "out_enb_x_clr: 0x%08x => 0x%p\n",
1065                            CLK_OUT_ENB_X_CLK_ENB_EMC_DLL,
1066                            clk_base + CLK_RST_CONTROLLER_CLK_OUT_ENB_X_CLR);
1067         }
1068 }
1069
1070 /*
1071  * Prelock the DLL.
1072  */
1073 u32 dll_prelock(struct tegra21_emc_table *next_timing,
1074                 int dvfs_with_training, u32 clksrc)
1075 {
1076         u32 emc_dig_dll_status;
1077         u32 dll_locked;
1078         u32 dll_out;
1079         u32 emc_cfg_dig_dll;
1080         u32 emc_dll_cfg_0;
1081         u32 emc_dll_cfg_1;
1082         u32 ddllcal_ctrl_start_trim_val;
1083         u32 dll_en;
1084         u32 dual_channel_lpddr4_case;
1085         u32 dll_priv_updated;
1086
1087         emc_cc_dbg(PRELOCK, "Prelock starting; version: %d\n",
1088                    EMC_PRELOCK_VERSION);
1089
1090         dual_channel_lpddr4_case =
1091                 !!(emc_readl(EMC_FBIO_CFG7) & EMC_FBIO_CFG7_CH1_ENABLE) &
1092                 !!(emc_readl(EMC_FBIO_CFG7) & EMC_FBIO_CFG7_CH0_ENABLE);
1093
1094         emc_dig_dll_status = 0;
1095         dll_locked = 0;
1096         dll_out = 0;
1097         emc_cfg_dig_dll = 0;
1098         emc_dll_cfg_0 = 0;
1099         emc_dll_cfg_1 = 0;
1100         ddllcal_ctrl_start_trim_val = 0;
1101         dll_en = 0;
1102
1103         emc_cc_dbg(PRELOCK, "Dual channel LPDDR4: %s\n",
1104                    dual_channel_lpddr4_case ? "yes" : "no");
1105         emc_cc_dbg(PRELOCK, "DLL clksrc: 0x%08x\n", clksrc);
1106
1107         /* Step 1:
1108          *   Configure the DLL for prelock.
1109          */
1110         emc_cc_dbg(PRELOCK_STEPS, "Step 1\n");
1111         emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL) &
1112                 ~EMC_CFG_DIG_DLL_CFG_DLL_LOCK_LIMIT_MASK;
1113         emc_cfg_dig_dll |= (3 << EMC_CFG_DIG_DLL_CFG_DLL_LOCK_LIMIT_SHIFT);
1114         emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1115         emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK;
1116         emc_cfg_dig_dll |= (3 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1117         emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1118         emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1119         emc_cfg_dig_dll &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1120
1121         emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
1122         emc_writel(1, EMC_TIMING_CONTROL);
1123
1124         /* Step 2:
1125          *   Update timings.
1126          */
1127         emc_cc_dbg(PRELOCK_STEPS, "Step 2\n");
1128         wait_for_update(EMC_EMC_STATUS,
1129                         EMC_EMC_STATUS_TIMING_UPDATE_STALLED, 0, 0);
1130         if (dual_channel_lpddr4_case)
1131                 wait_for_update(EMC_EMC_STATUS,
1132                                 EMC_EMC_STATUS_TIMING_UPDATE_STALLED, 0, 1);
1133
1134         /* Step 3:
1135          *   Poll channel(s) until DLL_EN is true.
1136          */
1137         emc_cc_dbg(PRELOCK_STEPS, "Step 3\n");
1138         do {
1139                 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1140                 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1141         } while (dll_en == 1);
1142
1143         if (dual_channel_lpddr4_case) {
1144                 do {
1145                         emc_cfg_dig_dll = emc1_readl(EMC_CFG_DIG_DLL);
1146                         dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1147                 } while (dll_en == 1);
1148         }
1149
1150         /* Step 4:
1151          *   Update DLL calibration filter.
1152          */
1153         emc_cc_dbg(PRELOCK_STEPS, "Step 4\n");
1154         emc_dll_cfg_0 = next_timing->burst_regs[EMC_DLL_CFG_0_INDEX];
1155
1156         emc_writel(emc_dll_cfg_0, EMC_DLL_CFG_0);
1157
1158         if (next_timing->rate >= 400000 && next_timing->rate < 600000)
1159                 ddllcal_ctrl_start_trim_val = 150;
1160         else if (next_timing->rate >= 600000 && next_timing->rate < 800000)
1161                 ddllcal_ctrl_start_trim_val = 100;
1162         else if (next_timing->rate >= 800000 && next_timing->rate < 1000000)
1163                 ddllcal_ctrl_start_trim_val = 70;
1164         else if (next_timing->rate >= 1000000 && next_timing->rate < 1200000)
1165                 ddllcal_ctrl_start_trim_val = 30;
1166         else
1167                 ddllcal_ctrl_start_trim_val = 20;
1168
1169         emc_dll_cfg_1 = emc_readl(EMC_DLL_CFG_1);
1170         emc_dll_cfg_1 &= EMC_DLL_CFG_1_DDLLCAL_CTRL_START_TRIM_MASK;
1171         emc_dll_cfg_1 |= ddllcal_ctrl_start_trim_val;
1172         emc_writel(emc_dll_cfg_1, EMC_DLL_CFG_1);
1173
1174         /* Step 8:
1175          *   (Skipping some steps to get back inline with reference.)
1176          *   Change the DLL clock source.
1177          */
1178         emc_cc_dbg(PRELOCK_STEPS, "Step 8\n");
1179         change_dll_src(next_timing, clksrc);
1180
1181         /* Step 9:
1182          *   Enable the DLL and start the prelock state machine.
1183          */
1184         emc_cc_dbg(PRELOCK_STEPS, "Step 9\n");
1185         emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1186         emc_cfg_dig_dll |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
1187         emc_writel(emc_cfg_dig_dll, EMC_CFG_DIG_DLL);
1188
1189         emc_timing_update(dual_channel_lpddr4_case ?
1190                           DUAL_CHANNEL : SINGLE_CHANNEL);
1191
1192         do {
1193                 emc_cfg_dig_dll = emc_readl(EMC_CFG_DIG_DLL);
1194                 dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1195         } while (dll_en == 0);
1196
1197         if (dual_channel_lpddr4_case) {
1198                 do {
1199                         emc_cfg_dig_dll = emc1_readl(EMC_CFG_DIG_DLL);
1200                         dll_en = emc_cfg_dig_dll & EMC_CFG_DIG_DLL_CFG_DLL_EN;
1201                 } while (dll_en == 0);
1202         }
1203
1204         /* Step 10:
1205          *   Wait for the DLL to lock.
1206          */
1207         emc_cc_dbg(PRELOCK_STEPS, "Step 10\n");
1208         do {
1209                 emc_dig_dll_status = emc_readl(EMC_DIG_DLL_STATUS);
1210                 dll_locked = emc_dig_dll_status & EMC_DIG_DLL_STATUS_DLL_LOCK;
1211                 dll_priv_updated = emc_dig_dll_status &
1212                         EMC_DIG_DLL_STATUS_DLL_PRIV_UPDATED;
1213         } while (!dll_locked || !dll_priv_updated);
1214
1215         /* Step 11:
1216          *   Prelock training specific code - removed. Should it be ??
1217          */
1218
1219         /* Step 12:
1220          *   Done! Return the dll prelock value.
1221          */
1222         emc_cc_dbg(PRELOCK_STEPS, "Step 12\n");
1223         emc_dig_dll_status = emc_readl(EMC_DIG_DLL_STATUS);
1224         return emc_dig_dll_status & EMC_DIG_DLL_STATUS_DLL_OUT_MASK;
1225 }
1226
1227 /*
1228  * Do the clock change sequence.
1229  */
1230 void emc_set_clock_r21015(struct tegra21_emc_table *next_timing,
1231                           struct tegra21_emc_table *last_timing,
1232                           int training, u32 clksrc)
1233 {
1234         /*
1235          * This is the timing table for the source frequency. It does _not_
1236          * necessarily correspond to the actual timing values in the EMC at the
1237          * moment. If the boot BCT differs from the table then this can happen.
1238          * However, we need it for accessing the dram_timing_regs (which are not
1239          * really registers) array for the current frequency.
1240          */
1241         struct tegra21_emc_table *fake_timing;
1242
1243         u32 i, tmp;
1244
1245         u32 cya_allow_ref_cc = 0, ref_b4_sref_en = 0, cya_issue_pc_ref = 0;
1246
1247         u32 zqcal_before_cc_cutoff = 2400; /* In picoseconds */
1248         u32 ref_delay_mult;
1249         u32 ref_delay;
1250         s32 zq_latch_dvfs_wait_time;
1251         s32 tZQCAL_lpddr4_fc_adj;
1252         /* Scaled by x1000 */
1253         u32 tFC_lpddr4 = 1000 * next_timing->dram_timing_regs[T_FC_LPDDR4];
1254         /* u32 tVRCG_lpddr4 = next_timing->dram_timing_regs[T_FC_LPDDR4]; */
1255         u32 tZQCAL_lpddr4 = 1000000;
1256
1257         u32 dram_type, dram_dev_num, shared_zq_resistor;
1258         u32 channel_mode;
1259         u32 is_lpddr3;
1260
1261         u32 emc_cfg, emc_sel_dpd_ctrl, emc_cfg_reg;
1262
1263         u32 emc_dbg;
1264         u32 emc_zcal_interval;
1265         u32 emc_zcal_wait_cnt_old;
1266         u32 emc_zcal_wait_cnt_new;
1267         u32 emc_dbg_active;
1268         u32 zq_op;
1269         u32 zcal_wait_time_clocks;
1270         u32 zcal_wait_time_ps;
1271
1272         u32 emc_auto_cal_config;
1273         u32 auto_cal_en;
1274
1275         u32 mr13_catr_enable;
1276
1277         u32 ramp_up_wait = 0, ramp_down_wait = 0;
1278
1279         /* In picoseconds. */
1280         u32 source_clock_period;
1281         u32 destination_clock_period;
1282
1283         u32 emc_dbg_o;
1284         u32 emc_cfg_pipe_clk_o;
1285         u32 emc_pin_o;
1286
1287         u32 mr13_flip_fspwr;
1288         u32 mr13_flip_fspop;
1289
1290         u32 opt_zcal_en_cc;
1291         u32 opt_do_sw_qrst = 1;
1292         u32 opt_dvfs_mode;
1293         u32 opt_dll_mode;
1294         u32 opt_cc_short_zcal = 1;
1295         u32 opt_short_zcal = 1;
1296         u32 save_restore_clkstop_pd = 1;
1297
1298         u32 prelock_dll_en = 0, dll_out;
1299
1300         int next_push, next_dq_e_ivref, next_dqs_e_ivref;
1301
1302         u64 emc_mrw6_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW6;
1303         u64 emc_mrw7_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW7;
1304         u64 emc_mrw8_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW8;
1305         u64 emc_mrw9_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW9;
1306         u64 emc_mrw10_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW10;
1307         u64 emc_mrw10_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW10;
1308         u64 emc_mrw11_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW11;
1309         u64 emc_mrw11_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW11;
1310         u64 emc_mrw12_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW12;
1311         u64 emc_mrw12_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW12;
1312         u64 emc_mrw13_ch0_ab = (u64)IO_ADDRESS(TEGRA_EMC0_BASE) + EMC_MRW13;
1313         u64 emc_mrw13_ch1_ab = (u64)IO_ADDRESS(TEGRA_EMC1_BASE) + EMC_MRW13;
1314         u64 emc_mrw14_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW14;
1315         u64 emc_mrw15_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRW15;
1316
1317         u64 emc_training_ctrl_ab =
1318                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_TRAINING_CTRL;
1319         u64 emc_cfg_ab = (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_CFG;
1320         u64 emc_mrs_wait_cnt_ab =
1321                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_MRS_WAIT_CNT;
1322         u64 emc_zcal_wait_cnt_ab =
1323                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_ZCAL_INTERVAL;
1324         u64 emc_zcal_interval_ab =
1325                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_ZCAL_INTERVAL;
1326         u64 emc_pmacro_autocal_cfg_common_ab =
1327                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_AUTOCAL_CFG_COMMON;
1328         u64 emc_pmacro_data_pad_tx_ctrl_ab =
1329                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_DATA_PAD_TX_CTRL;
1330         u64 emc_pmacro_cmd_pad_tx_ctrl_ab =
1331                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_CMD_PAD_TX_CTRL;
1332         u64 emc_pmacro_brick_ctrl_rfu1_ab =
1333                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_BRICK_CTRL_RFU1;
1334         u64 emc_pmacro_common_pad_tx_ctrl_ab =
1335                 (u64)IO_ADDRESS(TEGRA_EMC_BASE) + EMC_PMACRO_COMMON_PAD_TX_CTRL;
1336         u32 opt_war_200024907;
1337         u32 zq_wait_long;
1338         u32 zq_wait_short;
1339
1340         u32 bg_regulator_switch_complete_wait_clks;
1341         u32 bg_regulator_mode_change;
1342         u32 enable_bglp_regulator;
1343         u32 enable_bg_regulator;
1344
1345         u32 tRTM;
1346         u32 RP_war;
1347         u32 R2P_war;
1348         u32 TRPab_war;
1349         s32 nRTP;
1350         u32 deltaTWATM;
1351         u32 W2P_war;
1352         u32 tRPST;
1353
1354         u32 mrw_req;
1355         u32 adel = 0, compensate_trimmer_applicable = 0;
1356         u32 next_timing_rate_mhz = next_timing->rate / 1000;
1357
1358         static u32 fsp_for_next_freq;
1359
1360         emc_cc_dbg(INFO, "Running clock change.\n");
1361         ccfifo_index = 0;
1362
1363         fake_timing = get_timing_from_freq(last_timing->rate);
1364
1365         fsp_for_next_freq = !fsp_for_next_freq;
1366
1367         dram_type = emc_readl(EMC_FBIO_CFG5) &
1368                 EMC_FBIO_CFG5_DRAM_TYPE_MASK >> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT;
1369         shared_zq_resistor = last_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX] &
1370                 1 << 31; /* needs def */
1371         channel_mode = !!(last_timing->burst_regs[EMC_FBIO_CFG7_INDEX] &
1372                           1 << 2); /* needs def */
1373         opt_zcal_en_cc = (next_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX] &&
1374                           !last_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX]) ||
1375                           dram_type == DRAM_TYPE_LPDDR4;
1376         opt_dll_mode = (dram_type == DRAM_TYPE_DDR3) ?
1377                 get_dll_state(next_timing) : DLL_OFF;
1378         is_lpddr3 = (dram_type == DRAM_TYPE_LPDDR2) &&
1379                 next_timing->burst_regs[EMC_FBIO_CFG5_INDEX] &
1380                 1 << 25; /* needs def */
1381         opt_war_200024907 = (dram_type == DRAM_TYPE_LPDDR4);
1382         opt_dvfs_mode = MAN_SR;
1383         dram_dev_num = (mc_readl(MC_EMEM_ADR_CFG) & 0x1) + 1;
1384
1385         emc_cfg_reg = emc_readl(EMC_CFG);
1386         emc_auto_cal_config = emc_readl(EMC_AUTO_CAL_CONFIG);
1387
1388         source_clock_period = 1000000000 / last_timing->rate;
1389         destination_clock_period = 1000000000 / next_timing->rate;
1390
1391         tZQCAL_lpddr4_fc_adj = (source_clock_period > zqcal_before_cc_cutoff) ?
1392                 tZQCAL_lpddr4 / destination_clock_period :
1393                 (tZQCAL_lpddr4 - tFC_lpddr4) / destination_clock_period;
1394         emc_dbg_o = emc_readl(EMC_DBG);
1395         emc_pin_o = emc_readl(EMC_PIN);
1396         emc_cfg_pipe_clk_o = emc_readl(EMC_CFG_PIPE_CLK);
1397         emc_dbg = emc_dbg_o;
1398
1399         emc_cfg = next_timing->burst_regs[EMC_CFG_INDEX];
1400         emc_cfg &= ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
1401                      EMC_CFG_DRAM_CLKSTOP_SR | EMC_CFG_DRAM_CLKSTOP_PD);
1402         emc_sel_dpd_ctrl = next_timing->emc_sel_dpd_ctrl;
1403         emc_sel_dpd_ctrl &= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN |
1404                               EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN |
1405                               EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN |
1406                               EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN |
1407                               EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN);
1408
1409         emc_cc_dbg(INFO, "Clock change version: %d\n",
1410                    DVFS_CLOCK_CHANGE_VERSION);
1411         emc_cc_dbg(INFO, "DRAM type = %d\n", dram_type);
1412         emc_cc_dbg(INFO, "DRAM dev #: %d\n", dram_dev_num);
1413         emc_cc_dbg(INFO, "Next EMC clksrc: 0x%08x\n", clksrc);
1414         emc_cc_dbg(INFO, "DLL clksrc:      0x%08x\n", next_timing->dll_clk_src);
1415         emc_cc_dbg(INFO, "last rate: %lu, next rate %lu\n", last_timing->rate,
1416                    next_timing->rate);
1417         emc_cc_dbg(INFO, "last period: %u, next period: %u\n",
1418                    source_clock_period, destination_clock_period);
1419         emc_cc_dbg(INFO, "  shared_zq_resistor: %d\n", !!shared_zq_resistor);
1420         emc_cc_dbg(INFO, "  channel_mode: %d\n", channel_mode);
1421         emc_cc_dbg(INFO, "  opt_dll_mode: %d\n", opt_dll_mode);
1422
1423         /* Step 1:
1424          *   Pre DVFS SW sequence.
1425          */
1426         emc_cc_dbg(STEPS, "Step 1\n");
1427         emc_cc_dbg(STEPS, "Step 1.1: Disable DLL temporarily.\n");
1428         tmp = emc_readl(EMC_CFG_DIG_DLL);
1429         tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1430         emc_writel(tmp, EMC_CFG_DIG_DLL);
1431
1432         emc_timing_update(channel_mode);
1433         wait_for_update(EMC_CFG_DIG_DLL,
1434                         EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 0);
1435         if (channel_mode)
1436                 wait_for_update(EMC_CFG_DIG_DLL,
1437                                 EMC_CFG_DIG_DLL_CFG_DLL_EN, 0, 1);
1438
1439         emc_cc_dbg(STEPS, "Step 1.2: Disable AUTOCAL temporarily.\n");
1440         emc_auto_cal_config = next_timing->emc_auto_cal_config;
1441         auto_cal_en = emc_auto_cal_config & EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE;
1442         emc_auto_cal_config &= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START;
1443         emc_auto_cal_config |=  EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL;
1444         emc_auto_cal_config |=  EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL;
1445         emc_auto_cal_config |=  auto_cal_en;
1446         emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1447         emc_readl(EMC_AUTO_CAL_CONFIG); /* Flush write. */
1448
1449         emc_cc_dbg(STEPS, "Step 1.3: Disable other power features.\n");
1450         emc_set_shadow_bypass(ACTIVE);
1451         emc_writel(emc_cfg, EMC_CFG);
1452         emc_writel(emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1453         emc_set_shadow_bypass(ASSEMBLY);
1454
1455         if (next_timing->periodic_training) {
1456                 __reset_dram_clktree_values(next_timing);
1457
1458                 wait_for_update(EMC_EMC_STATUS,
1459                                 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 0);
1460                 if (channel_mode)
1461                         wait_for_update(EMC_EMC_STATUS,
1462                                 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK, 0, 1);
1463
1464                 wait_for_update(EMC_EMC_STATUS,
1465                                 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 0);
1466                 if (channel_mode)
1467                         wait_for_update(EMC_EMC_STATUS,
1468                                 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK, 0, 1);
1469
1470                 start_periodic_compensation();
1471
1472                 udelay(((1000 * actual_osc_clocks(last_timing->run_clocks)) /
1473                         last_timing->rate) + 2);
1474                 adel = update_clock_tree_delay(fake_timing, next_timing,
1475                                                dram_dev_num, channel_mode);
1476                 compensate_trimmer_applicable =
1477                         next_timing->periodic_training &&
1478                         ((adel * 128 * next_timing_rate_mhz) / 1000000) >
1479                         next_timing->tree_margin;
1480         }
1481
1482         emc_cc_dbg(SUB_STEPS, "Step 1.1: Bug 200024907 - Patch RP R2P");
1483         if (opt_war_200024907) {
1484                 nRTP = 16;
1485                 if (source_clock_period >= 1000000/1866) /* 535.91 ps */
1486                         nRTP = 14;
1487                 if (source_clock_period >= 1000000/1600) /* 625.00 ps */
1488                         nRTP = 12;
1489                 if (source_clock_period >= 1000000/1333) /* 750.19 ps */
1490                         nRTP = 10;
1491                 if (source_clock_period >= 1000000/1066) /* 938.09 ps */
1492                         nRTP = 8;
1493
1494                 deltaTWATM = max_t(u32, div_o3(7500, source_clock_period), 8);
1495
1496                 /*
1497                  * Originally there was a + .5 in the tRPST calculation.
1498                  * However since we can't do FP in the kernel and the tRTM
1499                  * computation was in a floating point ceiling function, adding
1500                  * one to tRTP should be ok. There is no other source of non
1501                  * integer values, so the result was always going to be
1502                  * something for the form: f_ceil(N + .5) = N + 1;
1503                  */
1504                 tRPST = ((last_timing->emc_mrw & 0x80) >> 7);
1505                 tRTM = fake_timing->dram_timing_regs[RL] +
1506                         div_o3(3600, source_clock_period) +
1507                         max_t(u32, div_o3(7500, source_clock_period), 8) +
1508                         tRPST + 1 + nRTP;
1509
1510                 emc_cc_dbg(INFO, "tRTM = %u, EMC_RP = %u\n", tRTM,
1511                            next_timing->burst_regs[EMC_RP_INDEX]);
1512
1513                 if (last_timing->burst_regs[EMC_RP_INDEX] < tRTM) {
1514                         if (tRTM > (last_timing->burst_regs[EMC_R2P_INDEX] +
1515                                     last_timing->burst_regs[EMC_RP_INDEX])) {
1516                                 R2P_war = tRTM -
1517                                         last_timing->burst_regs[EMC_RP_INDEX];
1518                                 RP_war = last_timing->burst_regs[EMC_RP_INDEX];
1519                                 TRPab_war =
1520                                        last_timing->burst_regs[EMC_TRPAB_INDEX];
1521                                 if (R2P_war > 63) {
1522                                         RP_war = R2P_war +
1523                                                 last_timing->burst_regs
1524                                                 [EMC_RP_INDEX] - 63;
1525                                         if (TRPab_war < RP_war)
1526                                                 TRPab_war = RP_war;
1527                                         R2P_war = 63;
1528                                 }
1529                         } else {
1530                                 R2P_war = last_timing->
1531                                         burst_regs[EMC_R2P_INDEX];
1532                                 RP_war = last_timing->burst_regs[EMC_RP_INDEX];
1533                                 TRPab_war =
1534                                        last_timing->burst_regs[EMC_TRPAB_INDEX];
1535                         }
1536
1537                         if (RP_war < deltaTWATM) {
1538                                 W2P_war = last_timing->burst_regs[EMC_W2P_INDEX]
1539                                         + deltaTWATM - RP_war;
1540                                 if (W2P_war > 63) {
1541                                         RP_war = RP_war + W2P_war - 63;
1542                                         if (TRPab_war < RP_war)
1543                                                 TRPab_war = RP_war;
1544                                         W2P_war = 63;
1545                                 }
1546                         } else {
1547                                 W2P_war =
1548                                         last_timing->burst_regs[EMC_W2P_INDEX];
1549                         }
1550
1551                         if((last_timing->burst_regs[EMC_W2P_INDEX] ^ W2P_war) ||
1552                            (last_timing->burst_regs[EMC_R2P_INDEX] ^ R2P_war) ||
1553                            (last_timing->burst_regs[EMC_RP_INDEX] ^ RP_war) ||
1554                            (last_timing->burst_regs[EMC_TRPAB_INDEX] ^
1555                             TRPab_war)) {
1556                                 emc_writel(RP_war, EMC_RP);
1557                                 emc_writel(R2P_war, EMC_R2P);
1558                                 emc_writel(W2P_war, EMC_W2P);
1559                                 emc_writel(TRPab_war, EMC_TRPAB);
1560                         }
1561                         emc_timing_update(DUAL_CHANNEL);
1562                 } else {
1563                         emc_cc_dbg(INFO, "Skipped WAR for bug 200024907\n");
1564                 }
1565         }
1566
1567         emc_writel(EMC_INTSTATUS_CLKCHANGE_COMPLETE, EMC_INTSTATUS);
1568         emc_set_shadow_bypass(ACTIVE);
1569         emc_writel(emc_cfg, EMC_CFG);
1570         emc_writel(emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1571         emc_writel(emc_cfg_pipe_clk_o | EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON,
1572                    EMC_CFG_PIPE_CLK);
1573         emc_writel(next_timing->emc_fdpd_ctrl_cmd_no_ramp &
1574                    ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE,
1575                    EMC_FDPD_CTRL_CMD_NO_RAMP);
1576
1577         bg_regulator_mode_change =
1578                 ((next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1579                   EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) ^
1580                  (last_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1581                   EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD)) ||
1582                 ((next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1583                   EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) ^
1584                  (last_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1585                   EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD));
1586         enable_bglp_regulator =
1587                 (next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1588                  EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) == 0;
1589         enable_bg_regulator =
1590                 (next_timing->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1591                  EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) == 0;
1592
1593         if (bg_regulator_mode_change) {
1594                 if (enable_bg_regulator)
1595                         emc_writel(last_timing->burst_regs
1596                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1597                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
1598                                    EMC_PMACRO_BG_BIAS_CTRL_0);
1599                 else
1600                         emc_writel(last_timing->burst_regs
1601                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1602                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
1603                                    EMC_PMACRO_BG_BIAS_CTRL_0);
1604
1605         }
1606
1607         /* Check if we need to turn on VREF generator. */
1608         if ((((last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1609                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 0) &&
1610              ((next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1611                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 1)) ||
1612             (((last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1613                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) == 0) &&
1614              ((next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
1615                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) == 1))) {
1616                 u32 pad_tx_ctrl =
1617                     next_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
1618                 u32 last_pad_tx_ctrl =
1619                     last_timing->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
1620
1621                 next_dqs_e_ivref = pad_tx_ctrl &
1622                         EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF;
1623                 next_dq_e_ivref = pad_tx_ctrl &
1624                         EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF;
1625                 next_push = (last_pad_tx_ctrl &
1626                              ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF &
1627                              ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) |
1628                         next_dq_e_ivref | next_dqs_e_ivref;
1629                 emc_writel(next_push, EMC_PMACRO_DATA_PAD_TX_CTRL);
1630                 udelay(1);
1631         } else if (bg_regulator_mode_change) {
1632                 udelay(1);
1633         }
1634
1635         emc_set_shadow_bypass(ASSEMBLY);
1636
1637         /* Step 2:
1638          *   Prelock the DLL.
1639          */
1640         emc_cc_dbg(STEPS, "Step 2\n");
1641         if (next_timing->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
1642             EMC_CFG_DIG_DLL_CFG_DLL_EN) {
1643                 emc_cc_dbg(INFO, "Prelock enabled for target frequency.\n");
1644                 dll_out = dll_prelock(next_timing, 0, clksrc);
1645                 emc_cc_dbg(INFO, "DLL out: 0x%03x\n", dll_out);
1646                 prelock_dll_en = 1;
1647         } else {
1648                 emc_cc_dbg(INFO, "Disabling DLL for target frequency.\n");
1649                 dll_disable(channel_mode);
1650         }
1651
1652         /* Step 3:
1653          *   Prepare autocal for the clock change.
1654          */
1655         emc_cc_dbg(STEPS, "Step 3\n");
1656         emc_set_shadow_bypass(ACTIVE);
1657         emc_writel(next_timing->emc_auto_cal_config2, EMC_AUTO_CAL_CONFIG2);
1658         emc_writel(next_timing->emc_auto_cal_config3, EMC_AUTO_CAL_CONFIG3);
1659         emc_writel(next_timing->emc_auto_cal_config4, EMC_AUTO_CAL_CONFIG4);
1660         emc_writel(next_timing->emc_auto_cal_config5, EMC_AUTO_CAL_CONFIG5);
1661         emc_writel(next_timing->emc_auto_cal_config6, EMC_AUTO_CAL_CONFIG6);
1662         emc_writel(next_timing->emc_auto_cal_config7, EMC_AUTO_CAL_CONFIG7);
1663         emc_writel(next_timing->emc_auto_cal_config8, EMC_AUTO_CAL_CONFIG8);
1664         emc_set_shadow_bypass(ASSEMBLY);
1665
1666         emc_auto_cal_config |= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START |
1667                                 auto_cal_en);
1668         emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1669
1670         /* Step 4:
1671          *   Update EMC_CFG. (??)
1672          */
1673         emc_cc_dbg(STEPS, "Step 4\n");
1674         if (source_clock_period > 50000 && dram_type == DRAM_TYPE_LPDDR4)
1675                 ccfifo_writel(1, EMC_SELF_REF, 0);
1676         else
1677                 emc_writel(next_timing->emc_cfg_2, EMC_CFG_2);
1678
1679         /* Step 5:
1680          *   Prepare reference variables for ZQCAL regs.
1681          */
1682         emc_cc_dbg(STEPS, "Step 5\n");
1683         emc_zcal_interval = 0;
1684         emc_zcal_wait_cnt_old =
1685                 last_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1686         emc_zcal_wait_cnt_new =
1687                 next_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1688         emc_zcal_wait_cnt_old &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1689         emc_zcal_wait_cnt_new &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1690
1691         if (dram_type == DRAM_TYPE_LPDDR4)
1692                 zq_wait_long = max((u32)1,
1693                                  div_o3(1000000, destination_clock_period));
1694         else if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
1695                 zq_wait_long = max(next_timing->min_mrs_wait,
1696                                  div_o3(360000, destination_clock_period)) + 4;
1697         else if (dram_type == DRAM_TYPE_DDR3)
1698                 zq_wait_long = max((u32)256,
1699                                  div_o3(320000, destination_clock_period) + 2);
1700         else
1701                 zq_wait_long = 0;
1702
1703         if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
1704                 zq_wait_short = max(max(next_timing->min_mrs_wait, (u32)6),
1705                                   div_o3(90000, destination_clock_period)) + 4;
1706         else if (dram_type == DRAM_TYPE_DDR3)
1707                 zq_wait_short = max((u32)64,
1708                                   div_o3(80000, destination_clock_period)) + 2;
1709         else
1710                 zq_wait_short = 0;
1711
1712         /* Step 6:
1713          *   Training code - removed.
1714          */
1715         emc_cc_dbg(STEPS, "Step 6\n");
1716
1717         /* Step 7:
1718          *   Program FSP reference registers and send MRWs to new FSPWR.
1719          */
1720         emc_cc_dbg(STEPS, "Step 7\n");
1721         if (!fsp_for_next_freq) {
1722                 mr13_flip_fspwr = (next_timing->emc_mrw3 & 0xffffff3f) | 0x80;
1723                 mr13_flip_fspop = (next_timing->emc_mrw3 & 0xffffff3f) | 0x00;
1724         } else {
1725                 mr13_flip_fspwr = (next_timing->emc_mrw3 & 0xffffff3f) | 0x40;
1726                 mr13_flip_fspop = (next_timing->emc_mrw3 & 0xffffff3f) | 0xc0;
1727         }
1728
1729         mr13_catr_enable = (mr13_flip_fspwr & 0xFFFFFFFE) | 0x01;
1730         if (dram_dev_num == TWO_RANK)
1731                 mr13_catr_enable =
1732                         (mr13_catr_enable & 0x3fffffff) | 0x80000000;
1733
1734         if (dram_type == DRAM_TYPE_LPDDR4) {
1735                 emc_writel(mr13_flip_fspwr, EMC_MRW3);
1736                 emc_writel(next_timing->emc_mrw, EMC_MRW);
1737                 emc_writel(next_timing->emc_mrw2, EMC_MRW2);
1738         }
1739
1740         /* Step 8:
1741          *   Program the shadow registers.
1742          */
1743         emc_cc_dbg(STEPS, "Step 8\n");
1744         emc_cc_dbg(SUB_STEPS, "Writing burst_regs\n");
1745         for (i = 0; i < next_timing->burst_regs_num; i++) {
1746                 u64 var;
1747                 u32 wval;
1748
1749                 if (!burst_reg_off[i])
1750                         continue;
1751
1752                 var = (u64)burst_reg_off[i];
1753                 wval = next_timing->burst_regs[i];
1754
1755                 if (dram_type != DRAM_TYPE_LPDDR4 &&
1756                     (var == emc_mrw6_ab      || var == emc_mrw7_ab ||
1757                      var == emc_mrw8_ab      || var == emc_mrw9_ab ||
1758                      var == emc_mrw10_ch0_ab || var == emc_mrw10_ch1_ab ||
1759                      var == emc_mrw11_ch0_ab || var == emc_mrw11_ch1_ab ||
1760                      var == emc_mrw12_ch0_ab || var == emc_mrw12_ch1_ab ||
1761                      var == emc_mrw13_ch0_ab || var == emc_mrw13_ch1_ab ||
1762                      var == emc_mrw14_ab     || var == emc_mrw15_ab ||
1763                      var == emc_training_ctrl_ab))
1764                         continue;
1765
1766                 /* Pain... And suffering. */
1767                 if (var == emc_cfg_ab) {
1768                         wval &= ~EMC_CFG_DRAM_ACPD;
1769                         wval &= ~EMC_CFG_DYN_SELF_REF;
1770                         if (dram_type == DRAM_TYPE_LPDDR4) {
1771                                 wval &= ~EMC_CFG_DRAM_CLKSTOP_SR;
1772                                 wval &= ~EMC_CFG_DRAM_CLKSTOP_PD;
1773                         }
1774                 } else if (var == emc_mrs_wait_cnt_ab &&
1775                            dram_type == DRAM_TYPE_LPDDR2 &&
1776                            opt_zcal_en_cc && !opt_cc_short_zcal &&
1777                            opt_short_zcal) {
1778                         wval = (wval & ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK <<
1779                                          EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT)) |
1780                            ((zq_wait_long & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) <<
1781                             EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1782                 } else if (var == emc_zcal_wait_cnt_ab &&
1783                            dram_type == DRAM_TYPE_DDR3 && opt_zcal_en_cc &&
1784                            !opt_cc_short_zcal && opt_short_zcal) {
1785                         wval = (wval & ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK <<
1786                                        EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT)) |
1787                             ((zq_wait_long &
1788                               EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK) <<
1789                               EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1790                 } else if (var == emc_zcal_interval_ab && opt_zcal_en_cc) {
1791                         wval = 0; /* EMC_ZCAL_INTERVAL reset value. */
1792                 } else if (var == emc_pmacro_autocal_cfg_common_ab) {
1793                         wval |= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS;
1794                 } else if (var == emc_pmacro_data_pad_tx_ctrl_ab) {
1795                         wval &=
1796                              ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
1797                                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
1798                                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
1799                                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
1800                 } else if (var == emc_pmacro_cmd_pad_tx_ctrl_ab) {
1801                         wval |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1802                         wval &= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
1803                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
1804                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
1805                                   EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
1806                 } else if (var == emc_pmacro_brick_ctrl_rfu1_ab) {
1807                         wval &= 0xf800f800;
1808                 } else if (var == emc_pmacro_common_pad_tx_ctrl_ab) {
1809                         wval &= 0xfffffff0;
1810                 }
1811
1812                 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1813                            i, wval, (void *)var);
1814                 __raw_writel(wval, (void __iomem *)var);
1815         }
1816
1817         /* SW addition: do EMC refresh adjustment here. */
1818         set_over_temp_timing(next_timing, dram_over_temp_state);
1819
1820         if (dram_type == DRAM_TYPE_LPDDR4) {
1821                 mrw_req = (23 << EMC_MRW_MRW_MA_SHIFT) |
1822                         (next_timing->run_clocks & EMC_MRW_MRW_OP_MASK);
1823                 emc_writel(mrw_req, EMC_MRW);
1824         }
1825
1826         /* Per channel burst registers. */
1827         emc_cc_dbg(SUB_STEPS, "Writing burst_regs_per_ch\n");
1828         for (i = 0; i < next_timing->burst_regs_per_ch_num; i++) {
1829                 if (!burst_perch_reg_off[i])
1830                         continue;
1831
1832                 if (dram_type != DRAM_TYPE_LPDDR4 &&
1833                     ((u64)burst_perch_reg_off[i] == emc_mrw6_ab ||
1834                      (u64)burst_perch_reg_off[i] == emc_mrw7_ab ||
1835                      (u64)burst_perch_reg_off[i] == emc_mrw8_ab ||
1836                      (u64)burst_perch_reg_off[i] == emc_mrw9_ab ||
1837                      (u64)burst_perch_reg_off[i] == emc_mrw10_ch0_ab ||
1838                      (u64)burst_perch_reg_off[i] == emc_mrw10_ch1_ab ||
1839                      (u64)burst_perch_reg_off[i] == emc_mrw11_ch0_ab ||
1840                      (u64)burst_perch_reg_off[i] == emc_mrw11_ch1_ab ||
1841                      (u64)burst_perch_reg_off[i] == emc_mrw12_ch0_ab ||
1842                      (u64)burst_perch_reg_off[i] == emc_mrw12_ch1_ab ||
1843                      (u64)burst_perch_reg_off[i] == emc_mrw13_ch0_ab ||
1844                      (u64)burst_perch_reg_off[i] == emc_mrw13_ch1_ab ||
1845                      (u64)burst_perch_reg_off[i] == emc_mrw14_ab ||
1846                      (u64)burst_perch_reg_off[i] == emc_mrw15_ab))
1847                         continue;
1848
1849                 /* Filter out second channel if not in DUAL_CHANNEL mode. */
1850                 if (channel_mode != DUAL_CHANNEL &&
1851                     (u64)burst_perch_reg_off[i] >=
1852                     (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1853                         continue;
1854
1855                 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1856                            i, next_timing->burst_regs_per_ch[i],
1857                            burst_perch_reg_off[i]);
1858                 __raw_writel(next_timing->burst_regs_per_ch[i],
1859                              burst_perch_reg_off[i]);
1860         }
1861
1862         /* Vref regs. */
1863         emc_cc_dbg(SUB_STEPS, "Writing vref_regs\n");
1864         for (i = 0; i < next_timing->vref_regs_num; i++) {
1865                 if (!vref_reg_off[i])
1866                         continue;
1867
1868                 if (channel_mode != DUAL_CHANNEL &&
1869                     (u64)vref_reg_off[i] >= (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1870                         continue;
1871
1872                 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1873                            i, next_timing->vref_regs[i], vref_reg_off[i]);
1874                 __raw_writel(next_timing->vref_regs[i], vref_reg_off[i]);
1875         }
1876
1877         /* Trimmers. */
1878         emc_cc_dbg(SUB_STEPS, "Writing trim_regs\n");
1879         for (i = 0; i < next_timing->trim_regs_num; i++) {
1880                 u64 trim_reg;
1881
1882                 if (!trim_reg_off[i])
1883                         continue;
1884
1885                 trim_reg = (u64)trim_reg_off[i] & 0xfff;
1886                 if (compensate_trimmer_applicable &&
1887                     (trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1888                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1889                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1890                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1891                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1892                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1893                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1894                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1895                      trim_reg == EMC_DATA_BRLSHFT_0 ||
1896                      trim_reg == EMC_DATA_BRLSHFT_1)) {
1897                         u32 reg =
1898                                 apply_periodic_compensation_trimmer(next_timing,
1899                                                                     trim_reg);
1900                         emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n", i, reg,
1901                                    trim_reg_off[i]);
1902                         __raw_writel(reg, trim_reg_off[i]);
1903                 } else {
1904                         emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1905                                    i, next_timing->trim_regs[i],
1906                                    trim_reg_off[i]);
1907                         __raw_writel(next_timing->trim_regs[i],
1908                                      trim_reg_off[i]);
1909                 }
1910
1911         }
1912
1913         /* Per channel trimmers. */
1914         emc_cc_dbg(SUB_STEPS, "Writing trim_regs_per_ch\n");
1915         for (i = 0; i < next_timing->trim_regs_per_ch_num; i++) {
1916                 u64 trim_reg;
1917
1918                 if (!trim_perch_reg_off[i])
1919                         continue;
1920
1921                 if (channel_mode != DUAL_CHANNEL &&
1922                     (u64)vref_reg_off[i] >=
1923                     (u64)IO_ADDRESS(TEGRA_EMC1_BASE))
1924                         continue;
1925
1926                 trim_reg = (u64)trim_perch_reg_off[i] & 0xfff;
1927                 if (compensate_trimmer_applicable &&
1928                     (trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1929                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1930                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1931                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1932                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1933                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1934                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1935                      trim_reg == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1936                      trim_reg == EMC_DATA_BRLSHFT_0 ||
1937                      trim_reg == EMC_DATA_BRLSHFT_1)) {
1938                         u32 reg =
1939                                 apply_periodic_compensation_trimmer(next_timing,
1940                                                             trim_reg);
1941                         emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1942                                    i, reg, trim_perch_reg_off[i]);
1943                         __raw_writel(reg,
1944                                      trim_perch_reg_off[i]);
1945                 } else {
1946                         emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1947                                    i, next_timing->trim_regs_per_ch[i],
1948                                    trim_perch_reg_off[i]);
1949                         __raw_writel(next_timing->trim_regs_per_ch[i],
1950                                      trim_perch_reg_off[i]);
1951                 }
1952         }
1953
1954         emc_cc_dbg(SUB_STEPS, "Writing burst_mc_regs\n");
1955         for (i = 0; i < next_timing->burst_mc_regs_num; i++) {
1956                 emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1957                            i, next_timing->burst_mc_regs[i],
1958                            burst_mc_reg_off[i]);
1959                 __raw_writel(next_timing->burst_mc_regs[i],
1960                              burst_mc_reg_off[i]);
1961         }
1962
1963         /* Registers to be programmed on the faster clock. */
1964         if (next_timing->rate < last_timing->rate) {
1965                 emc_cc_dbg(SUB_STEPS, "Writing la_scale_regs\n");
1966                 for (i = 0; i < next_timing->la_scale_regs_num; i++) {
1967                         emc_cc_dbg(REG_LISTS, "(%u) 0x%08x => 0x%p\n",
1968                                    i, next_timing->la_scale_regs[i],
1969                                    la_scale_off_regs[i]);
1970                         __raw_writel(next_timing->la_scale_regs[i],
1971                                      la_scale_off_regs[i]);
1972                 }
1973         }
1974
1975         /* Flush all the burst register writes. */
1976         wmb();
1977
1978         /* Step 9:
1979          *   LPDDR4 section A.
1980          */
1981         emc_cc_dbg(STEPS, "Step 9\n");
1982         if (dram_type == DRAM_TYPE_LPDDR4) {
1983                 emc_writel(emc_zcal_interval, EMC_ZCAL_INTERVAL);
1984                 emc_writel(emc_zcal_wait_cnt_new, EMC_ZCAL_WAIT_CNT);
1985
1986                 emc_dbg |= (EMC_DBG_WRITE_MUX_ACTIVE |
1987                             EMC_DBG_WRITE_ACTIVE_ONLY);
1988
1989                 emc_writel(emc_dbg, EMC_DBG);
1990                 emc_writel(emc_zcal_interval, EMC_ZCAL_INTERVAL);
1991                 emc_writel(emc_dbg_o, EMC_DBG);
1992         }
1993
1994         /* Step 10:
1995          *   LPDDR4 and DDR3 common section.
1996          */
1997         emc_cc_dbg(STEPS, "Step 10\n");
1998         if (opt_dvfs_mode == MAN_SR || dram_type == DRAM_TYPE_LPDDR4) {
1999                 if (dram_type == DRAM_TYPE_LPDDR4)
2000                         ccfifo_writel(0x101, EMC_SELF_REF, 0);
2001                 else
2002                         ccfifo_writel(0x1, EMC_SELF_REF, 0);
2003
2004                 if (dram_type == DRAM_TYPE_LPDDR4 &&
2005                     source_clock_period <= zqcal_before_cc_cutoff) {
2006                         ccfifo_writel(mr13_flip_fspwr ^ 0x40, EMC_MRW3, 0);
2007                         ccfifo_writel((next_timing->burst_regs[EMC_MRW6_INDEX] &
2008                                        0xFFFF3F3F) |
2009                                       (last_timing->burst_regs[EMC_MRW6_INDEX] &
2010                                        0x0000C0C0), EMC_MRW6, 0);
2011                         ccfifo_writel(
2012                                 (next_timing->burst_regs[EMC_MRW14_INDEX] &
2013                                  0xFFFF0707) |
2014                                 (last_timing->burst_regs[EMC_MRW14_INDEX] &
2015                                  0x00003838), EMC_MRW14, 0);
2016
2017                         if (dram_dev_num == TWO_RANK) {
2018                                 ccfifo_writel(
2019                                       (next_timing->burst_regs[EMC_MRW7_INDEX] &
2020                                        0xFFFF3F3F) |
2021                                       (last_timing->burst_regs[EMC_MRW7_INDEX] &
2022                                        0x0000C0C0), EMC_MRW7, 0);
2023                                 ccfifo_writel(
2024                                      (next_timing->burst_regs[EMC_MRW15_INDEX] &
2025                                       0xFFFF0707) |
2026                                      (last_timing->burst_regs[EMC_MRW15_INDEX] &
2027                                       0x00003838), EMC_MRW15, 0);
2028                         }
2029                         if (opt_zcal_en_cc) {
2030                                 if (dram_dev_num == ONE_RANK)
2031                                         ccfifo_writel(
2032                                                 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2033                                                 EMC_ZQ_CAL_ZQ_CAL_CMD,
2034                                                 EMC_ZQ_CAL, 0);
2035                                 else if (shared_zq_resistor)
2036                                         ccfifo_writel(
2037                                                 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2038                                                 EMC_ZQ_CAL_ZQ_CAL_CMD,
2039                                                 EMC_ZQ_CAL, 0);
2040                                 else
2041                                         ccfifo_writel(EMC_ZQ_CAL_ZQ_CAL_CMD,
2042                                                      EMC_ZQ_CAL, 0);
2043                         }
2044                 }
2045         }
2046
2047         emc_dbg = emc_dbg_o;
2048         if (dram_type == DRAM_TYPE_LPDDR4) {
2049                 ccfifo_writel(mr13_flip_fspop | 0x8, EMC_MRW3,
2050                               (1000 * fake_timing->dram_timing_regs[T_RP]) /
2051                               source_clock_period);
2052                 ccfifo_writel(0, 0, tFC_lpddr4 / source_clock_period);
2053         }
2054
2055         if (dram_type == DRAM_TYPE_LPDDR4 || opt_dvfs_mode != MAN_SR) {
2056                 u32 t = 30 + (cya_allow_ref_cc ?
2057                         (4000 * fake_timing->dram_timing_regs[T_RFC]) +
2058                         ((1000 * fake_timing->dram_timing_regs[T_RP]) /
2059                          source_clock_period) : 0);
2060
2061                 ccfifo_writel(emc_pin_o & ~(EMC_PIN_PIN_CKE_PER_DEV |
2062                                             EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE),
2063                               EMC_PIN, t);
2064         }
2065
2066         ref_delay_mult = 1;
2067         ref_b4_sref_en = 0;
2068         cya_issue_pc_ref = 0;
2069
2070         ref_delay_mult += ref_b4_sref_en   ? 1 : 0;
2071         ref_delay_mult += cya_allow_ref_cc ? 1 : 0;
2072         ref_delay_mult += cya_issue_pc_ref ? 1 : 0;
2073         ref_delay = ref_delay_mult *
2074                 ((1000 * fake_timing->dram_timing_regs[T_RP]
2075                   / source_clock_period) +
2076                  (1000 * fake_timing->dram_timing_regs[T_RFC] /
2077                   source_clock_period)) + 20;
2078
2079         /* Step 11:
2080          *   Ramp down.
2081          */
2082         emc_cc_dbg(STEPS, "Step 11\n");
2083         ccfifo_writel(0x0, EMC_CFG_SYNC,
2084                       dram_type == DRAM_TYPE_LPDDR4 ? 0 : ref_delay);
2085
2086         emc_dbg_active = emc_dbg | (EMC_DBG_WRITE_MUX_ACTIVE | /* Redundant. */
2087                                     EMC_DBG_WRITE_ACTIVE_ONLY);
2088         ccfifo_writel(emc_dbg_active, EMC_DBG, 0);
2089
2090         /* Todo: implement do_dvfs_power_ramp_down */
2091         ramp_down_wait = do_dvfs_power_ramp_down(source_clock_period, 0,
2092                                                  last_timing, next_timing);
2093
2094         /* Step 12:
2095          *   And finally - trigger the clock change.
2096          */
2097         emc_cc_dbg(STEPS, "Step 12\n");
2098         ccfifo_writel(1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE, 0);
2099         emc_dbg_active &= ~EMC_DBG_WRITE_ACTIVE_ONLY;
2100         ccfifo_writel(emc_dbg_active, EMC_DBG, 0);
2101
2102         /* Step 13:
2103          *   Ramp up.
2104          */
2105         /* Todo: implement do_dvfs_power_ramp_up(). */
2106         emc_cc_dbg(STEPS, "Step 13\n");
2107         ramp_up_wait = do_dvfs_power_ramp_up(destination_clock_period, 0,
2108                                              last_timing, next_timing);
2109         ccfifo_writel(emc_dbg, EMC_DBG, 0);
2110
2111         /* Step 14:
2112          *   Bringup CKE pins.
2113          */
2114         emc_cc_dbg(STEPS, "Step 14\n");
2115         if (dram_type == DRAM_TYPE_LPDDR4) {
2116                 u32 r = emc_pin_o | EMC_PIN_PIN_CKE;
2117                 if (dram_dev_num == TWO_RANK)
2118                         ccfifo_writel(r | EMC_PIN_PIN_CKEB |
2119                                       EMC_PIN_PIN_CKE_PER_DEV, EMC_PIN,
2120                                       0);
2121                 else
2122                         ccfifo_writel(r & ~(EMC_PIN_PIN_CKEB |
2123                                             EMC_PIN_PIN_CKE_PER_DEV),
2124                                       EMC_PIN, 0);
2125         }
2126
2127         /* Step 15: (two step 15s ??)
2128          *   Calculate zqlatch wait time; has dependency on ramping times.
2129          */
2130         emc_cc_dbg(STEPS, "Step 15\n");
2131
2132         if (source_clock_period <= zqcal_before_cc_cutoff) {
2133                 s32 t = (s32)(ramp_up_wait + ramp_down_wait) /
2134                         (s32)destination_clock_period;
2135                 zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - t;
2136         } else {
2137                 zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj -
2138                         div_o3(1000 * next_timing->dram_timing_regs[T_PDEX],
2139                                destination_clock_period);
2140         }
2141
2142         emc_cc_dbg(INFO, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj);
2143         emc_cc_dbg(INFO, "destination_clock_period = %u\n",
2144                    destination_clock_period);
2145         emc_cc_dbg(INFO, "next_timing->dram_timing_regs[T_PDEX] = %u\n",
2146                    next_timing->dram_timing_regs[T_PDEX]);
2147         emc_cc_dbg(INFO, "zq_latch_dvfs_wait_time = %d\n",
2148                    max_t(s32, 0, zq_latch_dvfs_wait_time));
2149
2150         if (dram_type == DRAM_TYPE_LPDDR4 && opt_zcal_en_cc) {
2151                 if (dram_dev_num == ONE_RANK) {
2152                         if (source_clock_period > zqcal_before_cc_cutoff)
2153                                 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2154                                    EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2155                                    div_o3(1000 *
2156                                           next_timing->dram_timing_regs[T_PDEX],
2157                                           destination_clock_period));
2158                         ccfifo_writel((mr13_flip_fspop & 0xFFFFFFF7) |
2159                                    0x0C000000, EMC_MRW3,
2160                                    div_o3(1000 *
2161                                           next_timing->dram_timing_regs[T_PDEX],
2162                                           destination_clock_period));
2163                         ccfifo_writel(0, EMC_SELF_REF, 0);
2164                         ccfifo_writel(0, EMC_REF, 0);
2165                         ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2166                                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
2167                                       EMC_ZQ_CAL,
2168                                       max_t(s32, 0, zq_latch_dvfs_wait_time));
2169                 } else if (shared_zq_resistor) {
2170                         if (source_clock_period > zqcal_before_cc_cutoff)
2171                                 ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2172                                    EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2173                                    div_o3(1000 *
2174                                           next_timing->dram_timing_regs[T_PDEX],
2175                                           destination_clock_period));
2176
2177                         ccfifo_writel(2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2178                                   EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2179                                   max_t(s32, 0, zq_latch_dvfs_wait_time) +
2180                                   div_o3(1000 *
2181                                          next_timing->dram_timing_regs[T_PDEX],
2182                                          destination_clock_period));
2183                         ccfifo_writel(1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2184                                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
2185                                       EMC_ZQ_CAL, 0);
2186
2187                         ccfifo_writel((mr13_flip_fspop & 0xfffffff7) |
2188                                       0x0c000000, EMC_MRW3, 0);
2189                         ccfifo_writel(0, EMC_SELF_REF, 0);
2190                         ccfifo_writel(0, EMC_REF, 0);
2191
2192                         ccfifo_writel(1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2193                                       EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2194                                       tZQCAL_lpddr4 / destination_clock_period);
2195                 } else {
2196                         if (source_clock_period > zqcal_before_cc_cutoff) {
2197                                 ccfifo_writel(EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
2198                                    div_o3(1000 *
2199                                           next_timing->dram_timing_regs[T_PDEX],
2200                                           destination_clock_period));
2201                         }
2202
2203                         ccfifo_writel((mr13_flip_fspop & 0xfffffff7) |
2204                                    0x0c000000, EMC_MRW3,
2205                                    div_o3(1000 *
2206                                           next_timing->dram_timing_regs[T_PDEX],
2207                                           destination_clock_period));
2208                         ccfifo_writel(0, EMC_SELF_REF, 0);
2209                         ccfifo_writel(0, EMC_REF, 0);
2210
2211                         ccfifo_writel(EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
2212                                       max_t(s32, 0, zq_latch_dvfs_wait_time));
2213                 }
2214         }
2215
2216         /* WAR: delay for zqlatch */
2217         ccfifo_writel(0, 0, 10);
2218
2219         /* Step 16:
2220          *   LPDDR4 Conditional Training Kickoff. Removed.
2221          */
2222
2223         /* Step 17:
2224          *   MANSR exit self refresh.
2225          */
2226         emc_cc_dbg(STEPS, "Step 17\n");
2227         if (opt_dvfs_mode == MAN_SR && dram_type != DRAM_TYPE_LPDDR4)
2228                 ccfifo_writel(0, EMC_SELF_REF, 0);
2229
2230         /* Step 18:
2231          *   Send MRWs to LPDDR3/DDR3.
2232          */
2233         emc_cc_dbg(STEPS, "Step 18\n");
2234         if (dram_type == DRAM_TYPE_LPDDR2) {
2235                 ccfifo_writel(next_timing->emc_mrw2, EMC_MRW2, 0);
2236                 ccfifo_writel(next_timing->emc_mrw,  EMC_MRW,  0);
2237                 if (is_lpddr3)
2238                         ccfifo_writel(next_timing->emc_mrw4, EMC_MRW4, 0);
2239         } else if (dram_type == DRAM_TYPE_DDR3) {
2240                 if (opt_dll_mode == DLL_ON)
2241                         ccfifo_writel(next_timing->emc_emrs &
2242                                       ~EMC_EMRS_USE_EMRS_LONG_CNT, EMC_EMRS, 0);
2243                 ccfifo_writel(next_timing->emc_emrs2 &
2244                               ~EMC_EMRS2_USE_EMRS2_LONG_CNT, EMC_EMRS2, 0);
2245                 ccfifo_writel(next_timing->emc_mrs |
2246                               EMC_EMRS_USE_EMRS_LONG_CNT, EMC_MRS, 0);
2247         }
2248
2249         /* Step 19:
2250          *   ZQCAL for LPDDR3/DDR3
2251          */
2252         emc_cc_dbg(STEPS, "Step 19\n");
2253         if (opt_zcal_en_cc) {
2254                 if (dram_type == DRAM_TYPE_LPDDR2) {
2255                         u32 r;
2256
2257                         zq_op = opt_cc_short_zcal  ? 0x56 : 0xAB;
2258                         zcal_wait_time_ps = opt_cc_short_zcal  ? 90000 : 360000;
2259                         zcal_wait_time_clocks = div_o3(zcal_wait_time_ps,
2260                                                     destination_clock_period);
2261                         r = zcal_wait_time_clocks <<
2262                                 EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT |
2263                                 zcal_wait_time_clocks <<
2264                                 EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT;
2265                         ccfifo_writel(r, EMC_MRS_WAIT_CNT2, 0);
2266                         ccfifo_writel(2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
2267                                       EMC_MRW_USE_MRW_EXT_CNT |
2268                                       10 << EMC_MRW_MRW_MA_SHIFT |
2269                                       zq_op << EMC_MRW_MRW_OP_SHIFT,
2270                                       EMC_MRW, 0);
2271                         if (dram_dev_num == TWO_RANK) {
2272                                 r = 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
2273                                         EMC_MRW_USE_MRW_EXT_CNT |
2274                                         10 << EMC_MRW_MRW_MA_SHIFT |
2275                                         zq_op << EMC_MRW_MRW_OP_SHIFT;
2276                                 ccfifo_writel(r, EMC_MRW, 0);
2277                         }
2278                 } else if (dram_type == DRAM_TYPE_DDR3) {
2279                         zq_op = opt_cc_short_zcal ? 0 : EMC_ZQ_CAL_LONG;
2280                         ccfifo_writel(zq_op | 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2281                                       EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL, 0);
2282                         if (dram_dev_num == TWO_RANK)
2283                                 ccfifo_writel(zq_op |
2284                                               1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
2285                                               EMC_ZQ_CAL_ZQ_CAL_CMD,
2286                                               EMC_ZQ_CAL, 0);
2287                 }
2288         }
2289
2290         if (bg_regulator_mode_change) {
2291                 emc_set_shadow_bypass(ACTIVE);
2292                 bg_regulator_switch_complete_wait_clks =
2293                         ramp_up_wait > 1250000 ? 0 :
2294                         (1250000 - ramp_up_wait) / destination_clock_period;
2295                 ccfifo_writel(next_timing->burst_regs
2296                               [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX],
2297                               EMC_PMACRO_BG_BIAS_CTRL_0,
2298                               bg_regulator_switch_complete_wait_clks);
2299                 emc_set_shadow_bypass(ASSEMBLY);
2300         }
2301
2302         /* Step 20:
2303          *   Issue ref and optional QRST.
2304          */
2305         emc_cc_dbg(STEPS, "Step 20\n");
2306         if (dram_type != DRAM_TYPE_LPDDR4)
2307                 ccfifo_writel(0, EMC_REF, 0);
2308
2309         if (opt_do_sw_qrst) {
2310                 ccfifo_writel(1, EMC_ISSUE_QRST, 0);
2311                 ccfifo_writel(0, EMC_ISSUE_QRST, 2);
2312         }
2313
2314         /* Step 21:
2315          *   Restore ZCAL and ZCAL interval.
2316          */
2317         emc_cc_dbg(STEPS, "Step 21\n");
2318         if (save_restore_clkstop_pd || opt_zcal_en_cc) {
2319                 ccfifo_writel(emc_dbg_o | EMC_DBG_WRITE_MUX_ACTIVE, EMC_DBG, 0);
2320                 if (opt_zcal_en_cc && dram_type != DRAM_TYPE_LPDDR4)
2321                         ccfifo_writel(next_timing->
2322                                       burst_regs[EMC_ZCAL_INTERVAL_INDEX],
2323                                       EMC_ZCAL_INTERVAL, 0);
2324
2325                 if (save_restore_clkstop_pd)
2326                         ccfifo_writel(next_timing->burst_regs[EMC_CFG_INDEX] &
2327                                       ~EMC_CFG_DYN_SELF_REF, EMC_CFG, 0);
2328                 ccfifo_writel(emc_dbg_o, EMC_DBG, 0);
2329         }
2330
2331         /* Step 22:
2332          *   Restore EMC_CFG_PIPE_CLK.
2333          */
2334         emc_cc_dbg(STEPS, "Step 22\n");
2335         ccfifo_writel(emc_cfg_pipe_clk_o, EMC_CFG_PIPE_CLK, 0);
2336
2337         if (bg_regulator_mode_change) {
2338                 if (enable_bg_regulator)
2339                         emc_writel(next_timing->burst_regs
2340                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
2341                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
2342                                    EMC_PMACRO_BG_BIAS_CTRL_0);
2343                 else
2344                         emc_writel(next_timing->burst_regs
2345                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
2346                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
2347                                    EMC_PMACRO_BG_BIAS_CTRL_0);
2348         }
2349
2350         /* Step 23:
2351          */
2352         emc_cc_dbg(STEPS, "Step 23\n");
2353
2354         /* Fix: rename tmp to something meaningful. */
2355         tmp = emc_readl(EMC_CFG_DIG_DLL);
2356         tmp |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
2357         tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
2358         tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
2359         tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
2360         tmp = (tmp & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
2361                 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
2362         emc_writel(tmp, EMC_CFG_DIG_DLL);
2363
2364         /* Clock change. Woot. BUG()s out if something fails. */
2365         do_clock_change(clksrc);
2366
2367         /* Step 24:
2368          *   Save training results. Removed.
2369          */
2370
2371         /* Step 25:
2372          *   Program MC updown registers.
2373          */
2374         emc_cc_dbg(STEPS, "Step 25\n");
2375
2376         if (next_timing->rate > last_timing->rate) {
2377                 for (i = 0; i < next_timing->la_scale_regs_num; i++)
2378                         __raw_writel(next_timing->la_scale_regs[i],
2379                                      la_scale_off_regs[i]);
2380                 emc_timing_update(0);
2381         }
2382
2383         /* Step 26:
2384          *   Restore ZCAL registers.
2385          */
2386         emc_cc_dbg(STEPS, "Step 26\n");
2387         if (dram_type == DRAM_TYPE_LPDDR4) {
2388                 emc_set_shadow_bypass(ACTIVE);
2389                 emc_writel(next_timing->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
2390                            EMC_ZCAL_WAIT_CNT);
2391                 emc_writel(next_timing->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
2392                            EMC_ZCAL_INTERVAL);
2393                 emc_set_shadow_bypass(ASSEMBLY);
2394         }
2395
2396         if (dram_type != DRAM_TYPE_LPDDR4 &&
2397             opt_zcal_en_cc && !opt_short_zcal && opt_cc_short_zcal) {
2398                 udelay(2);
2399
2400                 emc_set_shadow_bypass(ACTIVE);
2401                 if (dram_type == DRAM_TYPE_LPDDR2)
2402                         emc_writel(next_timing->
2403                                   burst_regs[EMC_MRS_WAIT_CNT_INDEX],
2404                                   EMC_MRS_WAIT_CNT);
2405                 else if (dram_type == DRAM_TYPE_DDR3)
2406                         emc_writel(next_timing->
2407                                    burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
2408                                    EMC_ZCAL_WAIT_CNT);
2409                 emc_set_shadow_bypass(ASSEMBLY);
2410         }
2411
2412         /* Step 27:
2413          *   Restore EMC_CFG, FDPD registers.
2414          */
2415         emc_cc_dbg(STEPS, "Step 27\n");
2416         emc_set_shadow_bypass(ACTIVE);
2417         emc_writel(next_timing->burst_regs[EMC_CFG_INDEX], EMC_CFG);
2418         emc_set_shadow_bypass(ASSEMBLY);
2419         emc_writel(next_timing->emc_fdpd_ctrl_cmd_no_ramp,
2420                    EMC_FDPD_CTRL_CMD_NO_RAMP);
2421         emc_writel(next_timing->emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
2422
2423         /* Step 28:
2424          *   Training recover. Removed.
2425          */
2426         emc_cc_dbg(STEPS, "Step 28\n");
2427
2428         emc_set_shadow_bypass(ACTIVE);
2429         emc_writel(next_timing->burst_regs[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX],
2430                    EMC_PMACRO_AUTOCAL_CFG_COMMON);
2431         emc_set_shadow_bypass(ASSEMBLY);
2432
2433         /* Step 29:
2434          *   Power fix WAR.
2435          */
2436         emc_cc_dbg(STEPS, "Step 29\n");
2437         emc_writel(EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0 |
2438                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1 |
2439                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2 |
2440                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3 |
2441                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4 |
2442                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5 |
2443                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6 |
2444                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7,
2445                    EMC_PMACRO_CFG_PM_GLOBAL_0);
2446         emc_writel(EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR,
2447                    EMC_PMACRO_TRAINING_CTRL_0);
2448         emc_writel(EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR,
2449                    EMC_PMACRO_TRAINING_CTRL_1);
2450         emc_writel(0, EMC_PMACRO_CFG_PM_GLOBAL_0);
2451
2452         /* Step 30:
2453          *   Re-enable autocal.
2454          */
2455         emc_cc_dbg(STEPS, "Step 30: Re-enable DLL and AUTOCAL\n");
2456         if (next_timing->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
2457             EMC_CFG_DIG_DLL_CFG_DLL_EN) {
2458                 tmp = emc_readl(EMC_CFG_DIG_DLL);
2459                 tmp |=  EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
2460                 tmp |=  EMC_CFG_DIG_DLL_CFG_DLL_EN;
2461                 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
2462                 tmp &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
2463                 tmp =  (tmp & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
2464                         (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
2465                 emc_writel(tmp, EMC_CFG_DIG_DLL);
2466                 emc_timing_update(channel_mode);
2467         }
2468
2469         emc_auto_cal_config = next_timing->emc_auto_cal_config;
2470         emc_writel(emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
2471
2472         /* Step 31:
2473          *   Restore FSP to account for switch back. Only needed in training.
2474          */
2475         emc_cc_dbg(STEPS, "Step 31\n");
2476
2477         /* Step 32:
2478          *   [SW] Update the alternative timing (derated vs normal) table with
2479          * the periodic training values computed during the clock change
2480          * pre-amble.
2481          */
2482         emc_cc_dbg(STEPS, "Step 32: Update alt timing\n");
2483         __update_emc_alt_timing(next_timing);
2484
2485         /* Done! Yay. */
2486 }