1
0
mirror of https://xff.cz/git/u-boot/ synced 2025-09-03 17:52:07 +02:00

ARM: mvebu: a38x: sync ddr training code with upstream

This syncs drivers/ddr/marvell/a38x/ with the mv_ddr-armada-17.10 branch
of https://github.com/MarvellEmbeddedProcessors/mv-ddr-marvell.git.

The upstream code is incorporated omitting the ddr4 and apn806 and
folding the nested a38x directory up one level. After that a
semi-automated step is used to drop unused features with unifdef

  find drivers/ddr/marvell/a38x/ -name '*.[ch]' | \
    xargs unifdef -m -UMV_DDR -UMV_DDR_ATF -UCONFIG_DDR4 \
		-UCONFIG_APN806 -UCONFIG_MC_STATIC \
		-UCONFIG_MC_STATIC_PRINT -UCONFIG_PHY_STATIC \
		-UCONFIG_64BIT

INTER_REGS_BASE is updated to be defined as SOC_REGS_PHY_BASE.

Some now empty files are removed and the ternary license is replaced
with a SPDX GPL-2.0+ identifier.

Signed-off-by: Chris Packham <judge.packham@gmail.com>
Signed-off-by: Stefan Roese <sr@denx.de>
This commit is contained in:
Chris Packham
2018-05-10 13:28:29 +12:00
committed by Stefan Roese
parent 00a7767766
commit 2b4ffbf6b4
56 changed files with 7952 additions and 5147 deletions

View File

@@ -3,12 +3,6 @@
* Copyright (C) Marvell International Ltd. and its affiliates
*/
#include <common.h>
#include <spl.h>
#include <asm/io.h>
#include <asm/arch/cpu.h>
#include <asm/arch/soc.h>
#include "ddr3_init.h"
#define VREF_INITIAL_STEP 3
@@ -16,9 +10,8 @@
#define VREF_MAX_INDEX 7
#define MAX_VALUE (1024 - 1)
#define MIN_VALUE (-MAX_VALUE)
#define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0x1f)
#define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
u32 ck_delay = (u32)-1, ck_delay_16 = (u32)-1;
u32 ca_delay;
int ddr3_tip_centr_skip_min_win_check = 0;
u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
@@ -48,45 +41,41 @@ static u32 rd_sample_mask[] = {
*/
int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
{
u32 cs_num = 0, max_cs = 0, max_read_sample = 0, min_read_sample = 0x1f;
u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
u32 data_read[MAX_INTERFACE_NUM] = { 0 };
u32 read_sample[MAX_CS_NUM];
u32 val;
u32 pup_index;
int max_phase = MIN_VALUE, current_phase;
enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
struct hws_topology_map *tm = ddr3_get_topology_map();
u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
DUNIT_ODT_CONTROL_REG,
DUNIT_ODT_CTRL_REG,
0 << 8, 0x3 << 8));
CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
READ_DATA_SAMPLE_DELAY,
RD_DATA_SMPL_DLYS_REG,
data_read, MASK_ALL_BITS));
val = data_read[if_id];
max_cs = hws_ddr3_tip_max_cs_get();
for (cs_num = 0; cs_num < max_cs; cs_num++) {
for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) {
read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
/* find maximum of read_samples */
if (read_sample[cs_num] >= max_read_sample) {
if (read_sample[cs_num] == max_read_sample) {
/* search for max phase */;
} else {
max_read_sample = read_sample[cs_num];
if (read_sample[cs_num] == max_read_sample)
max_phase = MIN_VALUE;
}
else
max_read_sample = read_sample[cs_num];
for (pup_index = 0;
pup_index < tm->num_of_bus_per_interface;
pup_index < octets_per_if_num;
pup_index++) {
CHECK_STATUS(ddr3_tip_bus_read
(dev_num, if_id,
ACCESS_TYPE_UNICAST, pup_index,
DDR_PHY_DATA,
RL_PHY_REG + CS_REG_VALUE(cs_num),
RL_PHY_REG(cs_num),
&val));
current_phase = ((int)val & 0xe0) >> 6;
@@ -100,21 +89,19 @@ int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
min_read_sample = read_sample[cs_num];
}
if (min_read_sample <= tm->interface_params[if_id].cas_l) {
min_read_sample = (int)tm->interface_params[if_id].cas_l;
}
min_read_sample = min_read_sample - 1;
max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
if (min_read_sample >= 0xf)
min_read_sample = 0xf;
if (max_read_sample >= 0x1f)
max_read_sample = 0x1f;
CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
ODT_TIMING_LOW,
DDR_ODT_TIMING_LOW_REG,
((min_read_sample - 1) << 12),
0xf << 12));
CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
ODT_TIMING_LOW,
DDR_ODT_TIMING_LOW_REG,
(max_read_sample << 16),
0x1f << 16));
@@ -123,7 +110,7 @@ int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
{
u32 reg_pup = RESULT_DB_PHY_REG_ADDR;
u32 reg_pup = RESULT_PHY_REG;
u32 reg_data;
u32 cs_num;
int i;
@@ -138,7 +125,7 @@ int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
ACCESS_TYPE_UNICAST, i,
DDR_PHY_DATA, reg_pup,
&reg_data));
res[i] = (reg_data >> RESULT_DB_PHY_REG_RX_OFFSET) & 0x1f;
res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
}
return 0;
@@ -176,7 +163,8 @@ int ddr3_tip_vref(u32 dev_num)
u32 copy_start_pattern, copy_end_pattern;
enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
u8 res[4];
struct hws_topology_map *tm = ddr3_get_topology_map();
u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
CHECK_STATUS(ddr3_tip_special_rx(dev_num));
@@ -190,9 +178,9 @@ int ddr3_tip_vref(u32 dev_num)
/* init params */
for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
for (pup = 0;
pup < tm->num_of_bus_per_interface; pup++) {
pup < octets_per_if_num; pup++) {
current_vref[pup][if_id] = 0;
last_vref[pup][if_id] = 0;
lim_vref[pup][if_id] = 0;
@@ -228,7 +216,7 @@ int ddr3_tip_vref(u32 dev_num)
}
/* TODO: Set number of active interfaces */
num_pup = tm->num_of_bus_per_interface * MAX_INTERFACE_NUM;
num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
while ((algo_run_flag <= num_pup) & (while_count < 10)) {
while_count++;
@@ -239,13 +227,13 @@ int ddr3_tip_vref(u32 dev_num)
/* Read Valid window results only for non converge pups */
for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
if (interface_state[if_id] != 4) {
get_valid_win_rx(dev_num, if_id, res);
for (pup = 0;
pup < tm->num_of_bus_per_interface;
pup < octets_per_if_num;
pup++) {
VALIDATE_ACTIVE
VALIDATE_BUS_ACTIVE
(tm->bus_act_mask, pup);
if (pup_st[pup]
[if_id] ==
@@ -263,14 +251,14 @@ int ddr3_tip_vref(u32 dev_num)
}
for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
DEBUG_TRAINING_HW_ALG(
DEBUG_LEVEL_TRACE,
("current_valid_window: IF[ %d ] - ", if_id));
for (pup = 0;
pup < tm->num_of_bus_per_interface; pup++) {
VALIDATE_ACTIVE(tm->bus_act_mask, pup);
pup < octets_per_if_num; pup++) {
VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
("%d ",
current_valid_window
@@ -281,10 +269,10 @@ int ddr3_tip_vref(u32 dev_num)
/* Compare results and respond as function of state */
for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
for (pup = 0;
pup < tm->num_of_bus_per_interface; pup++) {
VALIDATE_ACTIVE(tm->bus_act_mask, pup);
pup < octets_per_if_num; pup++) {
VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
if_id, pup,
@@ -609,10 +597,10 @@ int ddr3_tip_vref(u32 dev_num)
}
for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
for (pup = 0;
pup < tm->num_of_bus_per_interface; pup++) {
VALIDATE_ACTIVE(tm->bus_act_mask, pup);
pup < octets_per_if_num; pup++) {
VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
CHECK_STATUS(ddr3_tip_bus_read
(dev_num, if_id,
ACCESS_TYPE_UNICAST, pup,
@@ -640,7 +628,7 @@ int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
{
u32 if_id = 0;
u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
struct hws_topology_map *tm = ddr3_get_topology_map();
struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
/*
* ck_delay_table is delaying the of the clock signal only.
@@ -653,22 +641,18 @@ int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
*/
/* Calc ADLL Tap */
if ((ck_delay == -1) || (ck_delay_16 == -1)) {
if (ck_delay == PARAM_UNDEFINED)
DEBUG_TRAINING_HW_ALG(
DEBUG_LEVEL_ERROR,
("ERROR: One of ck_delay values not initialized!!!\n"));
}
("ERROR: ck_delay is not initialized!\n"));
for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
VALIDATE_ACTIVE(tm->if_act_mask, if_id);
/* Calc delay ps in ADLL tap */
if (tm->interface_params[if_id].bus_width ==
BUS_WIDTH_16)
ck_num_adll_tap = ck_delay_16 / adll_tap;
else
ck_num_adll_tap = ck_delay / adll_tap;
VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
/* Calc delay ps in ADLL tap */
ck_num_adll_tap = ck_delay / adll_tap;
ca_num_adll_tap = ca_delay / adll_tap;
data = (ck_num_adll_tap & 0x3f) +
((ca_num_adll_tap & 0x3f) << 10);