|
@@ -3513,7 +3513,6 @@ int radeon_atom_get_memory_info(struct radeon_device *rdev,
|
|
|
u8 frev, crev, i;
|
|
|
u16 data_offset, size;
|
|
|
union vram_info *vram_info;
|
|
|
- u8 *p;
|
|
|
|
|
|
memset(mem_info, 0, sizeof(struct atom_memory_info));
|
|
|
|
|
@@ -3529,13 +3528,12 @@ int radeon_atom_get_memory_info(struct radeon_device *rdev,
|
|
|
if (module_index < vram_info->v1_3.ucNumOfVRAMModule) {
|
|
|
ATOM_VRAM_MODULE_V3 *vram_module =
|
|
|
(ATOM_VRAM_MODULE_V3 *)vram_info->v1_3.aVramInfo;
|
|
|
- p = (u8 *)vram_info->v1_3.aVramInfo;
|
|
|
|
|
|
for (i = 0; i < module_index; i++) {
|
|
|
- vram_module = (ATOM_VRAM_MODULE_V3 *)p;
|
|
|
if (le16_to_cpu(vram_module->usSize) == 0)
|
|
|
return -EINVAL;
|
|
|
- p += le16_to_cpu(vram_module->usSize);
|
|
|
+ vram_module = (ATOM_VRAM_MODULE_V3 *)
|
|
|
+ ((u8 *)vram_module + le16_to_cpu(vram_module->usSize));
|
|
|
}
|
|
|
mem_info->mem_vendor = vram_module->asMemory.ucMemoryVenderID & 0xf;
|
|
|
mem_info->mem_type = vram_module->asMemory.ucMemoryType & 0xf0;
|
|
@@ -3547,13 +3545,12 @@ int radeon_atom_get_memory_info(struct radeon_device *rdev,
|
|
|
if (module_index < vram_info->v1_4.ucNumOfVRAMModule) {
|
|
|
ATOM_VRAM_MODULE_V4 *vram_module =
|
|
|
(ATOM_VRAM_MODULE_V4 *)vram_info->v1_4.aVramInfo;
|
|
|
- p = (u8 *)vram_info->v1_4.aVramInfo;
|
|
|
|
|
|
for (i = 0; i < module_index; i++) {
|
|
|
- vram_module = (ATOM_VRAM_MODULE_V4 *)p;
|
|
|
if (le16_to_cpu(vram_module->usModuleSize) == 0)
|
|
|
return -EINVAL;
|
|
|
- p += le16_to_cpu(vram_module->usModuleSize);
|
|
|
+ vram_module = (ATOM_VRAM_MODULE_V4 *)
|
|
|
+ ((u8 *)vram_module + le16_to_cpu(vram_module->usModuleSize));
|
|
|
}
|
|
|
mem_info->mem_vendor = vram_module->ucMemoryVenderID & 0xf;
|
|
|
mem_info->mem_type = vram_module->ucMemoryType & 0xf0;
|
|
@@ -3572,13 +3569,12 @@ int radeon_atom_get_memory_info(struct radeon_device *rdev,
|
|
|
if (module_index < vram_info->v2_1.ucNumOfVRAMModule) {
|
|
|
ATOM_VRAM_MODULE_V7 *vram_module =
|
|
|
(ATOM_VRAM_MODULE_V7 *)vram_info->v2_1.aVramInfo;
|
|
|
- p = (u8 *)vram_info->v2_1.aVramInfo;
|
|
|
|
|
|
for (i = 0; i < module_index; i++) {
|
|
|
- vram_module = (ATOM_VRAM_MODULE_V7 *)p;
|
|
|
if (le16_to_cpu(vram_module->usModuleSize) == 0)
|
|
|
return -EINVAL;
|
|
|
- p += le16_to_cpu(vram_module->usModuleSize);
|
|
|
+ vram_module = (ATOM_VRAM_MODULE_V7 *)
|
|
|
+ ((u8 *)vram_module + le16_to_cpu(vram_module->usModuleSize));
|
|
|
}
|
|
|
mem_info->mem_vendor = vram_module->ucMemoryVenderID & 0xf;
|
|
|
mem_info->mem_type = vram_module->ucMemoryType & 0xf0;
|
|
@@ -3628,21 +3624,19 @@ int radeon_atom_get_mclk_range_table(struct radeon_device *rdev,
|
|
|
if (module_index < vram_info->v1_4.ucNumOfVRAMModule) {
|
|
|
ATOM_VRAM_MODULE_V4 *vram_module =
|
|
|
(ATOM_VRAM_MODULE_V4 *)vram_info->v1_4.aVramInfo;
|
|
|
- ATOM_MEMORY_TIMING_FORMAT *format;
|
|
|
- p = (u8 *)vram_info->v1_4.aVramInfo;
|
|
|
|
|
|
for (i = 0; i < module_index; i++) {
|
|
|
- vram_module = (ATOM_VRAM_MODULE_V4 *)p;
|
|
|
if (le16_to_cpu(vram_module->usModuleSize) == 0)
|
|
|
return -EINVAL;
|
|
|
- p += le16_to_cpu(vram_module->usModuleSize);
|
|
|
+ vram_module = (ATOM_VRAM_MODULE_V4 *)
|
|
|
+ ((u8 *)vram_module + le16_to_cpu(vram_module->usModuleSize));
|
|
|
}
|
|
|
mclk_range_table->num_entries = (u8)
|
|
|
- ((vram_module->usModuleSize - offsetof(ATOM_VRAM_MODULE_V4, asMemTiming)) /
|
|
|
+ ((le16_to_cpu(vram_module->usModuleSize) - offsetof(ATOM_VRAM_MODULE_V4, asMemTiming)) /
|
|
|
mem_timing_size);
|
|
|
- p = (u8 *)vram_module->asMemTiming;
|
|
|
+ p = (u8 *)&vram_module->asMemTiming[0];
|
|
|
for (i = 0; i < mclk_range_table->num_entries; i++) {
|
|
|
- format = (ATOM_MEMORY_TIMING_FORMAT *)p;
|
|
|
+ ATOM_MEMORY_TIMING_FORMAT *format = (ATOM_MEMORY_TIMING_FORMAT *)p;
|
|
|
mclk_range_table->mclk[i] = le32_to_cpu(format->ulClkRange);
|
|
|
p += mem_timing_size;
|
|
|
}
|
|
@@ -3705,17 +3699,21 @@ int radeon_atom_init_mc_reg_table(struct radeon_device *rdev,
|
|
|
(ATOM_MEMORY_SETTING_DATA_BLOCK *)
|
|
|
((u8 *)reg_block + (2 * sizeof(u16)) +
|
|
|
le16_to_cpu(reg_block->usRegIndexTblSize));
|
|
|
+ ATOM_INIT_REG_INDEX_FORMAT *format = ®_block->asRegIndexBuf[0];
|
|
|
num_entries = (u8)((le16_to_cpu(reg_block->usRegIndexTblSize)) /
|
|
|
sizeof(ATOM_INIT_REG_INDEX_FORMAT)) - 1;
|
|
|
if (num_entries > VBIOS_MC_REGISTER_ARRAY_SIZE)
|
|
|
return -EINVAL;
|
|
|
- while (!(reg_block->asRegIndexBuf[i].ucPreRegDataLength & ACCESS_PLACEHOLDER) &&
|
|
|
- (i < num_entries)) {
|
|
|
+ while (i < num_entries) {
|
|
|
+ if (format->ucPreRegDataLength & ACCESS_PLACEHOLDER)
|
|
|
+ break;
|
|
|
reg_table->mc_reg_address[i].s1 =
|
|
|
- (u16)(le16_to_cpu(reg_block->asRegIndexBuf[i].usRegIndex));
|
|
|
+ (u16)(le16_to_cpu(format->usRegIndex));
|
|
|
reg_table->mc_reg_address[i].pre_reg_data =
|
|
|
- (u8)(reg_block->asRegIndexBuf[i].ucPreRegDataLength);
|
|
|
+ (u8)(format->ucPreRegDataLength);
|
|
|
i++;
|
|
|
+ format = (ATOM_INIT_REG_INDEX_FORMAT *)
|
|
|
+ ((u8 *)format + sizeof(ATOM_INIT_REG_INDEX_FORMAT));
|
|
|
}
|
|
|
reg_table->last = i;
|
|
|
while ((*(u32 *)reg_data != END_OF_REG_DATA_BLOCK) &&
|