amdgpu/addrlib: Fix invalid access to m_tileTable
Sometimes client driver passes valid tile info into address library, in this case, the tile index is computed in function HwlPostCheckTileIndex instead of CiAddrLib::HwlSetupTileCfg. We need to call HwlPostCheckTileIndex to calculate the correct tile index to get tile split bytes for this case.
This commit is contained in:
parent
9e40e09089
commit
4a4b7da141
|
@ -1277,14 +1277,25 @@ VOID CiAddrLib::HwlSetupTileInfo(
|
||||||
{
|
{
|
||||||
if (IsMacroTiled(tileMode))
|
if (IsMacroTiled(tileMode))
|
||||||
{
|
{
|
||||||
// Non-depth entries store a split factor
|
UINT_32 tileIndex = static_cast<UINT_32>(pOut->tileIndex);
|
||||||
UINT_32 sampleSplit = m_tileTable[pOut->tileIndex].info.tileSplitBytes;
|
|
||||||
UINT_32 tileBytes1x = BITS_TO_BYTES(bpp * MicroTilePixels * thickness);
|
|
||||||
UINT_32 colorTileSplit = Max(256u, sampleSplit * tileBytes1x);
|
|
||||||
|
|
||||||
if (m_rowSize < colorTileSplit)
|
if ((tileIndex == TileIndexInvalid) && (IsTileInfoAllZero(pTileInfo) == FALSE))
|
||||||
{
|
{
|
||||||
pOut->tcCompatible = FALSE;
|
tileIndex = HwlPostCheckTileIndex(pTileInfo, tileMode, inTileType, tileIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tileIndex != TileIndexInvalid)
|
||||||
|
{
|
||||||
|
ADDR_ASSERT(tileIndex < TileTableSize);
|
||||||
|
// Non-depth entries store a split factor
|
||||||
|
UINT_32 sampleSplit = m_tileTable[tileIndex].info.tileSplitBytes;
|
||||||
|
UINT_32 tileBytes1x = BITS_TO_BYTES(bpp * MicroTilePixels * thickness);
|
||||||
|
UINT_32 colorTileSplit = Max(256u, sampleSplit * tileBytes1x);
|
||||||
|
|
||||||
|
if (m_rowSize < colorTileSplit)
|
||||||
|
{
|
||||||
|
pOut->tcCompatible = FALSE;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
Loading…
Reference in New Issue