Integrity verification improvements

- Add a cache to prevent validation an IntegrityVerificationStream block twice.
- Add section validation for all hashed NCA sections
This commit is contained in:
Alex Barney 2018-10-09 18:10:44 -05:00
parent 24e6434765
commit fb089b0700
6 changed files with 128 additions and 32 deletions

View file

@ -10,10 +10,19 @@ namespace LibHac
public Stream DataLevel { get; }
public IntegrityCheckLevel IntegrityCheckLevel { get; }
/// <summary>
/// An array of the hash statuses of every block in each level.
/// </summary>
public Validity[][] LevelValidities { get; }
private IntegrityVerificationStream[] IntegrityStreams { get; }
public HierarchicalIntegrityVerificationStream(IntegrityVerificationInfo[] levelInfo, IntegrityCheckLevel integrityCheckLevel)
{
Levels = new Stream[levelInfo.Length];
IntegrityCheckLevel = integrityCheckLevel;
LevelValidities = new Validity[levelInfo.Length - 1][];
IntegrityStreams = new IntegrityVerificationStream[levelInfo.Length - 1];
Levels[0] = levelInfo[0].Data;
@ -22,11 +31,55 @@ namespace LibHac
var levelData = new IntegrityVerificationStream(levelInfo[i], Levels[i - 1], integrityCheckLevel);
Levels[i] = new RandomAccessSectorStream(levelData);
LevelValidities[i - 1] = levelData.BlockValidities;
IntegrityStreams[i - 1] = levelData;
}
DataLevel = Levels[Levels.Length - 1];
}
/// <summary>
/// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the hash level.
/// </summary>
/// <param name="level">The level of hierarchical hashes to check.</param>
/// <param name="returnOnError">If <see langword="true"/>, return as soon as an invalid block is found.</param>
/// <param name="logger">An optional <see cref="IProgressReport"/> for reporting progress.</param>
/// <returns>The <see cref="Validity"/> of the data of the specified hash level.</returns>
public Validity ValidateLevel(int level, bool returnOnError, IProgressReport logger = null)
{
Validity[] validities = LevelValidities[level];
IntegrityVerificationStream levelStream = IntegrityStreams[level];
// The original position of the stream must be restored when we're done validating
long initialPosition = levelStream.Position;
var buffer = new byte[levelStream.SectorSize];
var result = Validity.Valid;
logger?.SetTotal(levelStream.SectorCount);
for (int i = 0; i < levelStream.SectorCount; i++)
{
if (validities[i] == Validity.Unchecked)
{
levelStream.Position = levelStream.SectorSize * i;
levelStream.Read(buffer, 0, buffer.Length, IntegrityCheckLevel.IgnoreOnInvalid);
}
if (validities[i] == Validity.Invalid)
{
result = Validity.Invalid;
if (returnOnError) break;
}
logger?.ReportAdd(1);
}
logger?.SetTotal(0);
levelStream.Position = initialPosition;
return result;
}
public override void Flush()
{
throw new NotImplementedException();

View file

@ -11,6 +11,7 @@ namespace LibHac
private Stream HashStream { get; }
public IntegrityCheckLevel IntegrityCheckLevel { get; }
public Validity[] BlockValidities { get; }
private byte[] Salt { get; }
private IntegrityStreamType Type { get; }
@ -25,6 +26,8 @@ namespace LibHac
IntegrityCheckLevel = integrityCheckLevel;
Salt = info.Salt;
Type = info.Type;
BlockValidities = new Validity[SectorCount];
}
public override void Flush()
@ -55,12 +58,16 @@ namespace LibHac
throw new NotImplementedException();
}
public override int Read(byte[] buffer, int offset, int count)
public override int Read(byte[] buffer, int offset, int count) =>
Read(buffer, offset, count, IntegrityCheckLevel);
public int Read(byte[] buffer, int offset, int count, IntegrityCheckLevel integrityCheckLevel)
{
HashStream.Position = CurrentSector * DigestSize;
long blockNum = CurrentSector;
HashStream.Position = blockNum * DigestSize;
HashStream.Read(_hashBuffer, 0, DigestSize);
int bytesRead = base.Read(buffer, 0, count);
int bytesRead = base.Read(buffer, offset, count);
int bytesToHash = SectorSize;
if (bytesRead == 0) return 0;
@ -68,14 +75,14 @@ namespace LibHac
// If a hash is zero the data for the entire block is zero
if (Type == IntegrityStreamType.Save && _hashBuffer.IsEmpty())
{
Array.Clear(buffer, 0, SectorSize);
Array.Clear(buffer, offset, SectorSize);
return bytesRead;
}
if (bytesRead < SectorSize)
{
// Pad out unused portion of block
Array.Clear(buffer, bytesRead, SectorSize - bytesRead);
Array.Clear(buffer, offset + bytesRead, SectorSize - bytesRead);
// Partition FS hashes don't pad out an incomplete block
if (Type == IntegrityStreamType.PartitionFs)
@ -83,8 +90,15 @@ namespace LibHac
bytesToHash = bytesRead;
}
}
if (BlockValidities[blockNum] == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid)
{
throw new InvalidDataException("Hash error!");
}
if (IntegrityCheckLevel == IntegrityCheckLevel.None) return bytesRead;
if (integrityCheckLevel == IntegrityCheckLevel.None) return bytesRead;
if (BlockValidities[blockNum] != Validity.Unchecked) return bytesRead;
_hash.Initialize();
@ -93,7 +107,7 @@ namespace LibHac
_hash.TransformBlock(Salt, 0, Salt.Length, null, 0);
}
_hash.TransformBlock(buffer, 0, bytesToHash, null, 0);
_hash.TransformBlock(buffer, offset, bytesToHash, null, 0);
_hash.TransformFinalBlock(buffer, 0, 0);
byte[] hash = _hash.Hash;
@ -104,7 +118,10 @@ namespace LibHac
hash[0x1F] |= 0x80;
}
if (!Util.ArraysEqual(_hashBuffer, hash))
Validity validity = Util.ArraysEqual(_hashBuffer, hash) ? Validity.Valid : Validity.Invalid;
BlockValidities[blockNum] = validity;
if (validity == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid)
{
throw new InvalidDataException("Hash error!");
}
@ -150,9 +167,9 @@ namespace LibHac
/// </summary>
None,
/// <summary>
///
/// Invalid blocks will be marked as invalid when read, and will not cause an error.
/// </summary>
WarnOnInvalid,
IgnoreOnInvalid,
/// <summary>
/// An <see cref="InvalidDataException"/> will be thrown if an integrity check fails.
/// </summary>

View file

@ -182,6 +182,18 @@ namespace LibHac
}
}
/// <summary>
/// Opens one of the sections in the current <see cref="Nca"/> as a <see cref="HierarchicalIntegrityVerificationStream"/>
/// Only works with sections that have a <see cref="NcaFsHeader.HashType"/> of <see cref="NcaHashType.Ivfc"/> or <see cref="NcaHashType.Sha256"/>.
/// </summary>
/// <param name="index">The index of the NCA section to open. Valid indexes are 0-3.</param>
/// <param name="integrityCheckLevel">The level of integrity checks to be performed when reading the section.</param>
/// <returns>A <see cref="Stream"/> that provides access to the specified section. <see langword="null"/> if the section does not exist,
/// or is has no hash metadata.</returns>
/// <exception cref="ArgumentOutOfRangeException">The specified <paramref name="index"/> is outside the valid range.</exception>
public HierarchicalIntegrityVerificationStream OpenHashedSection(int index, IntegrityCheckLevel integrityCheckLevel) =>
OpenSection(index, false, integrityCheckLevel) as HierarchicalIntegrityVerificationStream;
/// <summary>
/// Opens one of the sections in the current <see cref="Nca"/>. For use with <see cref="ContentType.Program"/> type NCAs.
/// </summary>
@ -383,6 +395,7 @@ namespace LibHac
stream.Read(hashTable, 0, hashTable.Length);
sect.MasterHashValidity = Crypto.CheckMemoryHashTable(hashTable, expected, 0, hashTable.Length);
if (sect.Type == SectionType.Romfs) sect.Header.IvfcInfo.LevelHeaders[0].HashValidity = sect.MasterHashValidity;
}
public void Dispose()
@ -470,24 +483,27 @@ namespace LibHac
if (nca.Sections[index] == null) throw new ArgumentOutOfRangeException(nameof(index));
NcaSection sect = nca.Sections[index];
Stream stream = nca.OpenSection(index, false, IntegrityCheckLevel.WarnOnInvalid);
NcaHashType hashType = sect.Header.HashType;
if (hashType != NcaHashType.Sha256 && hashType != NcaHashType.Ivfc) return;
HierarchicalIntegrityVerificationStream stream = nca.OpenHashedSection(index, IntegrityCheckLevel.IgnoreOnInvalid);
if (stream == null) return;
logger?.LogMessage($"Verifying section {index}...");
switch (sect.Header.HashType)
for (int i = 0; i < stream.Levels.Length - 1; i++)
{
case NcaHashType.Sha256:
case NcaHashType.Ivfc:
if (stream is HierarchicalIntegrityVerificationStream ivfc)
{
for (int i = 1; i < ivfc.Levels.Length; i++)
{
logger?.LogMessage($" Verifying IVFC Level {i}...");
ivfc.Levels[i].CopyStream(Stream.Null, ivfc.Levels[i].Length, logger);
}
}
break;
default:
throw new ArgumentOutOfRangeException();
logger?.LogMessage($" Verifying Hash Level {i}...");
Validity result = stream.ValidateLevel(i, true, logger);
if (hashType == NcaHashType.Ivfc)
{
sect.Header.IvfcInfo.LevelHeaders[i].HashValidity = result;
}
else if (hashType == NcaHashType.Sha256 && i == stream.Levels.Length - 2)
{
sect.Header.Sha256Info.HashValidity = result;
}
}
}
}

View file

@ -215,6 +215,8 @@ namespace LibHac
public int BlockSizePower;
public uint Reserved;
public Validity HashValidity = Validity.Unchecked;
public IvfcLevelHeader(BinaryReader reader)
{
LogicalOffset = reader.ReadInt64();
@ -234,6 +236,8 @@ namespace LibHac
public long DataOffset;
public long DataSize;
public Validity HashValidity = Validity.Unchecked;
public Sha256Info(BinaryReader reader)
{
MasterHash = reader.ReadBytes(0x20);
@ -366,7 +370,7 @@ namespace LibHac
Bktr
}
public enum Validity
public enum Validity : byte
{
Unchecked,
Invalid,

View file

@ -15,6 +15,11 @@ namespace LibHac.Streams
/// </summary>
public int SectorSize { get; }
/// <summary>
/// The number of sectors in the stream.
/// </summary>
public int SectorCount { get; }
/// <summary>
/// The maximum number of sectors that can be read or written in a single operation.
/// </summary>
@ -64,6 +69,8 @@ namespace LibHac.Streams
_keepOpen = keepOpen;
_maxBufferSize = MaxSectors * SectorSize;
baseStream.Position = offset;
SectorCount = (int)Util.DivideByRoundUp(_baseStream.Length - _offset, sectorSize);
}
public override void Flush()

View file

@ -169,6 +169,7 @@ namespace hactoolnet
PrintRomfs(sect);
break;
case SectionType.Bktr:
PrintRomfs(sect);
break;
default:
sb.AppendLine(" Unknown/invalid superblock!");
@ -181,9 +182,8 @@ namespace hactoolnet
{
Sha256Info hashInfo = sect.Header.Sha256Info;
PrintItem(sb, colLen, $" Superblock Hash{sect.MasterHashValidity.GetValidityString()}:", hashInfo.MasterHash);
// todo sb.AppendLine($" Hash Table{sect.Pfs0.Validity.GetValidityString()}:");
sb.AppendLine($" Hash Table:");
PrintItem(sb, colLen, $" Master Hash{sect.MasterHashValidity.GetValidityString()}:", hashInfo.MasterHash);
sb.AppendLine($" Hash Table{sect.Header.Sha256Info.HashValidity.GetValidityString()}:");
PrintItem(sb, colLen, " Offset:", $"0x{hashInfo.HashTableOffset:x12}");
PrintItem(sb, colLen, " Size:", $"0x{hashInfo.HashTableSize:x12}");
@ -196,7 +196,7 @@ namespace hactoolnet
{
IvfcHeader ivfcInfo = sect.Header.IvfcInfo;
PrintItem(sb, colLen, $" Superblock Hash{sect.MasterHashValidity.GetValidityString()}:", ivfcInfo.MasterHash);
PrintItem(sb, colLen, $" Master Hash{sect.MasterHashValidity.GetValidityString()}:", ivfcInfo.MasterHash);
PrintItem(sb, colLen, " Magic:", ivfcInfo.Magic);
PrintItem(sb, colLen, " Version:", $"{ivfcInfo.Version:x8}");
@ -210,8 +210,7 @@ namespace hactoolnet
hashOffset = ivfcInfo.LevelHeaders[i - 1].LogicalOffset;
}
// todo sb.AppendLine($" Level {i}{level.HashValidity.GetValidityString()}:");
sb.AppendLine($" Level {i}:");
sb.AppendLine($" Level {i}{level.HashValidity.GetValidityString()}:");
PrintItem(sb, colLen, " Data Offset:", $"0x{level.LogicalOffset:x12}");
PrintItem(sb, colLen, " Data Size:", $"0x{level.HashDataSize:x12}");
PrintItem(sb, colLen, " Hash Offset:", $"0x{hashOffset:x12}");