Add save file integrity verification

This commit is contained in:
Alex Barney 2018-09-20 20:34:40 -05:00
parent ed6c3c2bed
commit e92c686d77
10 changed files with 108 additions and 18 deletions

View file

@ -19,8 +19,7 @@ namespace LibHac
for (int i = 1; i < Levels.Length; i++) for (int i = 1; i < Levels.Length; i++)
{ {
var levelData = new IntegrityVerificationStream(levelInfo[i].Data, Levels[i - 1], var levelData = new IntegrityVerificationStream(levelInfo[i], Levels[i - 1], enableIntegrityChecks);
levelInfo[i].BlockSizePower, enableIntegrityChecks);
Levels[i] = new RandomAccessSectorStream(levelData); Levels[i] = new RandomAccessSectorStream(levelData);
} }

View file

@ -12,14 +12,19 @@ namespace LibHac
private Stream HashStream { get; } private Stream HashStream { get; }
public bool EnableIntegrityChecks { get; } public bool EnableIntegrityChecks { get; }
private byte[] Salt { get; }
private IntegrityStreamType Type { get; }
private readonly byte[] _hashBuffer = new byte[DigestSize]; private readonly byte[] _hashBuffer = new byte[DigestSize];
private readonly SHA256 _hash = SHA256.Create(); private readonly SHA256 _hash = SHA256.Create();
public IntegrityVerificationStream(Stream dataStream, Stream hashStream, int blockSizePower, bool enableIntegrityChecks) public IntegrityVerificationStream(IntegrityVerificationInfo info, Stream hashStream, bool enableIntegrityChecks)
: base(dataStream, 1 << blockSizePower) : base(info.Data, 1 << info.BlockSizePower)
{ {
HashStream = hashStream; HashStream = hashStream;
EnableIntegrityChecks = enableIntegrityChecks; EnableIntegrityChecks = enableIntegrityChecks;
Salt = info.Salt;
Type = info.Type;
} }
public override void Flush() public override void Flush()
@ -55,21 +60,42 @@ namespace LibHac
HashStream.Position = CurrentSector * DigestSize; HashStream.Position = CurrentSector * DigestSize;
HashStream.Read(_hashBuffer, 0, DigestSize); HashStream.Read(_hashBuffer, 0, DigestSize);
int bytesRead = base.Read(buffer, 0, count);
// If a hash is zero the data for the entire block is zero // If a hash is zero the data for the entire block is zero
if (_hashBuffer.IsEmpty()) if (Type == IntegrityStreamType.Save && _hashBuffer.IsEmpty())
{ {
Array.Clear(buffer, 0, SectorSize); Array.Clear(buffer, 0, SectorSize);
return bytesRead;
} }
int bytesRead = base.Read(buffer, 0, count);
if (bytesRead < SectorSize) if (bytesRead < SectorSize)
{ {
// Pad out unused portion of block // Pad out unused portion of block
Array.Clear(buffer, bytesRead, SectorSize - bytesRead); Array.Clear(buffer, bytesRead, SectorSize - bytesRead);
} }
if (EnableIntegrityChecks && !Util.ArraysEqual(_hashBuffer, _hash.ComputeHash(buffer))) if (!EnableIntegrityChecks) return bytesRead;
_hash.Initialize();
if (Type == IntegrityStreamType.Save)
{
_hash.TransformBlock(Salt, 0, Salt.Length, null, 0);
}
_hash.TransformBlock(buffer, 0, SectorSize, null, 0);
_hash.TransformFinalBlock(buffer, 0, 0);
byte[] hash = _hash.Hash;
if (Type == IntegrityStreamType.Save)
{
// This bit is set on all save hashes
hash[0x1F] |= 0x80;
}
if (!Util.ArraysEqual(_hashBuffer, hash))
{ {
throw new InvalidDataException("Hash error!"); throw new InvalidDataException("Hash error!");
} }
@ -94,5 +120,13 @@ namespace LibHac
{ {
public Stream Data { get; set; } public Stream Data { get; set; }
public int BlockSizePower { get; set; } public int BlockSizePower { get; set; }
public byte[] Salt { get; set; }
public IntegrityStreamType Type { get; set; }
}
public enum IntegrityStreamType
{
Save,
RomFs
} }
} }

View file

@ -167,7 +167,8 @@ namespace LibHac
initInfo[i] = new IntegrityVerificationInfo initInfo[i] = new IntegrityVerificationInfo
{ {
Data = data, Data = data,
BlockSizePower = level.BlockSize BlockSizePower = level.BlockSize,
Type = IntegrityStreamType.RomFs
}; };
} }

View file

@ -166,9 +166,9 @@ namespace LibHac
public uint MasterHashSize; public uint MasterHashSize;
public uint NumLevels; public uint NumLevels;
public IvfcLevelHeader[] LevelHeaders = new IvfcLevelHeader[6]; public IvfcLevelHeader[] LevelHeaders = new IvfcLevelHeader[6];
public byte[] SaltSource;
public byte[] MasterHash; public byte[] MasterHash;
public IvfcHeader(BinaryReader reader) public IvfcHeader(BinaryReader reader)
{ {
Magic = reader.ReadAscii(4); Magic = reader.ReadAscii(4);
@ -181,7 +181,7 @@ namespace LibHac
LevelHeaders[i] = new IvfcLevelHeader(reader); LevelHeaders[i] = new IvfcLevelHeader(reader);
} }
reader.BaseStream.Position += 0x20; SaltSource = reader.ReadBytes(0x20);
MasterHash = reader.ReadBytes(0x20); MasterHash = reader.ReadBytes(0x20);
} }
} }

View file

@ -9,6 +9,7 @@ namespace LibHac.Savefile
public FsLayout Layout { get; set; } public FsLayout Layout { get; set; }
public JournalHeader Journal { get; set; } public JournalHeader Journal { get; set; }
public DuplexHeader Duplex { get; set; } public DuplexHeader Duplex { get; set; }
public IvfcHeader Ivfc { get; set; }
public SaveHeader Save { get; set; } public SaveHeader Save { get; set; }
public RemapHeader FileRemap { get; set; } public RemapHeader FileRemap { get; set; }
@ -41,6 +42,9 @@ namespace LibHac.Savefile
reader.BaseStream.Position = 0x300; reader.BaseStream.Position = 0x300;
Duplex = new DuplexHeader(reader); Duplex = new DuplexHeader(reader);
reader.BaseStream.Position = 0x344;
Ivfc = new IvfcHeader(reader);
reader.BaseStream.Position = 0x408; reader.BaseStream.Position = 0x408;
Journal = new JournalHeader(reader); Journal = new JournalHeader(reader);

View file

@ -1,5 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Security.Cryptography;
using System.Text; using System.Text;
using LibHac.Streams; using LibHac.Streams;
@ -15,6 +16,8 @@ namespace LibHac.Savefile
public SharedStreamSource MetaRemapSource { get; } public SharedStreamSource MetaRemapSource { get; }
private JournalStream JournalStream { get; } private JournalStream JournalStream { get; }
public SharedStreamSource JournalStreamSource { get; } public SharedStreamSource JournalStreamSource { get; }
private HierarchicalIntegrityVerificationStream IvfcStream { get; }
public SharedStreamSource IvfcStreamSource { get; }
private AllocationTable AllocationTable { get; } private AllocationTable AllocationTable { get; }
public Stream DuplexL1A { get; } public Stream DuplexL1A { get; }
@ -38,7 +41,7 @@ namespace LibHac.Savefile
public DirectoryEntry[] Directories { get; private set; } public DirectoryEntry[] Directories { get; private set; }
private Dictionary<string, FileEntry> FileDict { get; } private Dictionary<string, FileEntry> FileDict { get; }
public Savefile(Keyset keyset, Stream file, IProgressReport logger = null) public Savefile(Keyset keyset, Stream file, bool enableIntegrityChecks, IProgressReport logger = null)
{ {
SavefileSource = new SharedStreamSource(file); SavefileSource = new SharedStreamSource(file);
@ -102,6 +105,10 @@ namespace LibHac.Savefile
layout.JournalDataSizeB + layout.SizeReservedArea); layout.JournalDataSizeB + layout.SizeReservedArea);
JournalStream = new JournalStream(journalData, journalMap, (int)Header.Journal.BlockSize); JournalStream = new JournalStream(journalData, journalMap, (int)Header.Journal.BlockSize);
JournalStreamSource = new SharedStreamSource(JournalStream); JournalStreamSource = new SharedStreamSource(JournalStream);
IvfcStream = InitIvfcStream(enableIntegrityChecks);
IvfcStreamSource = new SharedStreamSource(IvfcStream);
ReadFileInfo(); ReadFileInfo();
Dictionary<string, FileEntry> dictionary = new Dictionary<string, FileEntry>(); Dictionary<string, FileEntry> dictionary = new Dictionary<string, FileEntry>();
foreach (FileEntry entry in Files) foreach (FileEntry entry in Files)
@ -113,6 +120,40 @@ namespace LibHac.Savefile
} }
} }
private HierarchicalIntegrityVerificationStream InitIvfcStream(bool enableIntegrityChecks)
{
IvfcHeader ivfc = Header.Ivfc;
const int ivfcLevels = 5;
var initInfo = new IntegrityVerificationInfo[ivfcLevels];
initInfo[0] = new IntegrityVerificationInfo
{
Data = new MemoryStream(Header.MasterHashA),
BlockSizePower = 0,
Type = IntegrityStreamType.Save
};
for (int i = 1; i < ivfcLevels; i++)
{
IvfcLevelHeader level = ivfc.LevelHeaders[i - 1];
Stream data = i == ivfcLevels - 1
? (Stream)JournalStream
: MetaRemapSource.CreateStream(level.LogicalOffset, level.HashDataSize);
initInfo[i] = new IntegrityVerificationInfo
{
Data = data,
BlockSizePower = level.BlockSize,
Salt = new HMACSHA256(Encoding.ASCII.GetBytes(SaltSources[i - 1])).ComputeHash(ivfc.SaltSource),
Type = IntegrityStreamType.Save
};
}
return new HierarchicalIntegrityVerificationStream(initInfo, enableIntegrityChecks);
}
public Stream OpenFile(string filename) public Stream OpenFile(string filename)
{ {
if (!FileDict.TryGetValue(filename, out FileEntry file)) if (!FileDict.TryGetValue(filename, out FileEntry file))
@ -135,7 +176,7 @@ namespace LibHac.Savefile
private AllocationTableStream OpenFatBlock(int blockIndex, long size) private AllocationTableStream OpenFatBlock(int blockIndex, long size)
{ {
return new AllocationTableStream(JournalStreamSource.CreateStream(), AllocationTable, (int)Header.Save.BlockSize, blockIndex, size); return new AllocationTableStream(IvfcStreamSource.CreateStream(), AllocationTable, (int)Header.Save.BlockSize, blockIndex, size);
} }
public bool FileExists(string filename) => FileDict.ContainsKey(filename); public bool FileExists(string filename) => FileDict.ContainsKey(filename);
@ -242,6 +283,16 @@ namespace LibHac.Savefile
return true; return true;
} }
private string[] SaltSources =
{
"HierarchicalIntegrityVerificationStorage::Master",
"HierarchicalIntegrityVerificationStorage::L1",
"HierarchicalIntegrityVerificationStorage::L2",
"HierarchicalIntegrityVerificationStorage::L3",
"HierarchicalIntegrityVerificationStorage::L4",
"HierarchicalIntegrityVerificationStorage::L5"
};
} }
public static class SavefileExtensions public static class SavefileExtensions

View file

@ -114,7 +114,7 @@ namespace LibHac
string sdPath = "/" + Util.GetRelativePath(file, SaveDir).Replace('\\', '/'); string sdPath = "/" + Util.GetRelativePath(file, SaveDir).Replace('\\', '/');
var nax0 = new Nax0(Keyset, stream, sdPath, false); var nax0 = new Nax0(Keyset, stream, sdPath, false);
save = new Savefile.Savefile(Keyset, nax0.Stream); save = new Savefile.Savefile(Keyset, nax0.Stream, false);
} }
catch (Exception ex) catch (Exception ex)
{ {

View file

@ -102,7 +102,7 @@ namespace NandReader
private static List<Ticket> ReadTickets(Keyset keyset, Stream savefile) private static List<Ticket> ReadTickets(Keyset keyset, Stream savefile)
{ {
var tickets = new List<Ticket>(); var tickets = new List<Ticket>();
var save = new Savefile(keyset, savefile); var save = new Savefile(keyset, savefile, false);
var ticketList = new BinaryReader(save.OpenFile("/ticket_list.bin")); var ticketList = new BinaryReader(save.OpenFile("/ticket_list.bin"));
var ticketFile = new BinaryReader(save.OpenFile("/ticket.bin")); var ticketFile = new BinaryReader(save.OpenFile("/ticket.bin"));

View file

@ -84,7 +84,7 @@ namespace NandReaderGui.ViewModel
private static List<Ticket> ReadTickets(Keyset keyset, Stream savefile) private static List<Ticket> ReadTickets(Keyset keyset, Stream savefile)
{ {
var tickets = new List<Ticket>(); var tickets = new List<Ticket>();
var save = new Savefile(keyset, savefile); var save = new Savefile(keyset, savefile, false);
var ticketList = new BinaryReader(save.OpenFile("/ticket_list.bin")); var ticketList = new BinaryReader(save.OpenFile("/ticket_list.bin"));
var ticketFile = new BinaryReader(save.OpenFile("/ticket.bin")); var ticketFile = new BinaryReader(save.OpenFile("/ticket.bin"));
@ -100,6 +100,7 @@ namespace NandReaderGui.ViewModel
return tickets; return tickets;
} }
private static Keyset OpenKeyset() private static Keyset OpenKeyset()
{ {
var home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); var home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);

View file

@ -104,7 +104,7 @@ namespace hactoolnet
{ {
using (var file = new FileStream(ctx.Options.InFile, FileMode.Open, FileAccess.ReadWrite)) using (var file = new FileStream(ctx.Options.InFile, FileMode.Open, FileAccess.ReadWrite))
{ {
var save = new Savefile(ctx.Keyset, file, ctx.Logger); var save = new Savefile(ctx.Keyset, file, ctx.Options.EnableHash, ctx.Logger);
if (ctx.Options.OutDir != null) if (ctx.Options.OutDir != null)
{ {
@ -136,7 +136,7 @@ namespace hactoolnet
save.JournalLayer3Hash.WriteAllBytes(Path.Combine(dir, "L2_6_Layer3Hash"), ctx.Logger); save.JournalLayer3Hash.WriteAllBytes(Path.Combine(dir, "L2_6_Layer3Hash"), ctx.Logger);
save.JournalFat.WriteAllBytes(Path.Combine(dir, "L2_7_FAT"), ctx.Logger); save.JournalFat.WriteAllBytes(Path.Combine(dir, "L2_7_FAT"), ctx.Logger);
save.JournalStreamSource.CreateStream().WriteAllBytes(Path.Combine(dir, "L3_0_SaveData"), ctx.Logger); save.IvfcStreamSource.CreateStream().WriteAllBytes(Path.Combine(dir, "L3_0_SaveData"), ctx.Logger);
} }
if (ctx.Options.SignSave) if (ctx.Options.SignSave)