This commit is contained in:
shadowninja108 2018-10-28 00:07:46 -07:00
commit f56500e257
37 changed files with 896 additions and 532 deletions

View file

@ -105,11 +105,6 @@ namespace LibHac
Counter[8] = (byte)((Counter[8] & 0xF0) | (int)(off & 0x0F)); Counter[8] = (byte)((Counter[8] & 0xF0) | (int)(off & 0x0F));
} }
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
switch (origin) switch (origin)

View file

@ -21,6 +21,14 @@ namespace LibHac
return comp; return comp;
} }
public static byte[] ComputeSha256(byte[] data, int offset, int count)
{
using (SHA256 sha = SHA256.Create())
{
return sha.ComputeHash(data, offset, count);
}
}
public static void DecryptEcb(byte[] key, byte[] src, int srcIndex, byte[] dest, int destIndex, int length) public static void DecryptEcb(byte[] key, byte[] src, int srcIndex, byte[] dest, int destIndex, int length)
{ {
using (Aes aes = Aes.Create()) using (Aes aes = Aes.Create())

View file

@ -39,31 +39,33 @@ namespace LibHac
} }
/// <summary> /// <summary>
/// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the hash level. /// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the data.
/// </summary> /// </summary>
/// <param name="level">The level of hierarchical hashes to check.</param>
/// <param name="returnOnError">If <see langword="true"/>, return as soon as an invalid block is found.</param> /// <param name="returnOnError">If <see langword="true"/>, return as soon as an invalid block is found.</param>
/// <param name="logger">An optional <see cref="IProgressReport"/> for reporting progress.</param> /// <param name="logger">An optional <see cref="IProgressReport"/> for reporting progress.</param>
/// <returns>The <see cref="Validity"/> of the data of the specified hash level.</returns> /// <returns>The <see cref="Validity"/> of the data of the specified hash level.</returns>
public Validity ValidateLevel(int level, bool returnOnError, IProgressReport logger = null) public Validity Validate(bool returnOnError, IProgressReport logger = null)
{ {
Validity[] validities = LevelValidities[level]; Validity[] validities = LevelValidities[LevelValidities.Length - 1];
IntegrityVerificationStream levelStream = IntegrityStreams[level]; IntegrityVerificationStream stream = IntegrityStreams[IntegrityStreams.Length - 1];
// The original position of the stream must be restored when we're done validating // Restore the original position of the stream when we're done validating
long initialPosition = levelStream.Position; long initialPosition = stream.Position;
var buffer = new byte[levelStream.SectorSize]; long blockSize = stream.SectorSize;
int blockCount = (int)Util.DivideByRoundUp(Length, blockSize);
var buffer = new byte[blockSize];
var result = Validity.Valid; var result = Validity.Valid;
logger?.SetTotal(levelStream.SectorCount); logger?.SetTotal(blockCount);
for (int i = 0; i < levelStream.SectorCount; i++) for (int i = 0; i < blockCount; i++)
{ {
if (validities[i] == Validity.Unchecked) if (validities[i] == Validity.Unchecked)
{ {
levelStream.Position = (long)levelStream.SectorSize * i; stream.Position = blockSize * i;
levelStream.Read(buffer, 0, buffer.Length, IntegrityCheckLevel.IgnoreOnInvalid); stream.Read(buffer, 0, buffer.Length, IntegrityCheckLevel.IgnoreOnInvalid);
} }
if (validities[i] == Validity.Invalid) if (validities[i] == Validity.Invalid)
@ -76,13 +78,13 @@ namespace LibHac
} }
logger?.SetTotal(0); logger?.SetTotal(0);
levelStream.Position = initialPosition; stream.Position = initialPosition;
return result; return result;
} }
public override void Flush() public override void Flush()
{ {
throw new NotImplementedException(); DataLevel.Flush();
} }
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
@ -115,7 +117,7 @@ namespace LibHac
public override void Write(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count)
{ {
throw new NotImplementedException(); DataLevel.Write(buffer, offset, count);
} }
public override bool CanRead => DataLevel.CanRead; public override bool CanRead => DataLevel.CanRead;
@ -128,4 +130,32 @@ namespace LibHac
set => DataLevel.Position = value; set => DataLevel.Position = value;
} }
} }
public static class HierarchicalIntegrityVerificationStreamExtensions
{
internal static void SetLevelValidities(this HierarchicalIntegrityVerificationStream stream, IvfcHeader header)
{
for (int i = 0; i < stream.Levels.Length - 1; i++)
{
Validity[] level = stream.LevelValidities[i];
var levelValidity = Validity.Valid;
foreach (Validity block in level)
{
if (block == Validity.Invalid)
{
levelValidity = Validity.Invalid;
break;
}
if (block == Validity.Unchecked && levelValidity != Validity.Invalid)
{
levelValidity = Validity.Unchecked;
}
}
header.LevelHeaders[i].HashValidity = levelValidity;
}
}
}
} }

View file

@ -30,11 +30,6 @@ namespace LibHac
BlockValidities = new Validity[SectorCount]; BlockValidities = new Validity[SectorCount];
} }
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
switch (origin) switch (origin)
@ -76,6 +71,7 @@ namespace LibHac
if (Type == IntegrityStreamType.Save && _hashBuffer.IsEmpty()) if (Type == IntegrityStreamType.Save && _hashBuffer.IsEmpty())
{ {
Array.Clear(buffer, offset, SectorSize); Array.Clear(buffer, offset, SectorSize);
BlockValidities[blockNum] = Validity.Valid;
return bytesRead; return bytesRead;
} }
@ -100,23 +96,7 @@ namespace LibHac
if (BlockValidities[blockNum] != Validity.Unchecked) return bytesRead; if (BlockValidities[blockNum] != Validity.Unchecked) return bytesRead;
_hash.Initialize(); byte[] hash = DoHash(buffer, offset, bytesToHash);
if (Type == IntegrityStreamType.Save)
{
_hash.TransformBlock(Salt, 0, Salt.Length, null, 0);
}
_hash.TransformBlock(buffer, offset, bytesToHash, null, 0);
_hash.TransformFinalBlock(buffer, 0, 0);
byte[] hash = _hash.Hash;
if (Type == IntegrityStreamType.Save)
{
// This bit is set on all save hashes
hash[0x1F] |= 0x80;
}
Validity validity = Util.ArraysEqual(_hashBuffer, hash) ? Validity.Valid : Validity.Invalid; Validity validity = Util.ArraysEqual(_hashBuffer, hash) ? Validity.Valid : Validity.Invalid;
BlockValidities[blockNum] = validity; BlockValidities[blockNum] = validity;
@ -131,7 +111,48 @@ namespace LibHac
public override void Write(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count)
{ {
throw new NotImplementedException(); long blockNum = CurrentSector;
int toWrite = (int)Math.Min(count, Length - Position);
byte[] hash = DoHash(buffer, offset, toWrite);
if (Type == IntegrityStreamType.Save && buffer.IsEmpty())
{
Array.Clear(hash, 0, DigestSize);
}
base.Write(buffer, offset, count);
HashStream.Position = blockNum * DigestSize;
HashStream.Write(hash, 0, DigestSize);
}
private byte[] DoHash(byte[] buffer, int offset, int count)
{
_hash.Initialize();
if (Type == IntegrityStreamType.Save)
{
_hash.TransformBlock(Salt, 0, Salt.Length, null, 0);
}
_hash.TransformBlock(buffer, offset, count, null, 0);
_hash.TransformFinalBlock(buffer, 0, 0);
byte[] hash = _hash.Hash;
if (Type == IntegrityStreamType.Save)
{
// This bit is set on all save hashes
hash[0x1F] |= 0x80;
}
return hash;
}
public override void Flush()
{
HashStream.Flush();
base.Flush();
} }
public override bool CanRead => true; public override bool CanRead => true;

View file

@ -544,6 +544,7 @@ namespace LibHac
public enum KeyType public enum KeyType
{ {
None,
Common, Common,
Unique, Unique,
Title Title

View file

@ -16,7 +16,7 @@
<RepositoryType>git</RepositoryType> <RepositoryType>git</RepositoryType>
<RepositoryUrl>https://github.com/Thealexbarney/LibHac</RepositoryUrl> <RepositoryUrl>https://github.com/Thealexbarney/LibHac</RepositoryUrl>
<VersionPrefix>0.1.2</VersionPrefix> <VersionPrefix>0.1.3</VersionPrefix>
<PathMap>$(MSBuildProjectDirectory)=C:/LibHac/</PathMap> <PathMap>$(MSBuildProjectDirectory)=C:/LibHac/</PathMap>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>

View file

@ -75,5 +75,25 @@ namespace LibHac
base.GetObjectData(info, context); base.GetObjectData(info, context);
info.AddValue(nameof(Name), Name); info.AddValue(nameof(Name), Name);
} }
public override string Message
{
get
{
string s = base.Message;
if (Type != KeyType.None)
{
s += $"{Environment.NewLine}Key Type: {Type}";
}
if (Name != null)
{
s += $"{Environment.NewLine}Key Name: {Name}";
}
return s;
}
}
} }
} }

View file

@ -381,7 +381,6 @@ namespace LibHac
stream.Read(hashTable, 0, hashTable.Length); stream.Read(hashTable, 0, hashTable.Length);
sect.MasterHashValidity = Crypto.CheckMemoryHashTable(hashTable, expected, 0, hashTable.Length); sect.MasterHashValidity = Crypto.CheckMemoryHashTable(hashTable, expected, 0, hashTable.Length);
if (sect.Header.HashType == NcaHashType.Ivfc) sect.Header.IvfcInfo.LevelHeaders[0].HashValidity = sect.MasterHashValidity;
} }
public void Dispose() public void Dispose()
@ -400,7 +399,21 @@ namespace LibHac
public int SectionNum { get; set; } public int SectionNum { get; set; }
public long Offset { get; set; } public long Offset { get; set; }
public long Size { get; set; } public long Size { get; set; }
public Validity MasterHashValidity { get; set; }
public Validity MasterHashValidity
{
get
{
if (Header.HashType == NcaHashType.Ivfc) return Header.IvfcInfo.LevelHeaders[0].HashValidity;
if (Header.HashType == NcaHashType.Sha256) return Header.Sha256Info.MasterHashValidity;
return Validity.Unchecked;
}
set
{
if (Header.HashType == NcaHashType.Ivfc) Header.IvfcInfo.LevelHeaders[0].HashValidity = value;
if (Header.HashType == NcaHashType.Sha256) Header.Sha256Info.MasterHashValidity = value;
}
}
public byte[] GetMasterHash() public byte[] GetMasterHash()
{ {
@ -489,25 +502,18 @@ namespace LibHac
if (stream == null) return Validity.Unchecked; if (stream == null) return Validity.Unchecked;
if (!quiet) logger?.LogMessage($"Verifying section {index}..."); if (!quiet) logger?.LogMessage($"Verifying section {index}...");
Validity validity = stream.Validate(true, logger);
for (int i = 0; i < stream.Levels.Length - 1; i++)
{
if (!quiet) logger?.LogMessage($" Verifying Hash Level {i}...");
Validity levelValidity = stream.ValidateLevel(i, true, logger);
if (hashType == NcaHashType.Ivfc) if (hashType == NcaHashType.Ivfc)
{ {
sect.Header.IvfcInfo.LevelHeaders[i].HashValidity = levelValidity; stream.SetLevelValidities(sect.Header.IvfcInfo);
} }
else if (hashType == NcaHashType.Sha256 && i == stream.Levels.Length - 2) else if (hashType == NcaHashType.Sha256)
{ {
sect.Header.Sha256Info.HashValidity = levelValidity; sect.Header.Sha256Info.HashValidity = validity;
} }
if (levelValidity == Validity.Invalid) return Validity.Invalid; return validity;
}
return Validity.Valid;
} }
} }
} }

View file

@ -158,8 +158,8 @@ namespace LibHac
{ {
public string Magic; public string Magic;
public int Version; public int Version;
public uint MasterHashSize; public int MasterHashSize;
public uint NumLevels; public int NumLevels;
public IvfcLevelHeader[] LevelHeaders = new IvfcLevelHeader[6]; public IvfcLevelHeader[] LevelHeaders = new IvfcLevelHeader[6];
public byte[] SaltSource; public byte[] SaltSource;
public byte[] MasterHash; public byte[] MasterHash;
@ -167,10 +167,10 @@ namespace LibHac
public IvfcHeader(BinaryReader reader) public IvfcHeader(BinaryReader reader)
{ {
Magic = reader.ReadAscii(4); Magic = reader.ReadAscii(4);
Version = reader.ReadInt16();
reader.BaseStream.Position += 2; reader.BaseStream.Position += 2;
MasterHashSize = reader.ReadUInt32(); Version = reader.ReadInt16();
NumLevels = reader.ReadUInt32(); MasterHashSize = reader.ReadInt32();
NumLevels = reader.ReadInt32();
for (int i = 0; i < LevelHeaders.Length; i++) for (int i = 0; i < LevelHeaders.Length; i++)
{ {
@ -210,6 +210,7 @@ namespace LibHac
public long DataOffset; public long DataOffset;
public long DataSize; public long DataSize;
public Validity MasterHashValidity = Validity.Unchecked;
public Validity HashValidity = Validity.Unchecked; public Validity HashValidity = Validity.Unchecked;
public Sha256Info(BinaryReader reader) public Sha256Info(BinaryReader reader)

View file

@ -1,6 +1,6 @@
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class AllocationTable public class AllocationTable
{ {

View file

@ -1,6 +1,6 @@
using System; using System;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class AllocationTableIterator public class AllocationTableIterator
{ {

View file

@ -1,7 +1,7 @@
using System; using System;
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class AllocationTableStream : Stream public class AllocationTableStream : Stream
{ {
@ -22,7 +22,7 @@ namespace LibHac.Savefile
public override void Flush() public override void Flush()
{ {
throw new NotImplementedException(); Data.Flush();
} }
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)
@ -53,7 +53,20 @@ namespace LibHac.Savefile
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
throw new NotImplementedException(); switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = Length - offset;
break;
}
return Position;
} }
public override void SetLength(long value) public override void SetLength(long value)
@ -63,12 +76,29 @@ namespace LibHac.Savefile
public override void Write(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count)
{ {
throw new NotImplementedException(); int remaining = count;
int outOffset = offset;
while (remaining > 0)
{
int remainingInSegment = Iterator.CurrentSegmentSize * BlockSize - SegmentPos;
int bytesToWrite = Math.Min(remaining, remainingInSegment);
Data.Write(buffer, outOffset, bytesToWrite);
outOffset += bytesToWrite;
remaining -= bytesToWrite;
if (SegmentPos >= Iterator.CurrentSegmentSize * BlockSize)
{
if (!Iterator.MoveNext()) return;
Data.Position = Iterator.PhysicalBlock * BlockSize;
}
}
} }
public override bool CanRead => true; public override bool CanRead => true;
public override bool CanSeek => true; public override bool CanSeek => true;
public override bool CanWrite => false; public override bool CanWrite => true;
public override long Length { get; } public override long Length { get; }
public override long Position public override long Position
@ -95,5 +125,11 @@ namespace LibHac.Savefile
Data.Position = Iterator.PhysicalBlock * BlockSize + segmentPos; Data.Position = Iterator.PhysicalBlock * BlockSize + segmentPos;
} }
} }
protected override void Dispose(bool disposing)
{
Flush();
base.Dispose(disposing);
}
} }
} }

View file

@ -2,7 +2,7 @@
using System.Collections; using System.Collections;
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class DuplexBitmap public class DuplexBitmap
{ {

View file

@ -1,7 +1,7 @@
using System; using System;
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class DuplexFs : Stream public class DuplexFs : Stream
{ {
@ -28,7 +28,9 @@ namespace LibHac.Savefile
public override void Flush() public override void Flush()
{ {
throw new NotImplementedException(); BitmapStream?.Flush();
DataA?.Flush();
DataB?.Flush();
} }
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)
@ -57,6 +59,30 @@ namespace LibHac.Savefile
return totalBytesRead; return totalBytesRead;
} }
public override void Write(byte[] buffer, int offset, int count)
{
long remaining = Math.Min(count, Length - Position);
if (remaining <= 0) return;
int inOffset = offset;
while (remaining > 0)
{
int blockNum = (int)(Position / BlockSize);
int blockPos = (int)(Position % BlockSize);
int bytesToWrite = (int)Math.Min(remaining, BlockSize - blockPos);
Stream data = Bitmap.Bitmap[blockNum] ? DataB : DataA;
data.Position = blockNum * BlockSize + blockPos;
data.Write(buffer, inOffset, bytesToWrite);
inOffset += bytesToWrite;
remaining -= bytesToWrite;
Position += bytesToWrite;
}
}
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
switch (origin) switch (origin)
@ -80,11 +106,6 @@ namespace LibHac.Savefile
throw new NotImplementedException(); throw new NotImplementedException();
} }
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true; public override bool CanRead => true;
public override bool CanSeek => true; public override bool CanSeek => true;
public override bool CanWrite => false; public override bool CanWrite => false;

View file

@ -3,7 +3,7 @@ using System.Diagnostics;
using System.IO; using System.IO;
using System.Text; using System.Text;
namespace LibHac.Savefile namespace LibHac.Save
{ {
[DebuggerDisplay("{" + nameof(FullPath) + "}")] [DebuggerDisplay("{" + nameof(FullPath) + "}")]
public abstract class FsEntry public abstract class FsEntry

View file

@ -1,7 +1,8 @@
using System; using System;
using System.IO; using System.IO;
using LibHac.Streams;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class Header public class Header
{ {
@ -24,13 +25,17 @@ namespace LibHac.Savefile
public byte[] DuplexMasterA { get; } public byte[] DuplexMasterA { get; }
public byte[] DuplexMasterB { get; } public byte[] DuplexMasterB { get; }
public Stream MasterHash { get; }
public Validity SignatureValidity { get; } public Validity SignatureValidity { get; }
public Validity HeaderHashValidity { get; } public Validity HeaderHashValidity { get; }
public byte[] Data { get; } public byte[] Data { get; }
public Header(Keyset keyset, BinaryReader reader) public Header(Keyset keyset, SharedStreamSource streamSource)
{ {
var reader = new BinaryReader(streamSource.CreateStream());
reader.BaseStream.Position = 0; reader.BaseStream.Position = 0;
Data = reader.ReadBytes(0x4000); Data = reader.ReadBytes(0x4000);
reader.BaseStream.Position = 0; reader.BaseStream.Position = 0;
@ -65,6 +70,8 @@ namespace LibHac.Savefile
reader.BaseStream.Position = Layout.IvfcMasterHashOffsetB; reader.BaseStream.Position = Layout.IvfcMasterHashOffsetB;
MasterHashB = reader.ReadBytes((int)Layout.IvfcMasterHashSize); MasterHashB = reader.ReadBytes((int)Layout.IvfcMasterHashSize);
MasterHash = streamSource.CreateStream(Layout.IvfcMasterHashOffsetA, Layout.IvfcMasterHashSize);
reader.BaseStream.Position = Layout.DuplexMasterOffsetA; reader.BaseStream.Position = Layout.DuplexMasterOffsetA;
DuplexMasterA = reader.ReadBytes((int)Layout.DuplexMasterSize); DuplexMasterA = reader.ReadBytes((int)Layout.DuplexMasterSize);
reader.BaseStream.Position = Layout.DuplexMasterOffsetB; reader.BaseStream.Position = Layout.DuplexMasterOffsetB;
@ -192,11 +199,11 @@ namespace LibHac.Savefile
public class RemapHeader public class RemapHeader
{ {
public string Magic { get; set; } public string Magic { get; }
public uint MagicNum { get; set; } public uint MagicNum { get; }
public int MapEntryCount { get; set; } public int MapEntryCount { get; }
public int MapSegmentCount { get; set; } public int MapSegmentCount { get; }
public int Field10 { get; set; } public int SegmentBits { get; }
public RemapHeader(BinaryReader reader) public RemapHeader(BinaryReader reader)
{ {
@ -204,7 +211,7 @@ namespace LibHac.Savefile
MagicNum = reader.ReadUInt32(); MagicNum = reader.ReadUInt32();
MapEntryCount = reader.ReadInt32(); MapEntryCount = reader.ReadInt32();
MapSegmentCount = reader.ReadInt32(); MapSegmentCount = reader.ReadInt32();
Field10 = reader.ReadInt32(); SegmentBits = reader.ReadInt32();
} }
} }
@ -320,7 +327,8 @@ namespace LibHac.Savefile
public long PhysicalOffset { get; } public long PhysicalOffset { get; }
public long Size { get; } public long Size { get; }
public int Alignment { get; } public int Alignment { get; }
public int StorageType { get; } public int Field1C { get; }
public long VirtualOffsetEnd => VirtualOffset + Size; public long VirtualOffsetEnd => VirtualOffset + Size;
public long PhysicalOffsetEnd => PhysicalOffset + Size; public long PhysicalOffsetEnd => PhysicalOffset + Size;
internal RemapSegment Segment { get; set; } internal RemapSegment Segment { get; set; }
@ -332,7 +340,7 @@ namespace LibHac.Savefile
PhysicalOffset = reader.ReadInt64(); PhysicalOffset = reader.ReadInt64();
Size = reader.ReadInt64(); Size = reader.ReadInt64();
Alignment = reader.ReadInt32(); Alignment = reader.ReadInt32();
StorageType = reader.ReadInt32(); Field1C = reader.ReadInt32();
} }
} }

View file

@ -1,7 +1,7 @@
using System; using System;
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class JournalStream : Stream public class JournalStream : Stream
{ {
@ -45,6 +45,25 @@ namespace LibHac.Savefile
return count; return count;
} }
public override void Write(byte[] buffer, int offset, int count)
{
long remaining = Math.Min(Length - Position, count);
if (remaining <= 0) return;
int inPos = offset;
while (remaining > 0)
{
long remainInEntry = BlockSize - Position % BlockSize;
int toRead = (int)Math.Min(remaining, remainInEntry);
BaseStream.Write(buffer, inPos, toRead);
inPos += toRead;
remaining -= toRead;
Position += toRead;
}
}
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
switch (origin) switch (origin)
@ -64,11 +83,15 @@ namespace LibHac.Savefile
} }
public override void SetLength(long value) => throw new NotSupportedException(); public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override void Flush() => throw new NotSupportedException(); public override void Flush()
{
BaseStream.Flush();
}
public override bool CanRead => true; public override bool CanRead => true;
public override bool CanSeek => true; public override bool CanSeek => true;
public override bool CanWrite => false; public override bool CanWrite => true;
public override long Length { get; } public override long Length { get; }
public override long Position public override long Position
{ {

View file

@ -1,6 +1,6 @@
using System.IO; using System.IO;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class LayeredDuplexFs : Stream public class LayeredDuplexFs : Stream
{ {
@ -32,7 +32,7 @@ namespace LibHac.Savefile
public override void Flush() public override void Flush()
{ {
throw new System.NotImplementedException(); DataLayer.Flush();
} }
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)
@ -52,7 +52,7 @@ namespace LibHac.Savefile
public override void Write(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count)
{ {
throw new System.NotImplementedException(); DataLayer.Write(buffer, offset, count);
} }
public override bool CanRead => DataLayer.CanRead; public override bool CanRead => DataLayer.CanRead;

116
LibHac/Save/RemapStorage.cs Normal file
View file

@ -0,0 +1,116 @@
using System;
using System.IO;
using LibHac.Streams;
namespace LibHac.Save
{
public class RemapStorage
{
private SharedStreamSource StreamSource { get; }
private RemapHeader Header { get; }
public MapEntry[] MapEntries { get; set; }
public RemapSegment[] Segments { get; set; }
/// <summary>
/// Creates a new <see cref="RemapStorage"/>
/// </summary>
/// <param name="data">A <see cref="Stream"/> of the main data of the RemapStream.
/// The <see cref="RemapStorage"/> object takes complete ownership of the Stream.</param>
/// <param name="header">The header for this RemapStorage.</param>
/// <param name="mapEntries">The remapping entries for this RemapStorage.</param>
public RemapStorage(Stream data, RemapHeader header, MapEntry[] mapEntries)
{
StreamSource = new SharedStreamSource(data);
Header = header;
MapEntries = mapEntries;
Segments = InitSegments(Header, MapEntries);
}
public Stream OpenStream(long offset, long size)
{
int segmentIdx = GetSegmentFromVirtualOffset(offset);
long segmentOffset = GetOffsetFromVirtualOffset(offset);
if (segmentIdx > Segments.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
RemapSegment segment = Segments[GetSegmentFromVirtualOffset(offset)];
if (segmentOffset > segment.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
Stream stream = new RemapStream(StreamSource.CreateStream(), segment);
return new SubStream(stream, offset, size);
}
public Stream OpenSegmentStream(int segment)
{
long offset = ToVirtualOffset(segment, 0);
long size = Segments[segment].Length;
return OpenStream(offset, size);
}
private static RemapSegment[] InitSegments(RemapHeader header, MapEntry[] mapEntries)
{
var segments = new RemapSegment[header.MapSegmentCount];
int entryIdx = 0;
for (int i = 0; i < header.MapSegmentCount; i++)
{
var seg = new RemapSegment();
seg.Entries.Add(mapEntries[entryIdx]);
seg.Offset = mapEntries[entryIdx].VirtualOffset;
mapEntries[entryIdx].Segment = seg;
entryIdx++;
while (entryIdx < mapEntries.Length &&
mapEntries[entryIdx - 1].VirtualOffsetEnd == mapEntries[entryIdx].VirtualOffset)
{
mapEntries[entryIdx].Segment = seg;
mapEntries[entryIdx - 1].Next = mapEntries[entryIdx];
seg.Entries.Add(mapEntries[entryIdx]);
entryIdx++;
}
seg.Length = seg.Entries[seg.Entries.Count - 1].VirtualOffsetEnd - seg.Entries[0].VirtualOffset;
segments[i] = seg;
}
return segments;
}
private int GetSegmentFromVirtualOffset(long virtualOffset)
{
return (int)((ulong)virtualOffset >> (64 - Header.SegmentBits));
}
private long GetOffsetFromVirtualOffset(long virtualOffset)
{
return virtualOffset & GetOffsetMask();
}
private long ToVirtualOffset(int segment, long offset)
{
long seg = (segment << (64 - Header.SegmentBits)) & GetSegmentMask();
long off = offset & GetOffsetMask();
return seg | off;
}
private long GetOffsetMask()
{
return (1 << (64 - Header.SegmentBits)) - 1;
}
private long GetSegmentMask()
{
return ~GetOffsetMask();
}
}
}

View file

@ -3,46 +3,21 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
namespace LibHac.Savefile namespace LibHac.Save
{ {
public class RemapStream : Stream public class RemapStream : Stream
{ {
private long _position; private long _position;
private Stream BaseStream { get; } private Stream BaseStream { get; }
public MapEntry[] MapEntries { get; set; } private RemapSegment Segment { get; }
public MapEntry CurrentEntry { get; set; } private MapEntry CurrentEntry { get; set; }
public RemapSegment[] Segments { get; set; }
public RemapStream(Stream baseStream, MapEntry[] entries, int segmentCount) public RemapStream(Stream baseStream, RemapSegment segment)
{ {
BaseStream = baseStream; BaseStream = baseStream;
MapEntries = entries; Segment = segment;
Segments = new RemapSegment[segmentCount]; CurrentEntry = segment.Entries[0];
Length = segment.Length;
int entryIdx = 0;
for (int i = 0; i < segmentCount; i++)
{
var seg = new RemapSegment();
seg.Entries.Add(MapEntries[entryIdx]);
seg.Offset = MapEntries[entryIdx].VirtualOffset;
MapEntries[entryIdx].Segment = seg;
entryIdx++;
while (entryIdx < MapEntries.Length &&
MapEntries[entryIdx - 1].VirtualOffsetEnd == MapEntries[entryIdx].VirtualOffset)
{
MapEntries[entryIdx].Segment = seg;
MapEntries[entryIdx - 1].Next = MapEntries[entryIdx];
seg.Entries.Add(MapEntries[entryIdx]);
entryIdx++;
}
seg.Length = seg.Entries[seg.Entries.Count - 1].VirtualOffsetEnd - seg.Entries[0].VirtualOffset;
Segments[i] = seg;
}
CurrentEntry = GetMapEntry(0);
UpdateBaseStreamPosition();
} }
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)
@ -68,6 +43,27 @@ namespace LibHac.Savefile
return count; return count;
} }
public override void Write(byte[] buffer, int offset, int count)
{
if (CurrentEntry == null) throw new EndOfStreamException();
long remaining = Math.Min(CurrentEntry.VirtualOffsetEnd - Position, count);
if (remaining <= 0) return;
int inPos = offset;
while (remaining > 0)
{
long remainInEntry = CurrentEntry.VirtualOffsetEnd - Position;
int toWrite = (int)Math.Min(remaining, remainInEntry);
BaseStream.Write(buffer, inPos, toWrite);
inPos += toWrite;
remaining -= toWrite;
Position += toWrite;
}
}
public override long Seek(long offset, SeekOrigin origin) public override long Seek(long offset, SeekOrigin origin)
{ {
switch (origin) switch (origin)
@ -91,20 +87,14 @@ namespace LibHac.Savefile
throw new NotSupportedException(); throw new NotSupportedException();
} }
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override void Flush() public override void Flush()
{ {
throw new NotImplementedException(); BaseStream.Flush();
} }
private MapEntry GetMapEntry(long offset) private MapEntry GetMapEntry(long offset)
{ {
// todo: is O(n) search a possible performance issue? MapEntry entry = Segment.Entries.FirstOrDefault(x => offset >= x.VirtualOffset && offset < x.VirtualOffsetEnd);
MapEntry entry = MapEntries.FirstOrDefault(x => offset >= x.VirtualOffset && offset < x.VirtualOffsetEnd);
if (entry == null) throw new ArgumentOutOfRangeException(nameof(offset)); if (entry == null) throw new ArgumentOutOfRangeException(nameof(offset));
return entry; return entry;
} }
@ -120,7 +110,7 @@ namespace LibHac.Savefile
public override bool CanRead => true; public override bool CanRead => true;
public override bool CanSeek => true; public override bool CanSeek => true;
public override bool CanWrite => false; public override bool CanWrite => false;
public override long Length { get; } = -1; public override long Length { get; }
public override long Position public override long Position
{ {

147
LibHac/Save/SaveFs.cs Normal file
View file

@ -0,0 +1,147 @@
using System.Collections.Generic;
using System.IO;
using LibHac.Streams;
namespace LibHac.Save
{
public class SaveFs
{
private SharedStreamSource StreamSource { get; }
private AllocationTable AllocationTable { get; }
private SaveHeader Header { get; }
public DirectoryEntry RootDirectory { get; private set; }
public FileEntry[] Files { get; private set; }
public DirectoryEntry[] Directories { get; private set; }
public Dictionary<string, FileEntry> FileDictionary { get; }
public SaveFs(Stream storage, Stream allocationTable, SaveHeader header)
{
StreamSource = new SharedStreamSource(storage);
AllocationTable = new AllocationTable(allocationTable);
Header = header;
ReadFileInfo();
var dictionary = new Dictionary<string, FileEntry>();
foreach (FileEntry entry in Files)
{
dictionary[entry.FullPath] = entry;
}
FileDictionary = dictionary;
}
public Stream OpenFile(string filename)
{
if (!FileDictionary.TryGetValue(filename, out FileEntry file))
{
throw new FileNotFoundException();
}
return OpenFile(file);
}
public Stream OpenFile(FileEntry file)
{
if (file.BlockIndex < 0)
{
return Stream.Null;
}
return OpenFatBlock(file.BlockIndex, file.FileSize);
}
public bool FileExists(string filename) => FileDictionary.ContainsKey(filename);
public Stream OpenRawSaveFs() => StreamSource.CreateStream();
private void ReadFileInfo()
{
// todo: Query the FAT for the file size when none is given
AllocationTableStream dirTableStream = OpenFatBlock(Header.DirectoryTableBlock, 1000000);
AllocationTableStream fileTableStream = OpenFatBlock(Header.FileTableBlock, 1000000);
DirectoryEntry[] dirEntries = ReadDirEntries(dirTableStream);
FileEntry[] fileEntries = ReadFileEntries(fileTableStream);
foreach (DirectoryEntry dir in dirEntries)
{
if (dir.NextSiblingIndex != 0) dir.NextSibling = dirEntries[dir.NextSiblingIndex];
if (dir.FirstChildIndex != 0) dir.FirstChild = dirEntries[dir.FirstChildIndex];
if (dir.FirstFileIndex != 0) dir.FirstFile = fileEntries[dir.FirstFileIndex];
if (dir.NextInChainIndex != 0) dir.NextInChain = dirEntries[dir.NextInChainIndex];
if (dir.ParentDirIndex != 0 && dir.ParentDirIndex < dirEntries.Length)
dir.ParentDir = dirEntries[dir.ParentDirIndex];
}
foreach (FileEntry file in fileEntries)
{
if (file.NextSiblingIndex != 0) file.NextSibling = fileEntries[file.NextSiblingIndex];
if (file.NextInChainIndex != 0) file.NextInChain = fileEntries[file.NextInChainIndex];
if (file.ParentDirIndex != 0 && file.ParentDirIndex < dirEntries.Length)
file.ParentDir = dirEntries[file.ParentDirIndex];
}
RootDirectory = dirEntries[2];
FileEntry fileChain = fileEntries[1].NextInChain;
var files = new List<FileEntry>();
while (fileChain != null)
{
files.Add(fileChain);
fileChain = fileChain.NextInChain;
}
DirectoryEntry dirChain = dirEntries[1].NextInChain;
var dirs = new List<DirectoryEntry>();
while (dirChain != null)
{
dirs.Add(dirChain);
dirChain = dirChain.NextInChain;
}
Files = files.ToArray();
Directories = dirs.ToArray();
FsEntry.ResolveFilenames(Files);
FsEntry.ResolveFilenames(Directories);
}
private FileEntry[] ReadFileEntries(Stream stream)
{
var reader = new BinaryReader(stream);
int count = reader.ReadInt32();
reader.BaseStream.Position -= 4;
var entries = new FileEntry[count];
for (int i = 0; i < count; i++)
{
entries[i] = new FileEntry(reader);
}
return entries;
}
private DirectoryEntry[] ReadDirEntries(Stream stream)
{
var reader = new BinaryReader(stream);
int count = reader.ReadInt32();
reader.BaseStream.Position -= 4;
var entries = new DirectoryEntry[count];
for (int i = 0; i < count; i++)
{
entries[i] = new DirectoryEntry(reader);
}
return entries;
}
private AllocationTableStream OpenFatBlock(int blockIndex, long size)
{
return new AllocationTableStream(StreamSource.CreateStream(), AllocationTable, (int)Header.BlockSize, blockIndex, size);
}
}
}

200
LibHac/Save/Savefile.cs Normal file
View file

@ -0,0 +1,200 @@
using System.IO;
using System.Security.Cryptography;
using System.Text;
using LibHac.Streams;
namespace LibHac.Save
{
public class Savefile
{
public Header Header { get; }
public SharedStreamSource SavefileSource { get; }
public SharedStreamSource JournalStreamSource { get; }
private HierarchicalIntegrityVerificationStream IvfcStream { get; }
public SharedStreamSource IvfcStreamSource { get; }
public SaveFs SaveFs { get; }
public RemapStorage DataRemapStorage { get; }
public RemapStorage MetaRemapStorage { get; }
public LayeredDuplexFs DuplexData { get; }
public DirectoryEntry RootDirectory => SaveFs.RootDirectory;
public FileEntry[] Files => SaveFs.Files;
public DirectoryEntry[] Directories => SaveFs.Directories;
public Savefile(Keyset keyset, Stream file, IntegrityCheckLevel integrityCheckLevel)
{
SavefileSource = new SharedStreamSource(file);
Header = new Header(keyset, SavefileSource);
FsLayout layout = Header.Layout;
DataRemapStorage = new RemapStorage(SavefileSource.CreateStream(layout.FileMapDataOffset, layout.FileMapDataSize),
Header.FileRemap, Header.FileMapEntries);
DuplexData = InitDuplexStream(DataRemapStorage, Header);
MetaRemapStorage = new RemapStorage(DuplexData, Header.MetaRemap, Header.MetaMapEntries);
Stream journalTable = MetaRemapStorage.OpenStream(layout.JournalTableOffset, layout.JournalTableSize);
MappingEntry[] journalMap = JournalStream.ReadMappingEntries(journalTable, Header.Journal.MainDataBlockCount);
Stream journalData = DataRemapStorage.OpenStream(layout.JournalDataOffset,
layout.JournalDataSizeB + layout.SizeReservedArea);
var journalStream = new JournalStream(journalData, journalMap, (int)Header.Journal.BlockSize);
JournalStreamSource = new SharedStreamSource(journalStream);
IvfcStream = InitIvfcStream(integrityCheckLevel);
SaveFs = new SaveFs(IvfcStream, MetaRemapStorage.OpenStream(layout.FatOffset, layout.FatSize), Header.Save);
IvfcStreamSource = new SharedStreamSource(IvfcStream);
}
private static LayeredDuplexFs InitDuplexStream(RemapStorage baseStorage, Header header)
{
FsLayout layout = header.Layout;
var duplexLayers = new DuplexFsLayerInfo[3];
duplexLayers[0] = new DuplexFsLayerInfo
{
DataA = new MemoryStream(header.DuplexMasterA),
DataB = new MemoryStream(header.DuplexMasterB),
Info = header.Duplex.Layers[0]
};
duplexLayers[1] = new DuplexFsLayerInfo
{
DataA = baseStorage.OpenStream(layout.DuplexL1OffsetA, layout.DuplexL1Size),
DataB = baseStorage.OpenStream(layout.DuplexL1OffsetB, layout.DuplexL1Size),
Info = header.Duplex.Layers[1]
};
duplexLayers[2] = new DuplexFsLayerInfo
{
DataA = baseStorage.OpenStream(layout.DuplexDataOffsetA, layout.DuplexDataSize),
DataB = baseStorage.OpenStream(layout.DuplexDataOffsetB, layout.DuplexDataSize),
Info = header.Duplex.Layers[2]
};
return new LayeredDuplexFs(duplexLayers, layout.DuplexIndex == 1);
}
private HierarchicalIntegrityVerificationStream InitIvfcStream(IntegrityCheckLevel integrityCheckLevel)
{
IvfcHeader ivfc = Header.Ivfc;
const int ivfcLevels = 5;
var initInfo = new IntegrityVerificationInfo[ivfcLevels];
initInfo[0] = new IntegrityVerificationInfo
{
Data = Header.MasterHash,
BlockSize = 0,
Type = IntegrityStreamType.Save
};
for (int i = 1; i < ivfcLevels; i++)
{
IvfcLevelHeader level = ivfc.LevelHeaders[i - 1];
Stream data = i == ivfcLevels - 1
? JournalStreamSource.CreateStream()
: MetaRemapStorage.OpenStream(level.LogicalOffset, level.HashDataSize);
initInfo[i] = new IntegrityVerificationInfo
{
Data = data,
BlockSize = 1 << level.BlockSizePower,
Salt = new HMACSHA256(Encoding.ASCII.GetBytes(SaltSources[i - 1])).ComputeHash(ivfc.SaltSource),
Type = IntegrityStreamType.Save
};
}
return new HierarchicalIntegrityVerificationStream(initInfo, integrityCheckLevel);
}
public Stream OpenFile(string filename)
{
return SaveFs.OpenFile(filename);
}
public Stream OpenFile(FileEntry file)
{
return SaveFs.OpenFile(file);
}
public bool FileExists(string filename) => SaveFs.FileExists(filename);
public bool CommitHeader(Keyset keyset)
{
SharedStream headerStream = SavefileSource.CreateStream();
var hashData = new byte[0x3d00];
headerStream.Position = 0x300;
headerStream.Read(hashData, 0, hashData.Length);
byte[] hash = Crypto.ComputeSha256(hashData, 0, hashData.Length);
headerStream.Position = 0x108;
headerStream.Write(hash, 0, hash.Length);
if (keyset.SaveMacKey.IsEmpty()) return false;
var cmacData = new byte[0x200];
var cmac = new byte[0x10];
headerStream.Position = 0x100;
headerStream.Read(cmacData, 0, 0x200);
Crypto.CalculateAesCmac(keyset.SaveMacKey, cmacData, 0, cmac, 0, 0x200);
headerStream.Position = 0;
headerStream.Write(cmac, 0, 0x10);
headerStream.Flush();
return true;
}
public Validity Verify(IProgressReport logger = null)
{
Validity validity = IvfcStream.Validate(true, logger);
IvfcStream.SetLevelValidities(Header.Ivfc);
return validity;
}
private string[] SaltSources =
{
"HierarchicalIntegrityVerificationStorage::Master",
"HierarchicalIntegrityVerificationStorage::L1",
"HierarchicalIntegrityVerificationStorage::L2",
"HierarchicalIntegrityVerificationStorage::L3",
"HierarchicalIntegrityVerificationStorage::L4",
"HierarchicalIntegrityVerificationStorage::L5"
};
}
public static class SavefileExtensions
{
public static void Extract(this Savefile save, string outDir, IProgressReport logger = null)
{
foreach (FileEntry file in save.Files)
{
Stream stream = save.OpenFile(file);
string outName = outDir + file.FullPath;
string dir = Path.GetDirectoryName(outName);
if (!string.IsNullOrWhiteSpace(dir)) Directory.CreateDirectory(dir);
using (var outFile = new FileStream(outName, FileMode.Create, FileAccess.ReadWrite))
{
logger?.LogMessage(file.FullPath);
stream.CopyStream(outFile, stream.Length, logger);
}
}
}
}
}

View file

@ -1,317 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using LibHac.Streams;
namespace LibHac.Savefile
{
public class Savefile
{
public Header Header { get; }
private RemapStream FileRemap { get; }
public SharedStreamSource SavefileSource { get; }
public SharedStreamSource FileRemapSource { get; }
private RemapStream MetaRemap { get; }
public SharedStreamSource MetaRemapSource { get; }
private JournalStream JournalStream { get; }
public SharedStreamSource JournalStreamSource { get; }
private HierarchicalIntegrityVerificationStream IvfcStream { get; }
public SharedStreamSource IvfcStreamSource { get; }
private AllocationTable AllocationTable { get; }
public Stream DuplexL1A { get; }
public Stream DuplexL1B { get; }
public Stream DuplexDataA { get; }
public Stream DuplexDataB { get; }
public LayeredDuplexFs DuplexData { get; }
public Stream JournalData { get; }
public Stream JournalTable { get; }
public Stream JournalBitmapUpdatedPhysical { get; }
public Stream JournalBitmapUpdatedVirtual { get; }
public Stream JournalBitmapUnassigned { get; }
public Stream JournalLayer1Hash { get; }
public Stream JournalLayer2Hash { get; }
public Stream JournalLayer3Hash { get; }
public Stream JournalFat { get; }
public DirectoryEntry RootDirectory { get; private set; }
public FileEntry[] Files { get; private set; }
public DirectoryEntry[] Directories { get; private set; }
private Dictionary<string, FileEntry> FileDict { get; }
public Savefile(Keyset keyset, Stream file, IntegrityCheckLevel integrityCheckLevel)
{
SavefileSource = new SharedStreamSource(file);
using (var reader = new BinaryReader(SavefileSource.CreateStream(), Encoding.Default, true))
{
Header = new Header(keyset, reader);
FsLayout layout = Header.Layout;
FileRemap = new RemapStream(
SavefileSource.CreateStream(layout.FileMapDataOffset, layout.FileMapDataSize),
Header.FileMapEntries, Header.FileRemap.MapSegmentCount);
FileRemapSource = new SharedStreamSource(FileRemap);
var duplexLayers = new DuplexFsLayerInfo[3];
duplexLayers[0] = new DuplexFsLayerInfo
{
DataA = new MemoryStream(Header.DuplexMasterA),
DataB = new MemoryStream(Header.DuplexMasterB),
Info = Header.Duplex.Layers[0]
};
duplexLayers[1] = new DuplexFsLayerInfo
{
DataA = FileRemapSource.CreateStream(layout.DuplexL1OffsetA, layout.DuplexL1Size),
DataB = FileRemapSource.CreateStream(layout.DuplexL1OffsetB, layout.DuplexL1Size),
Info = Header.Duplex.Layers[1]
};
duplexLayers[2] = new DuplexFsLayerInfo
{
DataA = FileRemapSource.CreateStream(layout.DuplexDataOffsetA, layout.DuplexDataSize),
DataB = FileRemapSource.CreateStream(layout.DuplexDataOffsetB, layout.DuplexDataSize),
Info = Header.Duplex.Layers[2]
};
DuplexL1A = FileRemapSource.CreateStream(layout.DuplexL1OffsetA, layout.DuplexL1Size);
DuplexL1B = FileRemapSource.CreateStream(layout.DuplexL1OffsetB, layout.DuplexL1Size);
DuplexDataA = FileRemapSource.CreateStream(layout.DuplexDataOffsetA, layout.DuplexDataSize);
DuplexDataB = FileRemapSource.CreateStream(layout.DuplexDataOffsetB, layout.DuplexDataSize);
JournalData = FileRemapSource.CreateStream(layout.JournalDataOffset, layout.JournalDataSizeB + layout.SizeReservedArea);
DuplexData = new LayeredDuplexFs(duplexLayers, Header.Layout.DuplexIndex == 1);
MetaRemap = new RemapStream(DuplexData, Header.MetaMapEntries, Header.MetaRemap.MapSegmentCount);
MetaRemapSource = new SharedStreamSource(MetaRemap);
JournalTable = MetaRemapSource.CreateStream(layout.JournalTableOffset, layout.JournalTableSize);
JournalBitmapUpdatedPhysical = MetaRemapSource.CreateStream(layout.JournalBitmapUpdatedPhysicalOffset, layout.JournalBitmapUpdatedPhysicalSize);
JournalBitmapUpdatedVirtual = MetaRemapSource.CreateStream(layout.JournalBitmapUpdatedVirtualOffset, layout.JournalBitmapUpdatedVirtualSize);
JournalBitmapUnassigned = MetaRemapSource.CreateStream(layout.JournalBitmapUnassignedOffset, layout.JournalBitmapUnassignedSize);
JournalLayer1Hash = MetaRemapSource.CreateStream(layout.IvfcL1Offset, layout.IvfcL1Size);
JournalLayer2Hash = MetaRemapSource.CreateStream(layout.IvfcL2Offset, layout.IvfcL2Size);
JournalLayer3Hash = MetaRemapSource.CreateStream(layout.IvfcL3Offset, layout.IvfcL3Size);
JournalFat = MetaRemapSource.CreateStream(layout.FatOffset, layout.FatSize);
AllocationTable = new AllocationTable(JournalFat);
MappingEntry[] journalMap = JournalStream.ReadMappingEntries(JournalTable, Header.Journal.MainDataBlockCount);
SharedStream journalData = FileRemapSource.CreateStream(layout.JournalDataOffset,
layout.JournalDataSizeB + layout.SizeReservedArea);
JournalStream = new JournalStream(journalData, journalMap, (int)Header.Journal.BlockSize);
JournalStreamSource = new SharedStreamSource(JournalStream);
IvfcStream = InitIvfcStream(integrityCheckLevel);
IvfcStreamSource = new SharedStreamSource(IvfcStream);
ReadFileInfo();
var dictionary = new Dictionary<string, FileEntry>();
foreach (FileEntry entry in Files)
{
dictionary[entry.FullPath] = entry;
}
FileDict = dictionary;
}
}
private HierarchicalIntegrityVerificationStream InitIvfcStream(IntegrityCheckLevel integrityCheckLevel)
{
IvfcHeader ivfc = Header.Ivfc;
const int ivfcLevels = 5;
var initInfo = new IntegrityVerificationInfo[ivfcLevels];
initInfo[0] = new IntegrityVerificationInfo
{
Data = new MemoryStream(Header.MasterHashA),
BlockSize = 0,
Type = IntegrityStreamType.Save
};
for (int i = 1; i < ivfcLevels; i++)
{
IvfcLevelHeader level = ivfc.LevelHeaders[i - 1];
Stream data = i == ivfcLevels - 1
? (Stream)JournalStream
: MetaRemapSource.CreateStream(level.LogicalOffset, level.HashDataSize);
initInfo[i] = new IntegrityVerificationInfo
{
Data = data,
BlockSize = 1 << level.BlockSizePower,
Salt = new HMACSHA256(Encoding.ASCII.GetBytes(SaltSources[i - 1])).ComputeHash(ivfc.SaltSource),
Type = IntegrityStreamType.Save
};
}
return new HierarchicalIntegrityVerificationStream(initInfo, integrityCheckLevel);
}
public Stream OpenFile(string filename)
{
if (!FileDict.TryGetValue(filename, out FileEntry file))
{
throw new FileNotFoundException();
}
return OpenFile(file);
}
public Stream OpenFile(FileEntry file)
{
if (file.BlockIndex < 0)
{
return Stream.Null;
}
return OpenFatBlock(file.BlockIndex, file.FileSize);
}
private AllocationTableStream OpenFatBlock(int blockIndex, long size)
{
return new AllocationTableStream(IvfcStreamSource.CreateStream(), AllocationTable, (int)Header.Save.BlockSize, blockIndex, size);
}
public bool FileExists(string filename) => FileDict.ContainsKey(filename);
private void ReadFileInfo()
{
// todo: Query the FAT for the file size when none is given
AllocationTableStream dirTableStream = OpenFatBlock(Header.Save.DirectoryTableBlock, 1000000);
AllocationTableStream fileTableStream = OpenFatBlock(Header.Save.FileTableBlock, 1000000);
DirectoryEntry[] dirEntries = ReadDirEntries(dirTableStream);
FileEntry[] fileEntries = ReadFileEntries(fileTableStream);
foreach (DirectoryEntry dir in dirEntries)
{
if (dir.NextSiblingIndex != 0) dir.NextSibling = dirEntries[dir.NextSiblingIndex];
if (dir.FirstChildIndex != 0) dir.FirstChild = dirEntries[dir.FirstChildIndex];
if (dir.FirstFileIndex != 0) dir.FirstFile = fileEntries[dir.FirstFileIndex];
if (dir.NextInChainIndex != 0) dir.NextInChain = dirEntries[dir.NextInChainIndex];
if (dir.ParentDirIndex != 0 && dir.ParentDirIndex < dirEntries.Length)
dir.ParentDir = dirEntries[dir.ParentDirIndex];
}
foreach (FileEntry file in fileEntries)
{
if (file.NextSiblingIndex != 0) file.NextSibling = fileEntries[file.NextSiblingIndex];
if (file.NextInChainIndex != 0) file.NextInChain = fileEntries[file.NextInChainIndex];
if (file.ParentDirIndex != 0 && file.ParentDirIndex < dirEntries.Length)
file.ParentDir = dirEntries[file.ParentDirIndex];
}
RootDirectory = dirEntries[2];
FileEntry fileChain = fileEntries[1].NextInChain;
var files = new List<FileEntry>();
while (fileChain != null)
{
files.Add(fileChain);
fileChain = fileChain.NextInChain;
}
DirectoryEntry dirChain = dirEntries[1].NextInChain;
var dirs = new List<DirectoryEntry>();
while (dirChain != null)
{
dirs.Add(dirChain);
dirChain = dirChain.NextInChain;
}
Files = files.ToArray();
Directories = dirs.ToArray();
FsEntry.ResolveFilenames(Files);
FsEntry.ResolveFilenames(Directories);
}
private FileEntry[] ReadFileEntries(Stream stream)
{
var reader = new BinaryReader(stream);
int count = reader.ReadInt32();
reader.BaseStream.Position -= 4;
var entries = new FileEntry[count];
for (int i = 0; i < count; i++)
{
entries[i] = new FileEntry(reader);
}
return entries;
}
private DirectoryEntry[] ReadDirEntries(Stream stream)
{
var reader = new BinaryReader(stream);
int count = reader.ReadInt32();
reader.BaseStream.Position -= 4;
var entries = new DirectoryEntry[count];
for (int i = 0; i < count; i++)
{
entries[i] = new DirectoryEntry(reader);
}
return entries;
}
public bool SignHeader(Keyset keyset)
{
if (keyset.SaveMacKey.IsEmpty()) return false;
var data = new byte[0x200];
var cmac = new byte[0x10];
SharedStream headerStream = SavefileSource.CreateStream();
headerStream.Position = 0x100;
headerStream.Read(data, 0, 0x200);
Crypto.CalculateAesCmac(keyset.SaveMacKey, data, 0, cmac, 0, 0x200);
headerStream.Position = 0;
headerStream.Write(cmac, 0, 0x10);
return true;
}
private string[] SaltSources =
{
"HierarchicalIntegrityVerificationStorage::Master",
"HierarchicalIntegrityVerificationStorage::L1",
"HierarchicalIntegrityVerificationStorage::L2",
"HierarchicalIntegrityVerificationStorage::L3",
"HierarchicalIntegrityVerificationStorage::L4",
"HierarchicalIntegrityVerificationStorage::L5"
};
}
public static class SavefileExtensions
{
public static void Extract(this Savefile save, string outDir, IProgressReport logger = null)
{
foreach (FileEntry file in save.Files)
{
Stream stream = save.OpenFile(file);
string outName = outDir + file.FullPath;
string dir = Path.GetDirectoryName(outName);
if (!string.IsNullOrWhiteSpace(dir)) Directory.CreateDirectory(dir);
using (var outFile = new FileStream(outName, FileMode.Create, FileAccess.ReadWrite))
{
logger?.LogMessage(file.FullPath);
stream.CopyStream(outFile, stream.Length, logger);
}
}
}
}
}

View file

@ -44,8 +44,9 @@ namespace LibHac.Streams
if (_readBytes == 0 || !_bufferDirty) return; if (_readBytes == 0 || !_bufferDirty) return;
_baseStream.Position = _currentSector * _bufferSize; _baseStream.Position = _currentSector * _bufferSize;
_baseStream.Write(_buffer, 0, _readBytes); _baseStream.Write(_buffer, 0, _bufferSize);
_bufferPos = 0;
_readBytes = 0; _readBytes = 0;
_bufferDirty = false; _bufferDirty = false;
} }
@ -53,6 +54,7 @@ namespace LibHac.Streams
public override void Flush() public override void Flush()
{ {
WriteSectorIfDirty(); WriteSectorIfDirty();
_baseStream.Flush();
} }
public override int Read(byte[] buffer, int offset, int count) public override int Read(byte[] buffer, int offset, int count)

View file

@ -112,7 +112,9 @@ namespace LibHac.Streams
public override void Write(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count)
{ {
ValidateSize(count); ValidateSize(count);
_baseStream.Write(buffer, offset, count); int toWrite = (int)Math.Min(count, Length - Position);
_baseStream.Write(buffer, offset, toWrite);
CurrentSector += count / SectorSize; CurrentSector += count / SectorSize;
} }

View file

@ -32,7 +32,13 @@ namespace LibHac.Streams
return new SharedStream(this, offset, length); return new SharedStream(this, offset, length);
} }
public void Flush() => BaseStream.Flush(); public void Flush()
{
lock (Locker)
{
BaseStream.Flush();
}
}
public int Read(long readOffset, byte[] buffer, int bufferOffset, int count) public int Read(long readOffset, byte[] buffer, int bufferOffset, int count)
{ {

View file

@ -32,6 +32,14 @@ namespace LibHac.Streams
return BaseStream.Read(buffer, offset, count); return BaseStream.Read(buffer, offset, count);
} }
public override void Write(byte[] buffer, int offset, int count)
{
long remaining = Math.Min(Length - Position, count);
if (remaining <= 0) return;
BaseStream.Write(buffer, offset, (int)remaining);
}
public override long Length { get; } public override long Length { get; }
public override bool CanRead => BaseStream.CanRead; public override bool CanRead => BaseStream.CanRead;
public override bool CanWrite => BaseStream.CanWrite; public override bool CanWrite => BaseStream.CanWrite;
@ -70,10 +78,5 @@ namespace LibHac.Streams
public override void SetLength(long value) => throw new NotSupportedException(); public override void SetLength(long value) => throw new NotSupportedException();
public override void Flush() => BaseStream.Flush(); public override void Flush() => BaseStream.Flush();
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
} }
} }

View file

@ -4,6 +4,7 @@ using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using LibHac.Save;
using LibHac.Streams; using LibHac.Streams;
namespace LibHac namespace LibHac
@ -16,7 +17,7 @@ namespace LibHac
public string SaveDir { get; } public string SaveDir { get; }
public Dictionary<string, Nca> Ncas { get; } = new Dictionary<string, Nca>(StringComparer.OrdinalIgnoreCase); public Dictionary<string, Nca> Ncas { get; } = new Dictionary<string, Nca>(StringComparer.OrdinalIgnoreCase);
public Dictionary<string, Savefile.Savefile> Saves { get; } = new Dictionary<string, Savefile.Savefile>(StringComparer.OrdinalIgnoreCase); public Dictionary<string, Savefile> Saves { get; } = new Dictionary<string, Savefile>(StringComparer.OrdinalIgnoreCase);
public Dictionary<ulong, Title> Titles { get; } = new Dictionary<ulong, Title>(); public Dictionary<ulong, Title> Titles { get; } = new Dictionary<ulong, Title>();
public Dictionary<ulong, Application> Applications { get; } = new Dictionary<ulong, Application>(); public Dictionary<ulong, Application> Applications { get; } = new Dictionary<ulong, Application>();
@ -117,7 +118,7 @@ namespace LibHac
foreach (string file in files) foreach (string file in files)
{ {
Savefile.Savefile save = null; Savefile save = null;
string saveName = Path.GetFileNameWithoutExtension(file); string saveName = Path.GetFileNameWithoutExtension(file);
try try
@ -126,7 +127,7 @@ namespace LibHac
string sdPath = "/" + Util.GetRelativePath(file, SaveDir).Replace('\\', '/'); string sdPath = "/" + Util.GetRelativePath(file, SaveDir).Replace('\\', '/');
var nax0 = new Nax0(Keyset, stream, sdPath, false); var nax0 = new Nax0(Keyset, stream, sdPath, false);
save = new Savefile.Savefile(Keyset, nax0.Stream, IntegrityCheckLevel.None); save = new Savefile(Keyset, nax0.Stream, IntegrityCheckLevel.None);
} }
catch (Exception ex) catch (Exception ex)
{ {

View file

@ -4,7 +4,7 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using LibHac; using LibHac;
using LibHac.Nand; using LibHac.Nand;
using LibHac.Savefile; using LibHac.Save;
namespace NandReader namespace NandReader
{ {

View file

@ -7,7 +7,7 @@ using GalaSoft.MvvmLight;
using GalaSoft.MvvmLight.Command; using GalaSoft.MvvmLight.Command;
using LibHac; using LibHac;
using LibHac.Nand; using LibHac.Nand;
using LibHac.Savefile; using LibHac.Save;
using LibHac.Streams; using LibHac.Streams;
namespace NandReaderGui.ViewModel namespace NandReaderGui.ViewModel

View file

@ -44,6 +44,7 @@ namespace hactoolnet
new CliOption("listapps", 0, (o, a) => o.ListApps = true), new CliOption("listapps", 0, (o, a) => o.ListApps = true),
new CliOption("listtitles", 0, (o, a) => o.ListTitles = true), new CliOption("listtitles", 0, (o, a) => o.ListTitles = true),
new CliOption("listromfs", 0, (o, a) => o.ListRomFs = true), new CliOption("listromfs", 0, (o, a) => o.ListRomFs = true),
new CliOption("listfiles", 0, (o, a) => o.ListFiles = true),
new CliOption("sign", 0, (o, a) => o.SignSave = true), new CliOption("sign", 0, (o, a) => o.SignSave = true),
new CliOption("title", 1, (o, a) => o.TitleId = ParseTitleId(a[0])), new CliOption("title", 1, (o, a) => o.TitleId = ParseTitleId(a[0])),
}; };
@ -203,6 +204,7 @@ namespace hactoolnet
sb.AppendLine(" --outdir <dir> Specify directory path to save contents to."); sb.AppendLine(" --outdir <dir> Specify directory path to save contents to.");
sb.AppendLine(" --debugoutdir <dir> Specify directory path to save intermediate data to for debugging."); sb.AppendLine(" --debugoutdir <dir> Specify directory path to save intermediate data to for debugging.");
sb.AppendLine(" --sign Sign the save file. (Requires device_key in key file)"); sb.AppendLine(" --sign Sign the save file. (Requires device_key in key file)");
sb.AppendLine(" --listfiles List files in save file.");
sb.AppendLine("Keygen options:"); sb.AppendLine("Keygen options:");
sb.AppendLine(" --outdir <dir> Specify directory path to save key files to."); sb.AppendLine(" --outdir <dir> Specify directory path to save key files to.");

View file

@ -34,11 +34,19 @@ namespace hactoolnet
public bool ListApps; public bool ListApps;
public bool ListTitles; public bool ListTitles;
public bool ListRomFs; public bool ListRomFs;
public bool ListFiles;
public bool SignSave; public bool SignSave;
public ulong TitleId; public ulong TitleId;
public IntegrityCheckLevel IntegrityLevel => public IntegrityCheckLevel IntegrityLevel
EnableHash ? IntegrityCheckLevel.ErrorOnInvalid : IntegrityCheckLevel.None; {
get
{
if (Validate) return IntegrityCheckLevel.IgnoreOnInvalid;
if (EnableHash) return IntegrityCheckLevel.ErrorOnInvalid;
return IntegrityCheckLevel.None;
}
}
} }
internal enum FileType internal enum FileType

View file

@ -1,4 +1,5 @@
using System.Text; using System;
using System.Text;
using LibHac; using LibHac;
namespace hactoolnet namespace hactoolnet
@ -26,5 +27,42 @@ namespace hactoolnet
default: return string.Empty; default: return string.Empty;
} }
} }
public static void PrintIvfcHash(StringBuilder sb, int colLen, int indentSize, IvfcHeader ivfcInfo, IntegrityStreamType type)
{
string prefix = new string(' ', indentSize);
string prefix2 = new string(' ', indentSize + 4);
if (type == IntegrityStreamType.RomFs)
PrintItem(sb, colLen, $"{prefix}Master Hash{ivfcInfo.LevelHeaders[0].HashValidity.GetValidityString()}:", ivfcInfo.MasterHash);
PrintItem(sb, colLen, $"{prefix}Magic:", ivfcInfo.Magic);
PrintItem(sb, colLen, $"{prefix}Version:", ivfcInfo.Version);
if (type == IntegrityStreamType.Save)
PrintItem(sb, colLen, $"{prefix}Salt Seed:", ivfcInfo.SaltSource);
int levelCount = Math.Max(ivfcInfo.NumLevels - 1, 0);
if (type == IntegrityStreamType.Save) levelCount = 4;
int offsetLen = type == IntegrityStreamType.Save ? 16 : 12;
for (int i = 0; i < levelCount; i++)
{
IvfcLevelHeader level = ivfcInfo.LevelHeaders[i];
long hashOffset = 0;
if (i != 0)
{
hashOffset = ivfcInfo.LevelHeaders[i - 1].LogicalOffset;
}
sb.AppendLine($"{prefix}Level {i}{level.HashValidity.GetValidityString()}:");
PrintItem(sb, colLen, $"{prefix2}Data Offset:", $"0x{level.LogicalOffset.ToString($"x{offsetLen}")}");
PrintItem(sb, colLen, $"{prefix2}Data Size:", $"0x{level.HashDataSize.ToString($"x{offsetLen}")}");
PrintItem(sb, colLen, $"{prefix2}Hash Offset:", $"0x{hashOffset.ToString($"x{offsetLen}")}");
PrintItem(sb, colLen, $"{prefix2}Hash BlockSize:", $"0x{1 << level.BlockSizePower:x8}");
}
}
} }
} }

View file

@ -174,7 +174,7 @@ namespace hactoolnet
PrintSha256Hash(sect); PrintSha256Hash(sect);
break; break;
case NcaHashType.Ivfc: case NcaHashType.Ivfc:
PrintIvfcHash(sect); PrintIvfcHash(sb, colLen, 8, sect.Header.IvfcInfo, IntegrityStreamType.RomFs);
break; break;
default: default:
sb.AppendLine(" Unknown/invalid superblock!"); sb.AppendLine(" Unknown/invalid superblock!");
@ -196,32 +196,6 @@ namespace hactoolnet
PrintItem(sb, colLen, " PFS0 Offset:", $"0x{hashInfo.DataOffset:x12}"); PrintItem(sb, colLen, " PFS0 Offset:", $"0x{hashInfo.DataOffset:x12}");
PrintItem(sb, colLen, " PFS0 Size:", $"0x{hashInfo.DataSize:x12}"); PrintItem(sb, colLen, " PFS0 Size:", $"0x{hashInfo.DataSize:x12}");
} }
void PrintIvfcHash(NcaSection sect)
{
IvfcHeader ivfcInfo = sect.Header.IvfcInfo;
PrintItem(sb, colLen, $" Master Hash{sect.MasterHashValidity.GetValidityString()}:", ivfcInfo.MasterHash);
PrintItem(sb, colLen, " Magic:", ivfcInfo.Magic);
PrintItem(sb, colLen, " Version:", $"{ivfcInfo.Version:x8}");
for (int i = 0; i < Romfs.IvfcMaxLevel; i++)
{
IvfcLevelHeader level = ivfcInfo.LevelHeaders[i];
long hashOffset = 0;
if (i != 0)
{
hashOffset = ivfcInfo.LevelHeaders[i - 1].LogicalOffset;
}
sb.AppendLine($" Level {i}{level.HashValidity.GetValidityString()}:");
PrintItem(sb, colLen, " Data Offset:", $"0x{level.LogicalOffset:x12}");
PrintItem(sb, colLen, " Data Size:", $"0x{level.HashDataSize:x12}");
PrintItem(sb, colLen, " Hash Offset:", $"0x{hashOffset:x12}");
PrintItem(sb, colLen, " Hash BlockSize:", $"0x{1 << level.BlockSizePower:x8}");
}
}
} }
} }
} }

View file

@ -1,9 +1,8 @@
using System; using System;
using System.IO; using System.IO;
using System.Linq;
using System.Text; using System.Text;
using LibHac; using LibHac;
using LibHac.Savefile; using LibHac.Save;
using static hactoolnet.Print; using static hactoolnet.Print;
namespace hactoolnet namespace hactoolnet
@ -16,6 +15,11 @@ namespace hactoolnet
{ {
var save = new Savefile(ctx.Keyset, file, ctx.Options.IntegrityLevel); var save = new Savefile(ctx.Keyset, file, ctx.Options.IntegrityLevel);
if (ctx.Options.Validate)
{
save.Verify(ctx.Logger);
}
if (ctx.Options.OutDir != null) if (ctx.Options.OutDir != null)
{ {
save.Extract(ctx.Options.OutDir, ctx.Logger); save.Extract(ctx.Options.OutDir, ctx.Logger);
@ -26,32 +30,50 @@ namespace hactoolnet
string dir = ctx.Options.DebugOutDir; string dir = ctx.Options.DebugOutDir;
Directory.CreateDirectory(dir); Directory.CreateDirectory(dir);
FsLayout layout = save.Header.Layout;
File.WriteAllBytes(Path.Combine(dir, "L0_0_MasterHashA"), save.Header.MasterHashA); File.WriteAllBytes(Path.Combine(dir, "L0_0_MasterHashA"), save.Header.MasterHashA);
File.WriteAllBytes(Path.Combine(dir, "L0_1_MasterHashB"), save.Header.MasterHashB); File.WriteAllBytes(Path.Combine(dir, "L0_1_MasterHashB"), save.Header.MasterHashB);
File.WriteAllBytes(Path.Combine(dir, "L0_2_DuplexMasterA"), save.Header.DuplexMasterA); File.WriteAllBytes(Path.Combine(dir, "L0_2_DuplexMasterA"), save.Header.DuplexMasterA);
File.WriteAllBytes(Path.Combine(dir, "L0_3_DuplexMasterB"), save.Header.DuplexMasterB); File.WriteAllBytes(Path.Combine(dir, "L0_3_DuplexMasterB"), save.Header.DuplexMasterB);
save.DuplexL1A.WriteAllBytes(Path.Combine(dir, "L0_4_DuplexL1A"), ctx.Logger);
save.DuplexL1B.WriteAllBytes(Path.Combine(dir, "L0_5_DuplexL1B"), ctx.Logger);
save.DuplexDataA.WriteAllBytes(Path.Combine(dir, "L0_6_DuplexDataA"), ctx.Logger);
save.DuplexDataB.WriteAllBytes(Path.Combine(dir, "L0_7_DuplexDataB"), ctx.Logger);
save.JournalData.WriteAllBytes(Path.Combine(dir, "L0_9_JournalData"), ctx.Logger);
Stream duplexL1A = save.DataRemapStorage.OpenStream(layout.DuplexL1OffsetA, layout.DuplexL1Size);
Stream duplexL1B = save.DataRemapStorage.OpenStream(layout.DuplexL1OffsetB, layout.DuplexL1Size);
Stream duplexDataA = save.DataRemapStorage.OpenStream(layout.DuplexDataOffsetA, layout.DuplexDataSize);
Stream duplexDataB = save.DataRemapStorage.OpenStream(layout.DuplexDataOffsetB, layout.DuplexDataSize);
Stream journalData = save.DataRemapStorage.OpenStream(layout.JournalDataOffset, layout.JournalDataSizeB + layout.SizeReservedArea);
duplexL1A.WriteAllBytes(Path.Combine(dir, "L0_4_DuplexL1A"), ctx.Logger);
duplexL1B.WriteAllBytes(Path.Combine(dir, "L0_5_DuplexL1B"), ctx.Logger);
duplexDataA.WriteAllBytes(Path.Combine(dir, "L0_6_DuplexDataA"), ctx.Logger);
duplexDataB.WriteAllBytes(Path.Combine(dir, "L0_7_DuplexDataB"), ctx.Logger);
journalData.WriteAllBytes(Path.Combine(dir, "L0_9_JournalData"), ctx.Logger);
save.DuplexData.WriteAllBytes(Path.Combine(dir, "L1_0_DuplexData"), ctx.Logger); save.DuplexData.WriteAllBytes(Path.Combine(dir, "L1_0_DuplexData"), ctx.Logger);
save.JournalTable.WriteAllBytes(Path.Combine(dir, "L2_0_JournalTable"), ctx.Logger);
save.JournalBitmapUpdatedPhysical.WriteAllBytes(Path.Combine(dir, "L2_1_JournalBitmapUpdatedPhysical"), ctx.Logger); Stream journalTable = save.MetaRemapStorage.OpenStream(layout.JournalTableOffset, layout.JournalTableSize);
save.JournalBitmapUpdatedVirtual.WriteAllBytes(Path.Combine(dir, "L2_2_JournalBitmapUpdatedVirtual"), ctx.Logger); Stream journalBitmapUpdatedPhysical = save.MetaRemapStorage.OpenStream(layout.JournalBitmapUpdatedPhysicalOffset, layout.JournalBitmapUpdatedPhysicalSize);
save.JournalBitmapUnassigned.WriteAllBytes(Path.Combine(dir, "L2_3_JournalBitmapUnassigned"), ctx.Logger); Stream journalBitmapUpdatedVirtual = save.MetaRemapStorage.OpenStream(layout.JournalBitmapUpdatedVirtualOffset, layout.JournalBitmapUpdatedVirtualSize);
save.JournalLayer1Hash.WriteAllBytes(Path.Combine(dir, "L2_4_Layer1Hash"), ctx.Logger); Stream journalBitmapUnassigned = save.MetaRemapStorage.OpenStream(layout.JournalBitmapUnassignedOffset, layout.JournalBitmapUnassignedSize);
save.JournalLayer2Hash.WriteAllBytes(Path.Combine(dir, "L2_5_Layer2Hash"), ctx.Logger); Stream journalLayer1Hash = save.MetaRemapStorage.OpenStream(layout.IvfcL1Offset, layout.IvfcL1Size);
save.JournalLayer3Hash.WriteAllBytes(Path.Combine(dir, "L2_6_Layer3Hash"), ctx.Logger); Stream journalLayer2Hash = save.MetaRemapStorage.OpenStream(layout.IvfcL2Offset, layout.IvfcL2Size);
save.JournalFat.WriteAllBytes(Path.Combine(dir, "L2_7_FAT"), ctx.Logger); Stream journalLayer3Hash = save.MetaRemapStorage.OpenStream(layout.IvfcL3Offset, layout.IvfcL3Size);
Stream journalFat = save.MetaRemapStorage.OpenStream(layout.FatOffset, layout.FatSize);
journalTable.WriteAllBytes(Path.Combine(dir, "L2_0_JournalTable"), ctx.Logger);
journalBitmapUpdatedPhysical.WriteAllBytes(Path.Combine(dir, "L2_1_JournalBitmapUpdatedPhysical"), ctx.Logger);
journalBitmapUpdatedVirtual.WriteAllBytes(Path.Combine(dir, "L2_2_JournalBitmapUpdatedVirtual"), ctx.Logger);
journalBitmapUnassigned.WriteAllBytes(Path.Combine(dir, "L2_3_JournalBitmapUnassigned"), ctx.Logger);
journalLayer1Hash.WriteAllBytes(Path.Combine(dir, "L2_4_Layer1Hash"), ctx.Logger);
journalLayer2Hash.WriteAllBytes(Path.Combine(dir, "L2_5_Layer2Hash"), ctx.Logger);
journalLayer3Hash.WriteAllBytes(Path.Combine(dir, "L2_6_Layer3Hash"), ctx.Logger);
journalFat.WriteAllBytes(Path.Combine(dir, "L2_7_FAT"), ctx.Logger);
save.IvfcStreamSource.CreateStream().WriteAllBytes(Path.Combine(dir, "L3_0_SaveData"), ctx.Logger); save.IvfcStreamSource.CreateStream().WriteAllBytes(Path.Combine(dir, "L3_0_SaveData"), ctx.Logger);
} }
if (ctx.Options.SignSave) if (ctx.Options.SignSave)
{ {
if (save.SignHeader(ctx.Keyset)) if (save.CommitHeader(ctx.Keyset))
{ {
ctx.Logger.LogMessage("Successfully signed save file"); ctx.Logger.LogMessage("Successfully signed save file");
} }
@ -61,6 +83,14 @@ namespace hactoolnet
} }
} }
if (ctx.Options.ListFiles)
{
foreach (FileEntry fileEntry in save.Files)
{
ctx.Logger.LogMessage(fileEntry.FullPath);
}
}
ctx.Logger.LogMessage(save.Print()); ctx.Logger.LogMessage(save.Print());
} }
} }
@ -72,7 +102,7 @@ namespace hactoolnet
sb.AppendLine(); sb.AppendLine();
sb.AppendLine("Savefile:"); sb.AppendLine("Savefile:");
PrintItem(sb, colLen, "CMAC Signature:", save.Header.Cmac); PrintItem(sb, colLen, $"CMAC Signature{save.Header.SignatureValidity.GetValidityString()}:", save.Header.Cmac);
PrintItem(sb, colLen, "Title ID:", $"{save.Header.ExtraData.TitleId:x16}"); PrintItem(sb, colLen, "Title ID:", $"{save.Header.ExtraData.TitleId:x16}");
PrintItem(sb, colLen, "User ID:", save.Header.ExtraData.UserId); PrintItem(sb, colLen, "User ID:", save.Header.ExtraData.UserId);
PrintItem(sb, colLen, "Save ID:", $"{save.Header.ExtraData.SaveId:x16}"); PrintItem(sb, colLen, "Save ID:", $"{save.Header.ExtraData.SaveId:x16}");
@ -82,17 +112,9 @@ namespace hactoolnet
PrintItem(sb, colLen, "Save Data Size:", $"0x{save.Header.ExtraData.DataSize:x16} ({Util.GetBytesReadable(save.Header.ExtraData.DataSize)})"); PrintItem(sb, colLen, "Save Data Size:", $"0x{save.Header.ExtraData.DataSize:x16} ({Util.GetBytesReadable(save.Header.ExtraData.DataSize)})");
PrintItem(sb, colLen, "Journal Size:", $"0x{save.Header.ExtraData.JournalSize:x16} ({Util.GetBytesReadable(save.Header.ExtraData.JournalSize)})"); PrintItem(sb, colLen, "Journal Size:", $"0x{save.Header.ExtraData.JournalSize:x16} ({Util.GetBytesReadable(save.Header.ExtraData.JournalSize)})");
PrintItem(sb, colLen, $"Header Hash{save.Header.HeaderHashValidity.GetValidityString()}:", save.Header.Layout.Hash); PrintItem(sb, colLen, $"Header Hash{save.Header.HeaderHashValidity.GetValidityString()}:", save.Header.Layout.Hash);
PrintItem(sb, colLen, "IVFC Salt Seed:", save.Header.Ivfc.SaltSource);
PrintItem(sb, colLen, "Number of Files:", save.Files.Length); PrintItem(sb, colLen, "Number of Files:", save.Files.Length);
if (save.Files.Length > 0 && save.Files.Length < 100) PrintIvfcHash(sb, colLen, 4, save.Header.Ivfc, IntegrityStreamType.Save);
{
sb.AppendLine("Files:");
foreach (FileEntry file in save.Files.OrderBy(x => x.FullPath))
{
sb.AppendLine(file.FullPath);
}
}
return sb.ToString(); return sb.ToString();
} }

View file

@ -4,7 +4,7 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using LibHac; using LibHac;
using LibHac.Savefile; using LibHac.Save;
namespace hactoolnet namespace hactoolnet
{ {

View file

@ -7,7 +7,7 @@
</PropertyGroup> </PropertyGroup>
<PropertyGroup> <PropertyGroup>
<VersionPrefix>0.1.2</VersionPrefix> <VersionPrefix>0.1.3</VersionPrefix>
<PathMap>$(MSBuildProjectDirectory)=C:/hactoolnet/</PathMap> <PathMap>$(MSBuildProjectDirectory)=C:/hactoolnet/</PathMap>
</PropertyGroup> </PropertyGroup>