Add NsoReader

This commit is contained in:
Alex Barney 2020-03-26 22:49:35 -07:00
parent 9cc60e22ec
commit f59c7c6a84
3 changed files with 255 additions and 0 deletions

View file

@ -0,0 +1,84 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using LibHac.Common;
namespace LibHac.Loader
{
[StructLayout(LayoutKind.Explicit, Size = 0x100)]
public struct NsoHeader
{
public const int SegmentCount = 3;
[FieldOffset(0x00)] public uint Magic;
[FieldOffset(0x04)] public uint Version;
[FieldOffset(0x08)] public uint Reserved08;
[FieldOffset(0x0C)] public Flag Flags;
[FieldOffset(0x10)] public uint TextFileOffset;
[FieldOffset(0x14)] public uint TextMemoryOffset;
[FieldOffset(0x18)] public uint TextSize;
[FieldOffset(0x1C)] public uint ModuleNameOffset;
[FieldOffset(0x20)] public uint RoFileOffset;
[FieldOffset(0x24)] public uint RoMemoryOffset;
[FieldOffset(0x28)] public uint RoSize;
[FieldOffset(0x2C)] public uint ModuleNameSize;
[FieldOffset(0x30)] public uint DataFileOffset;
[FieldOffset(0x34)] public uint DataMemoryOffset;
[FieldOffset(0x38)] public uint DataSize;
[FieldOffset(0x3C)] public uint BssSize;
[FieldOffset(0x40)] public Buffer32 ModuleId;
// Size of the sections in the NSO file
[FieldOffset(0x60)] public uint TextFileSize;
[FieldOffset(0x64)] public uint RoFileSize;
[FieldOffset(0x68)] public uint DataFileSize;
[FieldOffset(0x68)] private byte _reserved6C;
[FieldOffset(0x88)] public uint ApiInfoOffset;
[FieldOffset(0x8C)] public uint ApiInfoSize;
[FieldOffset(0x90)] public uint DynStrOffset;
[FieldOffset(0x94)] public uint DynStrSize;
[FieldOffset(0x98)] public uint DynSymOffset;
[FieldOffset(0x9C)] public uint DynSymSize;
[FieldOffset(0xA0)] public Buffer32 TextHash;
[FieldOffset(0xC0)] public Buffer32 RoHash;
[FieldOffset(0xE0)] public Buffer32 DataHash;
public Span<SegmentHeader> Segments =>
SpanHelpers.CreateSpan(ref Unsafe.As<uint, SegmentHeader>(ref TextFileOffset), SegmentCount);
public Span<uint> CompressedSizes => SpanHelpers.CreateSpan(ref TextFileSize, SegmentCount);
public Span<Buffer32> SegmentHashes => SpanHelpers.CreateSpan(ref TextHash, SegmentCount);
public Span<byte> Reserved6C => SpanHelpers.CreateSpan(ref _reserved6C, 0x1C);
[Flags]
public enum Flag
{
TextCompress = 1 << 0,
RoCompress = 1 << 1,
DataCompress = 1 << 2,
TextHash = 1 << 3,
RoHash = 1 << 4,
DataHash = 1 << 5
}
[StructLayout(LayoutKind.Sequential, Size = 0x10)]
public struct SegmentHeader
{
public uint FileOffset;
public uint MemoryOffset;
public uint Size;
}
}
}

View file

@ -0,0 +1,104 @@
using System;
using System.Runtime.CompilerServices;
using LibHac.Common;
using LibHac.Fs;
namespace LibHac.Loader
{
public class NsoReader
{
private IFile NsoFile { get; set; }
public NsoHeader Header;
public Result Initialize(IFile nsoFile)
{
Result rc = nsoFile.Read(out long bytesRead, 0, SpanHelpers.AsByteSpan(ref Header), ReadOption.None);
if (rc.IsFailure()) return rc;
if (bytesRead != Unsafe.SizeOf<NsoHeader>())
return ResultLoader.InvalidNso.Log();
NsoFile = nsoFile;
return Result.Success;
}
public Result GetSegmentSize(SegmentType segment, out uint size)
{
switch (segment)
{
case SegmentType.Text:
case SegmentType.Ro:
case SegmentType.Data:
size = Header.Segments[(int)segment].Size;
return Result.Success;
default:
size = default;
return ResultLibHac.ArgumentOutOfRange.Log();
}
}
public Result ReadSegment(SegmentType segment, Span<byte> buffer)
{
Result rc = GetSegmentSize(segment, out uint segmentSize);
if (rc.IsFailure()) return rc;
if (buffer.Length < segmentSize)
return ResultLibHac.BufferTooSmall.Log();
bool isCompressed = (((int)Header.Flags >> (int)segment) & 1) != 0;
bool checkHash = (((int)Header.Flags >> (int)segment) & 8) != 0;
return ReadSegmentImpl(ref Header.Segments[(int)segment], Header.CompressedSizes[(int)segment],
Header.SegmentHashes[(int)segment], isCompressed, checkHash, buffer);
}
private Result ReadSegmentImpl(ref NsoHeader.SegmentHeader segment, uint fileSize, Buffer32 fileHash,
bool isCompressed, bool checkHash, Span<byte> buffer)
{
// Select read size based on compression.
if (!isCompressed)
{
fileSize = segment.Size;
}
// Validate size.
if (fileSize > segment.Size)
return ResultLoader.InvalidNso.Log();
// Load data from file.
uint loadAddress = isCompressed ? (uint)buffer.Length - fileSize : 0;
Result rc = NsoFile.Read(out long bytesRead, segment.FileOffset, buffer.Slice((int)loadAddress), ReadOption.None);
if (rc.IsFailure()) return rc;
if (bytesRead != fileSize)
return ResultLoader.InvalidNso.Log();
// Uncompress if necessary.
if (isCompressed)
{
Lz4.Decompress(buffer.Slice((int)loadAddress), buffer);
}
// Check hash if necessary.
if (checkHash)
{
Buffer32 hash = default;
Crypto.Sha256.GenerateSha256Hash(buffer.Slice(0, (int)segment.Size), hash.Bytes);
if (hash.Bytes.SequenceCompareTo(fileHash.Bytes) != 0)
return ResultLoader.InvalidNso.Log();
}
return Result.Success;
}
public enum SegmentType
{
Text = 0,
Ro = 1,
Data = 2
}
}
}

View file

@ -76,5 +76,72 @@ namespace LibHac
return dec;
}
public static void Decompress(ReadOnlySpan<byte> cmp, Span<byte> dec)
{
int cmpPos = 0;
int decPos = 0;
int GetLength(int length, ReadOnlySpan<byte> cmp)
{
byte sum;
if (length == 0xf)
{
do
{
length += sum = cmp[cmpPos++];
}
while (sum == 0xff);
}
return length;
}
do
{
byte token = cmp[cmpPos++];
int encCount = (token >> 0) & 0xf;
int litCount = (token >> 4) & 0xf;
//Copy literal chunk
litCount = GetLength(litCount, cmp);
cmp.Slice(cmpPos, litCount).CopyTo(dec.Slice(decPos));
cmpPos += litCount;
decPos += litCount;
if (cmpPos >= cmp.Length)
{
break;
}
//Copy compressed chunk
int back = cmp[cmpPos++] << 0 |
cmp[cmpPos++] << 8;
encCount = GetLength(encCount, cmp) + 4;
int encPos = decPos - back;
if (encCount <= back)
{
dec.Slice(encPos, encCount).CopyTo(dec.Slice(decPos));
decPos += encCount;
}
else
{
while (encCount-- > 0)
{
dec[decPos++] = dec[encPos++];
}
}
}
while (cmpPos < cmp.Length &&
decPos < dec.Length);
}
}
}