Change IStorage.Length to IStorage.GetSize()

This commit is contained in:
Alex Barney 2019-03-18 14:16:20 -05:00
parent 7050b9a681
commit 4557665805
34 changed files with 118 additions and 84 deletions

View File

@ -23,8 +23,6 @@ namespace LibHac.IO
_tempBuffer = new byte[sectorSize];
_key1 = key.Slice(0, BlockSize).ToArray();
_key2 = key.Slice(BlockSize, BlockSize).ToArray();
Length = baseStorage.Length;
}
public Aes128XtsStorage(IStorage baseStorage, Span<byte> key1, Span<byte> key2, int sectorSize, bool leaveOpen)
@ -37,11 +35,8 @@ namespace LibHac.IO
_tempBuffer = new byte[sectorSize];
_key1 = key1.ToArray();
_key2 = key2.ToArray();
Length = baseStorage.Length;
}
public override long Length { get; }
protected override void ReadImpl(Span<byte> destination, long offset)
{
int size = destination.Length;

View File

@ -8,6 +8,7 @@ namespace LibHac.IO
{
private IStorage BaseStorage { get; }
private int BlockSize { get; }
private readonly long _length;
private LinkedList<CacheBlock> Blocks { get; } = new LinkedList<CacheBlock>();
private Dictionary<long, LinkedListNode<CacheBlock>> BlockDict { get; } = new Dictionary<long, LinkedListNode<CacheBlock>>();
@ -16,7 +17,7 @@ namespace LibHac.IO
{
BaseStorage = baseStorage;
BlockSize = blockSize;
Length = BaseStorage.Length;
_length = BaseStorage.GetSize();
if (!leaveOpen) ToDispose.Add(BaseStorage);
@ -30,7 +31,7 @@ namespace LibHac.IO
public CachedStorage(SectorStorage baseStorage, int cacheSize, bool leaveOpen)
: this(baseStorage, baseStorage.SectorSize, cacheSize, leaveOpen) { }
protected override void ReadImpl(Span<byte> destination, long offset)
{
long remaining = destination.Length;
@ -96,7 +97,7 @@ namespace LibHac.IO
BaseStorage.Flush();
}
public override long Length { get; }
public override long GetSize() => _length;
private CacheBlock GetBlock(long blockIndex)
{
@ -132,9 +133,9 @@ namespace LibHac.IO
long offset = index * BlockSize;
int length = BlockSize;
if (Length != -1)
if (_length != -1)
{
length = (int)Math.Min(Length - offset, length);
length = (int)Math.Min(_length - offset, length);
}
BaseStorage.Read(block.Buffer.AsSpan(0, length), offset);

View File

@ -6,7 +6,7 @@ namespace LibHac.IO
public class ConcatenationStorage : StorageBase
{
private ConcatSource[] Sources { get; }
public override long Length { get; }
private long _length;
public ConcatenationStorage(IList<IStorage> sources, bool leaveOpen)
{
@ -16,12 +16,12 @@ namespace LibHac.IO
long length = 0;
for (int i = 0; i < sources.Count; i++)
{
if (sources[i].Length < 0) throw new ArgumentException("Sources must have an explicit length.");
Sources[i] = new ConcatSource(sources[i], length, sources[i].Length);
length += sources[i].Length;
if (sources[i].GetSize() < 0) throw new ArgumentException("Sources must have an explicit length.");
Sources[i] = new ConcatSource(sources[i], length, sources[i].GetSize());
length += sources[i].GetSize();
}
Length = length;
_length = length;
}
protected override void ReadImpl(Span<byte> destination, long offset)
@ -78,9 +78,11 @@ namespace LibHac.IO
}
}
public override long GetSize() => _length;
private int FindSource(long offset)
{
if (offset < 0 || offset >= Length)
if (offset < 0 || offset >= _length)
throw new ArgumentOutOfRangeException(nameof(offset), offset, "The Storage does not contain this offset.");
int lo = 0;

View File

@ -42,7 +42,7 @@ namespace LibHac.IO
}
sources.Add(segment.Storage);
offset = segment.Offset + segment.Storage.Length;
offset = segment.Offset + segment.Storage.GetSize();
}
return new ConcatenationStorage(sources, true);

View File

@ -21,14 +21,14 @@ namespace LibHac.IO
{
Delta = delta;
if (Delta.Length < 0x40) throw new InvalidDataException("Delta file is too small.");
if (Delta.GetSize() < 0x40) throw new InvalidDataException("Delta file is too small.");
Header = new DeltaFragmentHeader(delta.AsFile(OpenMode.Read));
if (Header.Magic != Ndv0Magic) throw new InvalidDataException("NDV0 magic value is missing.");
long fragmentSize = Header.FragmentHeaderSize + Header.FragmentBodySize;
if (Delta.Length < fragmentSize)
if (Delta.GetSize() < fragmentSize)
{
throw new InvalidDataException($"Delta file is smaller than the header indicates. (0x{fragmentSize} bytes)");
}
@ -40,7 +40,7 @@ namespace LibHac.IO
{
Original = baseStorage;
if (Original.Length != Header.OriginalSize)
if (Original.GetSize() != Header.OriginalSize)
{
throw new InvalidDataException($"Original file size does not match the size in the delta header. (0x{Header.OriginalSize} bytes)");
}

View File

@ -26,6 +26,6 @@ namespace LibHac.IO
BaseFile.Flush();
}
public override long Length => BaseFile.GetSize();
public override long GetSize() => BaseFile.GetSize();
}
}

View File

@ -16,7 +16,8 @@ namespace LibHac.IO
/// An array of the hash statuses of every block in each level.
/// </summary>
public Validity[][] LevelValidities { get; }
public override long Length { get; }
private long _length;
private IntegrityVerificationStorage[] IntegrityStorages { get; }
@ -33,7 +34,7 @@ namespace LibHac.IO
{
var levelData = new IntegrityVerificationStorage(levelInfo[i], Levels[i - 1], integrityCheckLevel, leaveOpen);
int cacheCount = Math.Min((int)Util.DivideByRoundUp(levelData.Length, levelInfo[i].BlockSize), 4);
int cacheCount = Math.Min((int)Util.DivideByRoundUp(levelData.GetSize(), levelInfo[i].BlockSize), 4);
Levels[i] = new CachedStorage(levelData, cacheCount, leaveOpen);
LevelValidities[i - 1] = levelData.BlockValidities;
@ -41,7 +42,7 @@ namespace LibHac.IO
}
DataLevel = Levels[Levels.Length - 1];
Length = DataLevel.Length;
_length = DataLevel.GetSize();
if (!leaveOpen) ToDispose.Add(DataLevel);
}
@ -106,6 +107,8 @@ namespace LibHac.IO
DataLevel.Flush();
}
public override long GetSize() => _length;
/// <summary>
/// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the data.
/// </summary>
@ -118,7 +121,7 @@ namespace LibHac.IO
IntegrityVerificationStorage storage = IntegrityStorages[IntegrityStorages.Length - 1];
long blockSize = storage.SectorSize;
int blockCount = (int)Util.DivideByRoundUp(Length, blockSize);
int blockCount = (int)Util.DivideByRoundUp(_length, blockSize);
var buffer = new byte[blockSize];
var result = Validity.Valid;
@ -129,7 +132,7 @@ namespace LibHac.IO
{
if (validities[i] == Validity.Unchecked)
{
int toRead = (int)Math.Min(storage.Length - blockSize * i, buffer.Length);
int toRead = (int)Math.Min(storage.GetSize() - blockSize * i, buffer.Length);
storage.Read(buffer.AsSpan(0, toRead), blockSize * i, IntegrityCheckLevel.IgnoreOnInvalid);
}

View File

@ -31,9 +31,16 @@ namespace LibHac.IO
void Flush();
/// <summary>
/// The length of the <see cref="IStorage"/>. -1 will be returned if
/// Sets the size of the current IStorage.
/// </summary>
/// <param name="size">The desired size of the current IStorage in bytes.</param>
void SetSize(long size);
/// <summary>
/// The size of the<see cref="IStorage"/>. -1 will be returned if
/// the <see cref="IStorage"/> cannot be represented as a sequence of contiguous bytes.
/// </summary>
long Length { get; }
/// <returns>The size of the <see cref="IStorage"/> in bytes.</returns>
long GetSize();
}
}

View File

@ -11,6 +11,7 @@ namespace LibHac.IO
private List<IStorage> Sources { get; } = new List<IStorage>();
private BucketTree<RelocationEntry> BucketTree { get; }
private long _length;
public IndirectStorage(IStorage bucketTreeHeader, IStorage bucketTreeData, bool leaveOpen, params IStorage[] sources)
{
@ -23,7 +24,7 @@ namespace LibHac.IO
RelocationEntries = BucketTree.GetEntryList();
RelocationOffsets = RelocationEntries.Select(x => x.Offset).ToList();
Length = BucketTree.BucketOffsets.OffsetEnd;
_length = BucketTree.BucketOffsets.OffsetEnd;
}
protected override void ReadImpl(Span<byte> destination, long offset)
@ -62,7 +63,7 @@ namespace LibHac.IO
throw new NotImplementedException();
}
public override long Length { get; }
public override long GetSize() => _length;
private RelocationEntry GetRelocationEntry(long offset)
{

View File

@ -128,7 +128,7 @@ namespace LibHac.IO
long blockIndex = offset / SectorSize;
long hashPos = blockIndex * DigestSize;
int toWrite = (int)Math.Min(source.Length, Length - offset);
int toWrite = (int)Math.Min(source.Length, GetSize() - offset);
byte[] dataBuffer = ArrayPool<byte>.Shared.Rent(SectorSize);
try

View File

@ -36,6 +36,6 @@ namespace LibHac.IO
Storage.Flush();
}
public override long Length => Storage.Length;
public override long GetSize() => Storage.GetSize();
}
}

View File

@ -77,7 +77,7 @@ namespace LibHac.IO
private void SetCapacity(int value)
{
if (value < Length)
if (value < _length)
throw new ArgumentOutOfRangeException(nameof(value), "Capacity is smaller than the current length.");
if (!_isExpandable && value != _capacity) throw new NotSupportedException("MemoryStorage is not expandable.");
@ -85,7 +85,7 @@ namespace LibHac.IO
if (_isExpandable && value != _capacity)
{
var newBuffer = new byte[value];
Buffer.BlockCopy(_buffer, 0, newBuffer, 0, (int)Length);
Buffer.BlockCopy(_buffer, 0, newBuffer, 0, _length);
_buffer = newBuffer;
_capacity = value;
@ -94,6 +94,6 @@ namespace LibHac.IO
public override void Flush() { }
public override long Length => _length;
public override long GetSize() => _length;
}
}

View File

@ -106,10 +106,10 @@ namespace LibHac.IO.NcaUtils
long offset = sect.Offset;
long size = sect.Size;
if (!Util.IsSubRange(offset, size, BaseStorage.Length))
if (!Util.IsSubRange(offset, size, BaseStorage.GetSize()))
{
throw new InvalidDataException(
$"Section offset (0x{offset:x}) and length (0x{size:x}) fall outside the total NCA length (0x{BaseStorage.Length:x}).");
$"Section offset (0x{offset:x}) and length (0x{size:x}) fall outside the total NCA length (0x{BaseStorage.GetSize():x}).");
}
return BaseStorage.Slice(offset, size);

View File

@ -8,9 +8,10 @@ namespace LibHac.IO
public class NullStorage : StorageBase
{
public NullStorage() { }
public NullStorage(long length) => Length = length;
public NullStorage(long length) => _length = length;
private long _length;
public override long Length { get; }
protected override void ReadImpl(Span<byte> destination, long offset)
{
destination.Clear();
@ -23,5 +24,7 @@ namespace LibHac.IO
public override void Flush()
{
}
public override long GetSize() => _length;
}
}

View File

@ -81,7 +81,7 @@ namespace LibHac.IO.RomFs
sources.Add(new MemoryStorage(header));
sources.AddRange(Sources);
long fileLength = sources.Sum(x => x.Length);
long fileLength = sources.Sum(x => x.GetSize());
headerWriter.Write((long)HeaderSize);

View File

@ -9,13 +9,13 @@ namespace LibHac.IO.Save
private int InitialBlock { get; }
private AllocationTable Fat { get; }
public override long Length { get; }
private long _length;
public AllocationTableStorage(IStorage data, AllocationTable table, int blockSize, int initialBlock, long length)
{
BaseStorage = data;
BlockSize = blockSize;
Length = length;
_length = length;
Fat = table;
InitialBlock = initialBlock;
}
@ -78,5 +78,7 @@ namespace LibHac.IO.Save
{
BaseStorage.Flush();
}
public override long GetSize() => _length;
}
}

View File

@ -10,6 +10,8 @@ namespace LibHac.IO.Save
private IStorage DataB { get; }
private DuplexBitmap Bitmap { get; }
private long _length;
public DuplexStorage(IStorage dataA, IStorage dataB, IStorage bitmap, int blockSize)
{
DataA = dataA;
@ -17,8 +19,8 @@ namespace LibHac.IO.Save
BitmapStorage = bitmap;
BlockSize = blockSize;
Bitmap = new DuplexBitmap(BitmapStorage, (int)(bitmap.Length * 8));
Length = DataA.Length;
Bitmap = new DuplexBitmap(BitmapStorage, (int)(bitmap.GetSize() * 8));
_length = DataA.GetSize();
}
protected override void ReadImpl(Span<byte> destination, long offset)
@ -74,6 +76,6 @@ namespace LibHac.IO.Save
DataB?.Flush();
}
public override long Length { get; }
public override long GetSize() => _length;
}
}

View File

@ -6,6 +6,7 @@ namespace LibHac.IO.Save
{
private DuplexStorage[] Layers { get; }
private DuplexStorage DataLayer { get; }
private long _length;
public HierarchicalDuplexStorage(DuplexFsLayerInfo[] layers, bool masterBit)
{
@ -28,7 +29,7 @@ namespace LibHac.IO.Save
}
DataLayer = Layers[Layers.Length - 1];
Length = DataLayer.Length;
_length = DataLayer.GetSize();
}
protected override void ReadImpl(Span<byte> destination, long offset)
@ -46,7 +47,7 @@ namespace LibHac.IO.Save
DataLayer.Flush();
}
public override long Length { get; }
public override long GetSize() => _length;
}
public class DuplexFsLayerInfo

View File

@ -12,7 +12,8 @@ namespace LibHac.IO.Save
public JournalHeader Header { get; }
public int BlockSize { get; }
public override long Length { get; }
private long _length;
public JournalStorage(IStorage baseStorage, IStorage header, JournalMapParams mapInfo, bool leaveOpen)
{
@ -24,7 +25,7 @@ namespace LibHac.IO.Save
Map = new JournalMap(mapHeader, mapInfo);
BlockSize = (int)Header.BlockSize;
Length = Header.TotalSize - Header.JournalSize;
_length = Header.TotalSize - Header.JournalSize;
if (!leaveOpen) ToDispose.Add(baseStorage);
}
@ -80,6 +81,8 @@ namespace LibHac.IO.Save
BaseStorage.Flush();
}
public override long GetSize() => _length;
public IStorage GetBaseStorage() => BaseStorage.AsReadOnly();
public IStorage GetHeaderStorage() => HeaderStorage.AsReadOnly();
}

View File

@ -14,8 +14,6 @@ namespace LibHac.IO.Save
public MapEntry[] MapEntries { get; set; }
public RemapSegment[] Segments { get; set; }
public override long Length { get; } = -1;
/// <summary>
/// Creates a new <see cref="RemapStorage"/>
/// </summary>
@ -102,6 +100,8 @@ namespace LibHac.IO.Save
BaseStorage.Flush();
}
public override long GetSize() => -1;
public IStorage GetBaseStorage() => BaseStorage.AsReadOnly();
public IStorage GetHeaderStorage() => HeaderStorage.AsReadOnly();
public IStorage GetMapEntryStorage() => MapEntryStorage.AsReadOnly();

View File

@ -9,12 +9,14 @@ namespace LibHac.IO
public int SectorSize { get; }
public int SectorCount { get; }
private long _length;
public SectorStorage(IStorage baseStorage, int sectorSize, bool leaveOpen)
{
BaseStorage = baseStorage;
SectorSize = sectorSize;
SectorCount = (int)Util.DivideByRoundUp(BaseStorage.Length, sectorSize);
Length = baseStorage.Length;
SectorCount = (int)Util.DivideByRoundUp(BaseStorage.GetSize(), sectorSize);
_length = baseStorage.GetSize();
if (!leaveOpen) ToDispose.Add(BaseStorage);
}
@ -36,7 +38,7 @@ namespace LibHac.IO
BaseStorage.Flush();
}
public override long Length { get; }
public override long GetSize() => _length;
/// <summary>
/// Validates that the size is a multiple of the sector size

View File

@ -12,7 +12,7 @@ namespace LibHac.IO
protected abstract void ReadImpl(Span<byte> destination, long offset);
protected abstract void WriteImpl(ReadOnlySpan<byte> source, long offset);
public abstract void Flush();
public abstract long Length { get; }
public abstract long GetSize();
public void Read(Span<byte> destination, long offset)
{
@ -26,6 +26,11 @@ namespace LibHac.IO
WriteImpl(source, offset);
}
public virtual void SetSize(long size)
{
throw new NotSupportedException();
}
protected virtual void Dispose(bool disposing)
{
if (_isDisposed) return;
@ -53,10 +58,11 @@ namespace LibHac.IO
if (_isDisposed) throw new ObjectDisposedException(null);
if (span == null) throw new ArgumentNullException(nameof(span));
if (offset < 0) throw new ArgumentOutOfRangeException(nameof(offset), "Argument must be non-negative.");
long length = GetSize();
if (Length != -1 && !CanAutoExpand)
if (length != -1 && !CanAutoExpand)
{
if (offset + span.Length > Length) throw new ArgumentException("The given offset and count exceed the length of the Storage");
if (offset + span.Length > length) throw new ArgumentException("The given offset and count exceed the length of the Storage");
}
}
}

View File

@ -28,12 +28,14 @@ namespace LibHac.IO
public static IStorage Slice(this IStorage storage, long start)
{
if (storage.Length == -1)
long length = storage.GetSize();
if (length == -1)
{
return storage.Slice(start, storage.Length);
return storage.Slice(start, length);
}
return storage.Slice(start, storage.Length - start);
return storage.Slice(start, length - start);
}
public static IStorage Slice(this IStorage storage, long start, long length)
@ -53,7 +55,7 @@ namespace LibHac.IO
public static IStorage AsReadOnly(this IStorage storage, bool leaveOpen)
{
return new SubStorage(storage, 0, storage.Length, leaveOpen, FileAccess.Read);
return new SubStorage(storage, 0, storage.GetSize(), leaveOpen, FileAccess.Read);
}
public static Stream AsStream(this IStorage storage) => new StorageStream(storage, FileAccess.ReadWrite, true);
@ -65,7 +67,7 @@ namespace LibHac.IO
public static void CopyTo(this IStorage input, IStorage output, IProgressReport progress = null)
{
const int bufferSize = 81920;
long remaining = Math.Min(input.Length, output.Length);
long remaining = Math.Min(input.GetSize(), output.GetSize());
if (remaining < 0) throw new ArgumentException("Storage must have an explicit length");
progress?.SetTotal(remaining);
@ -99,7 +101,7 @@ namespace LibHac.IO
{
using (var outFile = new FileStream(filename, FileMode.Create, FileAccess.Write))
{
input.CopyToStream(outFile, input.Length, progress);
input.CopyToStream(outFile, input.GetSize(), progress);
}
}
@ -107,7 +109,7 @@ namespace LibHac.IO
{
if (storage == null) return new byte[0];
var arr = new byte[storage.Length];
var arr = new byte[storage.GetSize()];
storage.CopyTo(new MemoryStorage(arr));
return arr;
}
@ -116,7 +118,7 @@ namespace LibHac.IO
{
if (storage == null) return new T[0];
var arr = new T[storage.Length / Marshal.SizeOf<T>()];
var arr = new T[storage.GetSize() / Marshal.SizeOf<T>()];
Span<byte> dest = MemoryMarshal.Cast<T, byte>(arr.AsSpan());
storage.Read(dest, 0);
@ -143,7 +145,7 @@ namespace LibHac.IO
}
}
public static void CopyToStream(this IStorage input, Stream output) => CopyToStream(input, output, input.Length);
public static void CopyToStream(this IStorage input, Stream output) => CopyToStream(input, output, input.GetSize());
public static IStorage AsStorage(this Stream stream)
{

View File

@ -35,7 +35,7 @@ namespace LibHac.IO
public override long GetSize()
{
return BaseStorage.Length;
return BaseStorage.GetSize();
}
public override void SetSize(long size)

View File

@ -12,7 +12,7 @@ namespace LibHac.IO
{
BaseStorage = baseStorage;
LeaveOpen = leaveOpen;
Length = baseStorage.Length;
Length = baseStorage.GetSize();
CanRead = access.HasFlag(FileAccess.Read);
CanWrite = access.HasFlag(FileAccess.Write);

View File

@ -11,12 +11,12 @@ namespace LibHac.IO
{
private Stream BaseStream { get; }
private object Locker { get; } = new object();
public override long Length { get; }
private long _length;
public StreamStorage(Stream baseStream, bool leaveOpen)
{
BaseStream = baseStream;
Length = BaseStream.Length;
_length = BaseStream.Length;
if (!leaveOpen) ToDispose.Add(BaseStream);
}
@ -91,5 +91,7 @@ namespace LibHac.IO
BaseStream.Flush();
}
}
public override long GetSize() => _length;
}
}

View File

@ -7,21 +7,21 @@ namespace LibHac.IO
{
private IStorage BaseStorage { get; }
private long Offset { get; }
public override long Length { get; }
private FileAccess Access { get; } = FileAccess.ReadWrite;
private long _length;
public SubStorage(IStorage baseStorage, long offset, long length)
{
BaseStorage = baseStorage;
Offset = offset;
Length = length;
_length = length;
}
public SubStorage(SubStorage baseStorage, long offset, long length)
{
BaseStorage = baseStorage.BaseStorage;
Offset = baseStorage.Offset + offset;
Length = length;
_length = length;
}
public SubStorage(IStorage baseStorage, long offset, long length, bool leaveOpen)
@ -38,13 +38,13 @@ namespace LibHac.IO
protected override void ReadImpl(Span<byte> destination, long offset)
{
if((Access & FileAccess.Read) == 0) throw new InvalidOperationException("Storage is not readable");
if ((Access & FileAccess.Read) == 0) throw new InvalidOperationException("Storage is not readable");
BaseStorage.Read(destination, offset + Offset);
}
protected override void WriteImpl(ReadOnlySpan<byte> source, long offset)
{
if((Access & FileAccess.Write) == 0) throw new InvalidOperationException("Storage is not writable");
if ((Access & FileAccess.Write) == 0) throw new InvalidOperationException("Storage is not writable");
BaseStorage.Write(source, offset + Offset);
}
@ -52,5 +52,7 @@ namespace LibHac.IO
{
BaseStorage.Flush();
}
public override long GetSize() => _length;
}
}

View File

@ -44,7 +44,7 @@ namespace LibHac
public byte[] DecompressSection(int index)
{
IStorage compStream = OpenSection(index);
var compressed = new byte[compStream.Length];
var compressed = new byte[compStream.GetSize()];
compStream.Read(compressed, 0);
return DecompressBlz(compressed);

View File

@ -25,7 +25,7 @@ namespace hactoolnet
encryptWatch.Stop();
logger.SetTotal(0);
string rate = Util.GetBytesReadable((long)(src.Length * iterations / encryptWatch.Elapsed.TotalSeconds));
string rate = Util.GetBytesReadable((long)(src.GetSize() * iterations / encryptWatch.Elapsed.TotalSeconds));
logger.LogMessage($"{label}{rate}/s");
}

View File

@ -51,7 +51,7 @@ namespace hactoolnet
using (var outFile = new FileStream(ctx.Options.OutFile, FileMode.OpenOrCreate, FileAccess.ReadWrite))
{
IStorage patchedStorage = delta.GetPatchedStorage();
patchedStorage.CopyToStream(outFile, patchedStorage.Length, ctx.Logger);
patchedStorage.CopyToStream(outFile, patchedStorage.GetSize(), ctx.Logger);
}
}
}

View File

@ -23,7 +23,7 @@ namespace hactoolnet
using (var outFile = new FileStream(ctx.Options.OutFile, FileMode.Create, FileAccess.ReadWrite))
{
romfs.CopyToStream(outFile, romfs.Length, ctx.Logger);
romfs.CopyToStream(outFile, romfs.GetSize(), ctx.Logger);
}
ctx.Logger.LogMessage($"Finished writing {ctx.Options.OutFile}");
@ -50,7 +50,7 @@ namespace hactoolnet
using (var outFile = new FileStream(ctx.Options.OutFile, FileMode.Create, FileAccess.ReadWrite))
{
partitionFs.CopyToStream(outFile, partitionFs.Length, ctx.Logger);
partitionFs.CopyToStream(outFile, partitionFs.GetSize(), ctx.Logger);
}
ctx.Logger.LogMessage($"Finished writing {ctx.Options.OutFile}");

View File

@ -83,9 +83,9 @@ namespace hactoolnet
{
long bytesToRead = 1024L * 1024 * 1024 * 5;
IStorage storage = nca.OpenStorage(NcaSectionType.Data, ctx.Options.IntegrityLevel);
var dest = new NullStorage(storage.Length);
var dest = new NullStorage(storage.GetSize());
int iterations = (int)(bytesToRead / storage.Length) + 1;
int iterations = (int)(bytesToRead / storage.GetSize()) + 1;
ctx.Logger.LogMessage(iterations.ToString());
ctx.Logger.StartNewStopWatch();

View File

@ -94,7 +94,7 @@ namespace hactoolnet
using (var outStream = new FileStream(ctx.Options.NspOut, FileMode.Create, FileAccess.ReadWrite))
{
IStorage builtPfs = builder.Build(PartitionFileSystemType.Standard);
builtPfs.CopyToStream(outStream, builtPfs.Length, ctx.Logger);
builtPfs.CopyToStream(outStream, builtPfs.GetSize(), ctx.Logger);
}
}
}

View File

@ -30,7 +30,7 @@ namespace hactoolnet
{
using (var outFile = new FileStream(ctx.Options.RomfsOut, FileMode.Create, FileAccess.ReadWrite))
{
romfsStorage.CopyToStream(outFile, romfsStorage.Length, ctx.Logger);
romfsStorage.CopyToStream(outFile, romfsStorage.GetSize(), ctx.Logger);
}
}