Skip to content

Commit

Permalink
Initial compressed save support
Browse files Browse the repository at this point in the history
  • Loading branch information
Goz3rr committed Oct 14, 2019
1 parent c6c335e commit bb1d0b5
Show file tree
Hide file tree
Showing 10 changed files with 253 additions and 120 deletions.
18 changes: 17 additions & 1 deletion SatisfactorySaveParser/BinaryIOExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using SatisfactorySaveParser.Structures;
using SatisfactorySaveParser.Save;
using SatisfactorySaveParser.Structures;
using System.IO;
using System.Linq;
using System.Text;
Expand Down Expand Up @@ -99,5 +100,20 @@ public static void Write(this BinaryWriter writer, Vector4 vec)
writer.Write(vec.Z);
writer.Write(vec.W);
}

public static ChunkInfo ReadChunkInfo(this BinaryReader reader)
{
return new ChunkInfo()
{
CompressedSize = reader.ReadInt64(),
UncompressedSize = reader.ReadInt64()
};
}

public static void Write(this BinaryWriter writer, ChunkInfo info)
{
writer.Write(info.CompressedSize);
writer.Write(info.UncompressedSize);
}
}
}
255 changes: 181 additions & 74 deletions SatisfactorySaveParser/SatisfactorySave.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using NLog;
using Ionic.Zlib;
using NLog;
using SatisfactorySaveParser.Save;
using SatisfactorySaveParser.Structures;
using System;
Expand Down Expand Up @@ -50,60 +51,107 @@ public SatisfactorySave(string file)
{
Header = FSaveHeader.Parse(reader);

// Does not need to be a public property because it's equal to Entries.Count
var totalSaveObjects = reader.ReadUInt32();
log.Info($"Save contains {totalSaveObjects} object headers");

// Saved entities loop
for (int i = 0; i < totalSaveObjects; i++)
if (Header.SaveVersion < FSaveCustomVersion.SaveFileIsCompressed)
{
LoadData(reader);
}
else
{
var type = reader.ReadInt32();
switch (type)
using (var buffer = new MemoryStream())
{
case SaveEntity.TypeID:
Entries.Add(new SaveEntity(reader));
break;
case SaveComponent.TypeID:
Entries.Add(new SaveComponent(reader));
break;
default:
throw new InvalidOperationException($"Unexpected type {type}");
var uncompressedSize = 0L;

while (stream.Position < stream.Length)
{
var header = reader.ReadChunkInfo();
Trace.Assert(header.CompressedSize == ChunkInfo.Magic);

var summary = reader.ReadChunkInfo();

var subChunk = reader.ReadChunkInfo();
Trace.Assert(subChunk.UncompressedSize == summary.UncompressedSize);

var startPosition = stream.Position;
using (var zStream = new ZlibStream(stream, CompressionMode.Decompress, true))
{
zStream.CopyTo(buffer);
}

// ZlibStream appears to read more bytes than it uses (because of buffering probably) so we need to manually fix the input stream position
stream.Position = startPosition + subChunk.CompressedSize;

uncompressedSize += subChunk.UncompressedSize;
}


buffer.Position = 0;
using (var bufferReader = new BinaryReader(buffer))
{
var dataLength = bufferReader.ReadInt32();
Trace.Assert(uncompressedSize == dataLength + 4);

LoadData(bufferReader);
}
}
}
}
}

var totalSaveObjectData = reader.ReadInt32();
log.Info($"Save contains {totalSaveObjectData} object data");
Trace.Assert(Entries.Count == totalSaveObjects);
Trace.Assert(Entries.Count == totalSaveObjectData);
private void LoadData(BinaryReader reader)
{
// Does not need to be a public property because it's equal to Entries.Count
var totalSaveObjects = reader.ReadUInt32();
log.Info($"Save contains {totalSaveObjects} object headers");

for (int i = 0; i < Entries.Count; i++)
// Saved entities loop
for (int i = 0; i < totalSaveObjects; i++)
{
var type = reader.ReadInt32();
switch (type)
{
var len = reader.ReadInt32();
var before = reader.BaseStream.Position;
case SaveEntity.TypeID:
Entries.Add(new SaveEntity(reader));
break;
case SaveComponent.TypeID:
Entries.Add(new SaveComponent(reader));
break;
default:
throw new InvalidOperationException($"Unexpected type {type}");
}
}

var totalSaveObjectData = reader.ReadInt32();
log.Info($"Save contains {totalSaveObjectData} object data");
Trace.Assert(Entries.Count == totalSaveObjects);
Trace.Assert(Entries.Count == totalSaveObjectData);

for (int i = 0; i < Entries.Count; i++)
{
var len = reader.ReadInt32();
var before = reader.BaseStream.Position;

#if DEBUG
//log.Trace($"Reading {len} bytes @ {before} for {Entries[i].TypePath}");
//log.Trace($"Reading {len} bytes @ {before} for {Entries[i].TypePath}");
#endif

Entries[i].ParseData(len, reader);
var after = reader.BaseStream.Position;

if (before + len != after)
{
throw new InvalidOperationException($"Expected {len} bytes read but got {after - before}");
}
}
Entries[i].ParseData(len, reader);
var after = reader.BaseStream.Position;

var collectedObjectsCount = reader.ReadInt32();
log.Info($"Save contains {collectedObjectsCount} collected objects");
for (int i = 0; i < collectedObjectsCount; i++)
if (before + len != after)
{
CollectedObjects.Add(new ObjectReference(reader));
throw new InvalidOperationException($"Expected {len} bytes read but got {after - before}");
}
}

log.Debug($"Read {reader.BaseStream.Position} of total {reader.BaseStream.Length} bytes");
Trace.Assert(reader.BaseStream.Position == reader.BaseStream.Length);
var collectedObjectsCount = reader.ReadInt32();
log.Info($"Save contains {collectedObjectsCount} collected objects");
for (int i = 0; i < collectedObjectsCount; i++)
{
CollectedObjects.Add(new ObjectReference(reader));
}

log.Debug($"Read {reader.BaseStream.Position} of total {reader.BaseStream.Length} bytes");
Trace.Assert(reader.BaseStream.Position == reader.BaseStream.Length);
}

public void Save()
Expand All @@ -123,55 +171,114 @@ public void Save(string file)

Header.Serialize(writer);

writer.Write(Entries.Count);

var entities = Entries.Where(e => e is SaveEntity).ToArray();
for (var i = 0; i < entities.Length; i++)
if (Header.SaveVersion < FSaveCustomVersion.SaveFileIsCompressed)
{
writer.Write(SaveEntity.TypeID);
entities[i].SerializeHeader(writer);
SaveData(writer);
}

var components = Entries.Where(e => e is SaveComponent).ToArray();
for (var i = 0; i < components.Length; i++)
else
{
writer.Write(SaveComponent.TypeID);
components[i].SerializeHeader(writer);
}
using (var buffer = new MemoryStream())
using (var bufferWriter = new BinaryWriter(buffer))
{
bufferWriter.Write(0); // Placeholder size

writer.Write(entities.Length + components.Length);
SaveData(bufferWriter);

using (var ms = new MemoryStream())
using (var dataWriter = new BinaryWriter(ms))
{
for (var i = 0; i < entities.Length; i++)
{
entities[i].SerializeData(dataWriter);
buffer.Position = 0;
bufferWriter.Write((int)buffer.Length - 4);
buffer.Position = 0;

var bytes = ms.ToArray();
writer.Write(bytes.Length);
writer.Write(bytes);
for (var i = 0; i < (int)Math.Ceiling((double)buffer.Length / ChunkInfo.ChunkSize); i++)
{
using (var zBuffer = new MemoryStream())
{
var remaining = (int)Math.Min(ChunkInfo.ChunkSize, buffer.Length - (ChunkInfo.ChunkSize * i));

ms.SetLength(0);
}
for (var i = 0; i < components.Length; i++)
{
components[i].SerializeData(dataWriter);
using (var zStream = new ZlibStream(zBuffer, CompressionMode.Compress, CompressionLevel.Level6, true))
{
var tmpBuf = new byte[remaining];
buffer.Read(tmpBuf, 0, remaining);
zStream.Write(tmpBuf, 0, remaining);
}

writer.Write(new ChunkInfo()
{
CompressedSize = ChunkInfo.Magic,
UncompressedSize = remaining
});

writer.Write(new ChunkInfo()
{
CompressedSize = zBuffer.Length,
UncompressedSize = remaining
});

var bytes = ms.ToArray();
writer.Write(bytes.Length);
writer.Write(bytes);
writer.Write(new ChunkInfo()
{
CompressedSize = zBuffer.Length,
UncompressedSize = remaining
});

ms.SetLength(0);
//writer.Write(tmpBuf);
//zBuffer.CopyTo(stream);
writer.Write(zBuffer.ToArray());
}
}
}
}
}
}

private void SaveData(BinaryWriter writer)
{
writer.Write(Entries.Count);

writer.Write(CollectedObjects.Count);
foreach (var collectedObject in CollectedObjects)
var entities = Entries.Where(e => e is SaveEntity).ToArray();
for (var i = 0; i < entities.Length; i++)
{
writer.Write(SaveEntity.TypeID);
entities[i].SerializeHeader(writer);
}

var components = Entries.Where(e => e is SaveComponent).ToArray();
for (var i = 0; i < components.Length; i++)
{
writer.Write(SaveComponent.TypeID);
components[i].SerializeHeader(writer);
}

writer.Write(entities.Length + components.Length);

using (var ms = new MemoryStream())
using (var dataWriter = new BinaryWriter(ms))
{
for (var i = 0; i < entities.Length; i++)
{
writer.WriteLengthPrefixedString(collectedObject.LevelName);
writer.WriteLengthPrefixedString(collectedObject.PathName);
entities[i].SerializeData(dataWriter);

var bytes = ms.ToArray();
writer.Write(bytes.Length);
writer.Write(bytes);

ms.SetLength(0);
}
for (var i = 0; i < components.Length; i++)
{
components[i].SerializeData(dataWriter);

var bytes = ms.ToArray();
writer.Write(bytes.Length);
writer.Write(bytes);

ms.SetLength(0);
}
}

writer.Write(CollectedObjects.Count);
foreach (var collectedObject in CollectedObjects)
{
writer.WriteLengthPrefixedString(collectedObject.LevelName);
writer.WriteLengthPrefixedString(collectedObject.PathName);
}
}
}
Expand Down
4 changes: 4 additions & 0 deletions SatisfactorySaveParser/SatisfactorySaveParser.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="DotNetZip, Version=1.13.4.0, Culture=neutral, PublicKeyToken=6583c7c814667745, processorArchitecture=MSIL">
<HintPath>..\packages\DotNetZip.1.13.4\lib\net40\DotNetZip.dll</HintPath>
</Reference>
<Reference Include="NLog, Version=4.0.0.0, Culture=neutral, PublicKeyToken=5120e14c03d0593c, processorArchitecture=MSIL">
<HintPath>..\packages\NLog.4.6.2\lib\net45\NLog.dll</HintPath>
</Reference>
Expand Down Expand Up @@ -85,6 +88,7 @@
<Compile Include="SaveComponent.cs" />
<Compile Include="SaveEntity.cs" />
<Compile Include="SaveObject.cs" />
<Compile Include="Save\ChunkInfo.cs" />
<Compile Include="Save\EIntroTutorialSteps.cs" />
<Compile Include="Save\ESessionVisibility.cs" />
<Compile Include="Save\FSaveCustomVersion.cs" />
Expand Down
13 changes: 13 additions & 0 deletions SatisfactorySaveParser/Save/ChunkInfo.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
using System.IO;

namespace SatisfactorySaveParser.Save
{
public class ChunkInfo
{
public const long Magic = 0x9E2A83C1;
public const int ChunkSize = 131072; // 128 KiB

public long CompressedSize { get; set; }
public long UncompressedSize { get; set; }
}
}
3 changes: 3 additions & 0 deletions SatisfactorySaveParser/Save/FSaveCustomVersion.cs
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ public enum FSaveCustomVersion
// 2019-07-24 Due to a bug in the network optimizations the legs data where trashed, reseting the legs to zero is the best option.
ResetFactoryLegs,

// 2019-08-28 The large portion of the save file is now compressed. The header is still intact at the start of the file but after that it is compressed with ZLIB.
SaveFileIsCompressed,

// -----<new versions can be added above this line>-------------------------------------------------
VersionPlusOne,
LatestVersion = VersionPlusOne - 1
Expand Down
1 change: 1 addition & 0 deletions SatisfactorySaveParser/packages.config
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="DotNetZip" version="1.13.4" targetFramework="net462" />
<package id="NLog" version="4.6.2" targetFramework="net462" />
</packages>
Loading

0 comments on commit bb1d0b5

Please sign in to comment.