diff --git a/AssetTools.NET/Standard/AssetsBundleFileFormat/AssetBundleFile.cs b/AssetTools.NET/Standard/AssetsBundleFileFormat/AssetBundleFile.cs
index a7b9231..104cdda 100644
--- a/AssetTools.NET/Standard/AssetsBundleFileFormat/AssetBundleFile.cs
+++ b/AssetTools.NET/Standard/AssetsBundleFileFormat/AssetBundleFile.cs
@@ -5,6 +5,9 @@
using System;
using System.Collections.Generic;
using System.IO;
+#if !NET35
+using System.Threading.Tasks;
+#endif
namespace AssetsTools.NET
{
@@ -27,6 +30,23 @@ public class AssetBundleFile
///
public bool DataIsCompressed { get; set; }
+ private static int _lz4ParallelPackBatchSize = 32;
+ ///
+ /// Number of 0x20000 blocks processed per parallel batch in LZ4/LZ4Fast Pack/Unpack.
+ /// Used on non-NET35 targets only. Values less than 1 are treated as 1.
+ ///
+ public static int Lz4ParallelPackBatchSize
+ {
+ get => _lz4ParallelPackBatchSize;
+ set
+ {
+ if (value <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), "Lz4ParallelPackBatchSize must be greater than 0.");
+ }
+ _lz4ParallelPackBatchSize = value;
+ }
+ }
public AssetsFileReader Reader;
///
@@ -485,6 +505,7 @@ public void Pack(AssetsFileWriter writer, AssetBundleCompressionType compType,
case AssetBundleCompressionType.LZ4Fast:
{
// compress into 0x20000 blocks
+ const int blockSize = 0x20000;
BinaryReader bundleDataReader = new BinaryReader(bundleDataStream);
Stream writeStream;
@@ -493,7 +514,75 @@ public void Pack(AssetsFileWriter writer, AssetBundleCompressionType compType,
else
writeStream = GetTempFileStream();
- byte[] uncompressedBlock = bundleDataReader.ReadBytes(0x20000);
+#if !NET35
+ int totalBlockCount = (int)((bundleDataReader.BaseStream.Length + blockSize - 1) / blockSize);
+ int completedBlockCount = 0;
+
+ while (true)
+ {
+ List uncompressedBlocks = new List(_lz4ParallelPackBatchSize);
+ for (int i = 0; i < _lz4ParallelPackBatchSize; i++)
+ {
+ byte[] block = bundleDataReader.ReadBytes(blockSize);
+ if (block.Length == 0)
+ break;
+ uncompressedBlocks.Add(block);
+ }
+
+ if (uncompressedBlocks.Count == 0)
+ break;
+
+ // each outputBlock is a byte[]
+ byte[][] outputBlocks = new byte[uncompressedBlocks.Count][];
+ AssetBundleBlockInfo[] outputBlockInfos = new AssetBundleBlockInfo[uncompressedBlocks.Count];
+
+ Parallel.For(0, uncompressedBlocks.Count, i =>
+ {
+ byte[] uncompressedBlock = uncompressedBlocks[i];
+ byte[] compressedBlock = compType == AssetBundleCompressionType.LZ4Fast
+ ? LZ4Codec.Encode32(uncompressedBlock, 0, uncompressedBlock.Length)
+ : LZ4Codec.Encode32HC(uncompressedBlock, 0, uncompressedBlock.Length);
+
+ if (compressedBlock.Length > uncompressedBlock.Length)
+ {
+ outputBlocks[i] = uncompressedBlock;
+ outputBlockInfos[i] = new AssetBundleBlockInfo()
+ {
+ CompressedSize = (uint)uncompressedBlock.Length,
+ DecompressedSize = (uint)uncompressedBlock.Length,
+ Flags = 0x00
+ };
+ }
+ else
+ {
+ outputBlocks[i] = compressedBlock;
+ outputBlockInfos[i] = new AssetBundleBlockInfo()
+ {
+ CompressedSize = (uint)compressedBlock.Length,
+ DecompressedSize = (uint)uncompressedBlock.Length,
+ Flags = 0x03
+ };
+ }
+ });
+
+ for (int i = 0; i < outputBlocks.Length; i++)
+ {
+ byte[] outputBlock = outputBlocks[i];
+ AssetBundleBlockInfo blockInfo = outputBlockInfos[i];
+
+ writeStream.Write(outputBlock, 0, outputBlock.Length);
+ totalCompressedSize += blockInfo.CompressedSize;
+ newBlocks.Add(blockInfo);
+ }
+
+ completedBlockCount += outputBlocks.Length;
+ if (progress != null && totalBlockCount > 0)
+ {
+ progress.SetProgress((float)completedBlockCount / totalBlockCount);
+ }
+ }
+#else
+ byte[] uncompressedBlock = bundleDataReader.ReadBytes(blockSize);
while (uncompressedBlock.Length != 0)
{
byte[] compressedBlock = compType == AssetBundleCompressionType.LZ4Fast
@@ -536,8 +625,9 @@ public void Pack(AssetsFileWriter writer, AssetBundleCompressionType compType,
newBlocks.Add(blockInfo);
}
- uncompressedBlock = bundleDataReader.ReadBytes(0x20000);
+ uncompressedBlock = bundleDataReader.ReadBytes(blockSize);
}
+#endif
if (!blockDirAtEnd)
newStreams.Add(writeStream);