Updater: moved SharpCompress code into a separate embedded library. Seems to stop some anti-viruses detecting it as a trojan/virus.

Updated ZDoom_DECORATE.cfg (A_SetInventory).
This commit is contained in:
MaxED 2016-10-14 18:01:58 +00:00
parent 70d35bf1d6
commit ebb372bc27
71 changed files with 115 additions and 10692 deletions

View file

@ -294,6 +294,7 @@ keywords
A_DropInventory = "A_DropInventory(str type)";
A_DropItem = "A_DropItem(str item[, int dropamount = -1[, int chance = 256]])\nThe calling actor drops the specified item.\nThis works in a similar way to the DropItem actor property.";
A_SelectWeapon = "bool A_SelectWeapon(str type)";
A_SetInventory = "bool A_SetInventory(string type, int count[, int ptr = AAPTR_DEFAULT[, bool beyondMax = false]]]) ";
A_RadiusGive = "int A_RadiusGive(str item, float distance, int flags[, int amount = 0[, str filter = \"None\"[, str species = \"None\"[, int mindist = 0[, int limit = 0]]]]])\nflags: RGF flags.";
//Weapon functions
A_WeaponReady = "A_WeaponReady[(int flags = 0)]\nflags: WRF flags.";

Binary file not shown.

View file

@ -0,0 +1,83 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Security.Cryptography;
//Source: http://www.codeproject.com/Articles/528178/Load-DLL-From-Embedded-Resource
namespace mxd.GZDBUpdater
{
public static class EmbeddedAssembly
{
// Version 1.3
private static Dictionary<string, Assembly> dic;
public static void Load(string embeddedResource, string fileName)
{
if(dic == null) dic = new Dictionary<string, Assembly>();
byte[] ba;
Assembly asm;
Assembly curAsm = Assembly.GetExecutingAssembly();
using (Stream stm = curAsm.GetManifestResourceStream(embeddedResource))
{
// Either the file is not existed or it is not mark as embedded resource
if(stm == null) throw new Exception(embeddedResource + " is not found in Embedded Resources.");
// Get byte[] from the file from embedded resource
ba = new byte[(int) stm.Length];
stm.Read(ba, 0, (int) stm.Length);
try
{
asm = Assembly.Load(ba);
// Add the assembly/dll into dictionary
dic.Add(asm.FullName, asm);
return;
}
catch
{
// Purposely do nothing
// Unmanaged dll or assembly cannot be loaded directly from byte[]
// Let the process fall through for next part
}
}
bool fileOk;
string tempFile;
using (SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider())
{
string fileHash = BitConverter.ToString(sha1.ComputeHash(ba)).Replace("-", string.Empty);
tempFile = Path.GetTempPath() + fileName;
if(File.Exists(tempFile))
{
byte[] bb = File.ReadAllBytes(tempFile);
string fileHash2 = BitConverter.ToString(sha1.ComputeHash(bb)).Replace("-", string.Empty);
fileOk = (fileHash == fileHash2);
}
else
{
fileOk = false;
}
}
if(!fileOk)
{
File.WriteAllBytes(tempFile, ba);
}
asm = Assembly.LoadFile(tempFile);
dic.Add(asm.FullName, asm);
}
public static Assembly Get(string assemblyFullName)
{
if(dic == null || dic.Count == 0) return null;
if(dic.ContainsKey(assemblyFullName)) return dic[assemblyFullName];
return null;
}
}
}

View file

@ -1,20 +1,29 @@
using System;
using System.Reflection;
using System.Windows.Forms;
namespace mxd.GZDBUpdater
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
EmbeddedAssembly.Load("mxd.GZDBUpdater.SharpCompressStripped.dll", "SharpCompressStripped.dll");
AppDomain.CurrentDomain.AssemblyResolve += AssemblyResolve;
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.SetCompatibleTextRenderingDefault(false);
MainForm form = new MainForm();
if(!form.IsDisposed) Application.Run(form);
}
}
}
private static Assembly AssemblyResolve(object sender, ResolveEventArgs args)
{
return EmbeddedAssembly.Get(args.Name);
}
}
}

View file

@ -29,5 +29,5 @@ using System.Runtime.InteropServices;
// Build Number
// Revision
//
[assembly: AssemblyVersion("1.0.0.2")]
[assembly: AssemblyFileVersion("1.0.0.2")]
[assembly: AssemblyVersion("1.0.0.3")]
[assembly: AssemblyFileVersion("1.0.0.3")]

View file

@ -1,127 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<EventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; private set; }
private bool disposed;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
if(!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
public ArchiveType Type { get; private set; }
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
public int NumEntries { get { return lazyEntries.Count; } } //mxd
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
//public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
//public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
//protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
//IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
//IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
public virtual void Dispose()
{
if(!disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract IReader CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid { get { return false; } }
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
}
}

View file

@ -1,40 +0,0 @@
using System;
using System.IO;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public class ArchiveFactory
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath)
{
//filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options)
{
using(var stream = fileInfo.OpenRead())
{
if(SevenZipArchive.IsSevenZipFile(stream))
{
stream.Dispose();
return SevenZipArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
}
}
}

View file

@ -1,14 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public interface IArchive : IDisposable
{
int NumEntries { get; } //mxd
IReader ExtractAllEntries();
bool IsComplete { get; }
}
}

View file

@ -1,24 +0,0 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Archives
{
public interface IArchiveEntry : IEntry
{
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
//Stream OpenEntryStream();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}
}

View file

@ -1,11 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives
{
internal interface IArchiveExtractionListener //: IExtractionListener
{
void EnsureEntriesLoaded();
//void FireEntryExtractionBegin(IArchiveEntry entry);
//void FireEntryExtractionEnd(IArchiveEntry entry);
}
}

View file

@ -1,142 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions)
{
//fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions);
}
#endif
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
for (int i = 0; i < database.Files.Count; i++)
{
var file = database.Files[i];
if (!file.IsDir)
{
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file));
}
}
}
private void LoadFactory(Stream stream)
{
if (database == null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(null);
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
{
return new SevenZipReader(this);
}
//public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
internal SevenZipReader(SevenZipArchive archive)
: base(new ReaderOptions(), ArchiveType.SevenZip)
{
this.archive = archive;
}
public override SevenZipVolume Volume { get { return archive.Volumes.Single(); } }
internal override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
List<SevenZipArchiveEntry> entries = archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
yield return dir;
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key == null)
{
currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(stream, currentFolder, null);
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream()
{
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
}
}

View file

@ -1,18 +0,0 @@
using System.IO;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part)
{
Archive = archive;
}
public IArchive Archive { get; private set; }
public bool IsComplete { get { return true; } }
}
}

View file

@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry)
{
Item = entry;
}
public T Item { get; private set; }
}
}

View file

@ -1,11 +0,0 @@
namespace SharpCompress.Common
{
public enum ArchiveType
{
Rar,
Zip,
Tar,
SevenZip,
GZip
}
}

View file

@ -1,16 +0,0 @@
namespace SharpCompress.Common
{
public enum CompressionType
{
None,
GZip,
BZip2,
PPMd,
Deflate,
Rar,
LZMA,
BCJ,
BCJ2,
Unknown
}
}

View file

@ -1,80 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common
{
public abstract class Entry : IEntry
{
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public abstract long Crc { get; }
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The compressed file size
/// </summary>
public abstract long CompressedSize { get; }
/// <summary>
/// The compression type
/// </summary>
public abstract CompressionType CompressionType { get; }
/// <summary>
/// The uncompressed file size
/// </summary>
public abstract long Size { get; }
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public abstract DateTime? LastModifiedTime { get; }
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public abstract DateTime? CreatedTime { get; }
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public abstract DateTime? LastAccessedTime { get; }
/// <summary>
/// The entry time when archived, if recorded
/// </summary>
public abstract DateTime? ArchivedTime { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public abstract bool IsDirectory { get; }
/// <summary>
/// Entry is split among multiple volumes
/// </summary>
public abstract bool IsSplit { get; }
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
internal virtual void Close()
{
}
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib { get { throw new NotImplementedException(); } }
}
}

View file

@ -1,87 +0,0 @@
using System;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : Stream
{
public IReader Reader { get; private set; }
private readonly Stream stream;
private bool completed;
private bool isDisposed;
internal EntryStream(IReader reader, Stream stream)
{
Reader = reader;
this.stream = stream;
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
{
var buffer = new byte[4096];
while (Read(buffer, 0, buffer.Length) > 0)
{
}
completed = true;
}
protected override void Dispose(bool disposing)
{
if (!(completed || Reader.Cancelled))
{
SkipEntry();
}
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
stream.Dispose();
}
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
int read = stream.Read(buffer, offset, count);
if (read <= 0)
{
completed = true;
}
return read;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View file

@ -1,12 +0,0 @@
using System.IO;
namespace SharpCompress.Common
{
public abstract class FilePart
{
//internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
}
}

View file

@ -1,22 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class FilePartExtractionBeginEventArgs : EventArgs
{
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; internal set; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; internal set; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; internal set; }
}
}

View file

@ -1,51 +0,0 @@

#if !NO_FILE
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
internal static class IEntryExtensions
{
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
ExtractionOptions options)
{
if (options.PreserveFileTime || options.PreserveAttributes)
{
FileInfo nf = new FileInfo(destinationFileName);
if (!nf.Exists)
{
return;
}
// update file time to original packed time
if (options.PreserveFileTime)
{
if (entry.CreatedTime.HasValue)
{
nf.CreationTime = entry.CreatedTime.Value;
}
if (entry.LastModifiedTime.HasValue)
{
nf.LastWriteTime = entry.LastModifiedTime.Value;
}
if (entry.LastAccessedTime.HasValue)
{
nf.LastAccessTime = entry.LastAccessedTime.Value;
}
}
if (options.PreserveAttributes)
{
if (entry.Attrib.HasValue)
{
nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
}
}
}
}
}
}
#endif

View file

@ -1,21 +0,0 @@
using System;
namespace SharpCompress.Common
{
public interface IEntry
{
//CompressionType CompressionType { get; }
//DateTime? ArchivedTime { get; }
long CompressedSize { get; }
//long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
bool IsDirectory { get; }
//bool IsEncrypted { get; }
//bool IsSplit { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}

View file

@ -1,12 +0,0 @@
using System;
#if !NO_FILE
using System.IO;
#endif
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
}
}

View file

@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry)
{
Item = entry;
}
public T Item { get; private set; }
}
}

View file

@ -1,148 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip
{
internal class ArchiveDatabase
{
internal byte MajorVersion;
internal byte MinorVersion;
internal long StartPositionAfterHeader;
internal long DataStartPosition;
internal List<long> PackSizes = new List<long>();
internal List<uint?> PackCRCs = new List<uint?>();
internal List<CFolder> Folders = new List<CFolder>();
internal List<int> NumUnpackStreamsVector;
internal List<CFileItem> Files = new List<CFileItem>();
internal List<long> PackStreamStartPositions = new List<long>();
internal List<int> FolderStartFileIndex = new List<int>();
internal List<int> FileIndexToFolderIndexMap = new List<int>();
internal void Clear()
{
PackSizes.Clear();
PackCRCs.Clear();
Folders.Clear();
NumUnpackStreamsVector = null;
Files.Clear();
PackStreamStartPositions.Clear();
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
}
private void FillStartPos()
{
PackStreamStartPositions.Clear();
long startPos = 0;
for (int i = 0; i < PackSizes.Count; i++)
{
PackStreamStartPositions.Add(startPos);
startPos += PackSizes[i];
}
}
private void FillFolderStartFileIndex()
{
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
int folderIndex = 0;
int indexInFolder = 0;
for (int i = 0; i < Files.Count; i++)
{
CFileItem file = Files[i];
bool emptyStream = !file.HasStream;
if (emptyStream && indexInFolder == 0)
{
FileIndexToFolderIndexMap.Add(-1);
continue;
}
if (indexInFolder == 0)
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (;;)
{
if (folderIndex >= Folders.Count)
{
throw new InvalidOperationException();
}
FolderStartFileIndex.Add(i); // check it
if (NumUnpackStreamsVector[folderIndex] != 0)
{
break;
}
folderIndex++;
}
}
FileIndexToFolderIndexMap.Add(folderIndex);
if (emptyStream)
{
continue;
}
indexInFolder++;
if (indexInFolder >= NumUnpackStreamsVector[folderIndex])
{
folderIndex++;
indexInFolder = 0;
}
}
}
public void Fill()
{
FillStartPos();
FillFolderStartFileIndex();
}
internal long GetFolderStreamPos(CFolder folder, int indexInFolder)
{
int index = folder.FirstPackStreamId + indexInFolder;
return DataStartPosition + PackStreamStartPositions[index];
}
internal long GetFolderFullPackSize(int folderIndex)
{
int packStreamIndex = Folders[folderIndex].FirstPackStreamId;
CFolder folder = Folders[folderIndex];
long size = 0;
for (int i = 0; i < folder.PackStreams.Count; i++)
{
size += PackSizes[packStreamIndex + i];
}
return size;
}
internal Stream GetFolderStream(Stream stream, CFolder folder, IPasswordProvider pw)
{
int packStreamIndex = folder.FirstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder.PackStreams.Count; j++)
{
packSizes.Add(PackSizes[packStreamIndex + j]);
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
}
}

View file

@ -1,8 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CBindPair
{
internal int InIndex;
internal int OutIndex;
}
}

View file

@ -1,10 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{
internal CMethodId MethodId;
internal byte[] Props;
internal int NumInStreams;
internal int NumOutStreams;
}
}

View file

@ -1,27 +0,0 @@
using System;
namespace SharpCompress.Common.SevenZip
{
internal class CFileItem
{
public long Size { get; internal set; }
public uint? Attrib { get; internal set; }
public uint? Crc { get; internal set; }
public string Name { get; internal set; }
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }
public DateTime? CTime { get; internal set; }
public DateTime? ATime { get; internal set; }
public DateTime? MTime { get; internal set; }
public long? StartPos { get; internal set; }
public bool IsAnti { get; internal set; }
internal CFileItem()
{
HasStream = true;
}
}
}

View file

@ -1,175 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class CFolder
{
internal List<CCoderInfo> Coders = new List<CCoderInfo>();
internal List<CBindPair> BindPairs = new List<CBindPair>();
internal List<int> PackStreams = new List<int>();
internal int FirstPackStreamId;
internal List<long> UnpackSizes = new List<long>();
internal uint? UnpackCRC;
internal bool UnpackCRCDefined { get { return UnpackCRC != null; } }
public long GetUnpackSize()
{
if (UnpackSizes.Count == 0)
{
return 0;
}
for (int i = UnpackSizes.Count - 1; i >= 0; i--)
{
if (FindBindPairForOutStream(i) < 0)
{
return UnpackSizes[i];
}
}
throw new Exception();
}
public int GetNumOutStreams()
{
int count = 0;
for (int i = 0; i < Coders.Count; i++)
{
count += Coders[i].NumOutStreams;
}
return count;
}
public int FindBindPairForInStream(int inStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
{
if (BindPairs[i].InIndex == inStreamIndex)
{
return i;
}
}
return -1;
}
public int FindBindPairForOutStream(int outStreamIndex)
{
for (int i = 0; i < BindPairs.Count; i++)
{
if (BindPairs[i].OutIndex == outStreamIndex)
{
return i;
}
}
return -1;
}
public int FindPackStreamArrayIndex(int inStreamIndex)
{
for (int i = 0; i < PackStreams.Count; i++)
{
if (PackStreams[i] == inStreamIndex)
{
return i;
}
}
return -1;
}
public bool CheckStructure()
{
const int kNumCodersMax = 32; // don't change it
const int kMaskSize = 32; // it must be >= kNumCodersMax
const int kNumBindsMax = 32;
if (Coders.Count > kNumCodersMax || BindPairs.Count > kNumBindsMax)
{
return false;
}
{
var v = new BitVector(BindPairs.Count + PackStreams.Count);
for (int i = 0; i < BindPairs.Count; i++)
{
if (v.GetAndSet(BindPairs[i].InIndex))
{
return false;
}
}
for (int i = 0; i < PackStreams.Count; i++)
{
if (v.GetAndSet(PackStreams[i]))
{
return false;
}
}
}
{
var v = new BitVector(UnpackSizes.Count);
for (int i = 0; i < BindPairs.Count; i++)
{
if (v.GetAndSet(BindPairs[i].OutIndex))
{
return false;
}
}
}
uint[] mask = new uint[kMaskSize];
{
List<int> inStreamToCoder = new List<int>();
List<int> outStreamToCoder = new List<int>();
for (int i = 0; i < Coders.Count; i++)
{
CCoderInfo coder = Coders[i];
for (int j = 0; j < coder.NumInStreams; j++)
{
inStreamToCoder.Add(i);
}
for (int j = 0; j < coder.NumOutStreams; j++)
{
outStreamToCoder.Add(i);
}
}
for (int i = 0; i < BindPairs.Count; i++)
{
CBindPair bp = BindPairs[i];
mask[inStreamToCoder[bp.InIndex]] |= (1u << outStreamToCoder[bp.OutIndex]);
}
}
for (int i = 0; i < kMaskSize; i++)
{
for (int j = 0; j < kMaskSize; j++)
{
if (((1u << j) & mask[i]) != 0)
{
mask[i] |= mask[j];
}
}
}
for (int i = 0; i < kMaskSize; i++)
{
if (((1u << i) & mask[i]) != 0)
{
return false;
}
}
return true;
}
}
}

View file

@ -1,57 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
{
public const ulong kCopyId = 0;
public const ulong kLzmaId = 0x030101;
public const ulong kLzma2Id = 0x21;
public const ulong kAESId = 0x06F10701;
/*public static readonly CMethodId kCopy = new CMethodId(kCopyId);
public static readonly CMethodId kLzma = new CMethodId(kLzmaId);
public static readonly CMethodId kLzma2 = new CMethodId(kLzma2Id);
public static readonly CMethodId kAES = new CMethodId(kAESId);*/
public readonly ulong Id;
public CMethodId(ulong id)
{
Id = id;
}
public override int GetHashCode()
{
return Id.GetHashCode();
}
public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId)obj == this;
}
public bool Equals(CMethodId other)
{
return Id == other.Id;
}
public static bool operator ==(CMethodId left, CMethodId right)
{
return left.Id == right.Id;
}
public static bool operator !=(CMethodId left, CMethodId right)
{
return left.Id != right.Id;
}
public int GetLength()
{
int bytes = 0;
for (ulong value = Id; value != 0; value >>= 8)
{
bytes++;
}
return bytes;
}
}
}

View file

@ -1,69 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal struct CStreamSwitch : IDisposable
{
private ArchiveReader _archive;
private bool _needRemove;
private bool _active;
public void Dispose()
{
if (_active)
{
_active = false;
#if DEBUG
//Log.WriteLine("[end of switch]");
#endif
}
if (_needRemove)
{
_needRemove = false;
_archive.DeleteByteStream();
}
}
public void Set(ArchiveReader archive, byte[] dataVector)
{
Dispose();
_archive = archive;
_archive.AddByteStream(dataVector, 0, dataVector.Length);
_needRemove = true;
_active = true;
}
public void Set(ArchiveReader archive, List<byte[]> dataVector)
{
Dispose();
_active = true;
byte external = archive.ReadByte();
if (external != 0)
{
int dataIndex = archive.ReadNum();
if (dataIndex < 0 || dataIndex >= dataVector.Count)
{
throw new InvalidOperationException();
}
#if DEBUG
//Log.WriteLine("[switch to stream {0}]", dataIndex);
#endif
_archive = archive;
_archive.AddByteStream(dataVector[dataIndex], 0, dataVector[dataIndex].Length);
_needRemove = true;
_active = true;
}
else
{
#if DEBUG
//Log.WriteLine("[inline data]");
#endif
}
}
}
}

View file

@ -1,186 +0,0 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class DataReader
{
#region Static Methods
public static uint Get32(byte[] buffer, int offset)
{
return buffer[offset]
+ ((uint)buffer[offset + 1] << 8)
+ ((uint)buffer[offset + 2] << 16)
+ ((uint)buffer[offset + 3] << 24);
}
public static ulong Get64(byte[] buffer, int offset)
{
return buffer[offset]
+ ((ulong)buffer[offset + 1] << 8)
+ ((ulong)buffer[offset + 2] << 16)
+ ((ulong)buffer[offset + 3] << 24)
+ ((ulong)buffer[offset + 4] << 32)
+ ((ulong)buffer[offset + 5] << 40)
+ ((ulong)buffer[offset + 6] << 48)
+ ((ulong)buffer[offset + 7] << 56);
}
#endregion
#region Variables
private readonly byte[] _buffer;
private readonly int _ending;
#endregion
#region Public Methods
public DataReader(byte[] buffer, int offset, int length)
{
_buffer = buffer;
Offset = offset;
_ending = offset + length;
}
public int Offset { get; private set; }
public Byte ReadByte()
{
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
return _buffer[Offset++];
}
public void ReadBytes(byte[] buffer, int offset, int length)
{
if (length > _ending - Offset)
{
throw new EndOfStreamException();
}
while (length-- > 0)
{
buffer[offset++] = _buffer[Offset++];
}
}
public void SkipData(long size)
{
if (size > _ending - Offset)
{
throw new EndOfStreamException();
}
Offset += (int)size;
#if DEBUG
////Log.WriteLine("SkipData {0}", size);
#endif
}
public void SkipData()
{
SkipData(checked((long)ReadNumber()));
}
public ulong ReadNumber()
{
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
byte firstByte = _buffer[Offset++];
byte mask = 0x80;
ulong value = 0;
for (int i = 0; i < 8; i++)
{
if ((firstByte & mask) == 0)
{
ulong highPart = firstByte & (mask - 1u);
value += highPart << (i * 8);
return value;
}
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
value |= (ulong)_buffer[Offset++] << (8 * i);
mask >>= 1;
}
return value;
}
public int ReadNum()
{
ulong value = ReadNumber();
if (value > Int32.MaxValue)
{
throw new NotSupportedException();
}
return (int)value;
}
public uint ReadUInt32()
{
if (Offset + 4 > _ending)
{
throw new EndOfStreamException();
}
uint res = Get32(_buffer, Offset);
Offset += 4;
return res;
}
public ulong ReadUInt64()
{
if (Offset + 8 > _ending)
{
throw new EndOfStreamException();
}
ulong res = Get64(_buffer, Offset);
Offset += 8;
return res;
}
public string ReadString()
{
int ending = Offset;
for (;;)
{
if (ending + 2 > _ending)
{
throw new EndOfStreamException();
}
if (_buffer[ending] == 0 && _buffer[ending + 1] == 0)
{
break;
}
ending += 2;
}
string str = Encoding.Unicode.GetString(_buffer, Offset, ending - Offset);
Offset = ending + 2;
return str;
}
#endregion
}
}

View file

@ -1,43 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipEntry : Entry
{
internal SevenZipEntry(SevenZipFilePart filePart)
{
FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; private set; }
public override CompressionType CompressionType { get { return FilePart.CompressionType; } }
public override long Crc { get { return FilePart.Header.Crc ?? 0; } }
public override string Key { get { return FilePart.Header.Name; } }
public override long CompressedSize { get { return 0; } }
public override long Size { get { return FilePart.Header.Size; } }
public override DateTime? LastModifiedTime { get { return FilePart.Header.MTime; } }
public override DateTime? CreatedTime { get { return null; } }
public override DateTime? LastAccessedTime { get { return null; } }
public override DateTime? ArchivedTime { get { return null; } }
public override bool IsEncrypted { get { return false; } }
public override bool IsDirectory { get { return FilePart.Header.IsDir; } }
public override bool IsSplit { get { return false; } }
public override int? Attrib { get { return (int)FilePart.Header.Attrib; } }
internal override IEnumerable<FilePart> Parts { get { return FilePart.AsEnumerable<FilePart>(); } }
}
}

View file

@ -1,106 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? type;
private readonly Stream stream;
private readonly ArchiveDatabase database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
{
this.stream = stream;
this.database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
{
Folder = database.Folders[database.FileIndexToFolderIndexMap[index]];
}
}
//internal Stream BaseStream { get; private set; }
internal CFileItem Header { get; private set; }
internal CFolder Folder { get; private set; }
internal int Index { get; private set; }
//internal string FilePartName { get { return Header.Name; } }
internal override Stream GetRawStream()
{
return null;
}
internal override Stream GetCompressedStream()
{
if (!Header.HasStream)
{
return null;
}
var folderStream = database.GetFolderStream(stream, Folder, null);
int firstFileIndex = database.FolderStartFileIndex[database.Folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += database.Files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
public CompressionType CompressionType
{
get
{
if (type == null)
{
type = GetCompression();
}
return type.Value;
}
}
//copied from DecoderRegistry
private const uint k_Copy = 0x0;
private const uint k_Delta = 3;
private const uint k_LZMA2 = 0x21;
private const uint k_LZMA = 0x030101;
private const uint k_PPMD = 0x030401;
private const uint k_BCJ = 0x03030103;
private const uint k_BCJ2 = 0x0303011B;
private const uint k_Deflate = 0x040108;
private const uint k_BZip2 = 0x040202;
internal CompressionType GetCompression()
{
var coder = Folder.Coders.First();
switch (coder.MethodId.Id)
{
case k_LZMA:
case k_LZMA2:
{
return CompressionType.LZMA;
}
case k_PPMD:
{
return CompressionType.PPMd;
}
case k_BZip2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
}
}

View file

@ -1,13 +0,0 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
{
}
}
}

View file

@ -1,43 +0,0 @@
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public abstract class Volume : IVolume
{
private readonly Stream actualStream;
internal Volume(Stream stream, ReaderOptions readerOptions)
{
actualStream = stream;
ReaderOptions = readerOptions;
}
internal Stream Stream { get { return new NonDisposingStream(actualStream); } }
protected ReaderOptions ReaderOptions { get; private set; }
/// <summary>
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
//public virtual bool IsFirstVolume { get { return true; } }
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
//public virtual bool IsMultiVolume { get { return true; } }
private bool disposed;
public void Dispose()
{
if (!ReaderOptions.LeaveStreamOpen && !disposed)
{
actualStream.Dispose();
disposed = true;
}
}
}
}

View file

@ -1,113 +0,0 @@
using System.IO;
namespace SharpCompress.Compressors.Filters
{
internal class BCJFilter : Filter
{
private static readonly bool[] MASK_TO_ALLOWED_STATUS = {true, true, true, false, true, false, false, false};
private static readonly int[] MASK_TO_BIT_NUMBER = {0, 1, 2, 2, 3, 3, 3, 3};
private int pos;
private int prevMask;
public BCJFilter(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 5)
{
pos = 5;
}
private static bool test86MSByte(byte b)
{
return b == 0x00 || b == 0xFF;
}
protected override int Transform(byte[] buffer, int offset, int count)
{
int prevPos = offset - 1;
int end = offset + count - 5;
int i;
for (i = offset; i <= end; ++i)
{
if ((buffer[i] & 0xFE) != 0xE8)
{
continue;
}
prevPos = i - prevPos;
if ((prevPos & ~3) != 0)
{
// (unsigned)prevPos > 3
prevMask = 0;
}
else
{
prevMask = (prevMask << (prevPos - 1)) & 7;
if (prevMask != 0)
{
if (!MASK_TO_ALLOWED_STATUS[prevMask] || test86MSByte(
buffer[i + 4 - MASK_TO_BIT_NUMBER[prevMask]]))
{
prevPos = i;
prevMask = (prevMask << 1) | 1;
continue;
}
}
}
prevPos = i;
if (test86MSByte(buffer[i + 4]))
{
int src = buffer[i + 1]
| (buffer[i + 2] << 8)
| (buffer[i + 3] << 16)
| (buffer[i + 4] << 24);
int dest;
while (true)
{
if (isEncoder)
{
dest = src + (pos + i - offset);
}
else
{
dest = src - (pos + i - offset);
}
if (prevMask == 0)
{
break;
}
int index = MASK_TO_BIT_NUMBER[prevMask] * 8;
if (!test86MSByte((byte)(dest >> (24 - index))))
{
break;
}
src = dest ^ ((1 << (32 - index)) - 1);
}
buffer[i + 1] = (byte)dest;
buffer[i + 2] = (byte)(dest >> 8);
buffer[i + 3] = (byte)(dest >> 16);
buffer[i + 4] = (byte)(~(((dest >> 24) & 1) - 1));
i += 4;
}
else
{
prevMask = (prevMask << 1) | 1;
}
}
prevPos = i - prevPos;
prevMask = ((prevPos & ~3) != 0) ? 0 : prevMask << (prevPos - 1);
i -= offset;
pos += i;
return i;
}
}
}

View file

@ -1,154 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Filters
{
internal abstract class Filter : Stream
{
protected bool isEncoder;
protected Stream baseStream;
private readonly byte[] tail;
private readonly byte[] window;
private int transformed;
private int read;
private bool endReached;
private bool isDisposed;
protected Filter(bool isEncoder, Stream baseStream, int lookahead)
{
this.isEncoder = isEncoder;
this.baseStream = baseStream;
tail = new byte[lookahead - 1];
window = new byte[tail.Length * 2];
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
baseStream.Dispose();
}
public override bool CanRead { get { return !isEncoder; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return isEncoder; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { return baseStream.Length; } }
public override long Position { get { return baseStream.Position; } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
int size = 0;
if (transformed > 0)
{
int copySize = transformed;
if (copySize > count)
{
copySize = count;
}
Buffer.BlockCopy(tail, 0, buffer, offset, copySize);
transformed -= copySize;
read -= copySize;
offset += copySize;
count -= copySize;
size += copySize;
Buffer.BlockCopy(tail, copySize, tail, 0, read);
}
if (count == 0)
{
return size;
}
int inSize = read;
if (inSize > count)
{
inSize = count;
}
Buffer.BlockCopy(tail, 0, buffer, offset, inSize);
read -= inSize;
Buffer.BlockCopy(tail, inSize, tail, 0, read);
while (!endReached && inSize < count)
{
int baseRead = baseStream.Read(buffer, offset + inSize, count - inSize);
inSize += baseRead;
if (baseRead == 0)
{
endReached = true;
}
}
while (!endReached && read < tail.Length)
{
int baseRead = baseStream.Read(tail, read, tail.Length - read);
read += baseRead;
if (baseRead == 0)
{
endReached = true;
}
}
if (inSize > tail.Length)
{
transformed = Transform(buffer, offset, inSize);
offset += transformed;
count -= transformed;
size += transformed;
inSize -= transformed;
transformed = 0;
}
if (count == 0)
{
return size;
}
Buffer.BlockCopy(buffer, offset, window, 0, inSize);
Buffer.BlockCopy(tail, 0, window, inSize, read);
if (inSize + read > tail.Length)
{
transformed = Transform(window, 0, inSize + read);
}
else
{
transformed = inSize + read;
}
Buffer.BlockCopy(window, 0, buffer, offset, inSize);
Buffer.BlockCopy(window, inSize, tail, 0, read);
size += inSize;
transformed -= inSize;
return size;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
Transform(buffer, offset, count);
baseStream.Write(buffer, offset, count);
}
protected abstract int Transform(byte[] buffer, int offset, int count);
}
}

View file

@ -1,101 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpCompress.Compressors.LZMA
{
internal class BitVector
{
private readonly uint[] mBits;
public BitVector(int length)
{
Length = length;
mBits = new uint[(length + 31) >> 5];
}
public BitVector(int length, bool initValue)
{
Length = length;
mBits = new uint[(length + 31) >> 5];
if (initValue)
{
for (int i = 0; i < mBits.Length; i++)
{
mBits[i] = ~0u;
}
}
}
public BitVector(List<bool> bits)
: this(bits.Count)
{
for (int i = 0; i < bits.Count; i++)
{
if (bits[i])
{
SetBit(i);
}
}
}
public bool[] ToArray()
{
bool[] bits = new bool[Length];
for (int i = 0; i < bits.Length; i++)
{
bits[i] = this[i];
}
return bits;
}
public int Length { get; private set; }
public bool this[int index]
{
get
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
}
return (mBits[index >> 5] & (1u << (index & 31))) != 0;
}
}
public void SetBit(int index)
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
}
mBits[index >> 5] |= 1u << (index & 31);
}
internal bool GetAndSet(int index)
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
}
uint bits = mBits[index >> 5];
uint mask = 1u << (index & 31);
mBits[index >> 5] |= mask;
return (bits & mask) != 0;
}
public override string ToString()
{
StringBuilder sb = new StringBuilder(Length);
for (int i = 0; i < Length; i++)
{
sb.Append(this[i] ? 'x' : '.');
}
return sb.ToString();
}
}
}

View file

@ -1,89 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA
{
internal static class CRC
{
public const uint kInitCRC = 0xFFFFFFFF;
private static readonly uint[] kTable = new uint[4 * 256];
static CRC()
{
const uint kCrcPoly = 0xEDB88320;
for (uint i = 0; i < 256; i++)
{
uint r = i;
for (int j = 0; j < 8; j++)
{
r = (r >> 1) ^ (kCrcPoly & ~((r & 1) - 1));
}
kTable[i] = r;
}
for (uint i = 256; i < kTable.Length; i++)
{
uint r = kTable[i - 256];
kTable[i] = kTable[r & 0xFF] ^ (r >> 8);
}
}
public static uint From(Stream stream, long length)
{
uint crc = kInitCRC;
byte[] buffer = new byte[Math.Min(length, 4 << 10)];
while (length > 0)
{
int delta = stream.Read(buffer, 0, (int)Math.Min(length, buffer.Length));
if (delta == 0)
{
throw new EndOfStreamException();
}
crc = Update(crc, buffer, 0, delta);
length -= delta;
}
return Finish(crc);
}
public static uint Finish(uint crc)
{
return ~crc;
}
public static uint Update(uint crc, byte bt)
{
return kTable[(crc & 0xFF) ^ bt] ^ (crc >> 8);
}
public static uint Update(uint crc, uint value)
{
crc ^= value;
return kTable[0x300 + (crc & 0xFF)]
^ kTable[0x200 + ((crc >> 8) & 0xFF)]
^ kTable[0x100 + ((crc >> 16) & 0xFF)]
^ kTable[0x000 + (crc >> 24)];
}
public static uint Update(uint crc, ulong value)
{
return Update(Update(crc, (uint)value), (uint)(value >> 32));
}
public static uint Update(uint crc, long value)
{
return Update(crc, (ulong)value);
}
public static uint Update(uint crc, byte[] buffer, int offset, int length)
{
for (int i = 0; i < length; i++)
{
crc = Update(crc, buffer[offset + i]);
}
return crc;
}
}
}

View file

@ -1,182 +0,0 @@
using System;
using System.IO;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA
{
internal abstract class DecoderStream2 : Stream
{
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
internal static class DecoderStreamHelper
{
private static int FindCoderIndexForOutStreamIndex(CFolder folderInfo, int outStreamIndex)
{
for (int coderIndex = 0; coderIndex < folderInfo.Coders.Count; coderIndex++)
{
var coderInfo = folderInfo.Coders[coderIndex];
outStreamIndex -= coderInfo.NumOutStreams;
if (outStreamIndex < 0)
{
return coderIndex;
}
}
throw new InvalidOperationException("Could not link output stream to coder.");
}
private static void FindPrimaryOutStreamIndex(CFolder folderInfo, out int primaryCoderIndex,
out int primaryOutStreamIndex)
{
bool foundPrimaryOutStream = false;
primaryCoderIndex = -1;
primaryOutStreamIndex = -1;
for (int outStreamIndex = 0, coderIndex = 0;
coderIndex < folderInfo.Coders.Count;
coderIndex++)
{
for (int coderOutStreamIndex = 0;
coderOutStreamIndex < folderInfo.Coders[coderIndex].NumOutStreams;
coderOutStreamIndex++, outStreamIndex++)
{
if (folderInfo.FindBindPairForOutStream(outStreamIndex) < 0)
{
if (foundPrimaryOutStream)
{
throw new NotSupportedException("Multiple output streams.");
}
foundPrimaryOutStream = true;
primaryCoderIndex = coderIndex;
primaryOutStreamIndex = outStreamIndex;
}
}
}
if (!foundPrimaryOutStream)
{
throw new NotSupportedException("No output stream.");
}
}
private static Stream CreateDecoderStream(Stream[] packStreams, long[] packSizes, Stream[] outStreams,
CFolder folderInfo, int coderIndex, IPasswordProvider pass)
{
var coderInfo = folderInfo.Coders[coderIndex];
if (coderInfo.NumOutStreams != 1)
{
throw new NotSupportedException("Multiple output streams are not supported.");
}
int inStreamId = 0;
for (int i = 0; i < coderIndex; i++)
{
inStreamId += folderInfo.Coders[i].NumInStreams;
}
int outStreamId = 0;
for (int i = 0; i < coderIndex; i++)
{
outStreamId += folderInfo.Coders[i].NumOutStreams;
}
Stream[] inStreams = new Stream[coderInfo.NumInStreams];
for (int i = 0; i < inStreams.Length; i++, inStreamId++)
{
int bindPairIndex = folderInfo.FindBindPairForInStream(inStreamId);
if (bindPairIndex >= 0)
{
int pairedOutIndex = folderInfo.BindPairs[bindPairIndex].OutIndex;
if (outStreams[pairedOutIndex] != null)
{
throw new NotSupportedException("Overlapping stream bindings are not supported.");
}
int otherCoderIndex = FindCoderIndexForOutStreamIndex(folderInfo, pairedOutIndex);
inStreams[i] = CreateDecoderStream(packStreams, packSizes, outStreams, folderInfo, otherCoderIndex,
pass);
//inStreamSizes[i] = folderInfo.UnpackSizes[pairedOutIndex];
if (outStreams[pairedOutIndex] != null)
{
throw new NotSupportedException("Overlapping stream bindings are not supported.");
}
outStreams[pairedOutIndex] = inStreams[i];
}
else
{
int index = folderInfo.FindPackStreamArrayIndex(inStreamId);
if (index < 0)
{
throw new NotSupportedException("Could not find input stream binding.");
}
inStreams[i] = packStreams[index];
//inStreamSizes[i] = packSizes[index];
}
}
long unpackSize = folderInfo.UnpackSizes[outStreamId];
return DecoderRegistry.CreateDecoderStream(coderInfo.MethodId, inStreams, coderInfo.Props, pass, unpackSize);
}
internal static Stream CreateDecoderStream(Stream inStream, long startPos, long[] packSizes, CFolder folderInfo,
IPasswordProvider pass)
{
if (!folderInfo.CheckStructure())
{
throw new NotSupportedException("Unsupported stream binding structure.");
}
Stream[] inStreams = new Stream[folderInfo.PackStreams.Count];
for (int j = 0; j < folderInfo.PackStreams.Count; j++)
{
inStreams[j] = new BufferedSubStream(inStream, startPos, packSizes[j]);
startPos += packSizes[j];
}
Stream[] outStreams = new Stream[folderInfo.UnpackSizes.Count];
int primaryCoderIndex, primaryOutStreamIndex;
FindPrimaryOutStreamIndex(folderInfo, out primaryCoderIndex, out primaryOutStreamIndex);
return CreateDecoderStream(inStreams, packSizes, outStreams, folderInfo, primaryCoderIndex, pass);
}
}
}

View file

@ -1,161 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA
{
/// <summary>
/// The exception that is thrown when an error in input stream occurs during decoding.
/// </summary>
internal class DataErrorException : Exception
{
public DataErrorException()
: base("Data Error")
{
}
}
/// <summary>
/// The exception that is thrown when the value of an argument is outside the allowable range.
/// </summary>
internal class InvalidParamException : Exception
{
public InvalidParamException()
: base("Invalid Parameter")
{
}
}
internal interface ICodeProgress
{
/// <summary>
/// Callback progress.
/// </summary>
/// <param name="inSize">
/// input size. -1 if unknown.
/// </param>
/// <param name="outSize">
/// output size. -1 if unknown.
/// </param>
void SetProgress(Int64 inSize, Int64 outSize);
}
internal interface ICoder
{
/// <summary>
/// Codes streams.
/// </summary>
/// <param name="inStream">
/// input Stream.
/// </param>
/// <param name="outStream">
/// output Stream.
/// </param>
/// <param name="inSize">
/// input Size. -1 if unknown.
/// </param>
/// <param name="outSize">
/// output Size. -1 if unknown.
/// </param>
/// <param name="progress">
/// callback progress reference.
/// </param>
void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress);
}
/// <summary>
/// Provides the fields that represent properties idenitifiers for compressing.
/// </summary>
internal enum CoderPropID
{
/// <summary>
/// Specifies default property.
/// </summary>
DefaultProp = 0,
/// <summary>
/// Specifies size of dictionary.
/// </summary>
DictionarySize,
/// <summary>
/// Specifies size of memory for PPM*.
/// </summary>
UsedMemorySize,
/// <summary>
/// Specifies order for PPM methods.
/// </summary>
Order,
/// <summary>
/// Specifies Block Size.
/// </summary>
BlockSize,
/// <summary>
/// Specifies number of postion state bits for LZMA (0 - x - 4).
/// </summary>
PosStateBits,
/// <summary>
/// Specifies number of literal context bits for LZMA (0 - x - 8).
/// </summary>
LitContextBits,
/// <summary>
/// Specifies number of literal position bits for LZMA (0 - x - 4).
/// </summary>
LitPosBits,
/// <summary>
/// Specifies number of fast bytes for LZ*.
/// </summary>
NumFastBytes,
/// <summary>
/// Specifies match finder. LZMA: "BT2", "BT4" or "BT4B".
/// </summary>
MatchFinder,
/// <summary>
/// Specifies the number of match finder cyckes.
/// </summary>
MatchFinderCycles,
/// <summary>
/// Specifies number of passes.
/// </summary>
NumPasses,
/// <summary>
/// Specifies number of algorithm.
/// </summary>
Algorithm,
/// <summary>
/// Specifies the number of threads.
/// </summary>
NumThreads,
/// <summary>
/// Specifies mode with end marker.
/// </summary>
EndMarker
}
internal interface ISetCoderProperties
{
void SetCoderProperties(CoderPropID[] propIDs, object[] properties);
}
internal interface IWriteCoderProperties
{
void WriteCoderProperties(Stream outStream);
}
internal interface ISetDecoderProperties
{
void SetDecoderProperties(byte[] properties);
}
}

View file

@ -1,68 +0,0 @@
namespace SharpCompress.Compressors.LZMA.LZ
{
internal class CRC
{
public static readonly uint[] Table;
static CRC()
{
Table = new uint[256];
const uint kPoly = 0xEDB88320;
for (uint i = 0; i < 256; i++)
{
uint r = i;
for (int j = 0; j < 8; j++)
{
if ((r & 1) != 0)
{
r = (r >> 1) ^ kPoly;
}
else
{
r >>= 1;
}
}
Table[i] = r;
}
}
private uint _value = 0xFFFFFFFF;
public void Init()
{
_value = 0xFFFFFFFF;
}
public void UpdateByte(byte b)
{
_value = Table[(((byte)(_value)) ^ b)] ^ (_value >> 8);
}
public void Update(byte[] data, uint offset, uint size)
{
for (uint i = 0; i < size; i++)
{
_value = Table[(((byte)(_value)) ^ data[offset + i])] ^ (_value >> 8);
}
}
public uint GetDigest()
{
return _value ^ 0xFFFFFFFF;
}
private static uint CalculateDigest(byte[] data, uint offset, uint size)
{
CRC crc = new CRC();
// crc.Init();
crc.Update(data, offset, size);
return crc.GetDigest();
}
private static bool VerifyDigest(uint digest, byte[] data, uint offset, uint size)
{
return (CalculateDigest(data, offset, size) == digest);
}
}
}

View file

@ -1,424 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA.LZ
{
internal class BinTree : InWindow
{
private UInt32 _cyclicBufferPos;
private UInt32 _cyclicBufferSize;
private UInt32 _matchMaxLen;
private UInt32[] _son;
private UInt32[] _hash;
private UInt32 _cutValue = 0xFF;
private UInt32 _hashMask;
private UInt32 _hashSizeSum;
private bool HASH_ARRAY = true;
private const UInt32 kHash2Size = 1 << 10;
private const UInt32 kHash3Size = 1 << 16;
private const UInt32 kBT2HashSize = 1 << 16;
private const UInt32 kStartMaxLen = 1;
private const UInt32 kHash3Offset = kHash2Size;
private const UInt32 kEmptyHashValue = 0;
private const UInt32 kMaxValForNormalize = ((UInt32)1 << 31) - 1;
private UInt32 kNumHashDirectBytes;
private UInt32 kMinMatchCheck = 4;
private UInt32 kFixHashSize = kHash2Size + kHash3Size;
public void SetType(int numHashBytes)
{
HASH_ARRAY = (numHashBytes > 2);
if (HASH_ARRAY)
{
kNumHashDirectBytes = 0;
kMinMatchCheck = 4;
kFixHashSize = kHash2Size + kHash3Size;
}
else
{
kNumHashDirectBytes = 2;
kMinMatchCheck = 2 + 1;
kFixHashSize = 0;
}
}
public new void SetStream(Stream stream)
{
base.SetStream(stream);
}
public new void ReleaseStream()
{
base.ReleaseStream();
}
public new void Init()
{
base.Init();
for (UInt32 i = 0; i < _hashSizeSum; i++)
{
_hash[i] = kEmptyHashValue;
}
_cyclicBufferPos = 0;
ReduceOffsets(-1);
}
public new void MovePos()
{
if (++_cyclicBufferPos >= _cyclicBufferSize)
{
_cyclicBufferPos = 0;
}
base.MovePos();
if (_pos == kMaxValForNormalize)
{
Normalize();
}
}
public new Byte GetIndexByte(Int32 index)
{
return base.GetIndexByte(index);
}
public new UInt32 GetMatchLen(Int32 index, UInt32 distance, UInt32 limit)
{
return base.GetMatchLen(index, distance, limit);
}
public new UInt32 GetNumAvailableBytes()
{
return base.GetNumAvailableBytes();
}
public void Create(UInt32 historySize, UInt32 keepAddBufferBefore,
UInt32 matchMaxLen, UInt32 keepAddBufferAfter)
{
if (historySize > kMaxValForNormalize - 256)
{
throw new Exception();
}
_cutValue = 16 + (matchMaxLen >> 1);
UInt32 windowReservSize = (historySize + keepAddBufferBefore +
matchMaxLen + keepAddBufferAfter) / 2 + 256;
base.Create(historySize + keepAddBufferBefore, matchMaxLen + keepAddBufferAfter, windowReservSize);
_matchMaxLen = matchMaxLen;
UInt32 cyclicBufferSize = historySize + 1;
if (_cyclicBufferSize != cyclicBufferSize)
{
_son = new UInt32[(_cyclicBufferSize = cyclicBufferSize) * 2];
}
UInt32 hs = kBT2HashSize;
if (HASH_ARRAY)
{
hs = historySize - 1;
hs |= (hs >> 1);
hs |= (hs >> 2);
hs |= (hs >> 4);
hs |= (hs >> 8);
hs >>= 1;
hs |= 0xFFFF;
if (hs > (1 << 24))
{
hs >>= 1;
}
_hashMask = hs;
hs++;
hs += kFixHashSize;
}
if (hs != _hashSizeSum)
{
_hash = new UInt32[_hashSizeSum = hs];
}
}
public UInt32 GetMatches(UInt32[] distances)
{
UInt32 lenLimit;
if (_pos + _matchMaxLen <= _streamPos)
{
lenLimit = _matchMaxLen;
}
else
{
lenLimit = _streamPos - _pos;
if (lenLimit < kMinMatchCheck)
{
MovePos();
return 0;
}
}
UInt32 offset = 0;
UInt32 matchMinPos = (_pos > _cyclicBufferSize) ? (_pos - _cyclicBufferSize) : 0;
UInt32 cur = _bufferOffset + _pos;
UInt32 maxLen = kStartMaxLen; // to avoid items for len < hashSize;
UInt32 hashValue, hash2Value = 0, hash3Value = 0;
if (HASH_ARRAY)
{
UInt32 temp = CRC.Table[_bufferBase[cur]] ^ _bufferBase[cur + 1];
hash2Value = temp & (kHash2Size - 1);
temp ^= ((UInt32)(_bufferBase[cur + 2]) << 8);
hash3Value = temp & (kHash3Size - 1);
hashValue = (temp ^ (CRC.Table[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
{
hashValue = _bufferBase[cur] ^ ((UInt32)(_bufferBase[cur + 1]) << 8);
}
UInt32 curMatch = _hash[kFixHashSize + hashValue];
if (HASH_ARRAY)
{
UInt32 curMatch2 = _hash[hash2Value];
UInt32 curMatch3 = _hash[kHash3Offset + hash3Value];
_hash[hash2Value] = _pos;
_hash[kHash3Offset + hash3Value] = _pos;
if (curMatch2 > matchMinPos)
{
if (_bufferBase[_bufferOffset + curMatch2] == _bufferBase[cur])
{
distances[offset++] = maxLen = 2;
distances[offset++] = _pos - curMatch2 - 1;
}
}
if (curMatch3 > matchMinPos)
{
if (_bufferBase[_bufferOffset + curMatch3] == _bufferBase[cur])
{
if (curMatch3 == curMatch2)
{
offset -= 2;
}
distances[offset++] = maxLen = 3;
distances[offset++] = _pos - curMatch3 - 1;
curMatch2 = curMatch3;
}
}
if (offset != 0 && curMatch2 == curMatch)
{
offset -= 2;
maxLen = kStartMaxLen;
}
}
_hash[kFixHashSize + hashValue] = _pos;
UInt32 ptr0 = (_cyclicBufferPos << 1) + 1;
UInt32 ptr1 = (_cyclicBufferPos << 1);
UInt32 len0, len1;
len0 = len1 = kNumHashDirectBytes;
if (kNumHashDirectBytes != 0)
{
if (curMatch > matchMinPos)
{
if (_bufferBase[_bufferOffset + curMatch + kNumHashDirectBytes] !=
_bufferBase[cur + kNumHashDirectBytes])
{
distances[offset++] = maxLen = kNumHashDirectBytes;
distances[offset++] = _pos - curMatch - 1;
}
}
}
UInt32 count = _cutValue;
while (true)
{
if (curMatch <= matchMinPos || count-- == 0)
{
_son[ptr0] = _son[ptr1] = kEmptyHashValue;
break;
}
UInt32 delta = _pos - curMatch;
UInt32 cyclicPos = ((delta <= _cyclicBufferPos)
? (_cyclicBufferPos - delta)
: (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1;
UInt32 pby1 = _bufferOffset + curMatch;
UInt32 len = Math.Min(len0, len1);
if (_bufferBase[pby1 + len] == _bufferBase[cur + len])
{
while (++len != lenLimit)
{
if (_bufferBase[pby1 + len] != _bufferBase[cur + len])
{
break;
}
}
if (maxLen < len)
{
distances[offset++] = maxLen = len;
distances[offset++] = delta - 1;
if (len == lenLimit)
{
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = _son[ptr1];
len1 = len;
}
else
{
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = _son[ptr0];
len0 = len;
}
}
MovePos();
return offset;
}
public void Skip(UInt32 num)
{
do
{
UInt32 lenLimit;
if (_pos + _matchMaxLen <= _streamPos)
{
lenLimit = _matchMaxLen;
}
else
{
lenLimit = _streamPos - _pos;
if (lenLimit < kMinMatchCheck)
{
MovePos();
continue;
}
}
UInt32 matchMinPos = (_pos > _cyclicBufferSize) ? (_pos - _cyclicBufferSize) : 0;
UInt32 cur = _bufferOffset + _pos;
UInt32 hashValue;
if (HASH_ARRAY)
{
UInt32 temp = CRC.Table[_bufferBase[cur]] ^ _bufferBase[cur + 1];
UInt32 hash2Value = temp & (kHash2Size - 1);
_hash[hash2Value] = _pos;
temp ^= ((UInt32)(_bufferBase[cur + 2]) << 8);
UInt32 hash3Value = temp & (kHash3Size - 1);
_hash[kHash3Offset + hash3Value] = _pos;
hashValue = (temp ^ (CRC.Table[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
{
hashValue = _bufferBase[cur] ^ ((UInt32)(_bufferBase[cur + 1]) << 8);
}
UInt32 curMatch = _hash[kFixHashSize + hashValue];
_hash[kFixHashSize + hashValue] = _pos;
UInt32 ptr0 = (_cyclicBufferPos << 1) + 1;
UInt32 ptr1 = (_cyclicBufferPos << 1);
UInt32 len0, len1;
len0 = len1 = kNumHashDirectBytes;
UInt32 count = _cutValue;
while (true)
{
if (curMatch <= matchMinPos || count-- == 0)
{
_son[ptr0] = _son[ptr1] = kEmptyHashValue;
break;
}
UInt32 delta = _pos - curMatch;
UInt32 cyclicPos = ((delta <= _cyclicBufferPos)
? (_cyclicBufferPos - delta)
: (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1;
UInt32 pby1 = _bufferOffset + curMatch;
UInt32 len = Math.Min(len0, len1);
if (_bufferBase[pby1 + len] == _bufferBase[cur + len])
{
while (++len != lenLimit)
{
if (_bufferBase[pby1 + len] != _bufferBase[cur + len])
{
break;
}
}
if (len == lenLimit)
{
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = _son[ptr1];
len1 = len;
}
else
{
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = _son[ptr0];
len0 = len;
}
}
MovePos();
}
while (--num != 0);
}
private void NormalizeLinks(UInt32[] items, UInt32 numItems, UInt32 subValue)
{
for (UInt32 i = 0; i < numItems; i++)
{
UInt32 value = items[i];
if (value <= subValue)
{
value = kEmptyHashValue;
}
else
{
value -= subValue;
}
items[i] = value;
}
}
private void Normalize()
{
UInt32 subValue = _pos - _cyclicBufferSize;
NormalizeLinks(_son, _cyclicBufferSize * 2, subValue);
NormalizeLinks(_hash, _hashSizeSum, subValue);
ReduceOffsets((Int32)subValue);
}
public void SetCutValue(UInt32 cutValue)
{
_cutValue = cutValue;
}
}
}

View file

@ -1,183 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA.LZ
{
internal class InWindow
{
public Byte[] _bufferBase; // pointer to buffer with data
private Stream _stream;
private UInt32 _posLimit; // offset (from _buffer) of first byte when new block reading must be done
private bool _streamEndWasReached; // if (true) then _streamPos shows real end of stream
private UInt32 _pointerToLastSafePosition;
public UInt32 _bufferOffset;
public UInt32 _blockSize; // Size of Allocated memory block
public UInt32 _pos; // offset (from _buffer) of curent byte
private UInt32 _keepSizeBefore; // how many BYTEs must be kept in buffer before _pos
private UInt32 _keepSizeAfter; // how many BYTEs must be kept buffer after _pos
public UInt32 _streamPos; // offset (from _buffer) of first not read byte from Stream
public void MoveBlock()
{
UInt32 offset = _bufferOffset + _pos - _keepSizeBefore;
// we need one additional byte, since MovePos moves on 1 byte.
if (offset > 0)
{
offset--;
}
UInt32 numBytes = _bufferOffset + _streamPos - offset;
// check negative offset ????
for (UInt32 i = 0; i < numBytes; i++)
{
_bufferBase[i] = _bufferBase[offset + i];
}
_bufferOffset -= offset;
}
public virtual void ReadBlock()
{
if (_streamEndWasReached)
{
return;
}
while (true)
{
int size = (int)((0 - _bufferOffset) + _blockSize - _streamPos);
if (size == 0)
{
return;
}
int numReadBytes = _stream != null
? _stream.Read(_bufferBase, (int)(_bufferOffset + _streamPos), size)
: 0;
if (numReadBytes == 0)
{
_posLimit = _streamPos;
UInt32 pointerToPostion = _bufferOffset + _posLimit;
if (pointerToPostion > _pointerToLastSafePosition)
{
_posLimit = _pointerToLastSafePosition - _bufferOffset;
}
_streamEndWasReached = true;
return;
}
_streamPos += (UInt32)numReadBytes;
if (_streamPos >= _pos + _keepSizeAfter)
{
_posLimit = _streamPos - _keepSizeAfter;
}
}
}
private void Free()
{
_bufferBase = null;
}
public void Create(UInt32 keepSizeBefore, UInt32 keepSizeAfter, UInt32 keepSizeReserv)
{
_keepSizeBefore = keepSizeBefore;
_keepSizeAfter = keepSizeAfter;
UInt32 blockSize = keepSizeBefore + keepSizeAfter + keepSizeReserv;
if (_bufferBase == null || _blockSize != blockSize)
{
Free();
_blockSize = blockSize;
_bufferBase = new Byte[_blockSize];
}
_pointerToLastSafePosition = _blockSize - keepSizeAfter;
_streamEndWasReached = false;
}
public void SetStream(Stream stream)
{
_stream = stream;
if (_streamEndWasReached)
{
_streamEndWasReached = false;
if (IsDataStarved)
{
ReadBlock();
}
}
}
public void ReleaseStream()
{
_stream = null;
}
public void Init()
{
_bufferOffset = 0;
_pos = 0;
_streamPos = 0;
_streamEndWasReached = false;
ReadBlock();
}
public void MovePos()
{
_pos++;
if (_pos > _posLimit)
{
UInt32 pointerToPostion = _bufferOffset + _pos;
if (pointerToPostion > _pointerToLastSafePosition)
{
MoveBlock();
}
ReadBlock();
}
}
public Byte GetIndexByte(Int32 index)
{
return _bufferBase[_bufferOffset + _pos + index];
}
// index + limit have not to exceed _keepSizeAfter;
public UInt32 GetMatchLen(Int32 index, UInt32 distance, UInt32 limit)
{
if (_streamEndWasReached)
{
if ((_pos + index) + limit > _streamPos)
{
limit = _streamPos - (UInt32)(_pos + index);
}
}
distance++;
// Byte *pby = _buffer + (size_t)_pos + index;
UInt32 pby = _bufferOffset + _pos + (UInt32)index;
UInt32 i;
for (i = 0; i < limit && _bufferBase[pby + i] == _bufferBase[pby + i - distance]; i++)
{
;
}
return i;
}
public UInt32 GetNumAvailableBytes()
{
return _streamPos - _pos;
}
public void ReduceOffsets(Int32 subValue)
{
_bufferOffset += (UInt32)subValue;
_posLimit -= (UInt32)subValue;
_pos -= (UInt32)subValue;
_streamPos -= (UInt32)subValue;
}
public bool IsDataStarved { get { return _streamPos - _pos < _keepSizeAfter; } }
}
}

View file

@ -1,205 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA.LZ
{
internal class OutWindow
{
private byte[] _buffer;
private int _windowSize;
private int _pos;
private int _streamPos;
private int _pendingLen;
private int _pendingDist;
private Stream _stream;
public long Total;
public long Limit;
public void Create(int windowSize)
{
if (_windowSize != windowSize)
{
_buffer = new byte[windowSize];
}
else
{
_buffer[windowSize - 1] = 0;
}
_windowSize = windowSize;
_pos = 0;
_streamPos = 0;
_pendingLen = 0;
Total = 0;
Limit = 0;
}
public void Reset()
{
Create(_windowSize);
}
public void Init(Stream stream)
{
ReleaseStream();
_stream = stream;
}
public void Train(Stream stream)
{
long len = stream.Length;
int size = (len < _windowSize) ? (int)len : _windowSize;
stream.Position = len - size;
Total = 0;
Limit = size;
_pos = _windowSize - size;
CopyStream(stream, size);
if (_pos == _windowSize)
{
_pos = 0;
}
_streamPos = _pos;
}
public void ReleaseStream()
{
Flush();
_stream = null;
}
public void Flush()
{
if (_stream == null)
{
return;
}
int size = _pos - _streamPos;
if (size == 0)
{
return;
}
_stream.Write(_buffer, _streamPos, size);
if (_pos >= _windowSize)
{
_pos = 0;
}
_streamPos = _pos;
}
public void CopyBlock(int distance, int len)
{
int size = len;
int pos = _pos - distance - 1;
if (pos < 0)
{
pos += _windowSize;
}
for (; size > 0 && _pos < _windowSize && Total < Limit; size--)
{
if (pos >= _windowSize)
{
pos = 0;
}
_buffer[_pos++] = _buffer[pos++];
Total++;
if (_pos >= _windowSize)
{
Flush();
}
}
_pendingLen = size;
_pendingDist = distance;
}
public void PutByte(byte b)
{
_buffer[_pos++] = b;
Total++;
if (_pos >= _windowSize)
{
Flush();
}
}
public byte GetByte(int distance)
{
int pos = _pos - distance - 1;
if (pos < 0)
{
pos += _windowSize;
}
return _buffer[pos];
}
public int CopyStream(Stream stream, int len)
{
int size = len;
while (size > 0 && _pos < _windowSize && Total < Limit)
{
int curSize = _windowSize - _pos;
if (curSize > Limit - Total)
{
curSize = (int)(Limit - Total);
}
if (curSize > size)
{
curSize = size;
}
int numReadBytes = stream.Read(_buffer, _pos, curSize);
if (numReadBytes == 0)
{
throw new DataErrorException();
}
size -= numReadBytes;
_pos += numReadBytes;
Total += numReadBytes;
if (_pos >= _windowSize)
{
Flush();
}
}
return len - size;
}
public void SetLimit(long size)
{
Limit = Total + size;
}
public bool HasSpace { get { return _pos < _windowSize && Total < Limit; } }
public bool HasPending { get { return _pendingLen > 0; } }
public int Read(byte[] buffer, int offset, int count)
{
if (_streamPos >= _pos)
{
return 0;
}
int size = _pos - _streamPos;
if (size > count)
{
size = count;
}
Buffer.BlockCopy(_buffer, _streamPos, buffer, offset, size);
_streamPos += size;
if (_streamPos >= _windowSize)
{
_pos = 0;
_streamPos = 0;
}
return size;
}
public void CopyPending()
{
if (_pendingLen > 0)
{
CopyBlock(_pendingDist, _pendingLen);
}
}
public int AvailableBytes { get { return _pos - _streamPos; } }
}
}

View file

@ -1,109 +0,0 @@
namespace SharpCompress.Compressors.LZMA
{
internal abstract class Base
{
public const uint kNumRepDistances = 4;
public const uint kNumStates = 12;
// static byte []kLiteralNextStates = {0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5};
// static byte []kMatchNextStates = {7, 7, 7, 7, 7, 7, 7, 10, 10, 10, 10, 10};
// static byte []kRepNextStates = {8, 8, 8, 8, 8, 8, 8, 11, 11, 11, 11, 11};
// static byte []kShortRepNextStates = {9, 9, 9, 9, 9, 9, 9, 11, 11, 11, 11, 11};
public struct State
{
public uint Index;
public void Init()
{
Index = 0;
}
public void UpdateChar()
{
if (Index < 4)
{
Index = 0;
}
else if (Index < 10)
{
Index -= 3;
}
else
{
Index -= 6;
}
}
public void UpdateMatch()
{
Index = (uint)(Index < 7 ? 7 : 10);
}
public void UpdateRep()
{
Index = (uint)(Index < 7 ? 8 : 11);
}
public void UpdateShortRep()
{
Index = (uint)(Index < 7 ? 9 : 11);
}
public bool IsCharState()
{
return Index < 7;
}
}
public const int kNumPosSlotBits = 6;
public const int kDicLogSizeMin = 0;
// public const int kDicLogSizeMax = 30;
// public const uint kDistTableSizeMax = kDicLogSizeMax * 2;
public const int kNumLenToPosStatesBits = 2; // it's for speed optimization
public const uint kNumLenToPosStates = 1 << kNumLenToPosStatesBits;
public const uint kMatchMinLen = 2;
public static uint GetLenToPosState(uint len)
{
len -= kMatchMinLen;
if (len < kNumLenToPosStates)
{
return len;
}
return kNumLenToPosStates - 1;
}
public const int kNumAlignBits = 4;
public const uint kAlignTableSize = 1 << kNumAlignBits;
public const uint kAlignMask = (kAlignTableSize - 1);
public const uint kStartPosModelIndex = 4;
public const uint kEndPosModelIndex = 14;
public const uint kNumPosModels = kEndPosModelIndex - kStartPosModelIndex;
public const uint kNumFullDistances = 1 << ((int)kEndPosModelIndex / 2);
public const uint kNumLitPosStatesBitsEncodingMax = 4;
public const uint kNumLitContextBitsMax = 8;
public const int kNumPosStatesBitsMax = 4;
public const uint kNumPosStatesMax = (1 << kNumPosStatesBitsMax);
public const int kNumPosStatesBitsEncodingMax = 4;
public const uint kNumPosStatesEncodingMax = (1 << kNumPosStatesBitsEncodingMax);
public const int kNumLowLenBits = 3;
public const int kNumMidLenBits = 3;
public const int kNumHighLenBits = 8;
public const uint kNumLowLenSymbols = 1 << kNumLowLenBits;
public const uint kNumMidLenSymbols = 1 << kNumMidLenBits;
public const uint kNumLenSymbols = kNumLowLenSymbols + kNumMidLenSymbols +
(1 << kNumHighLenBits);
public const uint kMatchMaxLen = kMatchMinLen + kNumLenSymbols - 1;
}
}

View file

@ -1,456 +0,0 @@
using System;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
namespace SharpCompress.Compressors.LZMA
{
internal class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
private class LenDecoder
{
private BitDecoder m_Choice = new BitDecoder();
private BitDecoder m_Choice2 = new BitDecoder();
private readonly BitTreeDecoder[] m_LowCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private readonly BitTreeDecoder[] m_MidCoder = new BitTreeDecoder[Base.kNumPosStatesMax];
private BitTreeDecoder m_HighCoder = new BitTreeDecoder(Base.kNumHighLenBits);
private uint m_NumPosStates;
public void Create(uint numPosStates)
{
for (uint posState = m_NumPosStates; posState < numPosStates; posState++)
{
m_LowCoder[posState] = new BitTreeDecoder(Base.kNumLowLenBits);
m_MidCoder[posState] = new BitTreeDecoder(Base.kNumMidLenBits);
}
m_NumPosStates = numPosStates;
}
public void Init()
{
m_Choice.Init();
for (uint posState = 0; posState < m_NumPosStates; posState++)
{
m_LowCoder[posState].Init();
m_MidCoder[posState].Init();
}
m_Choice2.Init();
m_HighCoder.Init();
}
public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState)
{
if (m_Choice.Decode(rangeDecoder) == 0)
{
return m_LowCoder[posState].Decode(rangeDecoder);
}
uint symbol = Base.kNumLowLenSymbols;
if (m_Choice2.Decode(rangeDecoder) == 0)
{
symbol += m_MidCoder[posState].Decode(rangeDecoder);
}
else
{
symbol += Base.kNumMidLenSymbols;
symbol += m_HighCoder.Decode(rangeDecoder);
}
return symbol;
}
}
private class LiteralDecoder
{
private struct Decoder2
{
private BitDecoder[] m_Decoders;
public void Create()
{
m_Decoders = new BitDecoder[0x300];
}
public void Init()
{
for (int i = 0; i < 0x300; i++)
{
m_Decoders[i].Init();
}
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder)
{
uint symbol = 1;
do
{
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder);
}
while (symbol < 0x100);
return (byte)symbol;
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte)
{
uint symbol = 1;
do
{
uint matchBit = (uint)(matchByte >> 7) & 1;
matchByte <<= 1;
uint bit = m_Decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder);
symbol = (symbol << 1) | bit;
if (matchBit != bit)
{
while (symbol < 0x100)
{
symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder);
}
break;
}
}
while (symbol < 0x100);
return (byte)symbol;
}
}
private Decoder2[] m_Coders;
private int m_NumPrevBits;
private int m_NumPosBits;
private uint m_PosMask;
public void Create(int numPosBits, int numPrevBits)
{
if (m_Coders != null && m_NumPrevBits == numPrevBits &&
m_NumPosBits == numPosBits)
{
return;
}
m_NumPosBits = numPosBits;
m_PosMask = ((uint)1 << numPosBits) - 1;
m_NumPrevBits = numPrevBits;
uint numStates = (uint)1 << (m_NumPrevBits + m_NumPosBits);
m_Coders = new Decoder2[numStates];
for (uint i = 0; i < numStates; i++)
{
m_Coders[i].Create();
}
}
public void Init()
{
uint numStates = (uint)1 << (m_NumPrevBits + m_NumPosBits);
for (uint i = 0; i < numStates; i++)
{
m_Coders[i].Init();
}
}
private uint GetState(uint pos, byte prevByte)
{
return ((pos & m_PosMask) << m_NumPrevBits) + (uint)(prevByte >> (8 - m_NumPrevBits));
}
public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder);
}
public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte)
{
return m_Coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte);
}
}
private OutWindow m_OutWindow;
private readonly BitDecoder[] m_IsMatchDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private readonly BitDecoder[] m_IsRepDecoders = new BitDecoder[Base.kNumStates];
private readonly BitDecoder[] m_IsRepG0Decoders = new BitDecoder[Base.kNumStates];
private readonly BitDecoder[] m_IsRepG1Decoders = new BitDecoder[Base.kNumStates];
private readonly BitDecoder[] m_IsRepG2Decoders = new BitDecoder[Base.kNumStates];
private readonly BitDecoder[] m_IsRep0LongDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax];
private readonly BitTreeDecoder[] m_PosSlotDecoder = new BitTreeDecoder[Base.kNumLenToPosStates];
private readonly BitDecoder[] m_PosDecoders = new BitDecoder[Base.kNumFullDistances - Base.kEndPosModelIndex];
private BitTreeDecoder m_PosAlignDecoder = new BitTreeDecoder(Base.kNumAlignBits);
private readonly LenDecoder m_LenDecoder = new LenDecoder();
private readonly LenDecoder m_RepLenDecoder = new LenDecoder();
private readonly LiteralDecoder m_LiteralDecoder = new LiteralDecoder();
private int m_DictionarySize;
private uint m_PosStateMask;
private Base.State state = new Base.State();
private uint rep0, rep1, rep2, rep3;
public Decoder()
{
m_DictionarySize = -1;
for (int i = 0; i < Base.kNumLenToPosStates; i++)
{
m_PosSlotDecoder[i] = new BitTreeDecoder(Base.kNumPosSlotBits);
}
}
private void CreateDictionary()
{
if (m_DictionarySize < 0)
{
throw new InvalidParamException();
}
m_OutWindow = new OutWindow();
int blockSize = Math.Max(m_DictionarySize, (1 << 12));
m_OutWindow.Create(blockSize);
}
private void SetLiteralProperties(int lp, int lc)
{
if (lp > 8)
{
throw new InvalidParamException();
}
if (lc > 8)
{
throw new InvalidParamException();
}
m_LiteralDecoder.Create(lp, lc);
}
private void SetPosBitsProperties(int pb)
{
if (pb > Base.kNumPosStatesBitsMax)
{
throw new InvalidParamException();
}
uint numPosStates = (uint)1 << pb;
m_LenDecoder.Create(numPosStates);
m_RepLenDecoder.Create(numPosStates);
m_PosStateMask = numPosStates - 1;
}
private void Init()
{
uint i;
for (i = 0; i < Base.kNumStates; i++)
{
for (uint j = 0; j <= m_PosStateMask; j++)
{
uint index = (i << Base.kNumPosStatesBitsMax) + j;
m_IsMatchDecoders[index].Init();
m_IsRep0LongDecoders[index].Init();
}
m_IsRepDecoders[i].Init();
m_IsRepG0Decoders[i].Init();
m_IsRepG1Decoders[i].Init();
m_IsRepG2Decoders[i].Init();
}
m_LiteralDecoder.Init();
for (i = 0; i < Base.kNumLenToPosStates; i++)
{
m_PosSlotDecoder[i].Init();
}
// m_PosSpecDecoder.Init();
for (i = 0; i < Base.kNumFullDistances - Base.kEndPosModelIndex; i++)
{
m_PosDecoders[i].Init();
}
m_LenDecoder.Init();
m_RepLenDecoder.Init();
m_PosAlignDecoder.Init();
state.Init();
rep0 = 0;
rep1 = 0;
rep2 = 0;
rep3 = 0;
}
public void Code(Stream inStream, Stream outStream,
Int64 inSize, Int64 outSize, ICodeProgress progress)
{
if (m_OutWindow == null)
{
CreateDictionary();
}
m_OutWindow.Init(outStream);
if (outSize > 0)
{
m_OutWindow.SetLimit(outSize);
}
else
{
m_OutWindow.SetLimit(Int64.MaxValue - m_OutWindow.Total);
}
RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
Code(m_DictionarySize, m_OutWindow, rangeDecoder);
m_OutWindow.ReleaseStream();
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (inSize > 0 && rangeDecoder.Total != inSize))
{
throw new DataErrorException();
}
if (m_OutWindow.HasPending)
{
throw new DataErrorException();
}
m_OutWindow = null;
}
internal bool Code(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
{
int dictionarySizeCheck = Math.Max(dictionarySize, 1);
outWindow.CopyPending();
while (outWindow.HasSpace)
{
uint posState = (uint)outWindow.Total & m_PosStateMask;
if (m_IsMatchDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(rangeDecoder) == 0)
{
byte b;
byte prevByte = outWindow.GetByte(0);
if (!state.IsCharState())
{
b = m_LiteralDecoder.DecodeWithMatchByte(rangeDecoder,
(uint)outWindow.Total, prevByte,
outWindow.GetByte((int)rep0));
}
else
{
b = m_LiteralDecoder.DecodeNormal(rangeDecoder, (uint)outWindow.Total, prevByte);
}
outWindow.PutByte(b);
state.UpdateChar();
}
else
{
uint len;
if (m_IsRepDecoders[state.Index].Decode(rangeDecoder) == 1)
{
if (m_IsRepG0Decoders[state.Index].Decode(rangeDecoder) == 0)
{
if (
m_IsRep0LongDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(
rangeDecoder) == 0)
{
state.UpdateShortRep();
outWindow.PutByte(outWindow.GetByte((int)rep0));
continue;
}
}
else
{
UInt32 distance;
if (m_IsRepG1Decoders[state.Index].Decode(rangeDecoder) == 0)
{
distance = rep1;
}
else
{
if (m_IsRepG2Decoders[state.Index].Decode(rangeDecoder) == 0)
{
distance = rep2;
}
else
{
distance = rep3;
rep3 = rep2;
}
rep2 = rep1;
}
rep1 = rep0;
rep0 = distance;
}
len = m_RepLenDecoder.Decode(rangeDecoder, posState) + Base.kMatchMinLen;
state.UpdateRep();
}
else
{
rep3 = rep2;
rep2 = rep1;
rep1 = rep0;
len = Base.kMatchMinLen + m_LenDecoder.Decode(rangeDecoder, posState);
state.UpdateMatch();
uint posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
if (posSlot >= Base.kStartPosModelIndex)
{
int numDirectBits = (int)((posSlot >> 1) - 1);
rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.kEndPosModelIndex)
{
rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders,
rep0 - posSlot - 1, rangeDecoder, numDirectBits);
}
else
{
rep0 += (rangeDecoder.DecodeDirectBits(
numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits);
rep0 += m_PosAlignDecoder.ReverseDecode(rangeDecoder);
}
}
else
{
rep0 = posSlot;
}
}
if (rep0 >= outWindow.Total || rep0 >= dictionarySizeCheck)
{
if (rep0 == 0xFFFFFFFF)
{
return true;
}
throw new DataErrorException();
}
outWindow.CopyBlock((int)rep0, (int)len);
}
}
return false;
}
public void SetDecoderProperties(byte[] properties)
{
if (properties.Length < 1)
{
throw new InvalidParamException();
}
int lc = properties[0] % 9;
int remainder = properties[0] / 9;
int lp = remainder % 5;
int pb = remainder / 5;
if (pb > Base.kNumPosStatesBitsMax)
{
throw new InvalidParamException();
}
SetLiteralProperties(lp, lc);
SetPosBitsProperties(pb);
Init();
if (properties.Length >= 5)
{
m_DictionarySize = 0;
for (int i = 0; i < 4; i++)
{
m_DictionarySize += properties[1 + i] << (i * 8);
}
}
}
public void Train(Stream stream)
{
if (m_OutWindow == null)
{
CreateDictionary();
}
m_OutWindow.Train(stream);
}
}
}

View file

@ -1,55 +0,0 @@
namespace SharpCompress.Compressors.LZMA
{
public class LzmaEncoderProperties
{
internal CoderPropID[] propIDs;
internal object[] properties;
public LzmaEncoderProperties()
: this(false)
{
}
public LzmaEncoderProperties(bool eos)
: this(eos, 1 << 20)
{
}
public LzmaEncoderProperties(bool eos, int dictionary)
: this(eos, dictionary, 32)
{
}
public LzmaEncoderProperties(bool eos, int dictionary, int numFastBytes)
{
int posStateBits = 2;
int litContextBits = 3;
int litPosBits = 0;
int algorithm = 2;
string mf = "bt4";
propIDs = new[]
{
CoderPropID.DictionarySize,
CoderPropID.PosStateBits,
CoderPropID.LitContextBits,
CoderPropID.LitPosBits,
CoderPropID.Algorithm,
CoderPropID.NumFastBytes,
CoderPropID.MatchFinder,
CoderPropID.EndMarker
};
properties = new object[]
{
dictionary,
posStateBits,
litContextBits,
litPosBits,
algorithm,
numFastBytes,
mf,
eos
};
}
}
}

View file

@ -1,321 +0,0 @@
using System;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.LZMA
{
public class LzmaStream : Stream
{
private readonly Stream inputStream;
private readonly long inputSize;
private readonly long outputSize;
private readonly int dictionarySize;
private readonly OutWindow outWindow = new OutWindow();
private readonly RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder();
private Decoder decoder;
private long position;
private bool endReached;
private long availableBytes;
private long rangeDecoderLimit;
private long inputPosition;
// LZMA2
private readonly bool isLZMA2;
private bool uncompressedChunk;
private bool needDictReset = true;
private bool needProps = true;
private readonly Encoder encoder;
private bool isDisposed;
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5)
{
}
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize,
Stream presetDictionary, bool isLZMA2)
{
this.inputStream = inputStream;
this.inputSize = inputSize;
this.outputSize = outputSize;
this.isLZMA2 = isLZMA2;
if (!isLZMA2)
{
dictionarySize = DataConverter.LittleEndian.GetInt32(properties, 1);
outWindow.Create(dictionarySize);
if (presetDictionary != null)
{
outWindow.Train(presetDictionary);
}
rangeDecoder.Init(inputStream);
decoder = new Decoder();
decoder.SetDecoderProperties(properties);
Properties = properties;
availableBytes = outputSize < 0 ? long.MaxValue : outputSize;
rangeDecoderLimit = inputSize;
}
else
{
dictionarySize = 2 | (properties[0] & 1);
dictionarySize <<= (properties[0] >> 1) + 11;
outWindow.Create(dictionarySize);
if (presetDictionary != null)
{
outWindow.Train(presetDictionary);
needDictReset = false;
}
Properties = new byte[1];
availableBytes = 0;
}
}
public LzmaStream(LzmaEncoderProperties properties, bool isLZMA2, Stream outputStream)
: this(properties, isLZMA2, null, outputStream)
{
}
public LzmaStream(LzmaEncoderProperties properties, bool isLZMA2, Stream presetDictionary, Stream outputStream)
{
this.isLZMA2 = isLZMA2;
availableBytes = 0;
endReached = true;
if (isLZMA2)
{
throw new NotImplementedException();
}
encoder = new Encoder();
encoder.SetCoderProperties(properties.propIDs, properties.properties);
MemoryStream propStream = new MemoryStream(5);
encoder.WriteCoderProperties(propStream);
Properties = propStream.ToArray();
encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
{
encoder.Train(presetDictionary);
}
}
public override bool CanRead { get { return encoder == null; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return encoder != null; } }
public override void Flush()
{
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
if (disposing)
{
if (encoder != null)
{
position = encoder.Code(null, true);
}
if (inputStream != null)
{
inputStream.Dispose();
}
}
base.Dispose(disposing);
}
public override long Length { get { return position + availableBytes; } }
public override long Position { get { return position; } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
if (endReached)
{
return 0;
}
int total = 0;
while (total < count)
{
if (availableBytes == 0)
{
if (isLZMA2)
{
decodeChunkHeader();
}
else
{
endReached = true;
}
if (endReached)
{
break;
}
}
int toProcess = count - total;
if (toProcess > availableBytes)
{
toProcess = (int)availableBytes;
}
outWindow.SetLimit(toProcess);
if (uncompressedChunk)
{
inputPosition += outWindow.CopyStream(inputStream, toProcess);
}
else if (decoder.Code(dictionarySize, outWindow, rangeDecoder)
&& outputSize < 0)
{
availableBytes = outWindow.AvailableBytes;
}
int read = outWindow.Read(buffer, offset, toProcess);
total += read;
offset += read;
position += read;
availableBytes -= read;
if (availableBytes == 0 && !uncompressedChunk)
{
rangeDecoder.ReleaseStream();
if (!rangeDecoder.IsFinished || (rangeDecoderLimit >= 0 && rangeDecoder.Total != rangeDecoderLimit))
{
throw new DataErrorException();
}
inputPosition += rangeDecoder.Total;
if (outWindow.HasPending)
{
throw new DataErrorException();
}
}
}
if (endReached)
{
if (inputSize >= 0 && inputPosition != inputSize)
{
throw new DataErrorException();
}
if (outputSize >= 0 && position != outputSize)
{
throw new DataErrorException();
}
}
return total;
}
private void decodeChunkHeader()
{
int control = inputStream.ReadByte();
inputPosition++;
if (control == 0x00)
{
endReached = true;
return;
}
if (control >= 0xE0 || control == 0x01)
{
needProps = true;
needDictReset = false;
outWindow.Reset();
}
else if (needDictReset)
{
throw new DataErrorException();
}
if (control >= 0x80)
{
uncompressedChunk = false;
availableBytes = (control & 0x1F) << 16;
availableBytes += (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
rangeDecoderLimit = (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
if (control >= 0xC0)
{
needProps = false;
Properties[0] = (byte)inputStream.ReadByte();
inputPosition++;
decoder = new Decoder();
decoder.SetDecoderProperties(Properties);
}
else if (needProps)
{
throw new DataErrorException();
}
else if (control >= 0xA0)
{
decoder = new Decoder();
decoder.SetDecoderProperties(Properties);
}
rangeDecoder.Init(inputStream);
}
else if (control > 0x02)
{
throw new DataErrorException();
}
else
{
uncompressedChunk = true;
availableBytes = (inputStream.ReadByte() << 8) + inputStream.ReadByte() + 1;
inputPosition += 2;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
if (encoder != null)
{
position = encoder.Code(new MemoryStream(buffer, offset, count), false);
}
}
public byte[] Properties = new byte[5];
}
}

View file

@ -1,252 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
internal class Encoder
{
public const uint kTopValue = (1 << 24);
private Stream Stream;
public UInt64 Low;
public uint Range;
private uint _cacheSize;
private byte _cache;
//long StartPosition;
public void SetStream(Stream stream)
{
Stream = stream;
}
public void ReleaseStream()
{
Stream = null;
}
public void Init()
{
//StartPosition = Stream.Position;
Low = 0;
Range = 0xFFFFFFFF;
_cacheSize = 1;
_cache = 0;
}
public void FlushData()
{
for (int i = 0; i < 5; i++)
{
ShiftLow();
}
}
public void FlushStream()
{
Stream.Flush();
}
public void CloseStream()
{
Stream.Dispose();
}
public void Encode(uint start, uint size, uint total)
{
Low += start * (Range /= total);
Range *= size;
while (Range < kTopValue)
{
Range <<= 8;
ShiftLow();
}
}
public void ShiftLow()
{
if ((uint)Low < 0xFF000000 || (uint)(Low >> 32) == 1)
{
byte temp = _cache;
do
{
Stream.WriteByte((byte)(temp + (Low >> 32)));
temp = 0xFF;
}
while (--_cacheSize != 0);
_cache = (byte)(((uint)Low) >> 24);
}
_cacheSize++;
Low = ((uint)Low) << 8;
}
public void EncodeDirectBits(uint v, int numTotalBits)
{
for (int i = numTotalBits - 1; i >= 0; i--)
{
Range >>= 1;
if (((v >> i) & 1) == 1)
{
Low += Range;
}
if (Range < kTopValue)
{
Range <<= 8;
ShiftLow();
}
}
}
public void EncodeBit(uint size0, int numTotalBits, uint symbol)
{
uint newBound = (Range >> numTotalBits) * size0;
if (symbol == 0)
{
Range = newBound;
}
else
{
Low += newBound;
Range -= newBound;
}
while (Range < kTopValue)
{
Range <<= 8;
ShiftLow();
}
}
public long GetProcessedSizeAdd()
{
return -1;
//return _cacheSize + Stream.Position - StartPosition + 4;
// (long)Stream.GetProcessedSize();
}
}
internal class Decoder
{
public const uint kTopValue = (1 << 24);
public uint Range;
public uint Code;
// public Buffer.InBuffer Stream = new Buffer.InBuffer(1 << 16);
public Stream Stream;
public long Total;
public void Init(Stream stream)
{
// Stream.Init(stream);
Stream = stream;
Code = 0;
Range = 0xFFFFFFFF;
for (int i = 0; i < 5; i++)
{
Code = (Code << 8) | (byte)Stream.ReadByte();
}
Total = 5;
}
public void ReleaseStream()
{
// Stream.ReleaseStream();
Stream = null;
}
public void CloseStream()
{
Stream.Dispose();
}
public void Normalize()
{
while (Range < kTopValue)
{
Code = (Code << 8) | (byte)Stream.ReadByte();
Range <<= 8;
Total++;
}
}
public void Normalize2()
{
if (Range < kTopValue)
{
Code = (Code << 8) | (byte)Stream.ReadByte();
Range <<= 8;
Total++;
}
}
public uint GetThreshold(uint total)
{
return Code / (Range /= total);
}
public void Decode(uint start, uint size)
{
Code -= start * Range;
Range *= size;
Normalize();
}
public uint DecodeDirectBits(int numTotalBits)
{
uint range = Range;
uint code = Code;
uint result = 0;
for (int i = numTotalBits; i > 0; i--)
{
range >>= 1;
/*
result <<= 1;
if (code >= range)
{
code -= range;
result |= 1;
}
*/
uint t = (code - range) >> 31;
code -= range & (t - 1);
result = (result << 1) | (1 - t);
if (range < kTopValue)
{
code = (code << 8) | (byte)Stream.ReadByte();
range <<= 8;
Total++;
}
}
Range = range;
Code = code;
return result;
}
public uint DecodeBit(uint size0, int numTotalBits)
{
uint newBound = (Range >> numTotalBits) * size0;
uint symbol;
if (Code < newBound)
{
symbol = 0;
Range = newBound;
}
else
{
symbol = 1;
Code -= newBound;
Range -= newBound;
}
Normalize();
return symbol;
}
public bool IsFinished { get { return Code == 0; } }
// ulong GetProcessedSize() {return Stream.GetProcessedSize(); }
}
}

View file

@ -1,140 +0,0 @@
using System;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
internal struct BitEncoder
{
public const int kNumBitModelTotalBits = 11;
public const uint kBitModelTotal = (1 << kNumBitModelTotalBits);
private const int kNumMoveBits = 5;
private const int kNumMoveReducingBits = 2;
public const int kNumBitPriceShiftBits = 6;
private uint Prob;
public void Init()
{
Prob = kBitModelTotal >> 1;
}
public void UpdateModel(uint symbol)
{
if (symbol == 0)
{
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
}
else
{
Prob -= (Prob) >> kNumMoveBits;
}
}
public void Encode(Encoder encoder, uint symbol)
{
// encoder.EncodeBit(Prob, kNumBitModelTotalBits, symbol);
// UpdateModel(symbol);
uint newBound = (encoder.Range >> kNumBitModelTotalBits) * Prob;
if (symbol == 0)
{
encoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
}
else
{
encoder.Low += newBound;
encoder.Range -= newBound;
Prob -= (Prob) >> kNumMoveBits;
}
if (encoder.Range < Encoder.kTopValue)
{
encoder.Range <<= 8;
encoder.ShiftLow();
}
}
private static readonly UInt32[] ProbPrices = new UInt32[kBitModelTotal >> kNumMoveReducingBits];
static BitEncoder()
{
const int kNumBits = (kNumBitModelTotalBits - kNumMoveReducingBits);
for (int i = kNumBits - 1; i >= 0; i--)
{
UInt32 start = (UInt32)1 << (kNumBits - i - 1);
UInt32 end = (UInt32)1 << (kNumBits - i);
for (UInt32 j = start; j < end; j++)
{
ProbPrices[j] = ((UInt32)i << kNumBitPriceShiftBits) +
(((end - j) << kNumBitPriceShiftBits) >> (kNumBits - i - 1));
}
}
}
public uint GetPrice(uint symbol)
{
return ProbPrices[(((Prob - symbol) ^ ((-(int)symbol))) & (kBitModelTotal - 1)) >> kNumMoveReducingBits];
}
public uint GetPrice0()
{
return ProbPrices[Prob >> kNumMoveReducingBits];
}
public uint GetPrice1()
{
return ProbPrices[(kBitModelTotal - Prob) >> kNumMoveReducingBits];
}
}
internal struct BitDecoder
{
public const int kNumBitModelTotalBits = 11;
public const uint kBitModelTotal = (1 << kNumBitModelTotalBits);
private const int kNumMoveBits = 5;
private uint Prob;
public void UpdateModel(int numMoveBits, uint symbol)
{
if (symbol == 0)
{
Prob += (kBitModelTotal - Prob) >> numMoveBits;
}
else
{
Prob -= (Prob) >> numMoveBits;
}
}
public void Init()
{
Prob = kBitModelTotal >> 1;
}
public uint Decode(Decoder rangeDecoder)
{
uint newBound = (rangeDecoder.Range >> kNumBitModelTotalBits) * Prob;
if (rangeDecoder.Code < newBound)
{
rangeDecoder.Range = newBound;
Prob += (kBitModelTotal - Prob) >> kNumMoveBits;
if (rangeDecoder.Range < Decoder.kTopValue)
{
rangeDecoder.Code = (rangeDecoder.Code << 8) | (byte)rangeDecoder.Stream.ReadByte();
rangeDecoder.Range <<= 8;
rangeDecoder.Total++;
}
return 0;
}
rangeDecoder.Range -= newBound;
rangeDecoder.Code -= newBound;
Prob -= (Prob) >> kNumMoveBits;
if (rangeDecoder.Range < Decoder.kTopValue)
{
rangeDecoder.Code = (rangeDecoder.Code << 8) | (byte)rangeDecoder.Stream.ReadByte();
rangeDecoder.Range <<= 8;
rangeDecoder.Total++;
}
return 1;
}
}
}

View file

@ -1,163 +0,0 @@
using System;
namespace SharpCompress.Compressors.LZMA.RangeCoder
{
internal struct BitTreeEncoder
{
private readonly BitEncoder[] Models;
private readonly int NumBitLevels;
public BitTreeEncoder(int numBitLevels)
{
NumBitLevels = numBitLevels;
Models = new BitEncoder[1 << numBitLevels];
}
public void Init()
{
for (uint i = 1; i < (1 << NumBitLevels); i++)
{
Models[i].Init();
}
}
public void Encode(Encoder rangeEncoder, UInt32 symbol)
{
UInt32 m = 1;
for (int bitIndex = NumBitLevels; bitIndex > 0;)
{
bitIndex--;
UInt32 bit = (symbol >> bitIndex) & 1;
Models[m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
}
}
public void ReverseEncode(Encoder rangeEncoder, UInt32 symbol)
{
UInt32 m = 1;
for (UInt32 i = 0; i < NumBitLevels; i++)
{
UInt32 bit = symbol & 1;
Models[m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
symbol >>= 1;
}
}
public UInt32 GetPrice(UInt32 symbol)
{
UInt32 price = 0;
UInt32 m = 1;
for (int bitIndex = NumBitLevels; bitIndex > 0;)
{
bitIndex--;
UInt32 bit = (symbol >> bitIndex) & 1;
price += Models[m].GetPrice(bit);
m = (m << 1) + bit;
}
return price;
}
public UInt32 ReverseGetPrice(UInt32 symbol)
{
UInt32 price = 0;
UInt32 m = 1;
for (int i = NumBitLevels; i > 0; i--)
{
UInt32 bit = symbol & 1;
symbol >>= 1;
price += Models[m].GetPrice(bit);
m = (m << 1) | bit;
}
return price;
}
public static UInt32 ReverseGetPrice(BitEncoder[] Models, UInt32 startIndex,
int NumBitLevels, UInt32 symbol)
{
UInt32 price = 0;
UInt32 m = 1;
for (int i = NumBitLevels; i > 0; i--)
{
UInt32 bit = symbol & 1;
symbol >>= 1;
price += Models[startIndex + m].GetPrice(bit);
m = (m << 1) | bit;
}
return price;
}
public static void ReverseEncode(BitEncoder[] Models, UInt32 startIndex,
Encoder rangeEncoder, int NumBitLevels, UInt32 symbol)
{
UInt32 m = 1;
for (int i = 0; i < NumBitLevels; i++)
{
UInt32 bit = symbol & 1;
Models[startIndex + m].Encode(rangeEncoder, bit);
m = (m << 1) | bit;
symbol >>= 1;
}
}
}
internal struct BitTreeDecoder
{
private readonly BitDecoder[] Models;
private readonly int NumBitLevels;
public BitTreeDecoder(int numBitLevels)
{
NumBitLevels = numBitLevels;
Models = new BitDecoder[1 << numBitLevels];
}
public void Init()
{
for (uint i = 1; i < (1 << NumBitLevels); i++)
{
Models[i].Init();
}
}
public uint Decode(Decoder rangeDecoder)
{
uint m = 1;
for (int bitIndex = NumBitLevels; bitIndex > 0; bitIndex--)
{
m = (m << 1) + Models[m].Decode(rangeDecoder);
}
return m - ((uint)1 << NumBitLevels);
}
public uint ReverseDecode(Decoder rangeDecoder)
{
uint m = 1;
uint symbol = 0;
for (int bitIndex = 0; bitIndex < NumBitLevels; bitIndex++)
{
uint bit = Models[m].Decode(rangeDecoder);
m <<= 1;
m += bit;
symbol |= (bit << bitIndex);
}
return symbol;
}
public static uint ReverseDecode(BitDecoder[] Models, UInt32 startIndex,
Decoder rangeDecoder, int NumBitLevels)
{
uint m = 1;
uint symbol = 0;
for (int bitIndex = 0; bitIndex < NumBitLevels; bitIndex++)
{
uint bit = Models[startIndex + m].Decode(rangeDecoder);
m <<= 1;
m += bit;
symbol |= (bit << bitIndex);
}
return symbol;
}
}
}

View file

@ -1,58 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.Common.SevenZip;
//using SharpCompress.Compressors.BZip2;
//using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Filters;
using SharpCompress.Compressors.LZMA.Utilites;
//using SharpCompress.Compressors.PPMd;
namespace SharpCompress.Compressors.LZMA
{
internal static class DecoderRegistry
{
private const uint k_Copy = 0x0;
private const uint k_Delta = 3;
private const uint k_LZMA2 = 0x21;
private const uint k_LZMA = 0x030101;
private const uint k_PPMD = 0x030401;
private const uint k_BCJ = 0x03030103;
private const uint k_BCJ2 = 0x0303011B;
private const uint k_Deflate = 0x040108;
private const uint k_BZip2 = 0x040202;
internal static Stream CreateDecoderStream(CMethodId id, Stream[] inStreams, byte[] info, IPasswordProvider pass,
long limit)
{
switch (id.Id)
{
case k_Copy:
if (info != null)
{
throw new NotSupportedException();
}
return inStreams.Single();
case k_LZMA:
case k_LZMA2:
return new LzmaStream(info, inStreams.Single(), -1, limit);
/*#if !NO_CRYPTO
case CMethodId.kAESId:
return new AesDecoderStream(inStreams.Single(), info, pass, limit);
#endif*/
case k_BCJ:
return new BCJFilter(false, inStreams.Single());
/*case k_BCJ2:
return new Bcj2DecoderStream(inStreams, info, limit);
case k_BZip2:
return new BZip2Stream(inStreams.Single(), CompressionMode.Decompress, true);
case k_PPMD:
return new PpmdStream(new PpmdProperties(info), inStreams.Single(), false);
case k_Deflate:
return new DeflateStream(inStreams.Single(), CompressionMode.Decompress);*/
default:
throw new NotSupportedException();
}
}
}
}

View file

@ -1,105 +0,0 @@
using System;
using System.Diagnostics;
using System.IO;
namespace SharpCompress.Compressors.LZMA.Utilites
{
internal class CrcCheckStream : Stream
{
private readonly uint mExpectedCRC;
private uint mCurrentCRC;
private bool mClosed;
private readonly long[] mBytes = new long[256];
private long mLength;
public CrcCheckStream(uint crc)
{
mExpectedCRC = crc;
mCurrentCRC = CRC.kInitCRC;
}
protected override void Dispose(bool disposing)
{
if (mCurrentCRC != mExpectedCRC)
{
throw new InvalidOperationException();
}
try
{
if (disposing && !mClosed)
{
mClosed = true;
mCurrentCRC = CRC.Finish(mCurrentCRC);
#if DEBUG
if (mCurrentCRC == mExpectedCRC)
{
Debug.WriteLine("CRC ok: " + mExpectedCRC.ToString("x8"));
}
else
{
Debugger.Break();
Debug.WriteLine("bad CRC");
}
double lengthInv = 1.0 / mLength;
double entropy = 0;
for (int i = 0; i < 256; i++)
{
if (mBytes[i] != 0)
{
double p = lengthInv * mBytes[i];
entropy -= p * Math.Log(p, 256);
}
}
Debug.WriteLine("entropy: " + (int)(entropy * 100) + "%");
#endif
}
}
finally
{
base.Dispose(disposing);
}
}
public override bool CanRead { get { return false; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return true; } }
public override void Flush()
{
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
throw new InvalidOperationException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
mLength += count;
for (int i = 0; i < count; i++)
{
mBytes[buffer[offset + i]]++;
}
mCurrentCRC = CRC.Update(mCurrentCRC, buffer, offset, count);
}
}
}

View file

@ -1,7 +0,0 @@
namespace SharpCompress.Compressors.LZMA.Utilites
{
internal interface IPasswordProvider
{
string CryptoGetTextPassword();
}
}

View file

@ -1,92 +0,0 @@
using System;
using System.Diagnostics;
using System.IO;
namespace SharpCompress.Compressors.LZMA.Utilites
{
internal enum BlockType : byte
{
#region Constants
End = 0,
Header = 1,
ArchiveProperties = 2,
AdditionalStreamsInfo = 3,
MainStreamsInfo = 4,
FilesInfo = 5,
PackInfo = 6,
UnpackInfo = 7,
SubStreamsInfo = 8,
Size = 9,
CRC = 10,
Folder = 11,
CodersUnpackSize = 12,
NumUnpackStream = 13,
EmptyStream = 14,
EmptyFile = 15,
Anti = 16,
Name = 17,
CTime = 18,
ATime = 19,
MTime = 20,
WinAttributes = 21,
Comment = 22,
EncodedHeader = 23,
StartPos = 24,
Dummy = 25
#endregion
}
internal static class Utils
{
[Conditional("DEBUG")]
public static void Assert(bool expression)
{
if (!expression)
{
if (Debugger.IsAttached)
{
Debugger.Break();
}
throw new Exception("Assertion failed.");
}
}
public static void ReadExact(this Stream stream, byte[] buffer, int offset, int length)
{
if (stream == null)
{
throw new ArgumentNullException("stream");
}
if (buffer == null)
{
throw new ArgumentNullException("buffer");
}
if (offset < 0 || offset > buffer.Length)
{
throw new ArgumentOutOfRangeException("offset");
}
if (length < 0 || length > buffer.Length - offset)
{
throw new ArgumentOutOfRangeException("length");
}
while (length > 0)
{
int fetched = stream.Read(buffer, offset, length);
if (fetched <= 0)
{
throw new EndOfStreamException();
}
offset += fetched;
length -= fetched;
}
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,94 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.IO
{
internal class BufferedSubStream : Stream
{
private long position;
private int cacheOffset;
private int cacheLength;
private readonly byte[] cache;
public BufferedSubStream(Stream stream, long origin, long bytesToRead)
{
Stream = stream;
position = origin;
BytesLeftToRead = bytesToRead;
cache = new byte[32 << 10];
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
//Stream.Dispose();
}
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; private set; }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
if (count > BytesLeftToRead)
{
count = (int)BytesLeftToRead;
}
if (count > 0)
{
if (cacheLength == 0)
{
cacheOffset = 0;
Stream.Position = position;
cacheLength = Stream.Read(cache, 0, cache.Length);
position += cacheLength;
}
if (count > cacheLength)
{
count = cacheLength;
}
Buffer.BlockCopy(cache, cacheOffset, buffer, offset, count);
cacheOffset += count;
cacheLength -= count;
BytesLeftToRead -= count;
}
return count;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View file

@ -1,54 +0,0 @@
using System.IO;
namespace SharpCompress.IO
{
internal class NonDisposingStream : Stream
{
public NonDisposingStream(Stream stream)
{
Stream = stream;
}
protected override void Dispose(bool disposing)
{
//don't dispose anything
}
public Stream Stream { get; private set; }
public override bool CanRead { get { return Stream.CanRead; } }
public override bool CanSeek { get { return Stream.CanSeek; } }
public override bool CanWrite { get { return Stream.CanWrite; } }
public override void Flush()
{
Stream.Flush();
}
public override long Length { get { return Stream.Length; } }
public override long Position { get { return Stream.Position; } set { Stream.Position = value; } }
public override int Read(byte[] buffer, int offset, int count)
{
return Stream.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
return Stream.Seek(offset, origin);
}
public override void SetLength(long value)
{
Stream.SetLength(value);
}
public override void Write(byte[] buffer, int offset, int count)
{
Stream.Write(buffer, offset, count);
}
}
}

View file

@ -1,79 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.IO
{
internal class ReadOnlySubStream : Stream
{
public ReadOnlySubStream(Stream stream, long bytesToRead)
: this(stream, null, bytesToRead)
{
}
public ReadOnlySubStream(Stream stream, long? origin, long bytesToRead)
{
Stream = stream;
if (origin != null)
{
stream.Position = origin.Value;
}
BytesLeftToRead = bytesToRead;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
//Stream.Dispose();
}
}
private long BytesLeftToRead { get; set; }
public Stream Stream { get; private set; }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return false; } }
public override void Flush()
{
throw new NotSupportedException();
}
public override long Length { get { throw new NotSupportedException(); } }
public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } }
public override int Read(byte[] buffer, int offset, int count)
{
if (BytesLeftToRead < count)
{
count = (int)BytesLeftToRead;
}
int read = Stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
}
return read;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View file

@ -1,151 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress
{
internal class LazyReadOnlyCollection<T> : ICollection<T>
{
private readonly List<T> backing = new List<T>();
private readonly IEnumerator<T> source;
private bool fullyLoaded;
public LazyReadOnlyCollection(IEnumerable<T> source)
{
this.source = source.GetEnumerator();
}
private class LazyLoader : IEnumerator<T>
{
private readonly LazyReadOnlyCollection<T> lazyReadOnlyCollection;
private bool disposed;
private int index = -1;
internal LazyLoader(LazyReadOnlyCollection<T> lazyReadOnlyCollection)
{
this.lazyReadOnlyCollection = lazyReadOnlyCollection;
}
#region IEnumerator<T> Members
public T Current { get { return lazyReadOnlyCollection.backing[index]; } }
#endregion
#region IDisposable Members
public void Dispose()
{
if (!disposed)
{
disposed = true;
}
}
#endregion
#region IEnumerator Members
object IEnumerator.Current { get { return Current; } }
public bool MoveNext()
{
if (index + 1 < lazyReadOnlyCollection.backing.Count)
{
index++;
return true;
}
if (!lazyReadOnlyCollection.fullyLoaded && lazyReadOnlyCollection.source.MoveNext())
{
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current);
index++;
return true;
}
lazyReadOnlyCollection.fullyLoaded = true;
return false;
}
public void Reset()
{
throw new NotSupportedException();
}
#endregion
}
internal void EnsureFullyLoaded()
{
if (!fullyLoaded)
{
this.ForEach(x => { });
fullyLoaded = true;
}
}
internal IEnumerable<T> GetLoaded()
{
return backing;
}
#region ICollection<T> Members
public void Add(T item)
{
throw new NotSupportedException();
}
public void Clear()
{
throw new NotSupportedException();
}
public bool Contains(T item)
{
EnsureFullyLoaded();
return backing.Contains(item);
}
public void CopyTo(T[] array, int arrayIndex)
{
EnsureFullyLoaded();
backing.CopyTo(array, arrayIndex);
}
public int Count
{
get
{
EnsureFullyLoaded();
return backing.Count;
}
}
public bool IsReadOnly { get { return true; } }
public bool Remove(T item)
{
throw new NotSupportedException();
}
#endregion
#region IEnumerable<T> Members
//TODO check for concurrent access
public IEnumerator<T> GetEnumerator()
{
return new LazyLoader(this);
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
#endregion
}
}

View file

@ -1,226 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
namespace SharpCompress.Readers
{
/// <summary>
/// A generic push reader that reads unseekable comrpessed streams.
/// </summary>
public abstract class AbstractReader<TEntry, TVolume> : IReader//, IReaderExtractionListener
where TEntry : Entry
where TVolume : Volume
{
private bool completed;
private IEnumerator<TEntry> entriesForCurrentReadStream;
private bool wroteCurrentEntry;
//public event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionBegin;
// public event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionEnd;
//public event EventHandler<EventArgs> CompressedBytesRead;
// public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
{
ArchiveType = archiveType;
Options = options;
}
internal ReaderOptions Options { get; private set; }
public ArchiveType ArchiveType { get; private set; }
/// <summary>
/// Current volume that the current entry resides in
/// </summary>
public abstract TVolume Volume { get; }
/// <summary>
/// Current file entry
/// </summary>
public TEntry Entry { get { return entriesForCurrentReadStream.Current; } }
#region IDisposable Members
public void Dispose()
{
if (entriesForCurrentReadStream != null)
{
entriesForCurrentReadStream.Dispose();
}
Volume.Dispose();
}
#endregion
public bool Cancelled { get; private set; }
/// <summary>
/// Indicates that the remaining entries are not required.
/// On dispose of an EntryStream, the stream will not skip to the end of the entry.
/// An attempt to move to the next entry will throw an exception, as the compressed stream is not positioned at an entry boundary.
/// </summary>
public void Cancel()
{
if (!completed)
{
Cancelled = true;
}
}
public bool MoveToNextEntry()
{
if (completed)
{
return false;
}
if (Cancelled)
{
throw new InvalidOperationException("Reader has been cancelled.");
}
if (entriesForCurrentReadStream == null)
{
return LoadStreamForReading(RequestInitialStream());
}
if (!wroteCurrentEntry)
{
SkipEntry();
}
wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
{
return true;
}
completed = true;
return false;
}
internal bool LoadStreamForReading(Stream stream)
{
if (entriesForCurrentReadStream != null)
{
entriesForCurrentReadStream.Dispose();
}
if ((stream == null) || (!stream.CanRead))
{
throw new Exception("File is split into multiple archives: '"
+ Entry.Key +
"'. A new readable stream is required. Use Cancel if it was intended.");
}
entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
if (entriesForCurrentReadStream.MoveNext())
{
return true;
}
return false;
}
internal virtual Stream RequestInitialStream()
{
return Volume.Stream;
}
internal virtual bool NextEntryForCurrentStream()
{
return entriesForCurrentReadStream.MoveNext();
}
internal abstract IEnumerable<TEntry> GetEntries(Stream stream);
#region Entry Skip/Write
private void SkipEntry()
{
if (!Entry.IsDirectory)
{
Skip();
}
}
private readonly byte[] skipBuffer = new byte[4096];
private void Skip()
{
if (!Entry.IsSolid)
{
var rawStream = Entry.Parts.First().GetRawStream();
if (rawStream != null)
{
var bytesToAdvance = Entry.CompressedSize;
for (var i = 0; i < bytesToAdvance / skipBuffer.Length; i++)
{
rawStream.Read(skipBuffer, 0, skipBuffer.Length);
}
rawStream.Read(skipBuffer, 0, (int)(bytesToAdvance % skipBuffer.Length));
return;
}
}
using (var s = OpenEntryStream())
{
while (s.Read(skipBuffer, 0, skipBuffer.Length) > 0)
{
}
}
}
public void WriteEntryTo(Stream writableStream)
{
if (wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
if ((writableStream == null) || (!writableStream.CanWrite))
{
throw new ArgumentNullException(
"A writable Stream was required. Use Cancel if that was intended.");
}
//var streamListener = this as IReaderExtractionListener;
//streamListener.FireEntryExtractionBegin(Entry);
Write(writableStream);
//streamListener.FireEntryExtractionEnd(Entry);
wroteCurrentEntry = true;
}
internal void Write(Stream writeStream)
{
using (Stream s = OpenEntryStream())
{
s.TransferTo(writeStream);
}
}
public EntryStream OpenEntryStream()
{
if (wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = GetEntryStream();
wroteCurrentEntry = true;
return stream;
}
/// <summary>
/// Retains a reference to the entry stream, so we can check whether it completed later.
/// </summary>
protected EntryStream CreateEntryStream(Stream decompressed)
{
return new EntryStream(this, decompressed);
}
protected virtual EntryStream GetEntryStream()
{
return CreateEntryStream(Entry.Parts.First().GetCompressedStream());
}
#endregion
IEntry IReader.Entry { get { return Entry; } }
}
}

View file

@ -1,25 +0,0 @@
namespace SharpCompress.Readers
{
public class ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite {get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
}
}

View file

@ -1,40 +0,0 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Readers
{
public interface IReader : IDisposable
{
//event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionBegin;
//event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionEnd;
//event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
//event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
//ArchiveType ArchiveType { get; }
IEntry Entry { get; }
/// <summary>
/// Decompresses the current entry to the stream. This cannot be called twice for the current entry.
/// </summary>
/// <param name="writableStream"></param>
void WriteEntryTo(Stream writableStream);
bool Cancelled { get; }
void Cancel();
/// <summary>
/// Moves to the next entry by reading more data from the underlying stream. This skips if data has not been read.
/// </summary>
/// <returns></returns>
bool MoveToNextEntry();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
EntryStream OpenEntryStream();
}
}

View file

@ -1,100 +0,0 @@
#if !NO_FILE
using System.IO;
using SharpCompress.Common;
#endif
namespace SharpCompress.Readers
{
public static class IReaderExtensions
{
#if !NO_FILE
public static void WriteEntryTo(this IReader reader, string filePath)
{
using (Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write))
{
reader.WriteEntryTo(stream);
}
}
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
{
using (Stream stream = filePath.Open(FileMode.Create))
{
reader.WriteEntryTo(stream);
}
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static void WriteAllToDirectory(this IReader reader, string destinationDirectory,
ExtractionOptions options)
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(destinationDirectory, options);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(this IReader reader, string destinationDirectory,
ExtractionOptions options)
{
string destinationFileName = string.Empty;
string file = Path.GetFileName(reader.Entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(reader.Entry.Key);
string destdir = Path.Combine(destinationDirectory, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(destinationDirectory, file);
}
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToFile(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteEntryToFile(this IReader reader, string destinationFileName,
ExtractionOptions options)
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
using (FileStream fs = File.Open(destinationFileName, fm))
{
reader.WriteEntryTo(fs);
//using (Stream s = reader.OpenEntryStream())
//{
// s.TransferTo(fs);
//}
}
reader.Entry.PreserveExtractionOptions(destinationFileName, options);
}
#endif
}
}

View file

@ -1,12 +0,0 @@
namespace SharpCompress.Readers
{
public class ReaderOptions
{
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
public bool LookForHeader { get; set; }
public string Password { get; set; }
public bool LeaveStreamOpen = true;
}
}

View file

@ -1,281 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace SharpCompress
{
internal static class Utility
{
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2 << ~bits);
}
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
if (number >= 0)
{
return number >> bits;
}
return (number >> bits) + (2L << ~bits);
}
/// <summary>
/// Fills the array with an specific value from an specific index to an specific index.
/// </summary>
/// <param name="array">The array to be filled.</param>
/// <param name="fromindex">The first index to be filled.</param>
/// <param name="toindex">The last index to be filled.</param>
/// <param name="val">The value to fill the array with.</param>
public static void Fill<T>(T[] array, int fromindex, int toindex, T val) where T : struct
{
if (array.Length == 0)
{
throw new NullReferenceException();
}
if (fromindex > toindex)
{
throw new ArgumentException();
}
if ((fromindex < 0) || array.Length < toindex)
{
throw new IndexOutOfRangeException();
}
for (int index = (fromindex > 0) ? fromindex-- : fromindex; index < toindex; index++)
{
array[index] = val;
}
}
/// <summary>
/// Fills the array with an specific value.
/// </summary>
/// <param name="array">The array to be filled.</param>
/// <param name="val">The value to fill the array with.</param>
public static void Fill<T>(T[] array, T val) where T : struct
{
Fill(array, 0, array.Length, val);
}
public static void SetSize(this List<byte> list, int count)
{
if (count > list.Count)
{
for (int i = list.Count; i < count; i++)
{
list.Add(0x0);
}
}
else
{
byte[] temp = new byte[count];
list.CopyTo(temp, 0);
list.Clear();
list.AddRange(temp);
}
}
public static void AddRange<T>(this ICollection<T> destination, IEnumerable<T> source)
{
foreach (T item in source)
{
destination.Add(item);
}
}
public static void ForEach<T>(this IEnumerable<T> items, Action<T> action)
{
foreach (T item in items)
{
action(item);
}
}
public static IEnumerable<T> AsEnumerable<T>(this T item)
{
yield return item;
}
public static void CheckNotNull(this object obj, string name)
{
if (obj == null)
{
throw new ArgumentNullException(name);
}
}
public static void CheckNotNullOrEmpty(this string obj, string name)
{
obj.CheckNotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.");
}
}
public static void Skip(this Stream source, long advanceAmount)
{
byte[] buffer = new byte[32 * 1024];
int read = 0;
int readCount = 0;
do
{
readCount = buffer.Length;
if (readCount > advanceAmount)
{
readCount = (int)advanceAmount;
}
read = source.Read(buffer, 0, readCount);
if (read <= 0)
{
break;
}
advanceAmount -= read;
if (advanceAmount == 0)
{
break;
}
}
while (true);
}
public static void SkipAll(this Stream source)
{
byte[] buffer = new byte[32 * 1024];
do
{
}
while (source.Read(buffer, 0, buffer.Length) == buffer.Length);
}
public static DateTime DosDateToDateTime(UInt16 iDate, UInt16 iTime)
{
int year = iDate / 512 + 1980;
int month = iDate % 512 / 32;
int day = iDate % 512 % 32;
int hour = iTime / 2048;
int minute = iTime % 2048 / 32;
int second = iTime % 2048 % 32 * 2;
if (iDate == UInt16.MaxValue || month == 0 || day == 0)
{
year = 1980;
month = 1;
day = 1;
}
if (iTime == UInt16.MaxValue)
{
hour = minute = second = 0;
}
DateTime dt;
try
{
dt = new DateTime(year, month, day, hour, minute, second, DateTimeKind.Local);
}
catch
{
dt = new DateTime();
}
return dt;
}
public static uint DateTimeToDosTime(this DateTime? dateTime)
{
if (dateTime == null)
{
return 0;
}
var localDateTime = dateTime.Value.ToLocalTime();
return (uint)(
(localDateTime.Second / 2) | (localDateTime.Minute << 5) | (localDateTime.Hour << 11) |
(localDateTime.Day << 16) | (localDateTime.Month << 21) |
((localDateTime.Year - 1980) << 25));
}
public static DateTime DosDateToDateTime(UInt32 iTime)
{
return DosDateToDateTime((UInt16)(iTime / 65536),
(UInt16)(iTime % 65536));
}
public static DateTime DosDateToDateTime(Int32 iTime)
{
return DosDateToDateTime((UInt32)iTime);
}
public static long TransferTo(this Stream source, Stream destination)
{
byte[] array = new byte[81920];
int count;
long total = 0;
while ((count = source.Read(array, 0, array.Length)) != 0)
{
total += count;
destination.Write(array, 0, count);
}
return total;
}
public static bool ReadFully(this Stream stream, byte[] buffer)
{
int total = 0;
int read;
while ((read = stream.Read(buffer, total, buffer.Length - total)) > 0)
{
total += read;
if (total >= buffer.Length)
{
return true;
}
}
return (total >= buffer.Length);
}
public static string TrimNulls(this string source)
{
return source.Replace('\0', ' ').Trim();
}
public static bool BinaryEquals(this byte[] source, byte[] target)
{
if (source.Length != target.Length)
{
return false;
}
for (int i = 0; i < source.Length; ++i)
{
if (source[i] != target[i])
{
return false;
}
}
return true;
}
public static void CopyTo(this byte[] array, byte[] destination, int index)
{
Array.Copy(array, 0, destination, index, array.Length);
}
}
}

Binary file not shown.

View file

@ -56,6 +56,11 @@
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<Reference Include="SharpCompressStripped, Version=0.0.0.0, Culture=neutral, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>.\SharpCompressStripped.dll</HintPath>
<Private>False</Private>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
@ -64,6 +69,7 @@
<Reference Include="System.Windows.Forms" />
</ItemGroup>
<ItemGroup>
<Compile Include="Helpers\EmbeddedAssembly.cs" />
<Compile Include="MainForm.cs">
<SubType>Form</SubType>
</Compile>
@ -91,70 +97,6 @@
</None>
<Compile Include="Helpers\TaskbarProgress.cs" />
<Compile Include="Helpers\Webdata.cs" />
<Compile Include="SharpCompress\Archives\AbstractArchive.cs" />
<Compile Include="SharpCompress\Archives\ArchiveFactory.cs" />
<Compile Include="SharpCompress\Archives\IArchive.cs" />
<Compile Include="SharpCompress\Archives\IArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\IArchiveExtractionListener.cs" />
<Compile Include="SharpCompress\Archives\SevenZip\SevenZipArchive.cs" />
<Compile Include="SharpCompress\Archives\SevenZip\SevenZipArchiveEntry.cs" />
<Compile Include="SharpCompress\Common\ArchiveExtractionEventArgs.cs" />
<Compile Include="SharpCompress\Common\ArchiveType.cs" />
<Compile Include="SharpCompress\Common\CompressionType.cs" />
<Compile Include="SharpCompress\Common\Entry.cs" />
<Compile Include="SharpCompress\Common\EntryStream.cs" />
<Compile Include="SharpCompress\Common\FilePart.cs" />
<Compile Include="SharpCompress\Common\FilePartExtractionBeginEventArgs.cs" />
<Compile Include="SharpCompress\Common\IEntry.cs" />
<Compile Include="SharpCompress\Common\IEntry.Extensions.cs" />
<Compile Include="SharpCompress\Common\IVolume.cs" />
<Compile Include="SharpCompress\Common\ReaderExtractionEventArgs.cs" />
<Compile Include="SharpCompress\Common\SevenZip\ArchiveDatabase.cs" />
<Compile Include="SharpCompress\Common\SevenZip\ArchiveReader.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CBindPair.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CCoderInfo.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CFileItem.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CFolder.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CMethodId.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CStreamSwitch.cs" />
<Compile Include="SharpCompress\Common\SevenZip\DataReader.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipEntry.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipFilePart.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipVolume.cs" />
<Compile Include="SharpCompress\Common\Volume.cs" />
<Compile Include="SharpCompress\Compressors\Filters\BCJFilter.cs" />
<Compile Include="SharpCompress\Compressors\Filters\Filter.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\BitVector.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\CRC.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\DecoderStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\ICoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaBase.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaDecoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaEncoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaEncoderProperties.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\CRC.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzBinTree.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzInWindow.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzOutWindow.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoderBit.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoderBitTree.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Registry.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilities\CrcCheckStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilities\IPasswordProvider.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilities\Utils.cs" />
<Compile Include="SharpCompress\Converters\DataConverter.cs" />
<Compile Include="SharpCompress\IO\BufferedSubStream.cs" />
<Compile Include="SharpCompress\IO\NonDisposingStream.cs" />
<Compile Include="SharpCompress\IO\ReadOnlySubStream.cs" />
<Compile Include="SharpCompress\LazyReadOnlyCollection.cs" />
<Compile Include="SharpCompress\Readers\AbstractReader.cs" />
<Compile Include="SharpCompress\Readers\ExtractionOptions.cs" />
<Compile Include="SharpCompress\Readers\IReader.cs" />
<Compile Include="SharpCompress\Readers\IReaderExtensions.cs" />
<Compile Include="SharpCompress\Readers\ReaderOptions.cs" />
<Compile Include="SharpCompress\Utility.cs" />
</ItemGroup>
<ItemGroup>
<BootstrapperPackage Include="Microsoft.Net.Client.3.5">
@ -186,6 +128,9 @@
<ItemGroup>
<None Include="Updater.ico" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="SharpCompressStripped.dll" />
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.