diff --git a/ArchiveDiag-Worker/.gitignore b/ArchiveDiag-Worker/.gitignore
new file mode 100644
index 000000000..ff5b00c50
--- /dev/null
+++ b/ArchiveDiag-Worker/.gitignore
@@ -0,0 +1,264 @@
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+
+# Azure Functions localsettings file
+local.settings.json
+
+# User-specific files
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+
+# Visual Studio 2015 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUNIT
+*.VisualState.xml
+TestResult.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# DNX
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+*_i.c
+*_p.c
+*_i.h
+*.ilk
+*.meta
+*.obj
+*.pch
+*.pdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# TODO: Comment the next line if you want to checkin your web deploy settings
+# but database connection strings (with potential passwords) will be unencrypted
+#*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# The packages folder can be ignored because of Package Restore
+**/packages/*
+# except build/, which is used as an MSBuild target.
+!**/packages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/packages/repositories.config
+# NuGet v3's project.json files produces more ignoreable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+node_modules/
+orleans.codegen.cs
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+
+# SQL Server files
+*.mdf
+*.ldf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush
+.cr/
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
\ No newline at end of file
diff --git a/ArchiveDiag-Worker/ArchiveDiag-Worker.csproj b/ArchiveDiag-Worker/ArchiveDiag-Worker.csproj
new file mode 100644
index 000000000..ab2d7fd8d
--- /dev/null
+++ b/ArchiveDiag-Worker/ArchiveDiag-Worker.csproj
@@ -0,0 +1,25 @@
+
+
+ netcoreapp3.1
+ v3
+ ArchiveDiag_Worker
+ 611792f1-7d4a-46ba-85b5-873da1fe5642
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+ Never
+
+
+
\ No newline at end of file
diff --git a/ArchiveDiag-Worker/ProcessArchive.cs b/ArchiveDiag-Worker/ProcessArchive.cs
new file mode 100644
index 000000000..eab89db51
--- /dev/null
+++ b/ArchiveDiag-Worker/ProcessArchive.cs
@@ -0,0 +1,76 @@
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Threading.Tasks;
+using ArchiveDiagJson;
+using Azure.Storage.Blobs;
+using Microsoft.Azure.WebJobs;
+using Microsoft.Azure.WebJobs.Host;
+using Microsoft.Extensions.Logging;
+using Microsoft.WindowsAzure.Storage;
+using Microsoft.WindowsAzure.Storage.Blob;
+using ArchiveDiagApp = ICSharpCode.SharpZipLib.ArchiveDiag.Program;
+
+namespace SharpZipLib.ArchiveDiag.Worker
+{
+ public static class ArchiveDiagWorker
+ {
+ [FunctionName("ProcessArchive")]
+ public static async Task Run([BlobTrigger("uploads/{name}", Connection = "AzureWebJobsStorage")]Stream blobStream, string name, ILogger log)
+ {
+ log.LogInformation($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {blobStream.Length} Bytes");
+
+
+ //
+
+ log.LogInformation("Creating reports storage client");
+ var reports = new BlobContainerClient(Environment.GetEnvironmentVariable("AzureWebJobsStorage"), "reports");
+
+ var jobs = new BlobContainerClient(Environment.GetEnvironmentVariable("AzureWebJobsStorage"), "jobs");
+
+ var jobClient = jobs.GetBlobClient(name);
+
+ var job = await jobClient.ExistsAsync()
+ ? await JsonSerializer.DeserializeAsync((await jobClient.DownloadAsync()).Value.Content)
+ : new JobStatus() {FileName = "Unknown", Status = ""};
+
+
+ await using (var ms = new MemoryStream())
+ {
+ await JsonSerializer.SerializeAsync(ms, job.WithStatus("processing"));
+ ms.Seek(0, SeekOrigin.Begin);
+ await jobClient.UploadAsync(ms, overwrite:true);
+ }
+
+ var reportTempFile = new FileInfo(Path.GetTempFileName());
+
+ await using (var reportStream = reportTempFile.Create())
+ {
+ log.LogInformation("Analyzing file...");
+ ArchiveDiagApp.Run(blobStream, job.FileName, reportStream);
+ }
+
+ await using (var reportStream = reportTempFile.OpenRead())
+ {
+ log.LogInformation("Uploading results...");
+ await reports.UploadBlobAsync($"{name}.html", reportStream);
+ }
+
+
+ blobStream.Close();
+
+ log.LogInformation("Deleting upload blob...");
+ var uploads = new BlobContainerClient(Environment.GetEnvironmentVariable("AzureWebJobsStorage"), "uploads");
+ var deleteResult = await uploads.DeleteBlobAsync(name);
+ log.LogInformation("Upload deletion result: {Status}, {Reason}", deleteResult.Status, deleteResult.ReasonPhrase);
+
+ await using (var ms = new MemoryStream())
+ {
+ await JsonSerializer.SerializeAsync(ms, job.WithStatus("done"));
+ ms.Seek(0, SeekOrigin.Begin);
+ await jobClient.UploadAsync(ms, overwrite: true);
+ }
+
+ }
+ }
+}
diff --git a/ArchiveDiag-Worker/Properties/serviceDependencies.archivediag-worker - Zip Deploy.json b/ArchiveDiag-Worker/Properties/serviceDependencies.archivediag-worker - Zip Deploy.json
new file mode 100644
index 000000000..63816d610
--- /dev/null
+++ b/ArchiveDiag-Worker/Properties/serviceDependencies.archivediag-worker - Zip Deploy.json
@@ -0,0 +1,10 @@
+{
+ "dependencies": {
+ "storage1": {
+ "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroup')]/providers/Microsoft.Storage/storageAccounts/archivediagstore",
+ "type": "storage.azure",
+ "connectionId": "AzureWebJobsStorage",
+ "secretStore": "AzureAppSettings"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiag-Worker/Properties/serviceDependencies.json b/ArchiveDiag-Worker/Properties/serviceDependencies.json
new file mode 100644
index 000000000..fcc92d112
--- /dev/null
+++ b/ArchiveDiag-Worker/Properties/serviceDependencies.json
@@ -0,0 +1,8 @@
+{
+ "dependencies": {
+ "storage1": {
+ "type": "storage",
+ "connectionId": "AzureWebJobsStorage"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiag-Worker/Properties/serviceDependencies.local.json b/ArchiveDiag-Worker/Properties/serviceDependencies.local.json
new file mode 100644
index 000000000..b550bbba9
--- /dev/null
+++ b/ArchiveDiag-Worker/Properties/serviceDependencies.local.json
@@ -0,0 +1,9 @@
+{
+ "dependencies": {
+ "storage1": {
+ "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroup')]/providers/Microsoft.Storage/storageAccounts/archivediagstore",
+ "type": "storage.azure",
+ "connectionId": "AzureWebJobsStorage"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiag-Worker/host.json b/ArchiveDiag-Worker/host.json
new file mode 100644
index 000000000..bb3b8dadd
--- /dev/null
+++ b/ArchiveDiag-Worker/host.json
@@ -0,0 +1,11 @@
+{
+ "version": "2.0",
+ "logging": {
+ "applicationInsights": {
+ "samplingExcludedTypes": "Request",
+ "samplingSettings": {
+ "isEnabled": true
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiag/ArchiveDiag.csproj b/ArchiveDiag/ArchiveDiag.csproj
new file mode 100644
index 000000000..729075a2f
--- /dev/null
+++ b/ArchiveDiag/ArchiveDiag.csproj
@@ -0,0 +1,40 @@
+
+
+
+ Exe
+ netcoreapp3.1
+ 0.6.23
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ArchiveDiag/ArchiveDiagRunner.cs b/ArchiveDiag/ArchiveDiagRunner.cs
new file mode 100644
index 000000000..b819a12bf
--- /dev/null
+++ b/ArchiveDiag/ArchiveDiagRunner.cs
@@ -0,0 +1,198 @@
+using System;
+using System.Runtime.InteropServices;
+using System.Collections.Generic;
+using System.IO;
+using System.Runtime.CompilerServices;
+using System.Text;
+using ConLib;
+using ConLib.Console;
+using ConLib.HTML;
+using ICSharpCode.SharpZipLib.ArchiveDiag;
+using static ConLib.PrettyConsole;
+
+namespace ArchiveDiag
+{
+ abstract class ArchiveDiagRunner : IDisposable
+ {
+ private readonly HTMLWriter? html = null;
+ private readonly ColorFormatter? fmt = null;
+ private Stream archiveStream;
+
+ public int MaxNameHex { get; set; } = 32;
+ public int MaxNameString { get; set; } = 128;
+ public int MaxCommentHex { get; set; } = 32;
+ public int MaxCommentString { get; set; } = 128;
+ public int MaxExtraDataHex { get; set; } = 64;
+
+ public bool WarnUnknownSigns { get; set; } = false;
+
+ public string? FileName { get; }
+
+ public ArchiveDiagRunner(Stream archiveStream, string? fileName = null)
+ {
+ this.archiveStream = archiveStream;
+ FileName = fileName;
+ }
+
+ public void Run()
+ {
+ Run(new ConsoleWriter());
+ }
+
+ public void Run(params ColorWriter[] writers)
+ {
+ PrettyFormatters.Clear();
+
+ foreach (var writer in writers)
+ {
+ var fmt = new ColorFormatter(writer);
+ PrettyFormatters.Add(fmt);
+ }
+
+ PrettyConsole.ChoreOptions.StartedFormat = "{0} {1}\n";
+ PrettyConsole.ChoreOptions.NameColor = ConCol.White;
+ PrettyConsole.ChoreOptions.EndedFormat = "{0} {1} in ";
+
+ var now = DateTime.UtcNow;
+
+ WriteColor($"ArchiveDiag ", ConCol.White);
+ WriteVersion();
+ WriteLine($"\n");
+
+
+ WriteLine($"Runtime: {RuntimeInformation.FrameworkDescription} ({RuntimeInformation.OSDescription}/{RuntimeInformation.OSArchitecture})");
+ WriteLine($"File: {FileName ?? ""}");
+ Write($"Size: {archiveStream.Length}");
+ WriteColor(" byte(s)\n", ConCol.DarkGray);
+ Write($"Generated at: {now:yyyy-MM-dd}");
+ WriteColor("T", ConCol.DarkGray);
+ Write($"{now:HH:mm:ss}");
+ WriteColor("Z\n", ConCol.DarkGray);
+ WriteLine($"Parser: {Parser}");
+ WriteLine($"Tester: {Tester}");
+
+ WriteLine();
+
+ // Stream archiveStream = null;
+ DoTask("Open File", async () =>
+ {
+ // archiveStream = File.OpenRead(file);
+ }, continueOnFail: false);
+
+ DoChore("Parse Archive", () => { ParseArchive(archiveStream); }, continueOnFail: true);
+
+ // DoTask("Rewind File Stream", async () => { archiveStream.Seek(0, SeekOrigin.Begin); },
+ // continueOnFail: false);
+
+
+ DoChore("Test Archive", () =>
+ {
+ var result = TestArchive(archiveStream);
+
+
+ WriteLine($"Test result {(result ? "PASSED" : "FAILED")}", result ? ConCol.Green : ConCol.Red);
+ }, continueOnFail: true);
+
+ foreach (var writer in writers)
+ {
+ //writer.Flush();
+ // writer.Close();
+ }
+
+#if DEBUG
+ // Console.Read();
+#endif
+ }
+
+ protected static void WriteHexBytes(IEnumerable data, int maxNumber, bool endLine = true)
+ {
+ int bytesWritten = 0;
+ foreach (var b in data)
+ {
+ Write($"{b:x2} ");
+ if (++bytesWritten >= maxNumber)
+ {
+ WriteEllipsis(endLine);
+ return;
+ }
+ }
+
+ if (endLine) WriteLine();
+ }
+
+
+ protected static void WriteEllipsis(bool endLine = true) => WriteColor($" [...]{(endLine ? "\n" : "")}", ConCol.DarkGray);
+
+ protected static void WriteBool(bool value) => WriteColor(value ? "Yes" : "No", value ? ConCol.Green : ConCol.Red);
+ protected static void WriteBool(string prefix, bool value, bool endLine = true)
+ {
+ Write(FormattableStringFactory.Create(prefix));
+ WriteBool(value);
+ if (endLine) WriteLine();
+ }
+
+ protected static void WriteString(byte[] rawBytes, int maxLength, bool endLine = true)
+ {
+ var chars = Encoding.UTF8.GetChars(rawBytes);
+ var length = Math.Min(maxLength, chars.Length);
+
+ var firstPrintableIndex = -1;
+ for (var i = 0; i < length; i++)
+ {
+ if (char.IsControl(chars[i]) || chars[i] == '�')
+ {
+ if (firstPrintableIndex != -1)
+ {
+ // Write the string up until this position
+ Write($"{new string(chars, firstPrintableIndex, i - firstPrintableIndex)}");
+ firstPrintableIndex = -1;
+ }
+
+ if (chars[i] == '�')
+ {
+ WriteColor("�", ConCol.Red);
+ continue;
+ }
+
+ var charRaw = (short)chars[i];
+ WriteColor(charRaw > 255 ? $"\\u{charRaw:x4}" : $"\\x{charRaw:x2}", ConCol.Cyan);
+
+ }
+ else
+ {
+ if (firstPrintableIndex == -1)
+ {
+ firstPrintableIndex = i;
+ }
+
+ }
+ }
+
+ if (firstPrintableIndex != -1)
+ {
+ Write($"{new string(chars, firstPrintableIndex, length - firstPrintableIndex)}");
+ }
+
+ if (chars.Length > maxLength)
+ {
+ WriteEllipsis(endLine);
+ }
+ else if (endLine)
+ {
+ WriteLine();
+ }
+ }
+
+ public void Dispose()
+ {
+ html?.Dispose();
+ fmt?.Dispose();
+ }
+
+ protected abstract bool TestArchive(Stream archiveStream);
+ protected abstract void ParseArchive(Stream archiveStream);
+
+ protected virtual string Parser => "None";
+ protected virtual string Tester => "None";
+ }
+}
diff --git a/ArchiveDiag/ExtraDataType.cs b/ArchiveDiag/ExtraDataType.cs
new file mode 100644
index 000000000..fe968ef9c
--- /dev/null
+++ b/ArchiveDiag/ExtraDataType.cs
@@ -0,0 +1,151 @@
+namespace ICSharpCode.SharpZipLib.ArchiveDiag
+{
+ enum ExtraDataType
+ {
+ ///Zip64 extended information extra field
+ Zip64 = 0x0001,
+
+ ///AV Info
+ AVInfo = 0x0007,
+
+ ///Reserved for extended language encoding data (PFS) (see APPENDIX D)
+ ExtendedLanguageEncoding = 0x0008,
+
+ ///OS/2
+ OS2 = 0x0009,
+
+ ///NTFS
+ NTFS = 0x000a,
+
+ ///OpenVMS
+ OpenVMS = 0x000c,
+
+ ///UNIX
+ UNIX = 0x000d,
+
+ ///Reserved for file stream and fork descriptors
+ FileStreamFork = 0x000e,
+
+ ///Patch Descriptor
+ Patch = 0x000f,
+
+ ///PKCS#7 Store for X.509 Certificates
+ PKCS7Certs = 0x0014,
+
+ ///X.509 Certificate ID and Signature for individual file
+ X509FileCert = 0x0015,
+
+ ///X.509 Certificate ID for Central Directory
+ X509CentralDirCert = 0x0016,
+
+ ///Strong Encryption Header
+ Strong = 0x0017,
+
+ ///Record Management Controls
+ Record = 0x0018,
+
+ ///PKCS#7 Encryption Recipient Certificate List
+ PKCS7RecipCerts = 0x0019,
+
+ ///Reserved for Timestamp record
+ Timestamp = 0x0020,
+
+ ///Policy Decryption Key Record
+ PolicyDecryptionKey = 0x0021,
+
+ ///Smartcrypt Key Provider Record
+ SmartcryptKeyProvider = 0x0022,
+
+ ///Smartcrypt Policy Key Data Record
+ SmartcryptPolicyKey = 0x0023,
+
+ ///IBM S/390 (Z390), AS/400 (I400) attributes - uncompressed
+ IBMUncompressed = 0x0065,
+
+ ///Reserved for IBM S/390 (Z390), AS/400 (I400) attributes - compressed
+ IBMCompressed = 0x0066,
+
+ ///POSZIP 4690 (reserved)
+ POSZIP = 0x4690,
+
+ /// Info-ZIP Macintosh (old, J. Lee)
+ InfoZipMacOld = 0x07c8,
+
+ /// ZipIt Macintosh (first version)
+ ZipItMacShort = 0x2605,
+
+ /// ZipIt Macintosh v 1.3.5 and newer (w/o full filename)
+ ZipItMacLong = 0x2705,
+
+ /// Info-ZIP Macintosh (new, D. Haase's 'Mac3' field )
+ InfoZipMacNew = 0x334d,
+
+ /// Tandem NSK
+ TandemNSK = 0x4154,
+
+ /// Acorn/SparkFS (David Pilling)
+ AcornSparkFS = 0x4341,
+
+ /// Windows NT security descriptor (binary ACL)
+ WindowsACL = 0x4453,
+
+ /// VM/CMS
+ VMCMS = 0x4704,
+
+ /// MVS
+ MVS = 0x470f,
+
+ /// Theos, old inofficial port
+ TheosOld = 0x4854,
+
+ /// FWKCS MD5 (see below)
+ FWKCSMD5 = 0x4b46,
+
+ /// OS/2 access control list (text ACL)
+ OS2ACL = 0x4c41,
+
+ /// Info-ZIP OpenVMS (obsolete)
+ InfoZipOpenVMS = 0x4d49,
+
+ /// Macintosh SmartZIP, by Macro Bambini
+ SmartZipMac = 0x4d63,
+
+ /// Xceed original location extra field
+ XceedOriginalLocation = 0x4f4c,
+
+ /// AOS/VS (binary ACL)
+ AOSVSACL = 0x5356,
+
+ /// extended timestamp
+ UnixExtendedTime = 0x5455,
+
+ /// Info-ZIP Unix (original; also OS/2, NT, etc.)
+ InfoZipUNIXOld = 0x5855,
+
+ /// Xceed unicode extra field
+ XceedUnicode = 0x554e,
+
+ /// BeOS (BeBox, PowerMac, etc.)
+ BeOS = 0x6542,
+
+ /// Theos
+ Theos = 0x6854,
+
+ /// ASi Unix
+ ASiUnix = 0x756e,
+
+ /// Info-ZIP Unix (new)
+ InfoZipUNIXNew = 0x7855,
+
+ /// SMS/QDOS
+ SMSQDOS = 0xfb4a,
+
+ WinZipAES = 0x9901,
+
+ UnicodeName = 0x7075,
+ UnicodeComment = 0x6375,
+ CustomCP = 0x5A4C,
+
+ Unknown = 0xffff,
+ }
+}
diff --git a/ArchiveDiag/JobStatus.cs b/ArchiveDiag/JobStatus.cs
new file mode 100644
index 000000000..5ee2774a1
--- /dev/null
+++ b/ArchiveDiag/JobStatus.cs
@@ -0,0 +1,11 @@
+namespace ArchiveDiagJson
+{
+ public class JobStatus
+ {
+ public string FileName { get; set; }
+
+ public string Status { get; set; }
+
+ public JobStatus WithStatus(string status) => new JobStatus() {FileName = FileName, Status = status};
+ }
+}
diff --git a/ArchiveDiag/Program.cs b/ArchiveDiag/Program.cs
new file mode 100644
index 000000000..9121821c6
--- /dev/null
+++ b/ArchiveDiag/Program.cs
@@ -0,0 +1,177 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using ArchiveDiag;
+using CommandLine;
+using ConLib.Console;
+using ConLib.HTML;
+using ICSharpCode.SharpZipLib.Zip;
+using Microsoft.CodeAnalysis.CSharp.Scripting;
+using Microsoft.CodeAnalysis.Scripting;
+
+namespace ICSharpCode.SharpZipLib.ArchiveDiag
+{
+ public class Program
+ {
+ public class Options
+ {
+ [Value(0, HelpText = "Input filename", Required = true)]
+ public string Filename { get; set; }
+
+ [Option('v', "verbose", Required = false, HelpText = "Set output to verbose messages")]
+ public bool Verbose { get; set; }
+
+ [Option('q', "quiet")]
+ public bool Quiet { get; set; }
+
+ [Option('h', "no-html-report")]
+ public bool SkipHtmlReport { get; set; }
+
+ [Option('e', "eval", HelpText = "Run the input file as a C# script and create a report from the resulting stream")]
+ public bool Evaluate { get; set; }
+
+ [Option('t', "tar")]
+ public bool Tar { get; set; }
+ }
+
+
+ static int Main(string[] args)
+ {
+
+ Parser.Default.ParseArguments(args)
+ .WithParsed(o =>
+ {
+ Stream inputStream;
+ var outputFile = $"{o.Filename}.html";
+ var inputFile = Path.GetFileName(o.Filename);
+
+ if (o.Evaluate)
+ {
+ inputFile = $"script:{inputFile}";
+ try
+ {
+ using var fs = File.OpenRead(o.Filename);
+ using var sr = new StreamReader(fs);
+
+ var opts = ScriptOptions.Default
+ .WithFilePath(o.Filename)
+ .WithImports(
+ "System",
+ "System.IO",
+ "System.Text",
+ "System.Collections.Generic",
+ "ICSharpCode.SharpZipLib",
+ "ICSharpCode.SharpZipLib.Core",
+ "ICSharpCode.SharpZipLib.Zip")
+ .WithReferences(typeof(ZipOutputStream).Assembly);
+
+ var task =
+ CSharpScript.EvaluateAsync(sr.ReadToEnd(), opts);
+ if (task.Wait(TimeSpan.FromSeconds(30)))
+ {
+ inputStream = new MemoryStream(task.Result);
+ }
+ else throw new TimeoutException("Script evaluation timed out");
+ }
+ catch (Exception x)
+ {
+ Console.WriteLine($"Failed to evaluate input script: {x}");
+ return;
+ }
+
+ }
+ else
+ {
+ inputStream = File.OpenRead(o.Filename);
+
+ }
+
+ using var outputStream = File.Open(outputFile, FileMode.Create);
+ using var htmlWriter = new HTMLWriter(outputStream);
+
+ if(o.Tar) {
+ new TarArchiveDiagRunner(inputStream, inputFile).Run(new ConsoleWriter(), htmlWriter);
+ } else {
+ new ZipArchiveDiagRunner(inputStream, inputFile)
+ {
+ WarnUnknownSigns = false,
+ }.Run(new ConsoleWriter(), htmlWriter);
+ }
+
+
+
+ });
+
+ return 0;
+ }
+
+ static void Lala()
+ {
+ var dataBytes = new byte[] { 0x34, 0x68, 0xf2, 0x8d };
+
+ using var ms = new MemoryStream(dataBytes);
+ using var fs = File.Create("output.zip");
+ using var zip = new ZipOutputStream(fs);
+ zip.PutNextEntry(new ZipEntry("content-file.bin"));
+ ms.WriteTo(zip);
+
+ }
+
+ public void UseCreateZipFileFromData()
+ {
+ var dataBytes = new byte[] { 0x49, 0xe2, 0x9d, 0xa4, 0x5a, 0x49, 0x50 };
+
+ CreateZipFileFromData(File.Create("output.zip"), dataBytes);
+
+ using (var ms = new MemoryStream())
+ {
+ CreateZipFileFromData(ms, dataBytes, closeStream: false, zipEntryName: "data.bin");
+ var outputBytes = ms.ToArray();
+ }
+ }
+
+ public void CreateZipFileFromData(Stream outputStream, byte[] inputData, bool closeStream = true, string zipEntryName = "-")
+ {
+ using (var zipStream = new ZipOutputStream(outputStream))
+ {
+ // Stop ZipStream.Dispose() from also Closing the underlying stream.
+ zipStream.IsStreamOwner = closeStream;
+
+ zipStream.PutNextEntry(new ZipEntry(zipEntryName));
+ zipStream.Write(inputData);
+ }
+ }
+
+ }
+
+
+
+ static class ZipExtraDataExtensions
+ {
+ public static IEnumerable<(ExtraDataType, Range)> EnumerateTags(this ZipExtraData zed)
+ {
+ var index = 0;
+
+ var data = zed.GetEntryData();
+
+ while (index < data.Length - 3)
+ {
+ var tag = data[index++] + (data[index++] << 8);
+ var length = data[index++] + (data[index++] << 8);
+ yield return ((ExtraDataType)tag, new Range(index, index+length));
+ index += length;
+ }
+
+ }
+ }
+
+
+ internal static class StringExtensions
+ {
+ internal static string Ellipsis(this string source, int maxLength)
+ => source.Length > maxLength - 3
+ ? source.Substring(0, maxLength - 3) + "..."
+ : source;
+ }
+
+}
diff --git a/ArchiveDiag/Properties/launchSettings.json b/ArchiveDiag/Properties/launchSettings.json
new file mode 100644
index 000000000..fdc7896d5
--- /dev/null
+++ b/ArchiveDiag/Properties/launchSettings.json
@@ -0,0 +1,8 @@
+{
+ "profiles": {
+ "ArchiveDiag": {
+ "commandName": "Project",
+ "commandLineArgs": "C:\\Users\\nils\\Downloads\\tartest\\IOSFile.tar"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiag/Tar/TypeFlag.cs b/ArchiveDiag/Tar/TypeFlag.cs
new file mode 100644
index 000000000..00c695edd
--- /dev/null
+++ b/ArchiveDiag/Tar/TypeFlag.cs
@@ -0,0 +1,120 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace ArchiveDiag.Tar
+{
+ public enum TypeFlag: byte
+ {
+ ///
+ /// The "old way" of indicating a normal file.
+ ///
+ OldNorm = 0,
+
+ ///
+ /// Normal file type.
+ ///
+ Normal = (byte)'0',
+
+ ///
+ /// Link file type.
+ ///
+ Link = (byte)'1',
+
+ ///
+ /// Symbolic link file type.
+ ///
+ SymbolicLink = (byte)'2',
+
+ ///
+ /// Character device file type.
+ ///
+ CharacterDevice = (byte)'3',
+
+ ///
+ /// Block device file type.
+ ///
+ BlockDevice = (byte)'4',
+
+ ///
+ /// Directory file type.
+ ///
+ Directory = (byte)'5',
+
+ ///
+ /// FIFO (pipe) file type.
+ ///
+ FIFOPipe = (byte)'6',
+
+ ///
+ /// Contiguous file type.
+ ///
+ Contiguous = (byte)'7',
+
+ ///
+ /// Posix.1 2001 global extended header
+ ///
+ GlobalExtendedHeader = (byte)'g',
+
+ ///
+ /// Posix.1 2001 extended header
+ ///
+ ExtendedHeader = (byte)'x',
+
+ // POSIX allows for upper case ascii type as extensions
+
+ ///
+ /// Solaris access control list file type
+ ///
+ AccessControlList = (byte)'A',
+
+ ///
+ /// GNU dir dump file type
+ /// This is a dir entry that contains the names of files that were in the
+ /// dir at the time the dump was made
+ ///
+ GNU_DumpDir = (byte)'D',
+
+ ///
+ /// Solaris Extended Attribute File
+ ///
+ ExtendedAttribute = (byte)'E',
+
+ ///
+ /// Inode (metadata only) no file content
+ ///
+ Inode = (byte)'I',
+
+ ///
+ /// Identifies the next file on the tape as having a long link name
+ ///
+ GNU_LongLink = (byte)'K',
+
+ ///
+ /// Identifies the next file on the tape as having a long name
+ ///
+ GNU_LongName = (byte)'L',
+
+ ///
+ /// Continuation of a file that began on another volume
+ ///
+ GNU_MultiVol = (byte)'M',
+
+ ///
+ /// For storing filenames that dont fit in the main header (old GNU)
+ ///
+ GNU_Names = (byte)'N',
+
+ ///
+ /// GNU Sparse file
+ ///
+ GNU_Sparse = (byte)'S',
+
+ ///
+ /// GNU Tape/volume header ignore on extraction
+ ///
+ GNU_VolumeHeader = (byte)'V',
+
+
+ }
+}
diff --git a/ArchiveDiag/TarArchiveDiagRunner.cs b/ArchiveDiag/TarArchiveDiagRunner.cs
new file mode 100644
index 000000000..cd973d5a3
--- /dev/null
+++ b/ArchiveDiag/TarArchiveDiagRunner.cs
@@ -0,0 +1,174 @@
+using System;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text;
+using ArchiveDiag.Tar;
+using ConLib;
+using static ConLib.PrettyConsole;
+
+namespace ArchiveDiag
+{
+ class TarArchiveDiagRunner: ArchiveDiagRunner
+ {
+ protected override string Parser => "ArchiveDiag.TarParser";
+
+ public TarArchiveDiagRunner(Stream archiveStream, string? fileName = null) : base(archiveStream, fileName)
+ {
+ }
+
+ protected override bool TestArchive(Stream archiveStream)
+ {
+ return true;
+ }
+
+ protected override void ParseArchive(Stream archiveStream)
+ {
+ var block = new byte[512];
+ while(archiveStream.Length > archiveStream.Position)
+ {
+ var readCount = 0;
+
+ DoTask("Read Block", async () => readCount = await archiveStream.ReadAsync(block));
+
+ var isEndBlock = block.All(b => b == 0);
+ WriteBool("Block is end block? ", isEndBlock);
+
+ if(isEndBlock) continue;
+
+ long fileSize = 0;
+ DoChore("Parse entry header", () => fileSize = ParseHeader(block), true);
+
+ DoChore("Parse UStar header", () => ParseUstar(block), true);
+
+ if (fileSize >= 0)
+ {
+ DoTask("Skip Content", async () => archiveStream.Seek(fileSize, SeekOrigin.Current));
+ }
+ else
+ {
+ throw new Exception("Cannot seek to next entry, since the previous entry didn't contain a valid size");
+ }
+ }
+ }
+
+ private long ParseHeader(ReadOnlySpan block)
+ {
+
+ DumpStr("File name: ", block[0..100]);
+ // DumpHex("File name (raw): ", block[0..100]);
+ DumpDec("File mode: ", block[100..108]);
+ DumpDec("Owner ID: ", block[108..116]);
+ DumpDec("Group ID: ", block[116..124]);
+
+ var fileSize = DumpDec("File size: ", block[124..136]);
+
+ DumpTim("Modification time: ", block[136..148]);
+
+ DumpStr("Checksum: ", block[148..156]);
+
+ var typeflag = block[156];
+ var identifiedFlag = Enum.IsDefined(typeof(TypeFlag), typeflag) ? ((TypeFlag)typeflag).ToString("F") : "unknown";
+
+ WriteLine($"File type: {identifiedFlag} (0x{typeflag} '{(char)typeflag}')");
+ DumpStr("Linked file name: ", block[157..257]);
+
+ return fileSize;
+ }
+
+ private void ParseUstar(ReadOnlySpan block)
+ {
+ var magic = ReadStr(block.Slice(257, 6));
+
+ WriteLine($"UStar indicator: {magic}");
+
+ if (!magic.StartsWith("ustar"))
+ {
+ WriteLine($"UStar indicator not found, skipping!");
+ return;
+ }
+
+ DumpDec("Version: ", block[263..265]);
+ DumpStr("Owner user name: ", block[265..297]);
+ DumpStr("Owner group name: ", block[297..329]);
+ DumpDec($"Device major: ", block[329..337]);
+ DumpDec($"Device minor: ", block[337..345]);
+ DumpStr($"Filename prefix: ", block[345..500]);
+
+
+ }
+
+ private string ReadStr(ReadOnlySpan span) => Encoding.ASCII.GetString(span).Trim('\0', ' ');
+
+ private void DumpStr(string prefix, ReadOnlySpan span)
+ {
+ Write(FormattableStringFactory.Create(prefix));
+ WriteString(span.ToArray().Where(b => b != 0).ToArray(), 48);
+ }
+
+ private void DumpHex(string prefix, ReadOnlySpan span)
+ {
+ Write(FormattableStringFactory.Create(prefix));
+ WriteHexBytes(span.ToArray(), 100);
+ }
+
+ private long DumpDec(string prefix, ReadOnlySpan span)
+ {
+ var dec = -1L;
+
+ Write(FormattableStringFactory.Create(prefix));
+
+ var str = ReadStr(span);
+ if (string.IsNullOrWhiteSpace(str))
+ {
+ WriteColor("\n", ConCol.DarkGray);
+ return -1;
+ }
+ try
+ {
+ dec = Convert.ToInt64(str, 8);
+ WriteLine($"{dec}");
+ }
+ catch (Exception x)
+ {
+ WriteColor($"Invalid! ", ConCol.Red);
+ WriteHexBytes(span.ToArray(), 16);
+ WriteColor($" {x.GetType().Name}: {x.Message}\n", ConCol.DarkRed);
+ return -1;
+ }
+
+ return dec;
+ }
+
+ private DateTimeOffset DumpTim(string prefix, ReadOnlySpan span)
+ {
+ var dto = DateTimeOffset.MinValue;
+
+ Write(FormattableStringFactory.Create(prefix));
+
+ var str = ReadStr(span);
+ if (string.IsNullOrWhiteSpace(str))
+ {
+ WriteColor("\n", ConCol.DarkGray);
+ return dto;
+ }
+ try
+ {
+ dto = DateTimeOffset.FromUnixTimeSeconds(Convert.ToInt64(str, 8));
+
+ WriteLine($"{dto:s}");
+ }
+ catch (Exception x)
+ {
+ WriteColor($"Invalid! ", ConCol.Red);
+ WriteHexBytes(span.ToArray(), 16);
+ WriteColor($" {x.GetType().Name}: {x.Message}\n", ConCol.DarkRed);
+ }
+
+ return dto;
+ }
+
+
+
+ }
+}
diff --git a/ArchiveDiag/ZipArchiveDiagRunner.cs b/ArchiveDiag/ZipArchiveDiagRunner.cs
new file mode 100644
index 000000000..c55308066
--- /dev/null
+++ b/ArchiveDiag/ZipArchiveDiagRunner.cs
@@ -0,0 +1,530 @@
+using System;
+using System.IO;
+using System.Linq;
+using System.Text;
+using ConLib;
+using ICSharpCode.SharpZipLib.ArchiveDiag;
+using ICSharpCode.SharpZipLib.Zip;
+
+namespace ArchiveDiag
+{
+ class ZipArchiveDiagRunner: ArchiveDiagRunner
+ {
+ protected override string Parser => "ArchiveDiag.ZipParser";
+ protected override string Tester => "ICSharpCode.SharpZipLib.ZipFile+ZipInputStream";
+
+ public int MaxNameHex {get; set;} = 32;
+ public int MaxNameString {get; set;} = 128;
+ public int MaxCommentHex {get; set;} = 32;
+ public int MaxCommentString {get; set;} = 128;
+ public int MaxExtraDataHex {get; set;} = 64;
+
+ public bool WarnUnknownSigns {get; set;} = false;
+
+ public ZipArchiveDiagRunner(Stream archiveStream, string? fileName = null): base(archiveStream, fileName)
+ {
+ }
+
+ protected override bool TestArchive(Stream archiveStream)
+ {
+ var zipFileTest = false;
+ PrettyConsole.DoChore("[TEST] ZipFile.Test", () => {
+ zipFileTest = new ZipFile(archiveStream).TestArchive(true, TestStrategy.FindAllErrors, (status, message) =>
+ {
+ if (status.Operation == TestOperation.EntryData) return;
+ var pad = "";//.PadLeft(18 - status.Operation.ToString().Length);
+ if (status.Entry is { } entry)
+ {
+ PrettyConsole.WriteLine($"[{status.Operation}{pad}] #{entry.ZipFileIndex} {entry.Name}");
+ }
+ else
+ {
+ PrettyConsole.WriteLine($"[{status.Operation}{pad}]");
+ }
+
+ if (!string.IsNullOrEmpty(message))
+ {
+ PrettyConsole.WriteLine($"[{status.Operation}{pad}] {message}");
+ }
+ });
+ });
+
+ var zisIterTest = false;
+ zisIterTest = PrettyConsole.DoChore("[TEST] ZipInputStream.Iterate", () => {
+ archiveStream.Seek(0, SeekOrigin.Begin);
+ using(var zis = new ZipInputStream(archiveStream){IsStreamOwner = false}) {
+ ZipEntry entry;
+ var startEntry = 0l;
+ var entryNum = 0;
+ while ((entry = zis.GetNextEntry()) != null) {
+ entryNum++;
+ var startData = archiveStream.Position + (zis.inputBuffer.RawLength - zis.inputBuffer.Available);
+ var entryName = entry.Name;
+
+ // PrettyConsole.WriteLine($"- Entry #{entryNum,3} @ {startEntry,8} // {zis.inputBuffer.RawLength} of {zis.inputBuffer.Available}");
+
+ // while(zis.ReadByte() >= 0) {
+ // Console.Write(".");
+ // }
+
+ try {
+ zis.CloseEntry();
+ } catch {
+
+ PrettyConsole.WriteLine($"Error position: {(archiveStream.CanRead ? archiveStream.Position : -1)} // {zis.inputBuffer.RawLength} of {zis.inputBuffer.Available}");
+ throw;
+ }
+ var endEntry = archiveStream.Position + (zis.inputBuffer.RawLength - zis.inputBuffer.Available);
+ var headSize = startData - startEntry;
+ var dataSize = endEntry - startData;
+ PrettyConsole.WriteLine($"- Entry #{entryNum,3} @ {startEntry,8}: {entryName} ({headSize} + {dataSize} byte(s))");
+ startEntry = endEntry;
+ }
+ }
+ }, continueOnFail: true);
+ return zisIterTest && zipFileTest;
+ }
+
+ protected override void ParseArchive(Stream transportStream)
+ {
+
+ transportStream.Seek(0, SeekOrigin.Begin);
+
+ var br = new BinaryReader(transportStream);
+
+ int readVal;
+ while ((readVal = transportStream.ReadByte()) != -1)
+ {
+ if (readVal != 'P') continue;
+ readVal = transportStream.ReadByte();
+
+ if (readVal != 'K') continue;
+ var b1 = (byte)transportStream.ReadByte();
+ var b2 = (byte)transportStream.ReadByte();
+
+ if (b1 > 0x08 || b2 > 0x08)
+ {
+ transportStream.Seek(-2, SeekOrigin.Current);
+ continue;
+ }
+
+ var sign = 'P' | ('K' << 8) | (b1 << 16) | (b2 << 24);
+
+ var signId = sign switch
+ {
+ ZipConstants.DataDescriptorSignature
+ => nameof(ZipConstants.DataDescriptorSignature),
+ ZipConstants.CentralHeaderSignature
+ => nameof(ZipConstants.CentralHeaderSignature),
+ ZipConstants.CentralHeaderDigitalSignature
+ => nameof(ZipConstants.CentralHeaderDigitalSignature),
+ ZipConstants.EndOfCentralDirectorySignature
+ => nameof(ZipConstants.EndOfCentralDirectorySignature),
+ ZipConstants.LocalHeaderSignature
+ => nameof(ZipConstants.LocalHeaderSignature),
+ ZipConstants.SpanningTempSignature
+ => nameof(ZipConstants.SpanningTempSignature),
+ ZipConstants.Zip64CentralDirLocatorSignature
+ => nameof(ZipConstants.Zip64CentralDirLocatorSignature),
+ ZipConstants.Zip64CentralFileHeaderSignature
+ => nameof(ZipConstants.Zip64CentralFileHeaderSignature),
+ _ => $"Unknown (0x{sign:x8})",
+ };
+
+ //if (warnUnknownSigns || signId.StartsWith("Unknown"))
+ //{
+ PrettyConsole.WriteLine($"{transportStream.Position:x8} PK: {b1:x2} {b2:x2} {signId}");
+ //}
+
+ switch (sign)
+ {
+ case ZipConstants.LocalHeaderSignature:
+ PrettyConsole.DoChore("Parse Local File Header", ()
+ => ParseLocalHeader(br), true);
+ break;
+ case ZipConstants.DataDescriptorSignature:
+ PrettyConsole.DoChore("Parse Data Descriptor", ()
+ => ParseDataDescriptor(br), true);
+ break;
+ case ZipConstants.CentralHeaderSignature:
+ PrettyConsole.DoChore("Parse Central Directory Header", ()
+ => ParseCentralHeader(br), true);
+ break;
+ case ZipConstants.Zip64CentralFileHeaderSignature:
+ PrettyConsole.DoChore("Parse Zip64 End Of Central Directory Record", ()
+ => ParseZip64EndOfCentralDirectory(br), true);
+ break;
+ case ZipConstants.Zip64CentralDirLocatorSignature:
+ PrettyConsole.DoChore("Parse Zip64 Central Directory Locator", ()
+ => ParseZip64CentralDirLocator(br), true);
+ break;
+ case ZipConstants.EndOfCentralDirectorySignature:
+ PrettyConsole.DoChore("Parse End of Central Directory Record", ()
+ => ParseCentralDirectory(br), true);
+ break;
+ default:
+ if (WarnUnknownSigns) PrettyConsole.WriteLine($"{"Skipping unrecognized signature!"}\n");
+ break;
+ }
+
+ }
+
+ }
+
+ private void ParseArchiveExtraData(BinaryReader br)
+ {
+ var extraLen = br.ReadUInt16();
+ var extraDataBytes = new byte[extraLen];
+ br.Read(extraDataBytes);
+ var extraData = new ZipExtraData(extraDataBytes);
+
+ PrettyConsole.WriteLine($"Extra data:");
+ ParseExtraData(extraData);
+ }
+
+ private void ParseZip64CentralDirLocator(BinaryReader br)
+ {
+ var diskNumber = br.ReadUInt32();
+ var relativeOffset = br.ReadUInt64();
+ var diskTotal = br.ReadUInt64();
+
+ PrettyConsole.WriteLine($"Zip64 End Record Disk Number: {diskNumber}");
+ PrettyConsole.WriteLine($"Zip64 End Record Relative Offset: {relativeOffset} (0x{relativeOffset:x8})");
+ PrettyConsole.WriteLine($"Total number of disks: {diskTotal}");
+ }
+
+ private void ParseZip64EndOfCentralDirectory(BinaryReader br)
+ {
+ var recordSize = br.ReadUInt64();
+ var versionMadeBy = ZipVersion.From(br.ReadUInt16());
+ var versionToExtract = ZipVersion.From(br.ReadUInt16());
+ var diskNumber = br.ReadUInt32();
+ var startCentralDirDisk = br.ReadUInt32();
+
+ var entriesForDisk = br.ReadUInt64();
+ var entriesForWholeCentralDir = br.ReadUInt64();
+ var centralDirSize = br.ReadUInt64();
+ var offsetOfCentralDir = br.ReadUInt64();
+
+ var extDataSize = recordSize - 44;
+
+ PrettyConsole.WriteLine($"Size of record: {recordSize} (0x{recordSize:x16})");
+ PrettyConsole.WriteLine($"Version Made By: {versionMadeBy} (0x{versionMadeBy.OSRaw:x2})");
+ PrettyConsole.WriteLine($"Version Needed: {versionToExtract} (0x{versionToExtract.OSRaw:x2})");
+ PrettyConsole.WriteLine($"Disk Number: {diskNumber}");
+ PrettyConsole.WriteLine($"Start Central Directory Disk: {startCentralDirDisk:x8}");
+
+ PrettyConsole.WriteLine($"Entries For Disk: {entriesForDisk}");
+ PrettyConsole.WriteLine($"Entries For Central Directory: {entriesForWholeCentralDir}");
+ PrettyConsole.WriteLine($"Central Directory Size: {centralDirSize} (0x{centralDirSize:x16})");
+ PrettyConsole.WriteLine($"Central Directory Offset: {offsetOfCentralDir} (0x{offsetOfCentralDir:x16})");
+
+
+ PrettyConsole.WriteLine($"Extensible data size: {extDataSize}");
+ if (extDataSize <= 0) return;
+
+ PrettyConsole.WriteLine($"Parsing of extensible data is not supported! Skipping.", ConCol.Yellow);
+ // Ignore skipping the data for now, since it is more likely to cause issues than it is to prevent them
+ //br.BaseStream.Seek((long)extDataSize, SeekOrigin.Current);
+ }
+
+ private static DateTime DateTimeFromDosTime(uint dosTime)
+ {
+ var sec = Math.Min(59, 2 * (dosTime & 0x1f));
+ var min = Math.Min(59, (dosTime >> 5) & 0x3f);
+ var hrs = Math.Min(23, (dosTime >> 11) & 0x1f);
+ var mon = Math.Max(1, Math.Min(12, ((dosTime >> 21) & 0xf)));
+ var year = ((dosTime >> 25) & 0x7f) + 1980;
+ var day = Math.Max(1, Math.Min(DateTime.DaysInMonth((int)year, (int)mon), (int)((dosTime >> 16) & 0x1f)));
+ return new DateTime((int)year, (int)mon, day, (int)hrs, (int)min, (int)sec);
+ }
+
+ private void ParseCentralHeader(BinaryReader br)
+ {
+ var versionMadeBy = ZipVersion.From(br.ReadUInt16());
+ var versionToExtract = ZipVersion.From(br.ReadUInt16());
+ var bitFlags = (GeneralBitFlags)br.ReadUInt16();
+ var method = (CompressionMethod)br.ReadUInt16();
+ var dosTime = br.ReadUInt32();
+ var dateTime = DateTimeFromDosTime(dosTime);
+ var crc = br.ReadUInt32();
+ var compressedSize = (long)br.ReadUInt32();
+ var size = (long)br.ReadUInt32();
+ var nameLen = br.ReadUInt16();
+ var extraLen = br.ReadUInt16();
+ var commentLen = br.ReadUInt16();
+ var diskStartNo = br.ReadUInt16(); // Not currently used
+ var internalAttributes = br.ReadUInt16(); // Not currently used
+
+ var externalAttributes = (FileAttributes)br.ReadUInt32();
+ var offset = br.ReadUInt32();
+
+ var nameData = new byte[nameLen];
+ br.Read(nameData);
+
+ var extraDataBytes = new byte[extraLen];
+ br.Read(extraDataBytes);
+ var extraData = new ZipExtraData(extraDataBytes);
+
+ var commentData = new byte[commentLen];
+ br.Read(commentData);
+
+
+ PrettyConsole.WriteLine($"Version Made By: {versionMadeBy} (0x{versionMadeBy.OSRaw:x2})");
+ PrettyConsole.WriteLine($"Version Needed: {versionToExtract} (0x{versionToExtract.OSRaw:x2})");
+ PrettyConsole.WriteLine($"Bit flags: {bitFlags:F}");
+ PrettyConsole.WriteLine($"Compression Method: {method:G}");
+ PrettyConsole.WriteLine($"DOS Date: 0x{dosTime:x8}");
+ PrettyConsole.WriteLine($"File Date: {dateTime:yyyy-MM-dd}");
+ PrettyConsole.WriteLine($"File Time: {dateTime:HH:mm:ss}");
+ PrettyConsole.WriteLine($"CRC: 0x{crc:x8}");
+ WriteSize($"Compressed Size:", compressedSize);
+ WriteSize($"Uncompressed Size:", size);
+ PrettyConsole.WriteLine($"Name Length: {nameLen}");
+ PrettyConsole.WriteLine($"Extra Data Length: {extraLen}");
+ PrettyConsole.WriteLine($"Comment Length: {commentLen}");
+ PrettyConsole.WriteLine($"Disk number start: {diskStartNo}");
+ PrettyConsole.WriteLine($"Internal file attributes: {internalAttributes:x4}");
+ PrettyConsole.WriteLine($"External file attributes: {externalAttributes:F}");
+ PrettyConsole.WriteLine($"Relative offset of local header: {offset}");
+
+ PrettyConsole.Write($"Name (raw): "); WriteHexBytes(nameData, MaxNameHex);
+ PrettyConsole.Write($"Name: "); WriteString(nameData, MaxNameString);
+
+ if (commentLen > 0)
+ {
+ PrettyConsole.Write($"Comment (raw): "); WriteHexBytes(commentData, MaxCommentHex);
+ PrettyConsole.Write($"Comment: "); WriteString(commentData, MaxCommentString);
+ }
+
+ PrettyConsole.WriteLine($"Extra data:");
+ ParseExtraData(extraData);
+
+ }
+
+ private static void WriteSize(FormattableString fs, in long size)
+ {
+ PrettyConsole.Write(fs);
+
+ if (size == uint.MaxValue)
+ {
+ PrettyConsole.WriteLine($" {size} ({"Zip64 Indicator"})", ConCol.DarkGray, ConCol.Blue);
+ }
+ else
+ {
+ PrettyConsole.WriteLine($" {size}");
+ }
+ }
+
+ private void ParseExtraData(ZipExtraData extraData)
+ {
+ PrettyConsole.PushGroup("extra-data");
+ var extraDataBytes = extraData.GetEntryData();
+
+ foreach (var (tag, range) in extraData.EnumerateTags().ToList())
+ {
+ var length = range.End.Value - range.Start.Value;
+
+ var knownId = Enum.IsDefined(typeof(ExtraDataType), tag);
+
+ PrettyConsole.WriteLine($" - Type: {(knownId ? tag : ExtraDataType.Unknown):G} ({(uint)tag:x4}), Length: {length}");
+ PrettyConsole.PushGroup("extra-raw");
+ if (extraDataBytes.Length < range.End.Value)
+ {
+ PrettyConsole.WriteLine($" Invalid length! Skipping parse attemmpt!", ConCol.Red);
+ PrettyConsole.PopGroup(); // extra-raw
+ PrettyConsole.PopGroup(); // extra-data
+ return;
+ }
+
+ PrettyConsole.Write($" Raw: ");
+ WriteHexBytes(extraDataBytes[range], MaxExtraDataHex);
+
+ PrettyConsole.PopGroup();
+
+
+ switch (tag)
+ {
+ case ExtraDataType.NTFS:
+ {
+ PrettyConsole.PushGroup("extra-ntfs");
+ var tagNtDate = extraData.GetData();
+
+ PrettyConsole.WriteLine($" Created: {tagNtDate.CreateTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.WriteLine($" Accessed: {tagNtDate.LastAccessTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.WriteLine($" Modified: {tagNtDate.LastModificationTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.PopGroup();
+
+ break;
+ }
+ case ExtraDataType.UnixExtendedTime:
+ {
+ PrettyConsole.PushGroup("extra-unix-xtime");
+
+ var tagUnixDate = extraData.GetData();
+
+ PrettyConsole.WriteLine($" Created: {tagUnixDate.CreateTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.WriteLine($" Accessed: {tagUnixDate.AccessTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.WriteLine($" Modified: {tagUnixDate.ModificationTime:yyyy-MM-dd HH:mm:ss}");
+ PrettyConsole.PopGroup();
+ break;
+ }
+ case ExtraDataType.UnicodeName:
+ {
+ PrettyConsole.PushGroup("extra-unicode-name");
+
+ var nameRange = new Range(range.Start.Value + 5, range.End.Value);
+ var unicodeName = Encoding.UTF8.GetString(extraDataBytes[nameRange]);
+ File.WriteAllText("unicodetext.txt", unicodeName);
+ PrettyConsole.WriteLine($" Unicode name: {unicodeName}");
+ PrettyConsole.PopGroup();
+ break;
+ }
+ case ExtraDataType.Zip64:
+ {
+ PrettyConsole.PushGroup("extra-zip64");
+ using (var br = new BinaryReader(extraData.GetStreamForTag((int)tag)))
+ {
+ if (length < 16)
+ {
+ PrettyConsole.WriteLine($" Extra data length too small!", ConCol.Red);
+ PrettyConsole.WriteLine($" Zip64 should be {16} bytes, but {length} was indicated!");
+ }
+
+ if (length < 8) break;
+ var usize64 = br.ReadUInt64();
+ PrettyConsole.WriteLine($" Uncompressed Size: {usize64} (0x{usize64:x16})");
+
+ if (length < 16) break;
+ var csize64 = br.ReadUInt64();
+ PrettyConsole.WriteLine($" Compressed Size: {csize64} (0x{csize64:x16})");
+ }
+
+ PrettyConsole.PopGroup();
+ break;
+ }
+ }
+ }
+
+ PrettyConsole.PopGroup();
+ }
+
+ private void ParseCentralDirectory(BinaryReader br)
+ {
+ var diskNumber = br.ReadUInt16();
+ var startCentralDirDisk = br.ReadUInt16();
+ var entriesForDisk = br.ReadUInt16();
+ var entriesForWholeCentralDir = br.ReadUInt16();
+ var centralDirSize = br.ReadUInt32();
+ var offsetOfCentralDir = br.ReadUInt32();
+ var commentSize = br.ReadUInt16();
+
+ byte[] commentData = new byte[commentSize];
+ br.Read(commentData);
+
+ PrettyConsole.WriteLine($"Disk Number: {diskNumber:x4}");
+ PrettyConsole.WriteLine($"Start Central Directory Disk: {startCentralDirDisk:x4}");
+ PrettyConsole.WriteLine($"Entries For Central Directory: {entriesForWholeCentralDir}");
+ PrettyConsole.WriteLine($"Entries For Disk: {entriesForDisk}");
+ PrettyConsole.WriteLine($"Central Directory Size: {centralDirSize}");
+ PrettyConsole.WriteLine($"Central Directory Offset: {offsetOfCentralDir:x8}");
+ PrettyConsole.WriteLine($"Comment Size: {commentSize}");
+
+ if (commentData.Length > 0)
+ {
+ PrettyConsole.Write($"Comment (raw): "); WriteHexBytes(commentData, MaxCommentHex);
+ PrettyConsole.Write($"Comment: "); WriteString(commentData, MaxCommentString);
+ }
+
+ var zip64 = diskNumber == 0xffff
+ || startCentralDirDisk == 0xffff
+ || entriesForDisk == 0xffff
+ || entriesForWholeCentralDir == 0xffff
+ || centralDirSize == 0xffffffff
+ || offsetOfCentralDir == 0xffffffff;
+
+ WriteBool("Zip64 Indication: ", zip64);
+ }
+
+ private static void ParseDataDescriptor(BinaryReader br)
+ {
+ var crc = br.ReadInt32();
+
+ var pos = br.BaseStream.Position;
+ var csize = br.ReadInt32();
+ var usize = br.ReadInt32();
+
+ br.BaseStream.Seek(-8, SeekOrigin.Current);
+
+ // var crc64 = br.ReadUInt32();
+ var csize64 = br.ReadInt64();
+ var usize64 = br.ReadInt64();
+
+ // Revert stream back to non-zip64 descriptor to not risk skipping next entry
+ br.BaseStream.Seek(-8, SeekOrigin.Current);
+
+ PrettyConsole.WriteLine($"CRC: 0x{crc:x8}");
+ PrettyConsole.WriteLine($"32-bit sizes:");
+ PrettyConsole.PushGroup("descriptor32");
+ PrettyConsole.WriteLine($"Compressed Size: {csize} (0x{csize:x8})");
+ PrettyConsole.WriteLine($"Uncompressed Size: {usize} (0x{usize:x8})");
+ PrettyConsole.PopGroup();
+
+ PrettyConsole.WriteLine($"64-bit sizes (Zip64):");
+ PrettyConsole.PushGroup("descriptor64");
+ PrettyConsole.WriteLine($"Compressed Size: {csize64} (0x{csize64:x16})");
+ PrettyConsole.WriteLine($"Uncompressed Size: {usize64} (0x{usize64:x16})");
+ // PrettyConsole.WriteLine($"CRC: 0x{crc64:x8}");
+ PrettyConsole.PopGroup();
+
+ }
+
+ private void ParseLocalHeader(BinaryReader br)
+ {
+
+
+
+ var extractVersion = ZipVersion.From(br.ReadUInt16());
+ var flags = (GeneralBitFlags)br.ReadUInt16();
+ var method = (CompressionMethod)br.ReadUInt16();
+ var dosTime = br.ReadUInt32();
+ var dateTime = DateTimeFromDosTime(dosTime);
+ var crc = br.ReadUInt32();
+ long compressedSize = br.ReadUInt32();
+ long size = br.ReadUInt32();
+ int storedNameLength = br.ReadUInt16();
+ int extraDataLength = br.ReadUInt16();
+
+ var nameData = new byte[storedNameLength];
+ br.Read(nameData);
+
+ var extraDataBytes = new byte[extraDataLength];
+ br.Read(extraDataBytes);
+
+ var extraData = new ZipExtraData(extraDataBytes);
+
+
+ PrettyConsole.WriteLine($"Version Needed: {extractVersion} (0x{extractVersion.OperatingSystem:x})");
+ PrettyConsole.WriteLine($"Local Flags: {flags:F}");
+ PrettyConsole.WriteLine($"Compression Method: {method:G}");
+ PrettyConsole.WriteLine($"DOS Date: 0x{dosTime:x8}");
+ PrettyConsole.WriteLine($"File Date: {dateTime:yyyy-MM-dd}");
+ PrettyConsole.WriteLine($"File Time: {dateTime:HH:mm:ss}");
+ PrettyConsole.WriteLine($"CRC: 0x{crc:x8}");
+ WriteSize($"Compressed Size:", compressedSize);
+ WriteSize($"Uncompressed Size:", size);
+ PrettyConsole.WriteLine($"Name Length: {storedNameLength}");
+ PrettyConsole.WriteLine($"Extra Data Length: {extraDataLength}");
+ PrettyConsole.WriteLine($"Name (raw): "); WriteHexBytes(nameData, MaxNameHex);
+ PrettyConsole.Write($"Name: "); WriteString(nameData, MaxNameString);
+ PrettyConsole.WriteLine($"Extra data:");
+ ParseExtraData(extraData);
+
+ }
+
+
+ // Utils
+
+
+ }
+}
diff --git a/ArchiveDiag/ZipVersion.cs b/ArchiveDiag/ZipVersion.cs
new file mode 100644
index 000000000..64deb5c5c
--- /dev/null
+++ b/ArchiveDiag/ZipVersion.cs
@@ -0,0 +1,51 @@
+namespace ICSharpCode.SharpZipLib.ArchiveDiag
+{
+ struct ZipVersion
+ {
+ public enum ZipVersionOS: byte
+ {
+ MsDOS = 0,
+ Amiga = 1,
+ OpenVMS = 2,
+ UNIX = 3,
+ VMCMS = 4,
+ AtariST = 5,
+ OS2 = 6,
+ Macintosh = 7,
+ ZSystem = 8,
+ CPM = 9,
+ Windows = 10,
+ MVS = 11,
+ VSE = 12,
+ AcornRisc = 13,
+ VFAT = 14,
+ AlternateMVS = 15,
+ BeOS = 16,
+ Tandem = 17,
+ OS400 = 18,
+ MacOS = 19
+ }
+
+ public ZipVersionOS OperatingSystem;
+ public byte Major;
+ public byte Minor;
+
+ public byte OSRaw => (byte) OperatingSystem;
+
+ public ZipVersion(byte major, byte minor, ZipVersionOS os)
+ {
+ Major = major;
+ Minor = minor;
+ OperatingSystem = os;
+ }
+
+ public static ZipVersion From(ushort versionBytes)
+ => new ZipVersion(
+ (byte)((versionBytes & 0x00ff) / 10),
+ (byte)((versionBytes & 0x00ff) % 10),
+ (ZipVersionOS) (versionBytes >> 8));
+
+ public override string ToString()
+ => $"v{Major}.{Minor} / {OperatingSystem:G}";
+ }
+}
diff --git a/ArchiveDiagWeb/.config/dotnet-tools.json b/ArchiveDiagWeb/.config/dotnet-tools.json
new file mode 100644
index 000000000..d6051bacf
--- /dev/null
+++ b/ArchiveDiagWeb/.config/dotnet-tools.json
@@ -0,0 +1,12 @@
+{
+ "version": 1,
+ "isRoot": true,
+ "tools": {
+ "dotnet-ef": {
+ "version": "3.1.5",
+ "commands": [
+ "dotnet-ef"
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiagWeb/ArchiveDiagWeb.bak.csproj b/ArchiveDiagWeb/ArchiveDiagWeb.bak.csproj
new file mode 100644
index 000000000..1bbd0c596
--- /dev/null
+++ b/ArchiveDiagWeb/ArchiveDiagWeb.bak.csproj
@@ -0,0 +1,25 @@
+
+
+
+ netcoreapp3.1
+ 1fbbc60f-1148-475d-8c7d-6bf0d1807e44
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ArchiveDiagWeb/ArchiveDiagWeb.csproj b/ArchiveDiagWeb/ArchiveDiagWeb.csproj
new file mode 100644
index 000000000..49b56c2f6
--- /dev/null
+++ b/ArchiveDiagWeb/ArchiveDiagWeb.csproj
@@ -0,0 +1,26 @@
+
+
+
+ netcoreapp3.1
+ 1fbbc60f-1148-475d-8c7d-6bf0d1807e44
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ArchiveDiagWeb/Pages/Index.cshtml b/ArchiveDiagWeb/Pages/Index.cshtml
new file mode 100644
index 000000000..9c5e23c4b
--- /dev/null
+++ b/ArchiveDiagWeb/Pages/Index.cshtml
@@ -0,0 +1,59 @@
+@page
+@model ArchiveDiagWeb.DefaultModel
+@{
+ ViewData["Title"] = "ArhiveDiag";
+}
+
+
+
+
+
+
+
+
diff --git a/ArchiveDiagWeb/Pages/Index.cshtml.cs b/ArchiveDiagWeb/Pages/Index.cshtml.cs
new file mode 100644
index 000000000..a24625017
--- /dev/null
+++ b/ArchiveDiagWeb/Pages/Index.cshtml.cs
@@ -0,0 +1,99 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text.Json;
+using System.Threading.Tasks;
+using System.Web;
+using ArchiveDiagJson;
+using Azure.Storage.Blobs;
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.Mvc;
+using Microsoft.AspNetCore.Mvc.RazorPages;
+using ArchiveDiag = ICSharpCode.SharpZipLib.ArchiveDiag.Program;
+
+namespace ArchiveDiagWeb
+{
+ public class DefaultModel : PageModel
+ {
+ private IWebHostEnvironment _environment;
+ private BlobServiceClient _blobClient;
+
+ public DefaultModel(IWebHostEnvironment environment, BlobServiceClient blobClient)
+ {
+ _environment = environment;
+ _blobClient = blobClient;
+ }
+
+ [BindProperty]
+ public IFormFile Upload { get; set; }
+
+ public async Task OnPostAsync()
+ {
+
+ var guk = Convert.ToBase64String(Guid.NewGuid().ToByteArray())
+ .TrimEnd('=').Replace('+', '-').Replace('/', '_');
+
+
+ try
+ {
+ var jobsContainer = _blobClient.GetBlobContainerClient("jobs");
+ var jobClient = jobsContainer.GetBlobClient(guk);
+ await using (var ms = new MemoryStream())
+ {
+ await JsonSerializer.SerializeAsync(ms, new JobStatus()
+ {
+ FileName = Upload.FileName,
+ Status = "uploaded",
+ });
+
+ ms.Seek(0, SeekOrigin.Begin);
+
+ await jobClient.UploadAsync(ms, overwrite:true);
+ }
+ }
+ catch (Exception x)
+ {
+
+ }
+ try
+ {
+ var uploadContainer = _blobClient.GetBlobContainerClient("uploads");
+
+ await uploadContainer.UploadBlobAsync(guk, Upload.OpenReadStream());
+
+ return new JsonResult(new
+ {
+ result = "uploaded",
+ id = guk,
+ });
+ }
+ catch (Exception x)
+ {
+ return new JsonResult(new
+ {
+ result = "error",
+ error = $"{x}",
+ });
+ }
+
+ /*
+ var reports = new DirectoryInfo(Path.Combine(_environment.WebRootPath, "reports"));
+ reports.Create();
+
+
+
+ var file = new FileInfo(Path.Combine(reports.FullName, $"{guk}_{Upload.FileName}.html"));
+ await using (var fileStream = file.Create())
+ {
+ ArchiveDiag.Run(Upload.OpenReadStream(), Upload.FileName, fileStream);
+
+ }
+
+ var urlFileName = HttpUtility.UrlEncode(file.Name).Replace('+', '_');
+ Response.Redirect(Url.Content($"~/reports/{urlFileName}"));
+ */
+ }
+ }
+}
diff --git a/ArchiveDiagWeb/Pages/_ViewImports.cshtml b/ArchiveDiagWeb/Pages/_ViewImports.cshtml
new file mode 100644
index 000000000..afa82bbf2
--- /dev/null
+++ b/ArchiveDiagWeb/Pages/_ViewImports.cshtml
@@ -0,0 +1 @@
+@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
diff --git a/ArchiveDiagWeb/Program.cs b/ArchiveDiagWeb/Program.cs
new file mode 100644
index 000000000..53fe25435
--- /dev/null
+++ b/ArchiveDiagWeb/Program.cs
@@ -0,0 +1,26 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Logging;
+
+namespace ArchiveDiagWeb
+{
+ public class Program
+ {
+ public static void Main(string[] args)
+ {
+ CreateHostBuilder(args).Build().Run();
+ }
+
+ public static IHostBuilder CreateHostBuilder(string[] args) =>
+ Host.CreateDefaultBuilder(args)
+ .ConfigureWebHostDefaults(webBuilder =>
+ {
+ webBuilder.UseStartup();
+ });
+ }
+}
diff --git a/ArchiveDiagWeb/Properties/launchSettings.json b/ArchiveDiagWeb/Properties/launchSettings.json
new file mode 100644
index 000000000..b9f0146a6
--- /dev/null
+++ b/ArchiveDiagWeb/Properties/launchSettings.json
@@ -0,0 +1,33 @@
+{
+ "iisSettings": {
+ "windowsAuthentication": false,
+ "anonymousAuthentication": true,
+ "iisExpress": {
+ "applicationUrl": "http://localhost:50728",
+ "sslPort": 44324
+ }
+ },
+ "profiles": {
+ "IIS Express": {
+ "commandName": "IISExpress",
+ "launchBrowser": true,
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development",
+ "ASPNETCORE_HOSTINGSTARTUP__KEYVAULT__CONFIGURATIONENABLED": "true",
+ "ASPNETCORE_HOSTINGSTARTUP__KEYVAULT__CONFIGURATIONVAULT": "https://archivediagwebvault.vault.azure.net/",
+ "AZURE_USERNAME": "piksel-nils@outlook.com"
+ }
+ },
+ "ArchiveDiagWeb": {
+ "commandName": "Project",
+ "launchBrowser": true,
+ "applicationUrl": "https://localhost:5001;http://localhost:5000",
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development",
+ "ASPNETCORE_HOSTINGSTARTUP__KEYVAULT__CONFIGURATIONENABLED": "true",
+ "ASPNETCORE_HOSTINGSTARTUP__KEYVAULT__CONFIGURATIONVAULT": "https://archivediagwebvault.vault.azure.net/",
+ "AZURE_USERNAME": "piksel-nils@outlook.com"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiagWeb/Properties/serviceDependencies.archivediag - Web Deploy2.json b/ArchiveDiagWeb/Properties/serviceDependencies.archivediag - Web Deploy2.json
new file mode 100644
index 000000000..7628f2af8
--- /dev/null
+++ b/ArchiveDiagWeb/Properties/serviceDependencies.archivediag - Web Deploy2.json
@@ -0,0 +1,10 @@
+{
+ "dependencies": {
+ "storage1": {
+ "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroup')]/providers/Microsoft.Storage/storageAccounts/archivediagstore",
+ "type": "storage.azure",
+ "connectionId": "ArchiveDiagStorage",
+ "secretStore": "AzureAppSettings"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiagWeb/Properties/serviceDependencies.json b/ArchiveDiagWeb/Properties/serviceDependencies.json
new file mode 100644
index 000000000..1a578f8e8
--- /dev/null
+++ b/ArchiveDiagWeb/Properties/serviceDependencies.json
@@ -0,0 +1,13 @@
+{
+ "dependencies": {
+ "secrets1": {
+ "type": "secrets",
+ "suggestion": "true",
+ "ignored": "true"
+ },
+ "storage1": {
+ "type": "storage",
+ "connectionId": "ArchiveDiagStorage"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiagWeb/Properties/serviceDependencies.local.json b/ArchiveDiagWeb/Properties/serviceDependencies.local.json
new file mode 100644
index 000000000..d3b42a30c
--- /dev/null
+++ b/ArchiveDiagWeb/Properties/serviceDependencies.local.json
@@ -0,0 +1,10 @@
+{
+ "dependencies": {
+ "storage1": {
+ "resourceId": "/subscriptions/[parameters('subscriptionId')]/resourceGroups/[parameters('resourceGroup')]/providers/Microsoft.Storage/storageAccounts/archivediagstore",
+ "type": "storage.azure",
+ "connectionId": "ArchiveDiagStorage",
+ "secretStore": "LocalSecretsFile"
+ }
+ }
+}
\ No newline at end of file
diff --git a/ArchiveDiagWeb/Startup.cs b/ArchiveDiagWeb/Startup.cs
new file mode 100644
index 000000000..2a7f4e0f9
--- /dev/null
+++ b/ArchiveDiagWeb/Startup.cs
@@ -0,0 +1,89 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.AspNetCore.Http;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.Azure;
+using Azure.Storage.Queues;
+using Azure.Storage.Blobs;
+using Azure.Core.Extensions;
+
+namespace ArchiveDiagWeb
+{
+ public class Startup
+ {
+ public Startup(IConfiguration configuration)
+ {
+ Configuration = configuration;
+ }
+ public IConfiguration Configuration { get; }
+ // This method gets called by the runtime. Use this method to add services to the container.
+ // For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
+ public void ConfigureServices(IServiceCollection services)
+ {
+ services.AddRazorPages();
+ services.AddAzureClients(builder =>
+ {
+ builder.AddBlobServiceClient(Configuration["ConnectionStrings:ArchiveDiagStorage:blob"], preferMsi: true);
+ builder.AddQueueServiceClient(Configuration["ConnectionStrings:ArchiveDiagStorage:queue"], preferMsi: true);
+ });
+
+ }
+
+ // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
+ public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+ {
+ if (env.IsDevelopment())
+ {
+ app.UseDeveloperExceptionPage();
+
+ }
+
+ app.UseStaticFiles();
+
+ app.UseRouting();
+
+ app.UseEndpoints(endpoints =>
+ {
+ endpoints.MapGet("/status/{jobid}", async context =>
+ {
+ var name = context.Request.RouteValues["jobid"];
+ await context.Response.WriteAsync("Hello World!");
+ });
+
+ endpoints.MapRazorPages();
+ });
+ }
+
+ }
+ internal static class StartupExtensions
+ {
+ public static IAzureClientBuilder AddBlobServiceClient(this AzureClientFactoryBuilder builder, string serviceUriOrConnectionString, bool preferMsi)
+ {
+ if (preferMsi && Uri.TryCreate(serviceUriOrConnectionString, UriKind.Absolute, out Uri serviceUri))
+ {
+ return builder.AddBlobServiceClient(serviceUri);
+ }
+ else
+ {
+ return builder.AddBlobServiceClient(serviceUriOrConnectionString);
+ }
+ }
+ public static IAzureClientBuilder AddQueueServiceClient(this AzureClientFactoryBuilder builder, string serviceUriOrConnectionString, bool preferMsi)
+ {
+ if (preferMsi && Uri.TryCreate(serviceUriOrConnectionString, UriKind.Absolute, out Uri serviceUri))
+ {
+ return builder.AddQueueServiceClient(serviceUri);
+ }
+ else
+ {
+ return builder.AddQueueServiceClient(serviceUriOrConnectionString);
+ }
+ }
+ }
+}
diff --git a/ArchiveDiagWeb/appsettings.Development.json b/ArchiveDiagWeb/appsettings.Development.json
new file mode 100644
index 000000000..8983e0fc1
--- /dev/null
+++ b/ArchiveDiagWeb/appsettings.Development.json
@@ -0,0 +1,9 @@
+{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information",
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information"
+ }
+ }
+}
diff --git a/ArchiveDiagWeb/appsettings.json b/ArchiveDiagWeb/appsettings.json
new file mode 100644
index 000000000..d9d9a9bff
--- /dev/null
+++ b/ArchiveDiagWeb/appsettings.json
@@ -0,0 +1,10 @@
+{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information",
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information"
+ }
+ },
+ "AllowedHosts": "*"
+}
diff --git a/ICSharpCode.SharpZipLib.sln b/ICSharpCode.SharpZipLib.sln
index cab9675b5..48f01b2ed 100644
--- a/ICSharpCode.SharpZipLib.sln
+++ b/ICSharpCode.SharpZipLib.sln
@@ -19,6 +19,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib.Tes
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib.Benchmark", "benchmark\ICSharpCode.SharpZipLib.Benchmark\ICSharpCode.SharpZipLib.Benchmark.csproj", "{C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ArchiveDiag", "ArchiveDiag\ArchiveDiag.csproj", "{6F1432BF-4792-4AEB-BB84-3470B1119515}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ArchiveDiagWeb", "ArchiveDiagWeb\ArchiveDiagWeb.csproj", "{06CBE3B2-4B88-42DA-A4B4-F0A665D5E267}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ArchiveDiag-Worker", "ArchiveDiag-Worker\ArchiveDiag-Worker.csproj", "{1270950A-1EC0-4F83-AE03-CF0250F7F2C7}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -41,6 +47,18 @@ Global
{C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {6F1432BF-4792-4AEB-BB84-3470B1119515}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6F1432BF-4792-4AEB-BB84-3470B1119515}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6F1432BF-4792-4AEB-BB84-3470B1119515}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6F1432BF-4792-4AEB-BB84-3470B1119515}.Release|Any CPU.Build.0 = Release|Any CPU
+ {06CBE3B2-4B88-42DA-A4B4-F0A665D5E267}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {06CBE3B2-4B88-42DA-A4B4-F0A665D5E267}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {06CBE3B2-4B88-42DA-A4B4-F0A665D5E267}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {06CBE3B2-4B88-42DA-A4B4-F0A665D5E267}.Release|Any CPU.Build.0 = Release|Any CPU
+ {1270950A-1EC0-4F83-AE03-CF0250F7F2C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {1270950A-1EC0-4F83-AE03-CF0250F7F2C7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {1270950A-1EC0-4F83-AE03-CF0250F7F2C7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {1270950A-1EC0-4F83-AE03-CF0250F7F2C7}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs
index 3fb257906..5f4afdf71 100644
--- a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs
+++ b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs
@@ -691,7 +691,7 @@ public override int Read(byte[] buffer, int offset, int count)
///
/// Input buffer for this stream.
///
- protected InflaterInputBuffer inputBuffer;
+ public InflaterInputBuffer inputBuffer;
///
/// Base stream the inflater reads from.
diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs
index 147d4043d..4f77c31e6 100644
--- a/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs
+++ b/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs
@@ -3,6 +3,7 @@
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
+using System.Diagnostics;
using System.IO;
namespace ICSharpCode.SharpZipLib.Zip
@@ -163,31 +164,12 @@ public ZipEntry GetNextEntry()
CloseEntry();
}
- int header = inputBuffer.ReadLeInt();
-
- if (header == ZipConstants.CentralHeaderSignature ||
- header == ZipConstants.EndOfCentralDirectorySignature ||
- header == ZipConstants.CentralHeaderDigitalSignature ||
- header == ZipConstants.ArchiveExtraDataSignature ||
- header == ZipConstants.Zip64CentralFileHeaderSignature)
+ if (!SkipUntilNextEntry())
{
- // No more individual entries exist
Dispose();
return null;
}
- // -jr- 07-Dec-2003 Ignore spanning temporary signatures if found
- // Spanning signature is same as descriptor signature and is untested as yet.
- if ((header == ZipConstants.SpanningTempSignature) || (header == ZipConstants.SpanningSignature))
- {
- header = inputBuffer.ReadLeInt();
- }
-
- if (header != ZipConstants.LocalHeaderSignature)
- {
- throw new ZipException("Wrong Local header signature: 0x" + String.Format("{0:X}", header));
- }
-
var versionRequiredToExtract = (short)inputBuffer.ReadLeShort();
flags = inputBuffer.ReadLeShort();
@@ -283,14 +265,63 @@ public ZipEntry GetNextEntry()
return entry;
}
+ ///
+ /// Reads bytes from the input stream until either a local file header signature, or another signature
+ /// indicating that no more entries should be present, is found.
+ ///
+ /// Thrown if the end of the input stream is reached without any signatures found
+ /// Returns whether the found signature is for a local entry header
+ private bool SkipUntilNextEntry()
+ {
+ // First let's skip all null bytes since it's the sane padding to add when updating an entry with smaller size
+ var paddingSkipped = 0;
+ while(inputBuffer.ReadLeByte() == 0) {
+ paddingSkipped++;
+ }
+
+ // Last byte read was not actually consumed, restore the offset
+ inputBuffer.Available += 1;
+ if(paddingSkipped > 0) {
+ Debug.WriteLine("Skipped {0} null byte(s) before reading signature", paddingSkipped);
+ }
+
+ var offset = 0;
+ // Read initial header quad directly after the last entry
+ var header = (uint)inputBuffer.ReadLeInt();
+ do
+ {
+ switch (header)
+ {
+ case ZipConstants.CentralHeaderSignature:
+ case ZipConstants.EndOfCentralDirectorySignature:
+ case ZipConstants.CentralHeaderDigitalSignature:
+ case ZipConstants.ArchiveExtraDataSignature:
+ case ZipConstants.Zip64CentralFileHeaderSignature:
+ Debug.WriteLine("Non-entry signature found at offset {0,2}: 0x{1:x8}", offset, header);
+ // No more individual entries exist
+ return false;
+
+ case ZipConstants.LocalHeaderSignature:
+ Debug.WriteLine("Entry local header signature found at offset {0,2}: 0x{1:x8}", offset, header);
+ return true;
+ default:
+ // Current header quad did not match any signature, shift in another byte
+ header = (uint) (inputBuffer.ReadLeByte() << 24) | (header >> 8);
+ offset++;
+ break;
+ }
+ } while (true); // Loop until we either get an EOF exception or we find the next signature
+ }
+
///
/// Read data descriptor at the end of compressed data.
///
private void ReadDataDescriptor()
{
- if (inputBuffer.ReadLeInt() != ZipConstants.DataDescriptorSignature)
+ var descriptor = inputBuffer.ReadLeInt();
+ if (descriptor != ZipConstants.DataDescriptorSignature)
{
- throw new ZipException("Data descriptor signature not found");
+ throw new ZipException($"Data descriptor signature not found, got: 0x{descriptor:x8} instead of 0x{ZipConstants.DataDescriptorSignature:x8}");
}
entry.Crc = inputBuffer.ReadLeInt() & 0xFFFFFFFFL;
@@ -380,6 +411,7 @@ public void CloseEntry()
if ((inputBuffer.Available > csize) && (csize >= 0))
{
+ // Buffer can contain entire entry data. Internally offsetting position inside buffer
inputBuffer.Available = (int)((long)inputBuffer.Available - csize);
}
else