starting work on backup verification

2.x
Szum123321 2022-11-21 23:31:48 +01:00
parent ef4c69b4d1
commit 2774ebd2b4
8 changed files with 171 additions and 37 deletions

View File

@ -0,0 +1,16 @@
package net.szum123321.textile_backup.core;
import java.io.Serializable;
import java.nio.file.Path;
import java.time.LocalDateTime;
public record CompressionStatus(long[] treeHash, LocalDateTime date, long startTimestamp, long finishTimestamp, boolean ok, Path[] brokenFiles) implements Serializable {
public static class Builder {
public synchronized void update(Path path, long hash, Exception error) { throw new RuntimeException("UNIMPLEMENTED!"); }
public synchronized void update(Path path, Exception error) { throw new RuntimeException("UNIMPLEMENTED!"); }
public synchronized void update(Path path, long hash) { throw new RuntimeException("UNIMPLEMENTED!"); }
public CompressionStatus build() { throw new RuntimeException("UNIMPLEMENTED!"); }
}
}

View File

@ -0,0 +1,47 @@
package net.szum123321.textile_backup.core.create;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.CompressionStatus;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
public record FileInputStreamSupplier(Path path, String name, CompressionStatus.Builder builder) implements InputSupplier {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
@Override
public InputStream getInputStream() throws IOException {
try {
//TODO: put in hasher
return new HashingInputStream(Files.newInputStream(path), path, null, builder);
} catch (IOException e) {
builder.update(path, e);
throw e;
}
}
@Override
public Path getPath() {
return path;
}
@Override
public String getName() {
return name;
}
@Override
public InputStream get() {
try {
return getInputStream();
} catch (IOException e) {
log.error("An exception occurred while trying to create an input stream from file: {}!", path.toString(), e);
}
return null;
}
}

View File

@ -0,0 +1,43 @@
package net.szum123321.textile_backup.core.create;
import net.szum123321.textile_backup.core.CompressionStatus;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.nio.file.Path;
import java.util.zip.Checksum;
public class HashingInputStream extends FilterInputStream {
private final Path path;
private final Checksum hasher;
private final CompressionStatus.Builder statusBuilder;
public HashingInputStream(InputStream in, Path path, Checksum hasher, CompressionStatus.Builder statusBuilder) {
super(in);
this.hasher = hasher;
this.statusBuilder = statusBuilder;
this.path = path;
}
@Override
public int read(byte @NotNull [] b, int off, int len) throws IOException {
int i = in.read(b, off, len);
if(i > -1) hasher.update(b, off, i);
return i;
}
@Override
public int read() throws IOException {
int i = in.read();
if(i > -1) hasher.update(i);
return i;
}
@Override
public void close() throws IOException {
if(in.available() == 0) statusBuilder.update(path, hasher.getValue());
else statusBuilder.update(path, hasher.getValue(), new RuntimeException("AAAaa"));
super.close();
}
}

View File

@ -0,0 +1,14 @@
package net.szum123321.textile_backup.core.create;
import org.apache.commons.compress.parallel.InputStreamSupplier;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
public interface InputSupplier extends InputStreamSupplier {
InputStream getInputStream() throws IOException;
Path getPath();
String getName();
}

View File

@ -21,9 +21,12 @@ package net.szum123321.textile_backup.core.create.compressors;
import net.szum123321.textile_backup.TextileBackup; import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger; import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.ActionInitiator; import net.szum123321.textile_backup.core.ActionInitiator;
import net.szum123321.textile_backup.core.CompressionStatus;
import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException; import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.create.BackupContext; import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.FileInputStreamSupplier;
import net.szum123321.textile_backup.core.create.InputSupplier;
import java.io.*; import java.io.*;
import java.nio.file.Files; import java.nio.file.Files;
@ -47,12 +50,13 @@ public abstract class AbstractCompressor {
OutputStream arc = createArchiveOutputStream(bufferedOutputStream, ctx, coreLimit); OutputStream arc = createArchiveOutputStream(bufferedOutputStream, ctx, coreLimit);
Stream<Path> fileStream = Files.walk(inputFile)) { Stream<Path> fileStream = Files.walk(inputFile)) {
CompressionStatus.Builder statusBuilder = new CompressionStatus.Builder();
fileStream fileStream
.filter(path -> !Utilities.isBlacklisted(inputFile.relativize(path))) .filter(path -> !Utilities.isBlacklisted(inputFile.relativize(path)))
.filter(Files::isRegularFile).forEach(file -> { .filter(Files::isRegularFile).forEach(file -> {
try { try {
//hopefully one broken file won't spoil the whole archive addEntry(new FileInputStreamSupplier(file, inputFile.relativize(file).toString(), statusBuilder), arc);
addEntry(file, inputFile.relativize(file).toString(), arc);
} catch (IOException e) { } catch (IOException e) {
log.error("An exception occurred while trying to compress: {}", inputFile.relativize(file).toString(), e); log.error("An exception occurred while trying to compress: {}", inputFile.relativize(file).toString(), e);
@ -61,6 +65,13 @@ public abstract class AbstractCompressor {
} }
}); });
//Serialize using gson?
ByteArrayOutputStream bo = new ByteArrayOutputStream();
ObjectOutputStream o = new ObjectOutputStream(bo);
o.writeObject(statusBuilder.build());
addEntry(new StatusFileInputSupplier(bo.toByteArray(), bo.size()), arc);
finish(arc); finish(arc);
} catch(NoSpaceLeftOnDeviceException e) { } catch(NoSpaceLeftOnDeviceException e) {
log.error(""" log.error("""
@ -88,7 +99,7 @@ public abstract class AbstractCompressor {
} }
protected abstract OutputStream createArchiveOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException; protected abstract OutputStream createArchiveOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException;
protected abstract void addEntry(Path file, String entryName, OutputStream arc) throws IOException; protected abstract void addEntry(InputSupplier inputSupplier, OutputStream arc) throws IOException;
protected void finish(OutputStream arc) throws InterruptedException, ExecutionException, IOException { protected void finish(OutputStream arc) throws InterruptedException, ExecutionException, IOException {
//This function is only needed for the ParallelZipCompressor to write out ParallelScatterZipCreator //This function is only needed for the ParallelZipCompressor to write out ParallelScatterZipCreator
@ -97,4 +108,20 @@ public abstract class AbstractCompressor {
protected void close() { protected void close() {
//Same as above, just for ParallelGzipCompressor to shut down ExecutorService //Same as above, just for ParallelGzipCompressor to shut down ExecutorService
} }
private record StatusFileInputSupplier(byte[] data, int len) implements InputSupplier {
private final static String NAME = "textile_status.data";
@Override
public InputStream getInputStream() { return new ByteArrayInputStream(data, 0, len); }
@Override
public Path getPath() { return Path.of(NAME); }
@Override
public String getName() { return NAME; }
@Override
public InputStream get() { return new ByteArrayInputStream(data, 0, len); }
}
} }

View File

@ -22,12 +22,11 @@ import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger; import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException; import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException;
import net.szum123321.textile_backup.core.create.BackupContext; import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.InputSupplier;
import org.apache.commons.compress.archivers.zip.*; import org.apache.commons.compress.archivers.zip.*;
import org.apache.commons.compress.parallel.InputStreamSupplier;
import java.io.*; import java.io.*;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.*; import java.util.concurrent.*;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
@ -67,19 +66,19 @@ public class ParallelZipCompressor extends ZipCompressor {
} }
@Override @Override
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException { protected void addEntry(InputSupplier input, OutputStream arc) throws IOException {
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(file, entryName); ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(input.getPath(), input.getName());
if(ZipCompressor.isDotDat(file.getFileName().toString())) { if(ZipCompressor.isDotDat(input.getPath().getFileName().toString())) {
entry.setMethod(ZipEntry.STORED); entry.setMethod(ZipEntry.STORED);
entry.setSize(Files.size(file)); entry.setSize(Files.size(input.getPath()));
entry.setCompressedSize(Files.size(file)); entry.setCompressedSize(Files.size(input.getPath()));
entry.setCrc(getCRC(file)); entry.setCrc(getCRC(input.getPath()));
} else entry.setMethod(ZipEntry.DEFLATED); } else entry.setMethod(ZipEntry.DEFLATED);
entry.setTime(System.currentTimeMillis()); entry.setTime(System.currentTimeMillis());
scatterZipCreator.addArchiveEntry(entry, new FileInputStreamSupplier(file)); scatterZipCreator.addArchiveEntry(entry, input);
} }
@Override @Override
@ -127,16 +126,4 @@ public class ParallelZipCompressor extends ZipCompressor {
return isNative == that.isNative && Objects.equals(className, that.className) && Objects.equals(methodName, that.methodName); return isNative == that.isNative && Objects.equals(className, that.className) && Objects.equals(methodName, that.methodName);
} }
} }
record FileInputStreamSupplier(Path sourceFile) implements InputStreamSupplier {
public InputStream get() {
try {
return Files.newInputStream(sourceFile);
} catch (IOException e) {
log.error("An exception occurred while trying to create an input stream from file: {}!", sourceFile.toString(), e);
}
return null;
}
}
} }

View File

@ -21,6 +21,7 @@ package net.szum123321.textile_backup.core.create.compressors;
import net.szum123321.textile_backup.config.ConfigHelper; import net.szum123321.textile_backup.config.ConfigHelper;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.create.BackupContext; import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.InputSupplier;
import org.apache.commons.compress.archivers.zip.Zip64Mode; import org.apache.commons.compress.archivers.zip.Zip64Mode;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
@ -54,15 +55,15 @@ public class ZipCompressor extends AbstractCompressor {
} }
@Override @Override
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException { protected void addEntry(InputSupplier input, OutputStream arc) throws IOException {
try (InputStream fileInputStream = Files.newInputStream(file)){ try (InputStream fileInputStream = input.getInputStream()) {
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(file, entryName); ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(input.getPath(), input.getName());
if(isDotDat(file.getFileName().toString())) { if(isDotDat(input.getPath().getFileName().toString())) {
entry.setMethod(ZipEntry.STORED); entry.setMethod(ZipEntry.STORED);
entry.setSize(Files.size(file)); entry.setSize(Files.size(input.getPath()));
entry.setCompressedSize(Files.size(file)); entry.setCompressedSize(Files.size(input.getPath()));
entry.setCrc(getCRC(file)); entry.setCrc(getCRC(input.getPath()));
} }
((ZipArchiveOutputStream)arc).putArchiveEntry(entry); ((ZipArchiveOutputStream)arc).putArchiveEntry(entry);

View File

@ -20,13 +20,12 @@ package net.szum123321.textile_backup.core.create.compressors.tar;
import net.szum123321.textile_backup.core.create.BackupContext; import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.compressors.AbstractCompressor; import net.szum123321.textile_backup.core.create.compressors.AbstractCompressor;
import net.szum123321.textile_backup.core.create.InputSupplier;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.IOUtils;
import java.io.*; import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
public class AbstractTarArchiver extends AbstractCompressor { public class AbstractTarArchiver extends AbstractCompressor {
protected OutputStream getCompressorOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException { protected OutputStream getCompressorOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException {
@ -43,9 +42,9 @@ public class AbstractTarArchiver extends AbstractCompressor {
} }
@Override @Override
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException { protected void addEntry(InputSupplier in, OutputStream arc) throws IOException {
try (InputStream fileInputStream = Files.newInputStream(file)){ try (InputStream fileInputStream = in.getInputStream()) {
TarArchiveEntry entry = (TarArchiveEntry)((TarArchiveOutputStream) arc).createArchiveEntry(file, entryName); TarArchiveEntry entry = (TarArchiveEntry)((TarArchiveOutputStream) arc).createArchiveEntry(in.getPath(), in.getName());
((TarArchiveOutputStream)arc).putArchiveEntry(entry); ((TarArchiveOutputStream)arc).putArchiveEntry(entry);
IOUtils.copy(fileInputStream, arc); IOUtils.copy(fileInputStream, arc);