Backup integrity is now checked

2.x
Szum123321 2022-11-25 14:00:18 +01:00
parent dbb9a71749
commit 9c37affacd
10 changed files with 101 additions and 95 deletions

View File

@ -68,6 +68,7 @@ public class TextileBackup implements ModInitializer {
Globals.INSTANCE.shutdownQueueExecutor(60000);
if (config.get().shutdownBackup && Globals.INSTANCE.globalShutdownBackupFlag.get()) {
try {
MakeBackupRunnableFactory.create(
BackupContext.Builder
.newBackupContextBuilder()
@ -75,7 +76,8 @@ public class TextileBackup implements ModInitializer {
.setInitiator(ActionInitiator.Shutdown)
.setComment("shutdown")
.build()
).run();
).call();
} catch (Exception ignored) {}
}
});

View File

@ -41,7 +41,6 @@ public class StartBackupCommand {
}
private static int execute(ServerCommandSource source, @Nullable String comment) {
try {
Globals.INSTANCE.getQueueExecutor().submit(
MakeBackupRunnableFactory.create(
BackupContext.Builder
@ -53,10 +52,6 @@ public class StartBackupCommand {
.build()
)
);
} catch (Exception e) {
log.error("Something went wrong while executing command!", e);
throw e;
}
return 1;
}

View File

@ -191,6 +191,8 @@ public class ConfigPOJO implements ConfigData {
VERY_PERMISSIBLE;
public boolean isStrict() { return this == STRICT; }
public boolean verify() { return this != VERY_PERMISSIBLE; }
}
public enum ArchiveFormat {

View File

@ -28,8 +28,8 @@ public record CompressionStatus(long treeHash, LocalDateTime date, long startTim
public static final String DATA_FILENAME = "textile_status.data";
public boolean isValid(long decompressedHash) { return true; }
public static CompressionStatus readFromFile(Path f) throws IOException, ClassNotFoundException {
try(InputStream i = Files.newInputStream(f);
public static CompressionStatus readFromFile(Path folder) throws IOException, ClassNotFoundException {
try(InputStream i = Files.newInputStream(folder.resolve(DATA_FILENAME));
ObjectInputStream obj = new ObjectInputStream(i)) {
return (CompressionStatus) obj.readObject();
}

View File

@ -36,11 +36,13 @@ import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
/**
* The actual object responsible for creating the backup
*/
public class MakeBackupRunnable implements Runnable {
public class MakeBackupRunnable implements Callable<Void> {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE;
@ -49,9 +51,8 @@ public class MakeBackupRunnable implements Runnable {
public MakeBackupRunnable(BackupContext context) {
this.context = context;
}
@Override
public void run() {
public Void call() throws IOException, ExecutionException, InterruptedException {
Path outFile = Utilities
.getBackupRootPath(Utilities.getLevelName(context.server()))
.resolve(getFileName());
@ -114,7 +115,7 @@ public class MakeBackupRunnable implements Runnable {
} else {
log.sendInfoAL(context, "Done!");
}
} catch (Throwable e) {
} catch (InterruptedException | ExecutionException | IOException e) {
//ExecutorService swallows exception, so I need to catch everything
log.error("An exception occurred when trying to create new backup file!", e);
@ -128,10 +129,14 @@ public class MakeBackupRunnable implements Runnable {
if(context.initiator() == ActionInitiator.Player)
log.sendError(context, "An exception occurred when trying to create new backup file!");
throw e;
} finally {
Utilities.enableWorldSaving(context.server());
Globals.INSTANCE.disableWatchdog = false;
}
return null;
}
private String getFileName(){

View File

@ -25,12 +25,13 @@ import net.szum123321.textile_backup.config.ConfigHelper;
import net.szum123321.textile_backup.core.Utilities;
import java.time.LocalDateTime;
import java.util.concurrent.Callable;
public class MakeBackupRunnableFactory {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE;
public static Runnable create(BackupContext ctx) {
public static Callable<Void> create(BackupContext ctx) {
if(config.get().broadcastBackupStart) {
Utilities.notifyPlayers(ctx.server(),
"Warning! Server backup will begin shortly. You may experience some lag."
@ -58,13 +59,7 @@ public class MakeBackupRunnableFactory {
if (ctx.shouldSave()) {
log.sendInfoAL(ctx, "Saving server...");
ctx.server().getPlayerManager().saveAllPlayerData();
try {
ctx.server().save(false, true, true);
} catch (Exception e) {
log.sendErrorAL(ctx,"An exception occurred when trying to save the world!");
}
ctx.server().saveAll(true, true, false);
}
return new MakeBackupRunnable(ctx);

View File

@ -18,6 +18,7 @@
package net.szum123321.textile_backup.core.restore;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import org.jetbrains.annotations.NotNull;
import java.io.FilterOutputStream;
@ -30,12 +31,16 @@ public class HashingOutputStream extends FilterOutputStream {
private final Path path;
private final Checksum hasher;
public HashingOutputStream(OutputStream out, Path path, Checksum hasher) {
private final FileTreeHashBuilder hashBuilder;
public HashingOutputStream(OutputStream out, Path path, Checksum hasher, FileTreeHashBuilder hashBuilder) {
super(out);
this.path = path;
this.hasher = hasher;
this.hashBuilder = hashBuilder;
}
@Override
public void write(int b) throws IOException {
super.write(b);
@ -51,5 +56,6 @@ public class HashingOutputStream extends FilterOutputStream {
@Override
public void close() throws IOException {
super.close();
hashBuilder.update(path, hasher.getValue());
}
}

View File

@ -25,7 +25,6 @@ import net.szum123321.textile_backup.config.ConfigHelper;
import net.szum123321.textile_backup.config.ConfigPOJO;
import net.szum123321.textile_backup.core.ActionInitiator;
import net.szum123321.textile_backup.core.CompressionStatus;
import net.szum123321.textile_backup.core.LivingServer;
import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.MakeBackupRunnableFactory;
@ -33,12 +32,11 @@ import net.szum123321.textile_backup.core.restore.decompressors.GenericTarDecomp
import net.szum123321.textile_backup.core.restore.decompressors.ZipDecompressor;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
//TODO: Verify backup's validity?
public class RestoreBackupRunnable implements Runnable {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE;
@ -56,71 +54,73 @@ public class RestoreBackupRunnable implements Runnable {
log.info("Shutting down server...");
ctx.server().stop(false);
awaitServerShutdown();
if(config.get().backupOldWorlds) {
MakeBackupRunnableFactory.create(
BackupContext.Builder
.newBackupContextBuilder()
.setServer(ctx.server())
.setInitiator(ActionInitiator.Restore)
.setComment("Old_World" + (ctx.comment() != null ? "_" + ctx.comment() : ""))
.build()
).run();
}
Path worldFile = Utilities.getWorldFolder(ctx.server()), tmp = null;
Path worldFile = Utilities.getWorldFolder(ctx.server()), tmp;
try {
tmp = Files.createTempDirectory(
ctx.server().getRunDirectory().toPath(),
ctx.restoreableFile().getFile().getFileName().toString());
} catch (IOException e) {
throw new RuntimeException(e);
}
if(tmp == null) {
//TODO: log error!
log.error("An exception occurred while unpacking backup", e);
return;
}
FutureTask<Void> waitForShutdown = new FutureTask<>(() -> {
ctx.server().getThread().join(); //wait for server to die and save all its state
if(config.get().backupOldWorlds) {
return MakeBackupRunnableFactory.create(
BackupContext.Builder
.newBackupContextBuilder()
.setServer(ctx.server())
.setInitiator(ActionInitiator.Restore)
.setComment("Old_World" + (ctx.comment() != null ? "_" + ctx.comment() : ""))
.build()
).call();
}
return null;
});
new Thread(waitForShutdown).start();
try {
log.info("Starting decompression...");
long hash;
if (ctx.restoreableFile().getArchiveFormat() == ConfigPOJO.ArchiveFormat.ZIP)
ZipDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
hash = ZipDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
else
GenericTarDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
hash = GenericTarDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
CompressionStatus status = null;
CompressionStatus status = CompressionStatus.readFromFile(tmp);
Files.delete(tmp.resolve(CompressionStatus.DATA_FILENAME));
try (InputStream in = Files.newInputStream(tmp.resolve(CompressionStatus.DATA_FILENAME))) {
ObjectInputStream objectInputStream = new ObjectInputStream(in);
status = (CompressionStatus)objectInputStream.readObject();
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
//locks until the backup is finished
waitForShutdown.get();
if(status.isValid(0)) {
log.info("Deleting old world...");
if(status.isValid(hash) || !config.get().errorErrorHandlingMode.verify()) {
if(status.isValid(hash)) log.info("Backup valid. Restoring");
else log.info("Backup is damaged, but verification is disabled. Restoring");
Utilities.deleteDirectory(worldFile);
Files.move(tmp, worldFile);
if (config.get().deleteOldBackupAfterRestore) {
log.info("Deleting old backup");
log.info("Deleting restored backup file");
Files.delete(ctx.restoreableFile().getFile());
}
} else {
log.error("File tree hash mismatch! Got: {}, Expected {}. Aborting", hash, status.treeHash());
}
} catch (IOException e) {
} catch (ExecutionException | InterruptedException | ClassNotFoundException | IOException e) {
log.error("An exception occurred while trying to restore a backup!", e);
} finally {
//Regardless of what happended, we shiuld still clean up
if(Files.exists(tmp)) {
try {
Utilities.deleteDirectory(tmp);
} catch (IOException e) {
//TODO: Log error!
}
} catch (IOException ignored) {}
}
}
@ -129,14 +129,4 @@ public class RestoreBackupRunnable implements Runnable {
log.info("Done!");
}
private void awaitServerShutdown() {
while(((LivingServer)ctx.server()).isAlive()) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error("Exception occurred!", e);
}
}
}
}

View File

@ -20,7 +20,9 @@ package net.szum123321.textile_backup.core.restore.decompressors;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.restore.HashingOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.CompressorException;
@ -36,8 +38,9 @@ import java.time.Instant;
public class GenericTarDecompressor {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
public static void decompress(Path input, Path target) throws IOException {
public static long decompress(Path input, Path target) throws IOException {
Instant start = Instant.now();
FileTreeHashBuilder treeBuilder = new FileTreeHashBuilder(() -> null);
try (InputStream fileInputStream = Files.newInputStream(input);
InputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
@ -46,10 +49,8 @@ public class GenericTarDecompressor {
TarArchiveEntry entry;
while ((entry = archiveInputStream.getNextTarEntry()) != null) {
if(!archiveInputStream.canReadEntryData(entry)) {
log.error("Something when wrong while trying to decompress {}", entry.getName());
continue;
}
if(!archiveInputStream.canReadEntryData(entry))
throw new IOException("Couldn't read archive entry! " + entry.getName());
Path file = target.resolve(entry.getName());
@ -58,7 +59,8 @@ public class GenericTarDecompressor {
} else {
Files.createDirectories(file.getParent());
try (OutputStream outputStream = Files.newOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream)) {
HashingOutputStream hashingStream = new HashingOutputStream(outputStream, file, null, treeBuilder);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(hashingStream)) {
IOUtils.copy(archiveInputStream, bufferedOutputStream);
}
}
@ -68,6 +70,8 @@ public class GenericTarDecompressor {
}
log.info("Decompression took {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now())));
return treeBuilder.getValue();
}
private static InputStream getCompressorInputStream(InputStream inputStream) throws CompressorException {

View File

@ -20,7 +20,9 @@ package net.szum123321.textile_backup.core.restore.decompressors;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.restore.HashingOutputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.utils.IOUtils;
@ -35,9 +37,11 @@ import java.util.Iterator;
public class ZipDecompressor {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
public static void decompress(Path inputFile, Path target) throws IOException {
public static long decompress(Path inputFile, Path target) throws IOException {
Instant start = Instant.now();
FileTreeHashBuilder hashBuilder = new FileTreeHashBuilder(() -> null);
try(ZipFile zipFile = new ZipFile(inputFile.toFile())) {
for (Iterator<ZipArchiveEntry> it = zipFile.getEntries().asIterator(); it.hasNext(); ) {
ZipArchiveEntry entry = it.next();
@ -48,7 +52,8 @@ public class ZipDecompressor {
} else {
Files.createDirectories(file.getParent());
try (OutputStream outputStream = Files.newOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream)) {
HashingOutputStream hashingStream = new HashingOutputStream(outputStream, file, null, hashBuilder);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(hashingStream)) {
IOUtils.copy(zipFile.getInputStream(entry), bufferedOutputStream);
}
}
@ -56,5 +61,7 @@ public class ZipDecompressor {
}
log.info("Decompression took: {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now())));
return hashBuilder.getValue();
}
}