Backup integrity is now checked

2.x
Szum123321 2022-11-25 14:00:18 +01:00
parent dbb9a71749
commit 9c37affacd
10 changed files with 101 additions and 95 deletions

View File

@ -68,14 +68,16 @@ public class TextileBackup implements ModInitializer {
Globals.INSTANCE.shutdownQueueExecutor(60000); Globals.INSTANCE.shutdownQueueExecutor(60000);
if (config.get().shutdownBackup && Globals.INSTANCE.globalShutdownBackupFlag.get()) { if (config.get().shutdownBackup && Globals.INSTANCE.globalShutdownBackupFlag.get()) {
MakeBackupRunnableFactory.create( try {
BackupContext.Builder MakeBackupRunnableFactory.create(
.newBackupContextBuilder() BackupContext.Builder
.setServer(server) .newBackupContextBuilder()
.setInitiator(ActionInitiator.Shutdown) .setServer(server)
.setComment("shutdown") .setInitiator(ActionInitiator.Shutdown)
.build() .setComment("shutdown")
).run(); .build()
).call();
} catch (Exception ignored) {}
} }
}); });

View File

@ -41,22 +41,17 @@ public class StartBackupCommand {
} }
private static int execute(ServerCommandSource source, @Nullable String comment) { private static int execute(ServerCommandSource source, @Nullable String comment) {
try { Globals.INSTANCE.getQueueExecutor().submit(
Globals.INSTANCE.getQueueExecutor().submit( MakeBackupRunnableFactory.create(
MakeBackupRunnableFactory.create( BackupContext.Builder
BackupContext.Builder .newBackupContextBuilder()
.newBackupContextBuilder() .setCommandSource(source)
.setCommandSource(source) .setComment(comment)
.setComment(comment) .guessInitiator()
.guessInitiator() .saveServer()
.saveServer() .build()
.build() )
) );
);
} catch (Exception e) {
log.error("Something went wrong while executing command!", e);
throw e;
}
return 1; return 1;
} }

View File

@ -191,6 +191,8 @@ public class ConfigPOJO implements ConfigData {
VERY_PERMISSIBLE; VERY_PERMISSIBLE;
public boolean isStrict() { return this == STRICT; } public boolean isStrict() { return this == STRICT; }
public boolean verify() { return this != VERY_PERMISSIBLE; }
} }
public enum ArchiveFormat { public enum ArchiveFormat {

View File

@ -28,8 +28,8 @@ public record CompressionStatus(long treeHash, LocalDateTime date, long startTim
public static final String DATA_FILENAME = "textile_status.data"; public static final String DATA_FILENAME = "textile_status.data";
public boolean isValid(long decompressedHash) { return true; } public boolean isValid(long decompressedHash) { return true; }
public static CompressionStatus readFromFile(Path f) throws IOException, ClassNotFoundException { public static CompressionStatus readFromFile(Path folder) throws IOException, ClassNotFoundException {
try(InputStream i = Files.newInputStream(f); try(InputStream i = Files.newInputStream(folder.resolve(DATA_FILENAME));
ObjectInputStream obj = new ObjectInputStream(i)) { ObjectInputStream obj = new ObjectInputStream(i)) {
return (CompressionStatus) obj.readObject(); return (CompressionStatus) obj.readObject();
} }

View File

@ -36,11 +36,13 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
/** /**
* The actual object responsible for creating the backup * The actual object responsible for creating the backup
*/ */
public class MakeBackupRunnable implements Runnable { public class MakeBackupRunnable implements Callable<Void> {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME); private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE; private final static ConfigHelper config = ConfigHelper.INSTANCE;
@ -49,9 +51,8 @@ public class MakeBackupRunnable implements Runnable {
public MakeBackupRunnable(BackupContext context) { public MakeBackupRunnable(BackupContext context) {
this.context = context; this.context = context;
} }
@Override @Override
public void run() { public Void call() throws IOException, ExecutionException, InterruptedException {
Path outFile = Utilities Path outFile = Utilities
.getBackupRootPath(Utilities.getLevelName(context.server())) .getBackupRootPath(Utilities.getLevelName(context.server()))
.resolve(getFileName()); .resolve(getFileName());
@ -114,7 +115,7 @@ public class MakeBackupRunnable implements Runnable {
} else { } else {
log.sendInfoAL(context, "Done!"); log.sendInfoAL(context, "Done!");
} }
} catch (Throwable e) { } catch (InterruptedException | ExecutionException | IOException e) {
//ExecutorService swallows exception, so I need to catch everything //ExecutorService swallows exception, so I need to catch everything
log.error("An exception occurred when trying to create new backup file!", e); log.error("An exception occurred when trying to create new backup file!", e);
@ -128,10 +129,14 @@ public class MakeBackupRunnable implements Runnable {
if(context.initiator() == ActionInitiator.Player) if(context.initiator() == ActionInitiator.Player)
log.sendError(context, "An exception occurred when trying to create new backup file!"); log.sendError(context, "An exception occurred when trying to create new backup file!");
throw e;
} finally { } finally {
Utilities.enableWorldSaving(context.server()); Utilities.enableWorldSaving(context.server());
Globals.INSTANCE.disableWatchdog = false; Globals.INSTANCE.disableWatchdog = false;
} }
return null;
} }
private String getFileName(){ private String getFileName(){

View File

@ -25,12 +25,13 @@ import net.szum123321.textile_backup.config.ConfigHelper;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.concurrent.Callable;
public class MakeBackupRunnableFactory { public class MakeBackupRunnableFactory {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME); private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE; private final static ConfigHelper config = ConfigHelper.INSTANCE;
public static Runnable create(BackupContext ctx) { public static Callable<Void> create(BackupContext ctx) {
if(config.get().broadcastBackupStart) { if(config.get().broadcastBackupStart) {
Utilities.notifyPlayers(ctx.server(), Utilities.notifyPlayers(ctx.server(),
"Warning! Server backup will begin shortly. You may experience some lag." "Warning! Server backup will begin shortly. You may experience some lag."
@ -58,13 +59,7 @@ public class MakeBackupRunnableFactory {
if (ctx.shouldSave()) { if (ctx.shouldSave()) {
log.sendInfoAL(ctx, "Saving server..."); log.sendInfoAL(ctx, "Saving server...");
ctx.server().getPlayerManager().saveAllPlayerData(); ctx.server().saveAll(true, true, false);
try {
ctx.server().save(false, true, true);
} catch (Exception e) {
log.sendErrorAL(ctx,"An exception occurred when trying to save the world!");
}
} }
return new MakeBackupRunnable(ctx); return new MakeBackupRunnable(ctx);

View File

@ -18,6 +18,7 @@
package net.szum123321.textile_backup.core.restore; package net.szum123321.textile_backup.core.restore;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import java.io.FilterOutputStream; import java.io.FilterOutputStream;
@ -30,12 +31,16 @@ public class HashingOutputStream extends FilterOutputStream {
private final Path path; private final Path path;
private final Checksum hasher; private final Checksum hasher;
public HashingOutputStream(OutputStream out, Path path, Checksum hasher) { private final FileTreeHashBuilder hashBuilder;
public HashingOutputStream(OutputStream out, Path path, Checksum hasher, FileTreeHashBuilder hashBuilder) {
super(out); super(out);
this.path = path; this.path = path;
this.hasher = hasher; this.hasher = hasher;
this.hashBuilder = hashBuilder;
} }
@Override @Override
public void write(int b) throws IOException { public void write(int b) throws IOException {
super.write(b); super.write(b);
@ -51,5 +56,6 @@ public class HashingOutputStream extends FilterOutputStream {
@Override @Override
public void close() throws IOException { public void close() throws IOException {
super.close(); super.close();
hashBuilder.update(path, hasher.getValue());
} }
} }

View File

@ -25,7 +25,6 @@ import net.szum123321.textile_backup.config.ConfigHelper;
import net.szum123321.textile_backup.config.ConfigPOJO; import net.szum123321.textile_backup.config.ConfigPOJO;
import net.szum123321.textile_backup.core.ActionInitiator; import net.szum123321.textile_backup.core.ActionInitiator;
import net.szum123321.textile_backup.core.CompressionStatus; import net.szum123321.textile_backup.core.CompressionStatus;
import net.szum123321.textile_backup.core.LivingServer;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.create.BackupContext; import net.szum123321.textile_backup.core.create.BackupContext;
import net.szum123321.textile_backup.core.create.MakeBackupRunnableFactory; import net.szum123321.textile_backup.core.create.MakeBackupRunnableFactory;
@ -33,12 +32,11 @@ import net.szum123321.textile_backup.core.restore.decompressors.GenericTarDecomp
import net.szum123321.textile_backup.core.restore.decompressors.ZipDecompressor; import net.szum123321.textile_backup.core.restore.decompressors.ZipDecompressor;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
//TODO: Verify backup's validity?
public class RestoreBackupRunnable implements Runnable { public class RestoreBackupRunnable implements Runnable {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME); private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final static ConfigHelper config = ConfigHelper.INSTANCE; private final static ConfigHelper config = ConfigHelper.INSTANCE;
@ -56,71 +54,73 @@ public class RestoreBackupRunnable implements Runnable {
log.info("Shutting down server..."); log.info("Shutting down server...");
ctx.server().stop(false); ctx.server().stop(false);
awaitServerShutdown();
if(config.get().backupOldWorlds) { Path worldFile = Utilities.getWorldFolder(ctx.server()), tmp;
MakeBackupRunnableFactory.create(
BackupContext.Builder
.newBackupContextBuilder()
.setServer(ctx.server())
.setInitiator(ActionInitiator.Restore)
.setComment("Old_World" + (ctx.comment() != null ? "_" + ctx.comment() : ""))
.build()
).run();
}
Path worldFile = Utilities.getWorldFolder(ctx.server()), tmp = null;
try { try {
tmp = Files.createTempDirectory( tmp = Files.createTempDirectory(
ctx.server().getRunDirectory().toPath(), ctx.server().getRunDirectory().toPath(),
ctx.restoreableFile().getFile().getFileName().toString()); ctx.restoreableFile().getFile().getFileName().toString());
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); log.error("An exception occurred while unpacking backup", e);
}
if(tmp == null) {
//TODO: log error!
return; return;
} }
FutureTask<Void> waitForShutdown = new FutureTask<>(() -> {
ctx.server().getThread().join(); //wait for server to die and save all its state
if(config.get().backupOldWorlds) {
return MakeBackupRunnableFactory.create(
BackupContext.Builder
.newBackupContextBuilder()
.setServer(ctx.server())
.setInitiator(ActionInitiator.Restore)
.setComment("Old_World" + (ctx.comment() != null ? "_" + ctx.comment() : ""))
.build()
).call();
}
return null;
});
new Thread(waitForShutdown).start();
try { try {
log.info("Starting decompression..."); log.info("Starting decompression...");
long hash;
if (ctx.restoreableFile().getArchiveFormat() == ConfigPOJO.ArchiveFormat.ZIP) if (ctx.restoreableFile().getArchiveFormat() == ConfigPOJO.ArchiveFormat.ZIP)
ZipDecompressor.decompress(ctx.restoreableFile().getFile(), tmp); hash = ZipDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
else else
GenericTarDecompressor.decompress(ctx.restoreableFile().getFile(), tmp); hash = GenericTarDecompressor.decompress(ctx.restoreableFile().getFile(), tmp);
CompressionStatus status = null; CompressionStatus status = CompressionStatus.readFromFile(tmp);
Files.delete(tmp.resolve(CompressionStatus.DATA_FILENAME));
try (InputStream in = Files.newInputStream(tmp.resolve(CompressionStatus.DATA_FILENAME))) { //locks until the backup is finished
ObjectInputStream objectInputStream = new ObjectInputStream(in); waitForShutdown.get();
status = (CompressionStatus)objectInputStream.readObject();
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
if(status.isValid(0)) { if(status.isValid(hash) || !config.get().errorErrorHandlingMode.verify()) {
log.info("Deleting old world..."); if(status.isValid(hash)) log.info("Backup valid. Restoring");
else log.info("Backup is damaged, but verification is disabled. Restoring");
Utilities.deleteDirectory(worldFile); Utilities.deleteDirectory(worldFile);
Files.move(tmp, worldFile); Files.move(tmp, worldFile);
if (config.get().deleteOldBackupAfterRestore) { if (config.get().deleteOldBackupAfterRestore) {
log.info("Deleting old backup"); log.info("Deleting restored backup file");
Files.delete(ctx.restoreableFile().getFile()); Files.delete(ctx.restoreableFile().getFile());
} }
} else {
log.error("File tree hash mismatch! Got: {}, Expected {}. Aborting", hash, status.treeHash());
} }
} catch (IOException e) { } catch (ExecutionException | InterruptedException | ClassNotFoundException | IOException e) {
log.error("An exception occurred while trying to restore a backup!", e); log.error("An exception occurred while trying to restore a backup!", e);
} finally { } finally {
//Regardless of what happended, we shiuld still clean up
if(Files.exists(tmp)) { if(Files.exists(tmp)) {
try { try {
Utilities.deleteDirectory(tmp); Utilities.deleteDirectory(tmp);
} catch (IOException e) { } catch (IOException ignored) {}
//TODO: Log error!
}
} }
} }
@ -129,14 +129,4 @@ public class RestoreBackupRunnable implements Runnable {
log.info("Done!"); log.info("Done!");
} }
private void awaitServerShutdown() {
while(((LivingServer)ctx.server()).isAlive()) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
log.error("Exception occurred!", e);
}
}
}
} }

View File

@ -20,7 +20,9 @@ package net.szum123321.textile_backup.core.restore.decompressors;
import net.szum123321.textile_backup.TextileBackup; import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger; import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.restore.HashingOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorException;
@ -36,8 +38,9 @@ import java.time.Instant;
public class GenericTarDecompressor { public class GenericTarDecompressor {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME); private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
public static void decompress(Path input, Path target) throws IOException { public static long decompress(Path input, Path target) throws IOException {
Instant start = Instant.now(); Instant start = Instant.now();
FileTreeHashBuilder treeBuilder = new FileTreeHashBuilder(() -> null);
try (InputStream fileInputStream = Files.newInputStream(input); try (InputStream fileInputStream = Files.newInputStream(input);
InputStream bufferedInputStream = new BufferedInputStream(fileInputStream); InputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
@ -46,10 +49,8 @@ public class GenericTarDecompressor {
TarArchiveEntry entry; TarArchiveEntry entry;
while ((entry = archiveInputStream.getNextTarEntry()) != null) { while ((entry = archiveInputStream.getNextTarEntry()) != null) {
if(!archiveInputStream.canReadEntryData(entry)) { if(!archiveInputStream.canReadEntryData(entry))
log.error("Something when wrong while trying to decompress {}", entry.getName()); throw new IOException("Couldn't read archive entry! " + entry.getName());
continue;
}
Path file = target.resolve(entry.getName()); Path file = target.resolve(entry.getName());
@ -58,7 +59,8 @@ public class GenericTarDecompressor {
} else { } else {
Files.createDirectories(file.getParent()); Files.createDirectories(file.getParent());
try (OutputStream outputStream = Files.newOutputStream(file); try (OutputStream outputStream = Files.newOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream)) { HashingOutputStream hashingStream = new HashingOutputStream(outputStream, file, null, treeBuilder);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(hashingStream)) {
IOUtils.copy(archiveInputStream, bufferedOutputStream); IOUtils.copy(archiveInputStream, bufferedOutputStream);
} }
} }
@ -68,6 +70,8 @@ public class GenericTarDecompressor {
} }
log.info("Decompression took {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now()))); log.info("Decompression took {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now())));
return treeBuilder.getValue();
} }
private static InputStream getCompressorInputStream(InputStream inputStream) throws CompressorException { private static InputStream getCompressorInputStream(InputStream inputStream) throws CompressorException {

View File

@ -20,7 +20,9 @@ package net.szum123321.textile_backup.core.restore.decompressors;
import net.szum123321.textile_backup.TextileBackup; import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger; import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.FileTreeHashBuilder;
import net.szum123321.textile_backup.core.Utilities; import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.restore.HashingOutputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.IOUtils;
@ -35,9 +37,11 @@ import java.util.Iterator;
public class ZipDecompressor { public class ZipDecompressor {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME); private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
public static void decompress(Path inputFile, Path target) throws IOException { public static long decompress(Path inputFile, Path target) throws IOException {
Instant start = Instant.now(); Instant start = Instant.now();
FileTreeHashBuilder hashBuilder = new FileTreeHashBuilder(() -> null);
try(ZipFile zipFile = new ZipFile(inputFile.toFile())) { try(ZipFile zipFile = new ZipFile(inputFile.toFile())) {
for (Iterator<ZipArchiveEntry> it = zipFile.getEntries().asIterator(); it.hasNext(); ) { for (Iterator<ZipArchiveEntry> it = zipFile.getEntries().asIterator(); it.hasNext(); ) {
ZipArchiveEntry entry = it.next(); ZipArchiveEntry entry = it.next();
@ -48,7 +52,8 @@ public class ZipDecompressor {
} else { } else {
Files.createDirectories(file.getParent()); Files.createDirectories(file.getParent());
try (OutputStream outputStream = Files.newOutputStream(file); try (OutputStream outputStream = Files.newOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream)) { HashingOutputStream hashingStream = new HashingOutputStream(outputStream, file, null, hashBuilder);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(hashingStream)) {
IOUtils.copy(zipFile.getInputStream(entry), bufferedOutputStream); IOUtils.copy(zipFile.getInputStream(entry), bufferedOutputStream);
} }
} }
@ -56,5 +61,7 @@ public class ZipDecompressor {
} }
log.info("Decompression took: {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now()))); log.info("Decompression took: {} seconds.", Utilities.formatDuration(Duration.between(start, Instant.now())));
return hashBuilder.getValue();
} }
} }