all works now*. more debugging still needed.

2.x
Szum123321 2022-11-29 15:05:51 +01:00
parent 86ae95b02e
commit 4622f3fd0d
4 changed files with 54 additions and 57 deletions

View File

@ -18,8 +18,6 @@
package net.szum123321.textile_backup.core;
import net.fabricmc.loader.api.Version;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
@ -27,10 +25,10 @@ import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Map;
public record CompressionStatus(long treeHash, Map<Path, Exception> brokenFiles, LocalDateTime date, long startTimestamp, long finishTimestamp, Version modVersion) implements Serializable {
public record CompressionStatus(long treeHash, Map<Path, Exception> brokenFiles, LocalDateTime date, long startTimestamp, long finishTimestamp) implements Serializable {
public static final String DATA_FILENAME = "textile_status.data";
public boolean isValid(long decompressedHash) {
return decompressedHash == treeHash;
return decompressedHash == treeHash && brokenFiles.isEmpty();
}
public static CompressionStatus readFromFile(Path folder) throws IOException, ClassNotFoundException {
@ -57,7 +55,7 @@ public record CompressionStatus(long treeHash, Map<Path, Exception> brokenFiles,
.append(date.format(DateTimeFormatter.ISO_DATE_TIME))
.append(", start time stamp: ").append(startTimestamp)
.append(", finish time stamp: ").append(finishTimestamp)
.append(", Mod Version:").append(modVersion.getFriendlyString());
;//.append(", Mod Version: ").append(modVersion.getFriendlyString());
builder.append(", broken files: ");
if(brokenFiles.isEmpty()) builder.append("[]");

View File

@ -97,8 +97,8 @@ public abstract class AbstractCompressor {
CompressionStatus status = new CompressionStatus (
fileHashBuilder.getValue(),
brokenFileHandler.get(),
ctx.startDate(), start.toEpochMilli(), now.toEpochMilli(),
TextileBackup.VERSION
ctx.startDate(), start.toEpochMilli(), now.toEpochMilli()//,
//TextileBackup.VERSION
);
addEntry(new StatusFileInputSupplier(status.serialize()), arc);

View File

@ -37,7 +37,6 @@ public class BalticHash implements Hash {
protected final byte[] _byte_buffer = new byte[(state.length + 1) * Long.BYTES];
//Enforce endianness
protected final ByteBuffer buffer = ByteBuffer.wrap(_byte_buffer).order(ByteOrder.LITTLE_ENDIAN);
protected long hashed_data_length = 0;
public void update(byte b) {

View File

@ -33,15 +33,15 @@ It's still probably far from being the slowest part of code, so I don't expect a
I will keep this code here for future work perhaps
*/
public class BalticHashSIMD extends BalticHash {
public class BalticHashSIMD extends BalticHash {/*
public BalticHashSIMD() { throw new UnsupportedOperationException(); } //For safety
/* private LongVector state = LongVector.fromArray(LongVector.SPECIES_256, IV, 0);
private LongVector state = LongVector.fromArray(LongVector.SPECIES_256, IV, 0);
@Override
public long getValue() {
if(buffer.position() != 0) {
while(buffer.position() < buffer_limit) buffer.put((byte)0);
if (buffer.position() != 0) {
while (buffer.position() < buffer_limit) buffer.put((byte) 0);
round();
}
@ -54,16 +54,16 @@ public class BalticHashSIMD extends BalticHash {
@Override
public void update(byte[] data, int off, int len) {
int pos = off;
while(pos < len) {
while (pos < len) {
int n = Math.min(len - pos, buffer_limit - buffer.position());
if(n == 32) {
if (n == 32) {
var v = ByteVector.fromArray(ByteVector.SPECIES_256, data, pos).reinterpretAsLongs();
state = state.lanewise(VectorOperators.XOR, v);
state = xorshift64star(state);
} else {
System.arraycopy(data, pos, _byte_buffer, buffer.position(), n);
buffer.position(buffer.position() + n);
if(buffer.position() == buffer_limit) round();
if (buffer.position() == buffer_limit) round();
}
pos += n;
}
@ -79,7 +79,7 @@ public class BalticHashSIMD extends BalticHash {
int p = buffer.position();
if(p > buffer_limit) {
if (p > buffer_limit) {
System.arraycopy(_byte_buffer, buffer_limit, _byte_buffer, 0, buffer.limit() - p);
buffer.position(buffer.limit() - p);
} else buffer.rewind();