Major upgrade of arbitrary data functionality, to support on-chain data for small payloads.

Max size for on-chain data is 239 bytes, due to 16-byte IV. Must be a single file resource, without .qortal folder.
This commit is contained in:
CalDescent
2023-03-26 16:54:07 +01:00
parent 7deb9328fa
commit a83e332c11
17 changed files with 495 additions and 146 deletions

View File

@@ -4,6 +4,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
import org.qortal.crypto.Crypto;
import org.qortal.data.transaction.ArbitraryTransactionData;
import org.qortal.repository.DataException;
import org.qortal.settings.Settings;
import org.qortal.utils.Base58;
@@ -15,7 +16,6 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.stream.Stream;
import static java.util.Arrays.stream;
import static java.util.stream.Collectors.toMap;
@@ -85,6 +85,7 @@ public class ArbitraryDataFile {
return;
}
this.chunks = new ArrayList<>();
this.hash58 = Base58.encode(Crypto.digest(fileContent));
this.signature = signature;
LOGGER.trace(String.format("File digest: %s, size: %d bytes", this.hash58, fileContent.length));
@@ -111,6 +112,41 @@ public class ArbitraryDataFile {
return ArbitraryDataFile.fromHash58(Base58.encode(hash), signature);
}
public static ArbitraryDataFile fromRawData(byte[] data, byte[] signature) throws DataException {
if (data == null) {
return null;
}
return new ArbitraryDataFile(data, signature);
}
public static ArbitraryDataFile fromTransactionData(ArbitraryTransactionData transactionData) throws DataException {
ArbitraryDataFile arbitraryDataFile = null;
byte[] signature = transactionData.getSignature();
byte[] data = transactionData.getData();
if (data == null) {
return null;
}
// Create data file
switch (transactionData.getDataType()) {
case DATA_HASH:
arbitraryDataFile = ArbitraryDataFile.fromHash(data, signature);
break;
case RAW_DATA:
arbitraryDataFile = ArbitraryDataFile.fromRawData(data, signature);
break;
}
// Set metadata hash
if (arbitraryDataFile != null) {
arbitraryDataFile.setMetadataHash(transactionData.getMetadataHash());
}
return arbitraryDataFile;
}
public static ArbitraryDataFile fromPath(Path path, byte[] signature) {
if (path == null) {
return null;
@@ -260,6 +296,11 @@ public class ArbitraryDataFile {
this.chunks = new ArrayList<>();
if (file != null) {
if (file.exists() && file.length() <= chunkSize) {
// No need to split into chunks if we're already below the chunk size
return 0;
}
try (FileInputStream fileInputStream = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(fileInputStream)) {

View File

@@ -362,11 +362,6 @@ public class ArbitraryDataReader {
throw new DataException(String.format("Transaction data not found for signature %s", this.resourceId));
}
// Load hashes
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
// Load secret
byte[] secret = transactionData.getSecret();
if (secret != null) {
@@ -374,16 +369,14 @@ public class ArbitraryDataReader {
}
// Load data file(s)
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
ArbitraryTransactionUtils.checkAndRelocateMiscFiles(transactionData);
arbitraryDataFile.setMetadataHash(metadataHash);
if (!arbitraryDataFile.allFilesExist()) {
if (ArbitraryDataStorageManager.getInstance().isNameBlocked(transactionData.getName())) {
throw new DataException(
String.format("Unable to request missing data for file %s because the name is blocked", arbitraryDataFile));
}
else {
} else {
// Ask the arbitrary data manager to fetch data for this transaction
String message;
if (this.canRequestMissingFiles) {
@@ -394,8 +387,7 @@ public class ArbitraryDataReader {
} else {
message = String.format("Unable to reissue request for missing file %s for signature %s due to rate limit. Please try again later.", arbitraryDataFile, Base58.encode(transactionData.getSignature()));
}
}
else {
} else {
message = String.format("Missing data for file %s", arbitraryDataFile);
}
@@ -405,21 +397,25 @@ public class ArbitraryDataReader {
}
}
if (arbitraryDataFile.allChunksExist() && !arbitraryDataFile.exists()) {
// We have all the chunks but not the complete file, so join them
arbitraryDataFile.join();
// Data hashes need some extra processing
if (transactionData.getDataType() == DataType.DATA_HASH) {
if (arbitraryDataFile.allChunksExist() && !arbitraryDataFile.exists()) {
// We have all the chunks but not the complete file, so join them
arbitraryDataFile.join();
}
// If the complete file still doesn't exist then something went wrong
if (!arbitraryDataFile.exists()) {
throw new IOException(String.format("File doesn't exist: %s", arbitraryDataFile));
}
// Ensure the complete hash matches the joined chunks
if (!Arrays.equals(arbitraryDataFile.digest(), transactionData.getData())) {
// Delete the invalid file
arbitraryDataFile.delete();
throw new DataException("Unable to validate complete file hash");
}
}
// If the complete file still doesn't exist then something went wrong
if (!arbitraryDataFile.exists()) {
throw new IOException(String.format("File doesn't exist: %s", arbitraryDataFile));
}
// Ensure the complete hash matches the joined chunks
if (!Arrays.equals(arbitraryDataFile.digest(), digest)) {
// Delete the invalid file
arbitraryDataFile.delete();
throw new DataException("Unable to validate complete file hash");
}
// Ensure the file's size matches the size reported by the transaction (throws a DataException if not)
arbitraryDataFile.validateFileSize(transactionData.getSize());

View File

@@ -150,11 +150,7 @@ public class ArbitraryDataResource {
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
for (ArbitraryTransactionData transactionData : transactionDataList) {
byte[] hash = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
// Delete any chunks or complete files from each transaction
arbitraryDataFile.deleteAll(deleteMetadata);

View File

@@ -9,6 +9,7 @@ import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
import org.qortal.arbitrary.misc.Category;
import org.qortal.arbitrary.misc.Service;
import org.qortal.crypto.AES;
import org.qortal.crypto.Crypto;
import org.qortal.data.PaymentData;
import org.qortal.data.transaction.ArbitraryTransactionData;
@@ -181,6 +182,7 @@ public class ArbitraryDataTransactionBuilder {
for (ModifiedPath path : metadata.getModifiedPaths()) {
if (path.getDiffType() != DiffType.COMPLETE_FILE) {
atLeastOnePatch = true;
break;
}
}
}
@@ -229,10 +231,12 @@ public class ArbitraryDataTransactionBuilder {
random.nextBytes(lastReference);
}
Compression compression = Compression.ZIP;
// Single file resources are handled differently, especially for very small data payloads, as these go on chain
final boolean isSingleFileResource = FilesystemUtils.isSingleFileResource(path, false);
final boolean shouldUseOnChainData = (isSingleFileResource && AES.getEncryptedFileSize(FilesystemUtils.getSingleFileContents(path).length) <= ArbitraryTransaction.MAX_DATA_SIZE);
// FUTURE? Use zip compression for directories, or no compression for single files
// Compression compression = (path.toFile().isDirectory()) ? Compression.ZIP : Compression.NONE;
// Use zip compression if data isn't going on chain
Compression compression = shouldUseOnChainData ? Compression.NONE : Compression.ZIP;
ArbitraryDataWriter arbitraryDataWriter = new ArbitraryDataWriter(path, name, service, identifier, method,
compression, title, description, tags, category);
@@ -250,16 +254,21 @@ public class ArbitraryDataTransactionBuilder {
throw new DataException("Arbitrary data file is null");
}
// Get chunks metadata file
// Get metadata file
ArbitraryDataFile metadataFile = arbitraryDataFile.getMetadataFile();
if (metadataFile == null && arbitraryDataFile.chunkCount() > 1) {
throw new DataException(String.format("Chunks metadata data file is null but there are %d chunks", arbitraryDataFile.chunkCount()));
}
String digest58 = arbitraryDataFile.digest58();
if (digest58 == null) {
LOGGER.error("Unable to calculate file digest");
throw new DataException("Unable to calculate file digest");
// Default to using a data hash, with data held off-chain
ArbitraryTransactionData.DataType dataType = ArbitraryTransactionData.DataType.DATA_HASH;
byte[] data = arbitraryDataFile.digest();
// For small, single-chunk resources, we can store the data directly on chain
if (shouldUseOnChainData && arbitraryDataFile.getBytes().length <= ArbitraryTransaction.MAX_DATA_SIZE && arbitraryDataFile.chunkCount() == 0) {
// Within allowed on-chain data size
dataType = DataType.RAW_DATA;
data = arbitraryDataFile.getBytes();
}
final BaseTransactionData baseTransactionData = new BaseTransactionData(now, Group.NO_GROUP,
@@ -268,22 +277,21 @@ public class ArbitraryDataTransactionBuilder {
final int version = 5;
final int nonce = 0;
byte[] secret = arbitraryDataFile.getSecret();
final ArbitraryTransactionData.DataType dataType = ArbitraryTransactionData.DataType.DATA_HASH;
final byte[] digest = arbitraryDataFile.digest();
final byte[] metadataHash = (metadataFile != null) ? metadataFile.getHash() : null;
final List<PaymentData> payments = new ArrayList<>();
ArbitraryTransactionData transactionData = new ArbitraryTransactionData(baseTransactionData,
version, service.value, nonce, size, name, identifier, method,
secret, compression, digest, dataType, metadataHash, payments);
secret, compression, data, dataType, metadataHash, payments);
this.arbitraryTransactionData = transactionData;
} catch (DataException e) {
} catch (DataException | IOException e) {
if (arbitraryDataFile != null) {
arbitraryDataFile.deleteAll(true);
}
throw(e);
throw new DataException(e);
}
}

View File

@@ -107,10 +107,9 @@ public class ArbitraryDataWriter {
private void preExecute() throws DataException {
this.checkEnabled();
// Enforce compression when uploading a directory
File file = new File(this.filePath.toString());
if (file.isDirectory() && compression == Compression.NONE) {
throw new DataException("Unable to upload a directory without compression");
// Enforce compression when uploading multiple files
if (!FilesystemUtils.isSingleFileResource(this.filePath, false) && compression == Compression.NONE) {
throw new DataException("Unable to publish multiple files without compression");
}
// Create temporary working directory
@@ -168,6 +167,9 @@ public class ArbitraryDataWriter {
if (this.files.size() == 1) {
singleFilePath = Paths.get(this.filePath.toString(), this.files.get(0));
// Update filePath to point to the single file (instead of the directory containing the file)
this.filePath = singleFilePath;
}
}
}
@@ -314,9 +316,6 @@ public class ArbitraryDataWriter {
if (chunkCount > 0) {
LOGGER.info(String.format("Successfully split into %d chunk%s", chunkCount, (chunkCount == 1 ? "" : "s")));
}
else {
throw new DataException("Unable to split file into chunks");
}
}
private void createMetadataFile() throws IOException, DataException {

View File

@@ -258,8 +258,6 @@ public class ArbitraryDataFileListManager {
// Lookup file lists by signature (and optionally hashes)
public boolean fetchArbitraryDataFileList(ArbitraryTransactionData arbitraryTransactionData) {
byte[] digest = arbitraryTransactionData.getData();
byte[] metadataHash = arbitraryTransactionData.getMetadataHash();
byte[] signature = arbitraryTransactionData.getSignature();
String signature58 = Base58.encode(signature);
@@ -286,8 +284,7 @@ public class ArbitraryDataFileListManager {
// Find hashes that we are missing
try {
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
missingHashes = arbitraryDataFile.missingHashes();
} catch (DataException e) {
// Leave missingHashes as null, so that all hashes are requested
@@ -460,10 +457,9 @@ public class ArbitraryDataFileListManager {
arbitraryTransactionData = (ArbitraryTransactionData) transactionData;
// Load data file(s)
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(arbitraryTransactionData.getData(), signature);
arbitraryDataFile.setMetadataHash(arbitraryTransactionData.getMetadataHash());
// // Load data file(s)
// ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
//
// // Check all hashes exist
// for (byte[] hash : hashes) {
// //LOGGER.debug("Received hash {}", Base58.encode(hash));
@@ -594,12 +590,8 @@ public class ArbitraryDataFileListManager {
// Check if we're even allowed to serve data for this transaction
if (ArbitraryDataStorageManager.getInstance().canStoreData(transactionData)) {
byte[] hash = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
// Load file(s) and add any that exist to the list of hashes
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
// If the peer didn't supply a hash list, we need to return all hashes for this transaction
if (requestedHashes == null || requestedHashes.isEmpty()) {

View File

@@ -132,9 +132,7 @@ public class ArbitraryDataFileManager extends Thread {
List<byte[]> hashes) throws DataException {
// Load data file(s)
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(arbitraryTransactionData.getData(), signature);
byte[] metadataHash = arbitraryTransactionData.getMetadataHash();
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
boolean receivedAtLeastOneFile = false;
// Now fetch actual data from this peer

View File

@@ -202,4 +202,12 @@ public class AES {
.decode(cipherText)));
}
public static long getEncryptedFileSize(long inFileSize) {
// To calculate the resulting file size, add 16 (for the IV), then round up to the nearest multiple of 16
final int ivSize = 16;
final int chunkSize = 16;
final int expectedSize = Math.round((inFileSize + ivSize) / chunkSize) * chunkSize + chunkSize;
return expectedSize;
}
}

View File

@@ -5,9 +5,7 @@ import org.apache.logging.log4j.Logger;
import org.bouncycastle.util.Longs;
import org.qortal.arbitrary.misc.Service;
import org.qortal.data.arbitrary.ArbitraryResourceInfo;
import org.qortal.crypto.Crypto;
import org.qortal.data.arbitrary.ArbitraryResourceNameInfo;
import org.qortal.data.network.ArbitraryPeerData;
import org.qortal.data.transaction.ArbitraryTransactionData;
import org.qortal.data.transaction.ArbitraryTransactionData.*;
import org.qortal.data.transaction.BaseTransactionData;
@@ -15,6 +13,7 @@ import org.qortal.data.transaction.TransactionData;
import org.qortal.repository.ArbitraryRepository;
import org.qortal.repository.DataException;
import org.qortal.arbitrary.ArbitraryDataFile;
import org.qortal.transaction.ArbitraryTransaction;
import org.qortal.transaction.Transaction.ApprovalStatus;
import org.qortal.utils.Base58;
@@ -27,8 +26,6 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
private static final Logger LOGGER = LogManager.getLogger(HSQLDBArbitraryRepository.class);
private static final int MAX_RAW_DATA_SIZE = 255; // size of VARBINARY
protected HSQLDBRepository repository;
public HSQLDBArbitraryRepository(HSQLDBRepository repository) {
@@ -55,13 +52,8 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
return true;
}
// Load hashes
byte[] hash = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
// Load data file(s)
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
// Check if we already have the complete data file or all chunks
if (arbitraryDataFile.allFilesExist()) {
@@ -84,13 +76,8 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
return transactionData.getData();
}
// Load hashes
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
// Load data file(s)
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
// If we have the complete data file, return it
if (arbitraryDataFile.exists()) {
@@ -105,6 +92,7 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
arbitraryDataFile.join();
// Verify that the combined hash matches the expected hash
byte[] digest = transactionData.getData();
if (!digest.equals(arbitraryDataFile.digest())) {
LOGGER.info(String.format("Hash mismatch for transaction: %s", Base58.encode(signature)));
return null;
@@ -132,11 +120,11 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
}
// Trivial-sized payloads can remain in raw form
if (arbitraryTransactionData.getDataType() == DataType.RAW_DATA && arbitraryTransactionData.getData().length <= MAX_RAW_DATA_SIZE) {
if (arbitraryTransactionData.getDataType() == DataType.RAW_DATA && arbitraryTransactionData.getData().length <= ArbitraryTransaction.MAX_DATA_SIZE) {
return;
}
throw new IllegalStateException(String.format("Supplied data is larger than maximum size (%d bytes). Please use ArbitraryDataWriter.", MAX_RAW_DATA_SIZE));
throw new IllegalStateException(String.format("Supplied data is larger than maximum size (%d bytes). Please use ArbitraryDataWriter.", ArbitraryTransaction.MAX_DATA_SIZE));
}
@Override
@@ -146,14 +134,8 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
return;
}
// Load hashes
byte[] hash = arbitraryTransactionData.getData();
byte[] metadataHash = arbitraryTransactionData.getMetadataHash();
// Load data file(s)
byte[] signature = arbitraryTransactionData.getSignature();
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
// Delete file, chunks, and metadata
arbitraryDataFile.deleteAll(true);

View File

@@ -33,7 +33,7 @@ public class ArbitraryTransaction extends Transaction {
private ArbitraryTransactionData arbitraryTransactionData;
// Other useful constants
public static final int MAX_DATA_SIZE = 4000;
public static final int MAX_DATA_SIZE = 256;
public static final int MAX_METADATA_LENGTH = 32;
public static final int HASH_LENGTH = TransactionTransformer.SHA256_LENGTH;
public static final int MAX_IDENTIFIER_LENGTH = 64;

View File

@@ -110,13 +110,8 @@ public class ArbitraryTransactionUtils {
return false;
}
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
// Load complete file and chunks
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
return arbitraryDataFile.allChunksExist();
}
@@ -126,18 +121,13 @@ public class ArbitraryTransactionUtils {
return false;
}
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
if (metadataHash == null) {
if (transactionData.getMetadataHash() == null) {
// This file doesn't have any metadata/chunks, which means none exist
return false;
}
// Load complete file and chunks
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
return arbitraryDataFile.anyChunksExist();
}
@@ -147,12 +137,7 @@ public class ArbitraryTransactionUtils {
return 0;
}
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
// Find the folder containing the files
Path parentPath = arbitraryDataFile.getFilePath().getParent();
@@ -180,18 +165,13 @@ public class ArbitraryTransactionUtils {
return 0;
}
byte[] digest = transactionData.getData();
byte[] metadataHash = transactionData.getMetadataHash();
byte[] signature = transactionData.getSignature();
if (metadataHash == null) {
if (transactionData.getMetadataHash() == null) {
// This file doesn't have any metadata, therefore it has a single (complete) chunk
return 1;
}
// Load complete file and chunks
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(transactionData);
return arbitraryDataFile.fileCount();
}
@@ -243,31 +223,24 @@ public class ArbitraryTransactionUtils {
}
public static void deleteCompleteFileAndChunks(ArbitraryTransactionData arbitraryTransactionData) throws DataException {
byte[] completeHash = arbitraryTransactionData.getData();
byte[] metadataHash = arbitraryTransactionData.getMetadataHash();
byte[] signature = arbitraryTransactionData.getSignature();
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(completeHash, signature);
arbitraryDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
arbitraryDataFile.deleteAll(true);
}
public static void convertFileToChunks(ArbitraryTransactionData arbitraryTransactionData, long now, long cleanupAfter) throws DataException {
byte[] completeHash = arbitraryTransactionData.getData();
byte[] metadataHash = arbitraryTransactionData.getMetadataHash();
byte[] signature = arbitraryTransactionData.getSignature();
// Find the expected chunk hashes
ArbitraryDataFile expectedDataFile = ArbitraryDataFile.fromHash(completeHash, signature);
expectedDataFile.setMetadataHash(metadataHash);
ArbitraryDataFile expectedDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
if (metadataHash == null || !expectedDataFile.getMetadataFile().exists()) {
if (arbitraryTransactionData.getMetadataHash() == null || !expectedDataFile.getMetadataFile().exists()) {
// We don't have the metadata file, or this transaction doesn't have one - nothing to do
return;
}
byte[] completeHash = arbitraryTransactionData.getData();
byte[] signature = arbitraryTransactionData.getSignature();
// Split the file into chunks
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(completeHash, signature);
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromTransactionData(arbitraryTransactionData);
int chunkCount = arbitraryDataFile.split(ArbitraryDataFile.CHUNK_SIZE);
if (chunkCount > 1) {
LOGGER.info(String.format("Successfully split %s into %d chunk%s",

View File

@@ -250,6 +250,39 @@ public class FilesystemUtils {
return data;
}
/**
* isSingleFileResource
* Returns true if the path points to a file, or a
* directory containing a single file only.
*
* @param path to file or directory
* @param excludeQortalDirectory - if true, a directory containing a single file and a .qortal directory is considered a single file resource
* @return
* @throws IOException
*/
public static boolean isSingleFileResource(Path path, boolean excludeQortalDirectory) {
// If the path is a file, read the contents directly
if (path.toFile().isFile()) {
return true;
}
// Or if it's a directory, only load file contents if there is a single file inside it
else if (path.toFile().isDirectory()) {
String[] files = path.toFile().list();
if (excludeQortalDirectory) {
files = ArrayUtils.removeElement(files, ".qortal");
}
if (files.length == 1) {
Path filePath = Paths.get(path.toString(), files[0]);
if (filePath.toFile().isFile()) {
return true;
}
}
}
return false;
}
public static byte[] readFromFile(String filePath, long position, int size) throws IOException {
RandomAccessFile file = new RandomAccessFile(filePath, "r");
file.seek(position);