Merge branch 'master' into EPC-fixes

This commit is contained in:
CalDescent
2022-04-30 15:32:44 +01:00
12 changed files with 51 additions and 52 deletions

View File

@@ -381,6 +381,10 @@ public class AdminResource {
) @QueryParam("limit") Integer limit, @Parameter(
ref = "offset"
) @QueryParam("offset") Integer offset, @Parameter(
name = "tail",
description = "Fetch most recent log lines",
schema = @Schema(type = "boolean")
) @QueryParam("tail") Boolean tail, @Parameter(
ref = "reverse"
) @QueryParam("reverse") Boolean reverse) {
LoggerContext loggerContext = (LoggerContext) LogManager.getContext();
@@ -396,6 +400,13 @@ public class AdminResource {
if (reverse != null && reverse)
logLines = Lists.reverse(logLines);
// Tail mode - return the last X lines (where X = limit)
if (tail != null && tail) {
if (limit != null && limit > 0) {
offset = logLines.size() - limit;
}
}
// offset out of bounds?
if (offset != null && (offset < 0 || offset >= logLines.size()))
return "";
@@ -416,7 +427,7 @@ public class AdminResource {
limit = Math.min(limit, logLines.size());
logLines.subList(limit - 1, logLines.size()).clear();
logLines.subList(limit, logLines.size()).clear();
return String.join("\n", logLines);
} catch (IOException e) {

View File

@@ -93,10 +93,12 @@ public class ArbitraryDataFile {
File outputFile = outputFilePath.toFile();
try (FileOutputStream outputStream = new FileOutputStream(outputFile)) {
outputStream.write(fileContent);
outputStream.close();
this.filePath = outputFilePath;
// Verify hash
if (!this.hash58.equals(this.digest58())) {
LOGGER.error("Hash {} does not match file digest {} for signature: {}", this.hash58, this.digest58(), Base58.encode(signature));
String digest58 = this.digest58();
if (!this.hash58.equals(digest58)) {
LOGGER.error("Hash {} does not match file digest {} for signature: {}", this.hash58, digest58, Base58.encode(signature));
this.delete();
throw new DataException("Data file digest validation failed");
}

View File

@@ -425,9 +425,8 @@ public class BlockChain {
}
public long getNameRegistrationUnitFeeAtTimestamp(long ourTimestamp) {
// Scan through for reward at our height
for (int i = 0; i < nameRegistrationUnitFees.size(); ++i)
if (ourTimestamp >= nameRegistrationUnitFees.get(i).timestamp)
for (int i = nameRegistrationUnitFees.size() - 1; i >= 0; --i)
if (nameRegistrationUnitFees.get(i).timestamp <= ourTimestamp)
return nameRegistrationUnitFees.get(i).fee;
// Default to system-wide unit fee

View File

@@ -219,7 +219,7 @@ public class BlockMinter extends Thread {
// The last iteration found a higher weight block in the network, so sleep for a while
// to allow is to sync the higher weight chain. We are sleeping here rather than when
// detected as we don't want to hold the blockchain lock open.
LOGGER.info("Sleeping for 10 seconds...");
LOGGER.debug("Sleeping for 10 seconds...");
Thread.sleep(10 * 1000L);
}
@@ -328,13 +328,13 @@ public class BlockMinter extends Thread {
// If less than 30 seconds has passed since first detection the higher weight chain,
// we should skip our block submission to give us the opportunity to sync to the better chain
if (NTP.getTime() - timeOfLastLowWeightBlock < 30*1000L) {
LOGGER.info("Higher weight chain found in peers, so not signing a block this round");
LOGGER.info("Time since detected: {}", NTP.getTime() - timeOfLastLowWeightBlock);
LOGGER.debug("Higher weight chain found in peers, so not signing a block this round");
LOGGER.debug("Time since detected: {}ms", NTP.getTime() - timeOfLastLowWeightBlock);
continue;
}
else {
// More than 30 seconds have passed, so we should submit our block candidate anyway.
LOGGER.info("More than 30 seconds passed, so proceeding to submit block candidate...");
LOGGER.debug("More than 30 seconds passed, so proceeding to submit block candidate...");
}
}
else {

View File

@@ -574,15 +574,20 @@ public class Controller extends Thread {
MessageType.INFO);
LOGGER.info("Starting scheduled repository maintenance. This can take a while...");
try (final Repository repository = RepositoryManager.getRepository()) {
int attempts = 0;
while (attempts <= 5) {
try (final Repository repository = RepositoryManager.getRepository()) {
attempts++;
// Timeout if the database isn't ready for maintenance after 60 seconds
long timeout = 60 * 1000L;
repository.performPeriodicMaintenance(timeout);
// Timeout if the database isn't ready for maintenance after 60 seconds
long timeout = 60 * 1000L;
repository.performPeriodicMaintenance(timeout);
LOGGER.info("Scheduled repository maintenance completed");
} catch (DataException | TimeoutException e) {
LOGGER.error("Scheduled repository maintenance failed", e);
LOGGER.info("Scheduled repository maintenance completed");
break;
} catch (DataException | TimeoutException e) {
LOGGER.info("Scheduled repository maintenance failed. Retrying up to 5 times...", e);
}
}
// Get a new random interval
@@ -656,29 +661,6 @@ public class Controller extends Thread {
return lastMisbehaved != null && lastMisbehaved > NTP.getTime() - MISBEHAVIOUR_COOLOFF;
};
/** True if peer has unknown height, lower height or same height and same block signature (unless we don't have their block signature). */
public static Predicate<Peer> hasShorterBlockchain = peer -> {
BlockData highestBlockData = getInstance().getChainTip();
int ourHeight = highestBlockData.getHeight();
final PeerChainTipData peerChainTipData = peer.getChainTipData();
// Ensure we have chain tip data for this peer
if (peerChainTipData == null)
return true;
// Remove if peer is at a lower height than us
Integer peerHeight = peerChainTipData.getLastHeight();
if (peerHeight == null || peerHeight < ourHeight)
return true;
// Don't remove if peer is on a greater height chain than us, or if we don't have their block signature
if (peerHeight > ourHeight || peerChainTipData.getLastBlockSignature() == null)
return false;
// Remove if signatures match
return Arrays.equals(peerChainTipData.getLastBlockSignature(), highestBlockData.getSignature());
};
public static final Predicate<Peer> hasNoRecentBlock = peer -> {
final Long minLatestBlockTimestamp = getMinimumLatestBlockTimestamp();
final PeerChainTipData peerChainTipData = peer.getChainTipData();

View File

@@ -235,9 +235,6 @@ public class Synchronizer extends Thread {
// Disregard peers that are on the same block as last sync attempt and we didn't like their chain
peers.removeIf(Controller.hasInferiorChainTip);
// Remove peers with unknown height, lower height or same height and same block signature (unless we don't have their block signature)
peers.removeIf(Controller.hasShorterBlockchain);
final int peersBeforeComparison = peers.size();
// Request recent block summaries from the remaining peers, and locate our common block with each

View File

@@ -139,7 +139,7 @@ public class ArbitraryDataFileManager extends Thread {
Long startTime = NTP.getTime();
ArbitraryDataFileMessage receivedArbitraryDataFileMessage = fetchArbitraryDataFile(peer, null, signature, hash, null);
Long endTime = NTP.getTime();
if (receivedArbitraryDataFileMessage != null) {
if (receivedArbitraryDataFileMessage != null && receivedArbitraryDataFileMessage.getArbitraryDataFile() != null) {
LOGGER.debug("Received data file {} from peer {}. Time taken: {} ms", receivedArbitraryDataFileMessage.getArbitraryDataFile().getHash58(), peer, (endTime-startTime));
receivedAtLeastOneFile = true;

View File

@@ -39,12 +39,12 @@ public class ArbitraryTransactionTransformer extends TransactionTransformer {
private static final int IDENTIFIER_SIZE_LENGTH = INT_LENGTH;
private static final int COMPRESSION_LENGTH = INT_LENGTH;
private static final int METHOD_LENGTH = INT_LENGTH;
private static final int SECRET_LENGTH = INT_LENGTH; // TODO: wtf?
private static final int SECRET_SIZE_LENGTH = INT_LENGTH;
private static final int EXTRAS_LENGTH = SERVICE_LENGTH + DATA_TYPE_LENGTH + DATA_SIZE_LENGTH;
private static final int EXTRAS_V5_LENGTH = NONCE_LENGTH + NAME_SIZE_LENGTH + IDENTIFIER_SIZE_LENGTH +
METHOD_LENGTH + SECRET_LENGTH + COMPRESSION_LENGTH + RAW_DATA_SIZE_LENGTH + METADATA_HASH_SIZE_LENGTH;
METHOD_LENGTH + SECRET_SIZE_LENGTH + COMPRESSION_LENGTH + RAW_DATA_SIZE_LENGTH + METADATA_HASH_SIZE_LENGTH;
protected static final TransactionLayout layout;

View File

@@ -5,7 +5,8 @@
"maxBytesPerUnitFee": 1024,
"unitFee": "0.001",
"nameRegistrationUnitFees": [
{ "timestamp": 1645372800000, "fee": "5" }
{ "timestamp": 1645372800000, "fee": "5" },
{ "timestamp": 1651420800000, "fee": "1.25" }
],
"useBrokenMD160ForAddresses": false,
"requireGroupForApproval": false,

View File

@@ -356,8 +356,15 @@ public class MiscTests extends Common {
UnitFeesByTimestamp pastFeeIncrease = new UnitFeesByTimestamp();
pastFeeIncrease.timestamp = now - 1000L; // 1 second ago
pastFeeIncrease.fee = new AmountTypeAdapter().unmarshal("3");
FieldUtils.writeField(BlockChain.getInstance(), "nameRegistrationUnitFees", Arrays.asList(pastFeeIncrease), true);
// Set another increase in the future
futureFeeIncrease = new UnitFeesByTimestamp();
futureFeeIncrease.timestamp = now + (60 * 60 * 1000L); // 1 hour in the future
futureFeeIncrease.fee = new AmountTypeAdapter().unmarshal("10");
FieldUtils.writeField(BlockChain.getInstance(), "nameRegistrationUnitFees", Arrays.asList(pastFeeIncrease, futureFeeIncrease), true);
assertEquals(pastFeeIncrease.fee, BlockChain.getInstance().getNameRegistrationUnitFeeAtTimestamp(pastFeeIncrease.timestamp));
assertEquals(futureFeeIncrease.fee, BlockChain.getInstance().getNameRegistrationUnitFeeAtTimestamp(futureFeeIncrease.timestamp));
// Register a different name
// First try with the default unit fee