Clear out some more FindBugs warnings.

This commit is contained in:
Mike Hearn
2012-04-04 23:52:02 +02:00
parent de1f5b8726
commit 6368862ffe
8 changed files with 54 additions and 34 deletions

View File

@@ -21,4 +21,8 @@
<!-- BitCoinJ is not designed to run in an environment with malicious code loaded into the VM --> <!-- BitCoinJ is not designed to run in an environment with malicious code loaded into the VM -->
<Bug category="MALICIOUS_CODE"/> <Bug category="MALICIOUS_CODE"/>
<!-- This is flagging a valid issue but the real bug is in the JDK. See issue 173. -->
<Bug pattern="LG_LOST_LOGGER_DUE_TO_WEAK_REFERENCE"/>
</FindBugsFilter> </FindBugsFilter>

View File

@@ -194,7 +194,7 @@ public class Block extends Message {
* there are some interdependencies. For example altering a tx requires invalidating the Merkle root and therefore * there are some interdependencies. For example altering a tx requires invalidating the Merkle root and therefore
* the cached header bytes. * the cached header bytes.
*/ */
private synchronized void maybeParseHeader() { private void maybeParseHeader() {
if (headerParsed || bytes == null) if (headerParsed || bytes == null)
return; return;
parseHeader(); parseHeader();
@@ -202,7 +202,7 @@ public class Block extends Message {
bytes = null; bytes = null;
} }
private synchronized void maybeParseTransactions() { private void maybeParseTransactions() {
if (transactionsParsed || bytes == null) if (transactionsParsed || bytes == null)
return; return;
try { try {
@@ -223,7 +223,7 @@ public class Block extends Message {
* Ensure the object is parsed if needed. This should be called in every getter before returning a value. If the * Ensure the object is parsed if needed. This should be called in every getter before returning a value. If the
* lazy parse flag is not set this is a method returns immediately. * lazy parse flag is not set this is a method returns immediately.
*/ */
protected synchronized void maybeParse() { protected void maybeParse() {
throw new LazyParseException( throw new LazyParseException(
"checkParse() should never be called on a Block. Instead use checkParseHeader() and checkParseTransactions()"); "checkParse() should never be called on a Block. Instead use checkParseHeader() and checkParseTransactions()");
} }

View File

@@ -65,9 +65,11 @@ public class BlockChain {
* potentially invalidating transactions in our wallet. * potentially invalidating transactions in our wallet.
*/ */
protected StoredBlock chainHead; protected StoredBlock chainHead;
// chainHead is accessed under this lock rather than the BlockChain lock. This is to try and keep accessors
// responsive whilst the chain is downloading, without free-threading the entire chain add process (which could // The chainHead field is read/written synchronized with this object rather than BlockChain. However writing is
// get very confusing in the case of multiple blocks being added simultaneously). // also guaranteed to happen whilst BlockChain is synchronized (see setChainHead). The goal of this is to let
// clients quickly access the chain head even whilst the block chain is downloading and thus the BlockChain is
// locked most of the time.
protected final Object chainHeadLock = new Object(); protected final Object chainHeadLock = new Object();
protected final NetworkParameters params; protected final NetworkParameters params;
@@ -160,7 +162,7 @@ public class BlockChain {
statsBlocksAdded = 0; statsBlocksAdded = 0;
} }
// We check only the chain head for double adds here to avoid potentially expensive block chain misses. // We check only the chain head for double adds here to avoid potentially expensive block chain misses.
if (block.equals(chainHead.getHeader())) { if (block.equals(getChainHead().getHeader())) {
// Duplicate add of the block at the top of the chain, can be a natural artifact of the download process. // Duplicate add of the block at the top of the chain, can be a natural artifact of the download process.
log.debug("Chain head added more than once: {}", block.getHash()); log.debug("Chain head added more than once: {}", block.getHash());
return true; return true;
@@ -222,10 +224,11 @@ public class BlockChain {
private void connectBlock(StoredBlock newStoredBlock, StoredBlock storedPrev, private void connectBlock(StoredBlock newStoredBlock, StoredBlock storedPrev,
HashMap<Wallet, List<Transaction>> newTransactions) HashMap<Wallet, List<Transaction>> newTransactions)
throws BlockStoreException, VerificationException { throws BlockStoreException, VerificationException {
if (storedPrev.equals(chainHead)) { StoredBlock head = getChainHead();
if (storedPrev.equals(head)) {
// This block connects to the best known block, it is a normal continuation of the system. // This block connects to the best known block, it is a normal continuation of the system.
setChainHead(newStoredBlock); setChainHead(newStoredBlock);
log.debug("Chain is now {} blocks high", chainHead.getHeight()); log.debug("Chain is now {} blocks high", newStoredBlock.getHeight());
if (newTransactions != null) if (newTransactions != null)
sendTransactionsToWallet(newStoredBlock, NewBlockType.BEST_CHAIN, newTransactions); sendTransactionsToWallet(newStoredBlock, NewBlockType.BEST_CHAIN, newTransactions);
} else { } else {
@@ -233,11 +236,11 @@ public class BlockChain {
// //
// Note that we send the transactions to the wallet FIRST, even if we're about to re-organize this block // Note that we send the transactions to the wallet FIRST, even if we're about to re-organize this block
// to become the new best chain head. This simplifies handling of the re-org in the Wallet class. // to become the new best chain head. This simplifies handling of the re-org in the Wallet class.
boolean haveNewBestChain = newStoredBlock.moreWorkThan(chainHead); boolean haveNewBestChain = newStoredBlock.moreWorkThan(head);
if (haveNewBestChain) { if (haveNewBestChain) {
log.info("Block is causing a re-organize"); log.info("Block is causing a re-organize");
} else { } else {
StoredBlock splitPoint = findSplit(newStoredBlock, chainHead); StoredBlock splitPoint = findSplit(newStoredBlock, head);
if (splitPoint == newStoredBlock) { if (splitPoint == newStoredBlock) {
// newStoredBlock is a part of the same chain, there's no fork. This happens when we receive a block // newStoredBlock is a part of the same chain, there's no fork. This happens when we receive a block
// that we already saw and linked into the chain previously, which isn't the chain head. // that we already saw and linked into the chain previously, which isn't the chain head.
@@ -246,11 +249,14 @@ public class BlockChain {
newStoredBlock.getHeight(), newStoredBlock.getHeader().getHash()); newStoredBlock.getHeight(), newStoredBlock.getHeader().getHash());
return; return;
} }
int splitPointHeight = splitPoint != null ? splitPoint.getHeight() : -1; if (splitPoint == null) {
String splitPointHash = log.error("Block forks the chain but splitPoint is null");
splitPoint != null ? splitPoint.getHeader().getHashAsString() : "?"; } else {
log.info("Block forks the chain at height {}/block {}, but it did not cause a reorganize:\n{}", int splitPointHeight = splitPoint.getHeight();
String splitPointHash = splitPoint.getHeader().getHashAsString();
log.info("Block forks the chain at height {}/block {}, but it did not cause a reorganize:\n{}",
new Object[]{splitPointHeight, splitPointHash, newStoredBlock}); new Object[]{splitPointHeight, splitPointHash, newStoredBlock});
}
} }
// We may not have any transactions if we received only a header, which can happen during fast catchup. // We may not have any transactions if we received only a header, which can happen during fast catchup.
@@ -273,13 +279,14 @@ public class BlockChain {
// //
// Firstly, calculate the block at which the chain diverged. We only need to examine the // Firstly, calculate the block at which the chain diverged. We only need to examine the
// chain from beyond this block to find differences. // chain from beyond this block to find differences.
StoredBlock splitPoint = findSplit(newChainHead, chainHead); StoredBlock head = getChainHead();
StoredBlock splitPoint = findSplit(newChainHead, head);
log.info("Re-organize after split at height {}", splitPoint.getHeight()); log.info("Re-organize after split at height {}", splitPoint.getHeight());
log.info("Old chain head: {}", chainHead.getHeader().getHashAsString()); log.info("Old chain head: {}", head.getHeader().getHashAsString());
log.info("New chain head: {}", newChainHead.getHeader().getHashAsString()); log.info("New chain head: {}", newChainHead.getHeader().getHashAsString());
log.info("Split at block: {}", splitPoint.getHeader().getHashAsString()); log.info("Split at block: {}", splitPoint.getHeader().getHashAsString());
// Then build a list of all blocks in the old part of the chain and the new part. // Then build a list of all blocks in the old part of the chain and the new part.
List<StoredBlock> oldBlocks = getPartialChain(chainHead, splitPoint); List<StoredBlock> oldBlocks = getPartialChain(head, splitPoint);
List<StoredBlock> newBlocks = getPartialChain(newChainHead, splitPoint); List<StoredBlock> newBlocks = getPartialChain(newChainHead, splitPoint);
// Now inform the wallets. This is necessary so the set of currently active transactions (that we can spend) // Now inform the wallets. This is necessary so the set of currently active transactions (that we can spend)
// can be updated to take into account the re-organize. We might also have received new coins we didn't have // can be updated to take into account the re-organize. We might also have received new coins we didn't have
@@ -311,15 +318,15 @@ public class BlockChain {
* found (ie they are not part of the same chain). Returns newChainHead or chainHead if they don't actually diverge * found (ie they are not part of the same chain). Returns newChainHead or chainHead if they don't actually diverge
* but are part of the same chain. * but are part of the same chain.
*/ */
private StoredBlock findSplit(StoredBlock newChainHead, StoredBlock chainHead) throws BlockStoreException { private StoredBlock findSplit(StoredBlock newChainHead, StoredBlock oldChainHead) throws BlockStoreException {
StoredBlock currentChainCursor = chainHead; StoredBlock currentChainCursor = oldChainHead;
StoredBlock newChainCursor = newChainHead; StoredBlock newChainCursor = newChainHead;
// Loop until we find the block both chains have in common. Example: // Loop until we find the block both chains have in common. Example:
// //
// A -> B -> C -> D // A -> B -> C -> D
// \--> E -> F -> G // \--> E -> F -> G
// //
// findSplit will return block B. chainHead = D and newChainHead = G. // findSplit will return block B. oldChainHead = D and newChainHead = G.
while (!currentChainCursor.equals(newChainCursor)) { while (!currentChainCursor.equals(newChainCursor)) {
if (currentChainCursor.getHeight() > newChainCursor.getHeight()) { if (currentChainCursor.getHeight() > newChainCursor.getHeight()) {
currentChainCursor = currentChainCursor.getPrev(blockStore); currentChainCursor = currentChainCursor.getPrev(blockStore);

View File

@@ -312,8 +312,6 @@ public abstract class Message implements Serializable {
return buf; return buf;
} }
checkState(bytes == null, "Cached bytes present but failed to use them for serialization");
// No cached array available so serialize parts by stream. // No cached array available so serialize parts by stream.
ByteArrayOutputStream stream = new UnsafeByteArrayOutputStream(length < 32 ? 32 : length + 32); ByteArrayOutputStream stream = new UnsafeByteArrayOutputStream(length < 32 ? 32 : length + 32);
try { try {

View File

@@ -199,7 +199,6 @@ public class Transaction extends ChildMessage implements Serializable {
return appearsInHashes; return appearsInHashes;
if (appearsIn != null) { if (appearsIn != null) {
Preconditions.checkState(appearsInHashes == null);
log.info("Migrating a tx to appearsInHashes"); log.info("Migrating a tx to appearsInHashes");
appearsInHashes = new HashSet<Sha256Hash>(appearsIn.size()); appearsInHashes = new HashSet<Sha256Hash>(appearsIn.size());
for (StoredBlock block : appearsIn) { for (StoredBlock block : appearsIn) {
@@ -741,7 +740,7 @@ public class Transaction extends ChildMessage implements Serializable {
} }
/** Check if the transaction has a known confidence */ /** Check if the transaction has a known confidence */
public boolean hasConfidence() { public synchronized boolean hasConfidence() {
return confidence != null && confidence.getConfidenceType() != TransactionConfidence.ConfidenceType.UNKNOWN; return confidence != null && confidence.getConfidenceType() != TransactionConfidence.ConfidenceType.UNKNOWN;
} }

View File

@@ -72,7 +72,7 @@ public class TransactionConfidence implements Serializable {
* <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and * <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and
* is likely to be invoked on a peer thread.</p> * is likely to be invoked on a peer thread.</p>
* *
* <p>Note that this is NOT called when every block is arrived. Instead it is called when the transaction * <p>Note that this is NOT called when every block arrives. Instead it is called when the transaction
* transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in * transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in
* the best chain). If you want to know when the transaction gets buried under another block, listen for new block * the best chain). If you want to know when the transaction gets buried under another block, listen for new block
* events using {@link PeerEventListener#onBlocksDownloaded(Peer, Block, int)} and then use the getters on the * events using {@link PeerEventListener#onBlocksDownloaded(Peer, Block, int)} and then use the getters on the
@@ -304,7 +304,7 @@ public class TransactionConfidence implements Serializable {
* @throws IllegalStateException if confidence type is not BUILDING * @throws IllegalStateException if confidence type is not BUILDING
* @return estimated number of hashes needed to reverse the transaction. * @return estimated number of hashes needed to reverse the transaction.
*/ */
public BigInteger getWorkDone(BlockChain chain) throws BlockStoreException { public synchronized BigInteger getWorkDone(BlockChain chain) throws BlockStoreException {
int depth; int depth;
synchronized (this) { synchronized (this) {
if (getConfidenceType() != ConfidenceType.BUILDING) if (getConfidenceType() != ConfidenceType.BUILDING)

View File

@@ -21,7 +21,10 @@ import java.io.StringWriter;
import java.io.Writer; import java.io.Writer;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.Date; import java.util.Date;
import java.util.logging.*; import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/** /**
* A Java logging formatter that writes more compact output than the default. * A Java logging formatter that writes more compact output than the default.
@@ -29,15 +32,20 @@ import java.util.logging.*;
public class BriefLogFormatter extends Formatter { public class BriefLogFormatter extends Formatter {
private static final MessageFormat messageFormat = new MessageFormat("{3,date,hh:mm:ss} {0} {1}.{2}: {4}\n{5}"); private static final MessageFormat messageFormat = new MessageFormat("{3,date,hh:mm:ss} {0} {1}.{2}: {4}\n{5}");
// OpenJDK made a questionable, backwards incompatible change to the Logger implementation. It internally uses
// weak references now which means simply fetching the logger and changing its configuration won't work. We must
// keep a reference to our custom logger around.
private static Logger logger;
/** Configures JDK logging to use this class for everything. */ /** Configures JDK logging to use this class for everything. */
public static void init() { public static void init() {
LogManager.getLogManager().getLogger("").getHandlers()[0].setFormatter(new BriefLogFormatter()); logger = Logger.getLogger("");
logger.getHandlers()[0].setFormatter(new BriefLogFormatter());
} }
public static void initVerbose() { public static void initVerbose() {
Logger logger = LogManager.getLogManager().getLogger(""); init();
logger.getHandlers()[0].setFormatter(new BriefLogFormatter()); logger.setLevel(Level.ALL);
logger.setLevel(Level.FINEST);
logger.log(Level.FINE, "test"); logger.log(Level.FINE, "test");
} }

View File

@@ -77,6 +77,7 @@ public class WalletTool {
private static NetworkParameters params; private static NetworkParameters params;
private static File walletFile; private static File walletFile;
private static OptionSet options; private static OptionSet options;
private static java.util.logging.Logger logger;
public enum ActionEnum { public enum ActionEnum {
DUMP, DUMP,
@@ -130,7 +131,8 @@ public class WalletTool {
log.info("Starting up ..."); log.info("Starting up ...");
} else { } else {
// Disable logspam unless there is a flag. // Disable logspam unless there is a flag.
LogManager.getLogManager().getLogger("").setLevel(Level.SEVERE); logger = LogManager.getLogManager().getLogger("");
logger.setLevel(Level.SEVERE);
} }
File chainFileName; File chainFileName;
@@ -262,7 +264,9 @@ public class WalletTool {
tmp = File.createTempFile("wallet", null, walletFile.getParentFile()); tmp = File.createTempFile("wallet", null, walletFile.getParentFile());
tmp.deleteOnExit(); tmp.deleteOnExit();
wallet.saveToFile(tmp); wallet.saveToFile(tmp);
tmp.renameTo(walletFile); if (!tmp.renameTo(walletFile)) {
throw new IOException("Failed to rename wallet");
}
} catch (IOException e) { } catch (IOException e) {
System.err.println("Failed to save wallet! Old wallet should be left untouched."); System.err.println("Failed to save wallet! Old wallet should be left untouched.");
e.printStackTrace(); e.printStackTrace();