Compare commits

...

41 Commits

Author SHA1 Message Date
Ice
6f628be053 Update pom.xml - Deps
Corrections for NTP slipage at start up
2025-07-20 03:18:52 -04:00
Ice
eb07c45955 Merge pull request #255 from IceBurst/master
* Abstraction of AltCoinJ 
* Abstraction of CIYAM
* Update to BouncyCastle
2025-07-13 14:08:15 -04:00
Ice
8bea11bc52 Merge branch 'master' into master 2025-07-13 14:06:11 -04:00
Qortal-Auto-Update
415f594b25 Bump version to 5.0.2 2025-07-12 15:46:54 -07:00
crowetic
1e593cdf13 Merge pull request #263 from crowetic/master
updated minPeerVersion to 5.0.0 and removed duplicate entry in pom
2025-07-12 15:43:45 -07:00
71d2fbe0b6 updated minPeerVersion to 5.0.0 and removed duplicate entry in pom 2025-07-12 15:42:26 -07:00
crowetic
5a760db37d Merge pull request #262 from kennycud/master
Full Send 

Tested and ready
2025-07-12 15:30:11 -07:00
kennycud
05d629e717 removed logging spam 2025-07-12 14:03:35 -07:00
kennycud
cea63e7ec7 PeerSendManagement support for sending all messages through a queue 2025-07-12 14:02:19 -07:00
Qortal-Auto-Update
5fabc7792c Bump version to 5.0.1 2025-07-10 13:56:06 -07:00
crowetic
09d0af9b78 Merge pull request #260 from kennycud/master
Promising QDN Improvements
2025-07-10 13:51:35 -07:00
crowetic
698e616bc9 Merge pull request #261 from crowetic/master
added new auto-update scripts
2025-07-10 13:50:21 -07:00
6c0a9b3539 added new auto-update scripts 2025-07-10 13:47:10 -07:00
kennycud
60811f9f65 log spam reduction 2025-07-10 13:38:02 -07:00
kennycud
d91a777ffd Delete qortal.log 2025-07-10 13:32:30 -07:00
kennycud
c19cad020e Merge pull request #14 from Philreact/master-11
PeerSendManager
2025-07-10 13:18:14 -07:00
52519e3662 PeerSendManagement loose-ends 2025-07-10 23:16:42 +03:00
fd62e6156c increase request timeout 2025-07-10 17:38:32 +03:00
e5890b3b6f added cooling period in case of re-connections 2025-07-10 17:38:25 +03:00
256baeb1f4 reduce interval cleanup 2025-07-10 17:37:46 +03:00
05b83ade47 remove unused code 2025-07-10 17:37:39 +03:00
f7cb4ce264 PeerSendManger added 2025-07-10 17:37:25 +03:00
086ed6574f Merge remote-tracking branch 'kenny/master' into master-10 2025-07-09 22:38:00 +03:00
kennycud
4b56690118 qdn relay optimizations 2025-07-09 12:34:47 -07:00
kennycud
44d26b513a waiting and retrying clogged write channels 2025-07-08 13:42:49 -07:00
kennycud
dbd900f74a peer fetcher executor shutdown for inactivity, thanks to philreact research, peer fetcher thread naming added 2025-07-08 05:43:30 -07:00
kennycud
38463f6b1a follower compile error fix 2025-07-07 14:51:24 -07:00
kennycud
16e48aba04 follower initial implementation 2025-07-07 14:34:55 -07:00
kennycud
56d97457a1 Merge remote-tracking branch 'origin/master' 2025-07-07 14:32:25 -07:00
kennycud
2167d2f8fe reduced logging spam 2025-07-07 14:30:45 -07:00
kennycud
8425d62673 Merge pull request #13 from Philreact/bugfix/data-renderer-name-spaces
replace name spaces with encoded space
2025-07-05 05:06:15 -07:00
4995bee3e3 replace name spaces with encoded space 2025-07-05 07:03:55 +03:00
Ice
95e12395ae Merge pull request #1 from IceBurst/Abstract-and-Update-Deps
Abstract and update deps
2025-06-11 03:15:34 -04:00
Ice
47e5c473b3 Merge branch 'master' into Abstract-and-Update-Deps 2025-06-11 03:15:22 -04:00
MergeMerc
30c5136c44 Add Logging for failing to get a Repository Connection for Non-Required/Non-Blocking Tasks 2025-06-09 13:34:05 -04:00
Ice
618945620d Abstract CIYAM.AT out of Repo 2025-04-29 07:13:34 -04:00
Ice
b6d3e407c8 Updates to Dependencies - Test Improvements 2025-04-28 07:25:58 -04:00
Ice
2a97fba108 Merge remote-tracking branch 'origin/IceBurst-Unit-Tests-Updates' into Abstract-and-Update-Deps 2025-04-24 03:45:38 -04:00
Ice
2e7cd93716 Delete .github/workflows/pr-testomg 2025-04-16 15:07:52 -04:00
Ice
2cf0aeac22 Update pr-testing.yml 2025-04-16 14:30:10 -04:00
Ice
cc4056047e Create pr-testomg 2025-04-15 15:45:00 -04:00
42 changed files with 1559 additions and 543 deletions

View File

@@ -1,7 +1,7 @@
name: PR testing
on:
pull_request:
push:
branches: [ master ]
jobs:
@@ -21,11 +21,11 @@ jobs:
with:
java-version: '11'
distribution: 'adopt'
- name: Load custom deps
run: |
mvn install -DskipTests=true --file pom.xml
- name: Run all tests
run: |
mvn -B clean test -DskipTests=false --file pom.xml

Binary file not shown.

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<version>1.3.7</version>
<description>POM was created from install:install-file</description>
</project>

Binary file not shown.

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<version>1.3.8</version>
<description>POM was created from install:install-file</description>
</project>

Binary file not shown.

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<version>1.4.0</version>
<description>POM was created from install:install-file</description>
</project>

Binary file not shown.

View File

@@ -1,123 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<version>1.4.1</version>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<skipTests>false</skipTests>
<bouncycastle.version>1.69</bouncycastle.version>
<junit.version>4.13.2</junit.version>
<maven-compiler-plugin.version>3.11.0</maven-compiler-plugin.version>
<maven-jar-plugin.version>3.3.0</maven-jar-plugin.version>
<maven-javadoc-plugin.version>3.6.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>3.3.0</maven-source-plugin.version>
<maven-surefire-plugin.version>3.2.2</maven-surefire-plugin.version>
</properties>
<build>
<sourceDirectory>src/main/java</sourceDirectory>
<testSourceDirectory>src/test/java</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<skipTests>${skipTests}</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>${bouncycastle.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

Binary file not shown.

View File

@@ -1,123 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<version>1.4.2</version>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<skipTests>false</skipTests>
<bouncycastle.version>1.70</bouncycastle.version>
<junit.version>4.13.2</junit.version>
<maven-compiler-plugin.version>3.13.0</maven-compiler-plugin.version>
<maven-source-plugin.version>3.3.0</maven-source-plugin.version>
<maven-javadoc-plugin.version>3.6.3</maven-javadoc-plugin.version>
<maven-surefire-plugin.version>3.2.5</maven-surefire-plugin.version>
<maven-jar-plugin.version>3.4.1</maven-jar-plugin.version>
</properties>
<build>
<sourceDirectory>src/main/java</sourceDirectory>
<testSourceDirectory>src/test/java</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<skipTests>${skipTests}</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>${bouncycastle.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -1,16 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>org.ciyam</groupId>
<artifactId>AT</artifactId>
<versioning>
<release>1.4.2</release>
<versions>
<version>1.3.7</version>
<version>1.3.8</version>
<version>1.4.0</version>
<version>1.4.1</version>
<version>1.4.2</version>
</versions>
<lastUpdated>20240426084210</lastUpdated>
</versioning>
</metadata>

59
pom.xml
View File

@@ -3,19 +3,19 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.qortal</groupId>
<artifactId>qortal</artifactId>
<version>5.0.0</version>
<version>5.1.0</version> <!-- Version must be <X.Y.Z> -->
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<skipTests>true</skipTests>
<altcoinj.version>7dc8c6f</altcoinj.version>
<altcoinj.version>7dc8c6f</altcoinj.version>
<bitcoinj.version>0.15.10</bitcoinj.version>
<bouncycastle.version>1.70</bouncycastle.version>
<skipJUnitTests>true</skipJUnitTests>
<altcoinj.version>d7cf6ac</altcoinj.version> <!-- BC v16 / Updated Abstract Classes / alertSigningKey -->
<bitcoinj.version>0.16.3</bitcoinj.version>
<bouncycastle.version>1.73</bouncycastle.version>
<build.timestamp>${maven.build.timestamp}</build.timestamp>
<ciyam-at.version>1.4.2</ciyam-at.version>
<ciyam-at.version>1b731d1</ciyam-at.version> <!-- This is the hash for v1.4.3 -->
<commons-net.version>3.8.0</commons-net.version>
<!-- <commons-net.version>3.9.0</commons-net.version> v5.2.0 coming soon -->
<commons-text.version>1.12.0</commons-text.version>
<commons-io.version>2.18.0</commons-io.version>
<commons-compress.version>1.27.1</commons-compress.version>
@@ -24,6 +24,7 @@
<extendedset.version>0.12.3</extendedset.version>
<git-commit-id-plugin.version>4.9.10</git-commit-id-plugin.version>
<grpc.version>1.68.1</grpc.version>
<!-- <grpc.version>1.68.3</grpc.version> v5.2.0 coming soon -->
<guava.version>33.3.1-jre</guava.version>
<hamcrest-library.version>2.2</hamcrest-library.version>
<homoglyph.version>1.2.1</homoglyph.version>
@@ -34,6 +35,7 @@
<jaxb-runtime.version>2.3.9</jaxb-runtime.version>
<jersey.version>2.42</jersey.version>
<jetty.version>9.4.56.v20240826</jetty.version>
<!-- <jetty.version>9.4.57.v20241219</jetty.version> v5.2.0 Coming Soon -->
<json-simple.version>1.1.1</json-simple.version>
<json.version>20240303</json.version>
<jsoup.version>1.18.1</jsoup.version>
@@ -52,10 +54,14 @@
<maven-shade-plugin.version>3.6.0</maven-shade-plugin.version>
<maven-install-plugin.version>3.1.3</maven-install-plugin.version>
<maven-surefire-plugin.version>3.5.2</maven-surefire-plugin.version>
<!-- <maven-surefire-plugin.version>3.5.3</maven-surefire-plugin.version> v5.2.0 Coming Soon -->
<protobuf.version>3.25.3</protobuf.version>
<!-- <protobuf.version>3.25.7</protobuf.version> v 5.1 -->
<replacer.version>1.5.3</replacer.version>
<simplemagic.version>1.17</simplemagic.version>
<slf4j.version>1.7.36</slf4j.version>
<!-- <swagger-api.version>2.2.30</swagger-api.version> need code upgrade Future Release -->
<!-- <swagger-api.version>2.1.13</swagger-api.version> need code upgrade Future Release -->
<swagger-api.version>2.0.10</swagger-api.version>
<swagger-ui.version>5.18.2</swagger-ui.version>
<upnp.version>1.2</upnp.version>
@@ -291,19 +297,23 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
<configuration>
<archive>
<manifest>
<addDefaultEntries>false</addDefaultEntries>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
<manifestEntries>
<Last-Commit-Id>${git.commit.id.full}</Last-Commit-Id>
<Last-Commit-Time>${git.commit.time}</Last-Commit-Time>
<Reproducible-Build>true</Reproducible-Build>
</manifestEntries>
</archive>
</configuration>
<executions>
<execution>
<configuration>
<archive>
<manifest>
<addDefaultEntries>false</addDefaultEntries>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
<manifestEntries>
<Last-Commit-Id>${git.commit.id.full}</Last-Commit-Id>
<Last-Commit-Time>${git.commit.time}</Last-Commit-Time>
<Reproducible-Build>true</Reproducible-Build>
</manifestEntries>
</archive>
</configuration>
</execution>
</executions>
</plugin>
<!-- Copy modified hsqldb.jar to install / modified MANIFEST.MF-->
<plugin>
@@ -378,6 +388,7 @@
</execution>
</executions>
</plugin>
<!-- Removed, now use Maven reproducible by default v4.0, IntelliJ v2025.1 and later -->
<plugin>
<groupId>io.github.zlika</groupId>
<artifactId>reproducible-build-maven-plugin</artifactId>
@@ -400,7 +411,7 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<skipTests>${skipTests}</skipTests>
<skipTests>${skipJUnitTests}</skipTests>
</configuration>
</plugin>
</plugins>
@@ -484,7 +495,7 @@
</dependency>
<!-- CIYAM AT (automated transactions) -->
<dependency>
<groupId>org.ciyam</groupId>
<groupId>com.github.iceburst</groupId>
<artifactId>AT</artifactId>
<version>${ciyam-at.version}</version>
</dependency>
@@ -502,7 +513,7 @@
</dependency>
<!-- For Litecoin, etc. support, requires bitcoinj -->
<dependency>
<groupId>com.github.qortal</groupId>
<groupId>com.github.iceburst</groupId>
<artifactId>altcoinj</artifactId>
<version>${altcoinj.version}</version>
</dependency>

View File

@@ -1,17 +1,41 @@
package org.qortal.account;
import org.bouncycastle.crypto.generators.Ed25519KeyPairGenerator;
import org.bouncycastle.crypto.params.Ed25519KeyGenerationParameters;
import org.bouncycastle.crypto.params.Ed25519PublicKeyParameters;
import org.qortal.crypto.Crypto;
import org.qortal.data.account.AccountData;
import org.qortal.repository.Repository;
import java.security.SecureRandom;
public class PublicKeyAccount extends Account {
protected final byte[] publicKey;
protected final Ed25519PublicKeyParameters edPublicKeyParams;
/** <p>Constructor for generating a PublicKeyAccount</p>
*
* @param repository Block Chain
* @param publicKey 32 byte Public Key
* @since v4.7.3
*/
public PublicKeyAccount(Repository repository, byte[] publicKey) {
this(repository, new Ed25519PublicKeyParameters(publicKey, 0));
super(repository, Crypto.toAddress(publicKey));
Ed25519PublicKeyParameters t = null;
try {
t = new Ed25519PublicKeyParameters(publicKey, 0);
} catch (Exception e) {
var gen = new Ed25519KeyPairGenerator();
gen.init(new Ed25519KeyGenerationParameters(new SecureRandom()));
var keyPair = gen.generateKeyPair();
t = (Ed25519PublicKeyParameters) keyPair.getPublic();
} finally {
this.edPublicKeyParams = t;
}
this.publicKey = publicKey;
}
protected PublicKeyAccount(Repository repository, Ed25519PublicKeyParameters edPublicKeyParams) {

View File

@@ -1,6 +1,7 @@
package org.qortal.arbitrary;
import com.google.common.io.Resources;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.LogManager;
@@ -15,11 +16,13 @@ import org.qortal.settings.Settings;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
@@ -167,7 +170,14 @@ public class ArbitraryDataRenderer {
if (HTMLParser.isHtmlFile(filename)) {
// HTML file - needs to be parsed
byte[] data = Files.readAllBytes(filePath); // TODO: limit file size that can be read into memory
HTMLParser htmlParser = new HTMLParser(resourceId, inPath, prefix, includeResourceIdInPrefix, data, qdnContext, service, identifier, theme, usingCustomRouting, lang);
String encodedResourceId;
if (resourceIdType == ResourceIdType.NAME) {
encodedResourceId = resourceId.replace(" ", "%20");
} else {
encodedResourceId = resourceId;
}
HTMLParser htmlParser = new HTMLParser(encodedResourceId, inPath, prefix, includeResourceIdInPrefix, data, qdnContext, service, identifier, theme, usingCustomRouting, lang);
htmlParser.addAdditionalHeaderTags();
response.addHeader("Content-Security-Policy", "default-src 'self' 'unsafe-inline' 'unsafe-eval'; font-src 'self' data:; media-src 'self' data: blob:; img-src 'self' data: blob:; connect-src 'self' wss: blob:;");
response.setContentType(context.getMimeType(filename));

View File

@@ -567,6 +567,9 @@ public class Controller extends Thread {
LOGGER.info("Starting foreign fees manager");
ForeignFeesManager.getInstance().start();
LOGGER.info("Starting follower");
Follower.getInstance().start();
LOGGER.info("Starting transaction importer");
TransactionImporter.getInstance().start();

View File

@@ -124,8 +124,8 @@ public class ArbitraryDataFileListManager {
if (timeSinceLastAttempt > 15 * 1000L) {
// We haven't tried for at least 15 seconds
if (networkBroadcastCount < 3) {
// We've made less than 3 total attempts
if (networkBroadcastCount < 12) {
// We've made less than 12 total attempts
return true;
}
}
@@ -134,8 +134,8 @@ public class ArbitraryDataFileListManager {
if (timeSinceLastAttempt > 60 * 1000L) {
// We haven't tried for at least 1 minute
if (networkBroadcastCount < 8) {
// We've made less than 8 total attempts
if (networkBroadcastCount < 40) {
// We've made less than 40 total attempts
return true;
}
}
@@ -402,8 +402,8 @@ public class ArbitraryDataFileListManager {
return true;
}
public void deleteFileListRequestsForSignature(byte[] signature) {
String signature58 = Base58.encode(signature);
public void deleteFileListRequestsForSignature(String signature58) {
for (Iterator<Map.Entry<Integer, Triple<String, Peer, Long>>> it = arbitraryDataFileListRequests.entrySet().iterator(); it.hasNext();) {
Map.Entry<Integer, Triple<String, Peer, Long>> entry = it.next();
if (entry == null || entry.getKey() == null || entry.getValue() == null) {
@@ -587,9 +587,7 @@ public class ArbitraryDataFileListManager {
// Forward to requesting peer
LOGGER.debug("Forwarding file list with {} hashes to requesting peer: {}", hashes.size(), requestingPeer);
if (!requestingPeer.sendMessage(forwardArbitraryDataFileListMessage)) {
requestingPeer.disconnect("failed to forward arbitrary data file list");
}
requestingPeer.sendMessage(forwardArbitraryDataFileListMessage);
}
}
}
@@ -787,7 +785,6 @@ public class ArbitraryDataFileListManager {
if (!peer.sendMessage(arbitraryDataFileListMessage)) {
LOGGER.debug("Couldn't send list of hashes");
peer.disconnect("failed to send list of hashes");
continue;
}

View File

@@ -1,6 +1,7 @@
package org.qortal.controller.arbitrary;
import com.google.common.net.InetAddresses;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.qortal.arbitrary.ArbitraryDataFile;
@@ -12,6 +13,7 @@ import org.qortal.data.network.PeerData;
import org.qortal.data.transaction.ArbitraryTransactionData;
import org.qortal.network.Network;
import org.qortal.network.Peer;
import org.qortal.network.PeerSendManagement;
import org.qortal.network.message.*;
import org.qortal.repository.DataException;
import org.qortal.repository.Repository;
@@ -23,7 +25,8 @@ import org.qortal.utils.NTP;
import java.security.SecureRandom;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@@ -31,6 +34,7 @@ import java.util.stream.Collectors;
public class ArbitraryDataFileManager extends Thread {
public static final int SEND_TIMEOUT_MS = 500;
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFileManager.class);
private static ArbitraryDataFileManager instance;
@@ -67,9 +71,9 @@ public class ArbitraryDataFileManager extends Thread {
public static int MAX_FILE_HASH_RESPONSES = 1000;
private ArbitraryDataFileManager() {
this.arbitraryDataFileHashResponseScheduler.scheduleAtFixedRate( this::processResponses, 60, 1, TimeUnit.SECONDS);
this.arbitraryDataFileHashResponseScheduler.scheduleAtFixedRate(this::handleFileListRequestProcess, 60, 1, TimeUnit.SECONDS);
}
public static ArbitraryDataFileManager getInstance() {
@@ -79,6 +83,8 @@ public class ArbitraryDataFileManager extends Thread {
return instance;
}
@Override
public void run() {
Thread.currentThread().setName("Arbitrary Data File Manager");
@@ -140,7 +146,7 @@ public class ArbitraryDataFileManager extends Thread {
if (!arbitraryDataFileRequests.containsKey(Base58.encode(hash))) {
LOGGER.debug("Requesting data file {} from peer {}", hash58, peer);
Long startTime = NTP.getTime();
ArbitraryDataFile receivedArbitraryDataFile = fetchArbitraryDataFile(peer, null, arbitraryTransactionData, signature, hash, null);
ArbitraryDataFile receivedArbitraryDataFile = fetchArbitraryDataFile(peer, arbitraryTransactionData, signature, hash);
Long endTime = NTP.getTime();
if (receivedArbitraryDataFile != null) {
LOGGER.debug("Received data file {} from peer {}. Time taken: {} ms", receivedArbitraryDataFile.getHash58(), peer, (endTime-startTime));
@@ -207,14 +213,71 @@ public class ArbitraryDataFileManager extends Thread {
}
}
private ArbitraryDataFile fetchArbitraryDataFile(Peer peer, Peer requestingPeer, ArbitraryTransactionData arbitraryTransactionData, byte[] signature, byte[] hash, Message originalMessage) throws DataException {
ArbitraryDataFile existingFile = ArbitraryDataFile.fromHash(hash, signature);
boolean fileAlreadyExists = existingFile.exists();
String hash58 = Base58.encode(hash);
private ArbitraryDataFile fetchArbitraryDataFile(Peer peer, ArbitraryTransactionData arbitraryTransactionData, byte[] signature, byte[] hash) throws DataException {
ArbitraryDataFile arbitraryDataFile;
// Fetch the file if it doesn't exist locally
if (!fileAlreadyExists) {
try {
ArbitraryDataFile existingFile = ArbitraryDataFile.fromHash(hash, signature);
boolean fileAlreadyExists = existingFile.exists();
String hash58 = Base58.encode(hash);
// Fetch the file if it doesn't exist locally
if (!fileAlreadyExists) {
LOGGER.debug(String.format("Fetching data file %.8s from peer %s", hash58, peer));
arbitraryDataFileRequests.put(hash58, NTP.getTime());
Message getArbitraryDataFileMessage = new GetArbitraryDataFileMessage(signature, hash);
Message response = null;
try {
response = peer.getResponseWithTimeout(getArbitraryDataFileMessage, (int) ArbitraryDataManager.ARBITRARY_REQUEST_TIMEOUT);
} catch (InterruptedException e) {
// Will return below due to null response
}
arbitraryDataFileRequests.remove(hash58);
LOGGER.trace(String.format("Removed hash %.8s from arbitraryDataFileRequests", hash58));
if (response == null) {
LOGGER.debug("Received null response from peer {}", peer);
return null;
}
if (response.getType() != MessageType.ARBITRARY_DATA_FILE) {
LOGGER.debug("Received response with invalid type: {} from peer {}", response.getType(), peer);
return null;
}
ArbitraryDataFileMessage peersArbitraryDataFileMessage = (ArbitraryDataFileMessage) response;
arbitraryDataFile = peersArbitraryDataFileMessage.getArbitraryDataFile();
} else {
LOGGER.debug(String.format("File hash %s already exists, so skipping the request", hash58));
arbitraryDataFile = existingFile;
}
if (arbitraryDataFile != null) {
arbitraryDataFile.save();
// If this is a metadata file then we need to update the cache
if (arbitraryTransactionData != null && arbitraryTransactionData.getMetadataHash() != null) {
if (Arrays.equals(arbitraryTransactionData.getMetadataHash(), hash)) {
ArbitraryDataCacheManager.getInstance().addToUpdateQueue(arbitraryTransactionData);
}
}
// We may need to remove the file list request, if we have all the files for this transaction
this.handleFileListRequests(signature);
}
} catch (DataException e) {
LOGGER.error(e.getMessage(), e);
arbitraryDataFile = null;
}
return arbitraryDataFile;
}
private void fetchFileForRelay(Peer peer, Peer requestingPeer, byte[] signature, byte[] hash, Message originalMessage) throws DataException {
try {
String hash58 = Base58.encode(hash);
LOGGER.debug(String.format("Fetching data file %.8s from peer %s", hash58, peer));
arbitraryDataFileRequests.put(hash58, NTP.getTime());
Message getArbitraryDataFileMessage = new GetArbitraryDataFileMessage(signature, hash);
@@ -228,77 +291,73 @@ public class ArbitraryDataFileManager extends Thread {
arbitraryDataFileRequests.remove(hash58);
LOGGER.trace(String.format("Removed hash %.8s from arbitraryDataFileRequests", hash58));
if (response == null) {
LOGGER.debug("Received null response from peer {}", peer);
return null;
return;
}
if (response.getType() != MessageType.ARBITRARY_DATA_FILE) {
LOGGER.debug("Received response with invalid type: {} from peer {}", response.getType(), peer);
return null;
}
ArbitraryDataFileMessage peersArbitraryDataFileMessage = (ArbitraryDataFileMessage) response;
arbitraryDataFile = peersArbitraryDataFileMessage.getArbitraryDataFile();
} else {
LOGGER.debug(String.format("File hash %s already exists, so skipping the request", hash58));
arbitraryDataFile = existingFile;
}
if (arbitraryDataFile == null) {
// We don't have a file, so give up here
return null;
}
// We might want to forward the request to the peer that originally requested it
this.handleArbitraryDataFileForwarding(requestingPeer, new ArbitraryDataFileMessage(signature, arbitraryDataFile), originalMessage);
boolean isRelayRequest = (requestingPeer != null);
if (isRelayRequest) {
if (!fileAlreadyExists) {
// File didn't exist locally before the request, and it's a forwarding request, so delete it if it exists.
// It shouldn't exist on the filesystem yet, but leaving this here just in case.
arbitraryDataFile.delete(10);
}
}
else {
arbitraryDataFile.save();
}
// If this is a metadata file then we need to update the cache
if (arbitraryTransactionData != null && arbitraryTransactionData.getMetadataHash() != null) {
if (Arrays.equals(arbitraryTransactionData.getMetadataHash(), hash)) {
ArbitraryDataCacheManager.getInstance().addToUpdateQueue(arbitraryTransactionData);
}
}
// We may need to remove the file list request, if we have all the files for this transaction
this.handleFileListRequests(signature);
return arbitraryDataFile;
}
private void handleFileListRequests(byte[] signature) {
try (final Repository repository = RepositoryManager.getRepository()) {
// Fetch the transaction data
ArbitraryTransactionData arbitraryTransactionData = ArbitraryTransactionUtils.fetchTransactionData(repository, signature);
if (arbitraryTransactionData == null) {
return;
}
boolean completeFileExists = ArbitraryTransactionUtils.completeFileExists(arbitraryTransactionData);
ArbitraryDataFileMessage peersArbitraryDataFileMessage = (ArbitraryDataFileMessage) response;
ArbitraryDataFile arbitraryDataFile = peersArbitraryDataFileMessage.getArbitraryDataFile();
if (completeFileExists) {
String signature58 = Base58.encode(arbitraryTransactionData.getSignature());
LOGGER.info("All chunks or complete file exist for transaction {}", signature58);
ArbitraryDataFileListManager.getInstance().deleteFileListRequestsForSignature(signature);
if (arbitraryDataFile != null) {
// We might want to forward the request to the peer that originally requested it
this.handleArbitraryDataFileForwarding(requestingPeer, new ArbitraryDataFileMessage(signature, arbitraryDataFile), originalMessage);
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
Map<String, byte[]> signatureBySignature58 = new HashMap<>();
// Lock to synchronize access to the list
private final Object handleFileListRequestsLock = new Object();
// Scheduled executor service to process messages every second
private final ScheduledExecutorService handleFileListRequestsScheduler = Executors.newScheduledThreadPool(1);
private void handleFileListRequests(byte[] signature) {
synchronized (handleFileListRequestsLock) {
signatureBySignature58.put(Base58.encode(signature), signature);
}
}
private void handleFileListRequestProcess() {
Map<String, byte[]> signaturesToProcess;
synchronized (handleFileListRequestsLock) {
signaturesToProcess = new HashMap<>(signatureBySignature58);
signatureBySignature58.clear();
}
if( signaturesToProcess.isEmpty() ) return;
try (final Repository repository = RepositoryManager.getRepository()) {
// Fetch the transaction data
List<ArbitraryTransactionData> arbitraryTransactionDataList
= ArbitraryTransactionUtils.fetchTransactionDataList(repository, new ArrayList<>(signaturesToProcess.values()));
for( ArbitraryTransactionData arbitraryTransactionData : arbitraryTransactionDataList ) {
boolean completeFileExists = ArbitraryTransactionUtils.completeFileExists(arbitraryTransactionData);
if (completeFileExists) {
String signature58 = Base58.encode(arbitraryTransactionData.getSignature());
LOGGER.debug("All chunks or complete file exist for transaction {}", signature58);
ArbitraryDataFileListManager.getInstance().deleteFileListRequestsForSignature(signature58);
}
}
} catch (DataException e) {
LOGGER.debug("Unable to handle file list requests: {}", e.getMessage());
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
@@ -315,15 +374,14 @@ public class ArbitraryDataFileManager extends Thread {
LOGGER.debug("Received arbitrary data file - forwarding is needed");
// The ID needs to match that of the original request
message.setId(originalMessage.getId());
try {
// The ID needs to match that of the original request
message.setId(originalMessage.getId());
if (!requestingPeer.sendMessageWithTimeout(message, (int) ArbitraryDataManager.ARBITRARY_REQUEST_TIMEOUT)) {
LOGGER.debug("Failed to forward arbitrary data file to peer {}", requestingPeer);
requestingPeer.disconnect("failed to forward arbitrary data file");
}
else {
LOGGER.debug("Forwarded arbitrary data file to peer {}", requestingPeer);
PeerSendManagement.getInstance().getOrCreateSendManager(requestingPeer).queueMessage(message, SEND_TIMEOUT_MS);
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
@@ -597,13 +655,9 @@ public class ArbitraryDataFileManager extends Thread {
LOGGER.debug("Sending file {}...", arbitraryDataFile);
ArbitraryDataFileMessage arbitraryDataFileMessage = new ArbitraryDataFileMessage(signature, arbitraryDataFile);
arbitraryDataFileMessage.setId(message.getId());
if (!peer.sendMessageWithTimeout(arbitraryDataFileMessage, (int) ArbitraryDataManager.ARBITRARY_REQUEST_TIMEOUT)) {
LOGGER.debug("Couldn't send file {}", arbitraryDataFile);
peer.disconnect("failed to send file");
}
else {
LOGGER.debug("Sent file {}", arbitraryDataFile);
}
PeerSendManagement.getInstance().getOrCreateSendManager(peer).queueMessage(arbitraryDataFileMessage, SEND_TIMEOUT_MS);
}
else if (relayInfo != null) {
LOGGER.debug("We have relay info for hash {}", Base58.encode(hash));
@@ -615,7 +669,7 @@ public class ArbitraryDataFileManager extends Thread {
LOGGER.debug("Asking peer {} for hash {}", peerToAsk, hash58);
// No need to pass arbitraryTransactionData below because this is only used for metadata caching,
// and metadata isn't retained when relaying.
this.fetchArbitraryDataFile(peerToAsk, peer, null, signature, hash, message);
this.fetchFileForRelay(peerToAsk, peer, signature, hash, message);
}
else {
LOGGER.debug("Peer {} not found in relay info", peer);
@@ -637,7 +691,6 @@ public class ArbitraryDataFileManager extends Thread {
fileUnknownMessage.setId(message.getId());
if (!peer.sendMessage(fileUnknownMessage)) {
LOGGER.debug("Couldn't sent file-unknown response");
peer.disconnect("failed to send file-unknown response");
}
else {
LOGGER.debug("Sent file-unknown response for file {}", arbitraryDataFile);

View File

@@ -15,6 +15,7 @@ import org.qortal.settings.Settings;
import org.qortal.utils.ArbitraryTransactionUtils;
import org.qortal.utils.Base58;
import org.qortal.utils.NTP;
import org.qortal.utils.NamedThreadFactory;
import java.net.http.HttpResponse;
import java.util.ArrayList;
@@ -38,6 +39,9 @@ public class ArbitraryDataFileRequestThread {
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFileRequestThread.class);
private static final Integer FETCHER_LIMIT_PER_PEER = Settings.getInstance().getMaxThreadsForMessageType(MessageType.GET_ARBITRARY_DATA_FILE);
private static final String FETCHER_THREAD_PREFIX = "Arbitrary Data Fetcher ";
private ConcurrentHashMap<String, ExecutorService> executorByPeer = new ConcurrentHashMap<>();
private ArbitraryDataFileRequestThread() {
@@ -64,8 +68,9 @@ public class ArbitraryDataFileRequestThread {
if (value instanceof ThreadPoolExecutor) {
ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) value;
if (threadPoolExecutor.getActiveCount() == 0) {
threadPoolExecutor.shutdown();
if (this.executorByPeer.computeIfPresent(key, (k, v) -> null) == null) {
LOGGER.info("removed executor: peer = " + key);
LOGGER.trace("removed executor: peer = " + key);
}
}
} else {
@@ -147,7 +152,9 @@ public class ArbitraryDataFileRequestThread {
.computeIfAbsent(
responseInfo.getPeer().toString(),
peer -> Executors.newFixedThreadPool(
Settings.getInstance().getMaxThreadsForMessageType(MessageType.GET_ARBITRARY_DATA_FILE))
FETCHER_LIMIT_PER_PEER,
new NamedThreadFactory(FETCHER_THREAD_PREFIX + responseInfo.getPeer().toString(), NORM_PRIORITY)
)
)
.execute(fetcher);
}

View File

@@ -42,10 +42,10 @@ public class ArbitraryDataManager extends Thread {
private int powDifficulty = 14; // Must not be final, as unit tests need to reduce this value
/** Request timeout when transferring arbitrary data */
public static final long ARBITRARY_REQUEST_TIMEOUT = 12 * 1000L; // ms
public static final long ARBITRARY_REQUEST_TIMEOUT = 24 * 1000L; // ms
/** Maximum time to hold information about an in-progress relay */
public static final long ARBITRARY_RELAY_TIMEOUT = 90 * 1000L; // ms
public static final long ARBITRARY_RELAY_TIMEOUT = 120 * 1000L; // ms
/** Maximum time to hold direct peer connection information */
public static final long ARBITRARY_DIRECT_CONNECTION_INFO_TIMEOUT = 2 * 60 * 1000L; // ms

View File

@@ -360,9 +360,8 @@ public class ArbitraryMetadataManager {
// Forward to requesting peer
LOGGER.debug("Forwarding metadata to requesting peer: {}", requestingPeer);
if (!requestingPeer.sendMessage(forwardArbitraryMetadataMessage)) {
requestingPeer.disconnect("failed to forward arbitrary metadata");
}
requestingPeer.sendMessage(forwardArbitraryMetadataMessage);
}
}
}
@@ -479,7 +478,6 @@ public class ArbitraryMetadataManager {
arbitraryMetadataMessage.setId(message.getId());
if (!peer.sendMessage(arbitraryMetadataMessage)) {
LOGGER.debug("Couldn't send metadata");
peer.disconnect("failed to send metadata");
continue;
}
LOGGER.debug("Sent metadata");

View File

@@ -0,0 +1,130 @@
package org.qortal.controller.arbitrary;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.qortal.data.transaction.ArbitraryTransactionData;
import org.qortal.repository.Repository;
import org.qortal.repository.RepositoryManager;
import org.qortal.settings.Settings;
import org.qortal.utils.ListUtils;
import org.qortal.utils.NamedThreadFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
public class Follower {
private static final Logger LOGGER = LogManager.getLogger(Follower.class);
private ScheduledExecutorService service
= Executors.newScheduledThreadPool(2, new NamedThreadFactory("Follower", Thread.NORM_PRIORITY));
private Follower() {
}
private static Follower instance;
public static Follower getInstance() {
if( instance == null ) {
instance = new Follower();
}
return instance;
}
public void start() {
// fetch arbitrary transactions from followed names from the last 100 blocks every 2 minutes
service.scheduleWithFixedDelay(() -> fetch(OptionalInt.of(100)), 10, 2, TimeUnit.MINUTES);
// fetch arbitrary transaction from followed names from any block every 24 hours
service.scheduleWithFixedDelay(() -> fetch(OptionalInt.empty()), 4, 24, TimeUnit.HOURS);
}
private void fetch(OptionalInt limit) {
try {
// for each followed name, get arbitraty transactions, then examine those transactions before fetching
for (String name : ListUtils.followedNames()) {
List<ArbitraryTransactionData> transactionsInReverseOrder;
// open database to get the transactions in reverse order for the followed name
try (final Repository repository = RepositoryManager.getRepository()) {
List<ArbitraryTransactionData> latestArbitraryTransactionsByName
= repository.getArbitraryRepository().getLatestArbitraryTransactionsByName(name);
if (limit.isPresent()) {
final int blockHeightThreshold = repository.getBlockRepository().getBlockchainHeight() - limit.getAsInt();
transactionsInReverseOrder
= latestArbitraryTransactionsByName.stream().filter(tx -> tx.getBlockHeight() > blockHeightThreshold)
.collect(Collectors.toList());
} else {
transactionsInReverseOrder = latestArbitraryTransactionsByName;
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
transactionsInReverseOrder = new ArrayList<>(0);
}
// collect process transaction hashes, so we don't fetch outdated transactions
Set<ArbitraryTransactionDataHashWrapper> processedTransactions = new HashSet<>();
ArbitraryDataStorageManager storageManager = ArbitraryDataStorageManager.getInstance();
// for each arbitrary transaction for the followed name process, evaluate, fetch
for (ArbitraryTransactionData arbitraryTransaction : transactionsInReverseOrder) {
boolean examined = false;
try (final Repository repository = RepositoryManager.getRepository()) {
// if not processed
if (!processedTransactions.contains(new ArbitraryTransactionDataHashWrapper(arbitraryTransaction))) {
boolean isLocal = repository.getArbitraryRepository().isDataLocal(arbitraryTransaction.getSignature());
// if not local, then continue to evaluate
if (!isLocal) {
// evaluate fetching status for this transaction on this node
ArbitraryDataExamination examination = storageManager.shouldPreFetchData(repository, arbitraryTransaction);
// if the evaluation passed, then fetch
examined = examination.isPass();
}
// if locally stored, then nothing needs to be done
// add to processed transactions
processedTransactions.add(new ArbitraryTransactionDataHashWrapper(arbitraryTransaction));
}
}
// if passed examination for fetching, then fetch
if (examined) {
LOGGER.info("for {} on {}, fetching {}", name, arbitraryTransaction.getService(), arbitraryTransaction.getIdentifier());
boolean fetched = ArbitraryDataFileListManager.getInstance().fetchArbitraryDataFileList(arbitraryTransaction);
LOGGER.info("fetched = " + fetched);
}
// pause a second before moving on to another transaction
Thread.sleep(1000);
}
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
}

View File

@@ -8,6 +8,8 @@ import org.bitcoinj.core.*;
import org.bitcoinj.crypto.ChildNumber;
import org.bitcoinj.crypto.DeterministicHierarchy;
import org.bitcoinj.crypto.DeterministicKey;
import org.bitcoinj.crypto.HDPath;
import org.bitcoinj.params.AbstractBitcoinNetParams;
import org.bitcoinj.script.Script.ScriptType;
import org.bitcoinj.script.ScriptBuilder;
import org.bitcoinj.wallet.DeterministicKeyChain;
@@ -25,7 +27,7 @@ import java.util.*;
import java.util.stream.Collectors;
/** Bitcoin-like (Bitcoin, Litecoin, etc.) support */
public abstract class Bitcoiny implements ForeignBlockchain {
public abstract class Bitcoiny extends AbstractBitcoinNetParams implements ForeignBlockchain {
protected static final Logger LOGGER = LogManager.getLogger(Bitcoiny.class);
@@ -65,6 +67,7 @@ public abstract class Bitcoiny implements ForeignBlockchain {
// Constructors and instance
protected Bitcoiny(BitcoinyBlockchainProvider blockchainProvider, Context bitcoinjContext, String currencyCode, Coin feePerKb) {
this.genesisBlock = this.getGenesisBlock();
this.blockchainProvider = blockchainProvider;
this.bitcoinjContext = bitcoinjContext;
this.currencyCode = currencyCode;
@@ -74,6 +77,15 @@ public abstract class Bitcoiny implements ForeignBlockchain {
}
// Getters & setters
@Override
public String getPaymentProtocolId() {
return this.id;
}
@Override
public Block getGenesisBlock() {
return this.genesisBlock;
}
public BitcoinyBlockchainProvider getBlockchainProvider() {
return this.blockchainProvider;
@@ -590,15 +602,27 @@ public abstract class Bitcoiny implements ForeignBlockchain {
return new AddressInfo(
address.toString(),
toIntegerList( key.getPath()),
toIntegerList( key.getPath() ),
summingUnspentOutputs(address.toString()),
key.getPathAsString(),
transactionCount,
candidates.contains(address.toString()));
}
private static List<Integer> toIntegerList(ImmutableList<ChildNumber> path) {
/**
* <p>Convert BitcoinJ native type to List of Integers, BitcoinJ v16 compatible
* </p>
*
* @param path path to deterministic key
* @return Array of Ints representing the keys position in the tree
* @since v4.7.2
*/
private static List<Integer> toIntegerList(HDPath path) {
return path.stream().map(ChildNumber::num).collect(Collectors.toList());
}
// BitcoinJ v15 compatible
private static List<Integer> toIntegerList(ImmutableList<ChildNumber> path) {
return path.stream().map(ChildNumber::num).collect(Collectors.toList());
}

View File

@@ -1,5 +1,6 @@
package org.qortal.crosschain;
import org.bitcoinj.core.Block;
import org.bitcoinj.core.Coin;
import org.bitcoinj.core.Context;
import org.bitcoinj.core.NetworkParameters;
@@ -148,4 +149,16 @@ public class BitcoinyTBD extends Bitcoiny {
this.netTBD.setFeeRequired( fee );
}
@Override
public String getPaymentProtocolId() {
return params.getId();
}
@Override
public Block getGenesisBlock() {
if(genesisBlock == null)
genesisBlock = params.getGenesisBlock();
return this.genesisBlock;
}
}

View File

@@ -98,9 +98,10 @@ public class DeterminedNetworkParams extends NetworkParameters implements Altcoi
LOGGER.info( "Creating Genesis Block ...");
// BitcoinJ v16 has a new native method for this
//this.genesisBlock = CoinParamsUtil.createGenesisBlockFromRequest(this, request);
LOGGER.info("Created Genesis Block: genesisBlock = " + genesisBlock );
// LOGGER.info("Created Genesis Block: genesisBlock = " + genesisBlock );
// this is 100 for each coin from what I can tell
this.spendableCoinbaseDepth = 100;
@@ -113,8 +114,9 @@ public class DeterminedNetworkParams extends NetworkParameters implements Altcoi
//
// LOGGER.info("request = " + request);
//
// checkState(genesisHash.equals(request.getExpectedGenesisHash()));
this.alertSigningKey = Hex.decode(request.getPubKey());
// checkState(genesisHash.equals(request.getExpectedGenesisHash()))
// alertSigningKey is removed in v16
// this.alertSigningKey = Hex.decode(request.getPubKey());
this.majorityEnforceBlockUpgrade = request.getMajorityEnforceBlockUpgrade();
this.majorityRejectBlockOutdated = request.getMajorityRejectBlockOutdated();
@@ -221,6 +223,12 @@ public class DeterminedNetworkParams extends NetworkParameters implements Altcoi
}
}
@Override
public Block getGenesisBlock() {
//ToDo: Finish
return null;
}
/**
* Get the difficulty target expected for the next block. This includes all
* the weird cases for Litecoin such as testnet blocks which can be maximum

View File

@@ -184,6 +184,11 @@ public class LegacyZcashAddress extends Address {
return p2sh ? ScriptType.P2SH : ScriptType.P2PKH;
}
@Override
public int compareTo(Address address) {
return this.toString().compareTo(address.toString());
}
/**
* Given an address, examines the version byte and attempts to find a matching NetworkParameters. If you aren't sure
* which network the address is intended for (eg, it was provided by a user), you can use this to decide if it is

View File

@@ -640,10 +640,13 @@ public class Peer {
return false;
try {
this.outputBuffer = ByteBuffer.wrap(message.toBytes());
byte[] messageBytes = message.toBytes();
this.outputBuffer = ByteBuffer.wrap(messageBytes);
this.outputMessageType = message.getType().name();
this.outputMessageId = message.getId();
LOGGER.trace("[{}] Sending {} message with ID {} to peer {}",
this.peerConnectionId, this.outputMessageType, this.outputMessageId, this);
@@ -662,12 +665,22 @@ public class Peer {
// If output byte buffer is not null, send from that
int bytesWritten = this.socketChannel.write(outputBuffer);
LOGGER.trace("[{}] Sent {} bytes of {} message with ID {} to peer {} ({} total)", this.peerConnectionId,
bytesWritten, this.outputMessageType, this.outputMessageId, this, outputBuffer.limit());
int zeroSendCount = 0;
// If we've sent 0 bytes then socket buffer is full so we need to wait until it's empty again
if (bytesWritten == 0) {
return true;
while (bytesWritten == 0) {
if (zeroSendCount > 9) {
LOGGER.debug("Socket write stuck for too long, returning");
return true;
}
try {
Thread.sleep(10); // 10MS CPU Sleep to try and give it time to flush the socket
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false; // optional, if you want to signal shutdown
}
zeroSendCount++;
bytesWritten = this.socketChannel.write(outputBuffer);
}
// If we then exhaust the byte buffer, set it to null (otherwise loop and try to send more)
@@ -723,13 +736,18 @@ public class Peer {
* @return <code>true</code> if message successfully sent; <code>false</code> otherwise
*/
public boolean sendMessageWithTimeout(Message message, int timeout) {
return PeerSendManagement.getInstance().getOrCreateSendManager(this).queueMessage(message, timeout);
}
public boolean sendMessageWithTimeoutNow(Message message, int timeout) {
if (!this.socketChannel.isOpen()) {
return false;
}
try {
// Queue message, to be picked up by ChannelWriteTask and then peer.writeChannel()
LOGGER.trace("[{}] Queuing {} message with ID {} to peer {}", this.peerConnectionId,
LOGGER.debug("[{}] Queuing {} message with ID {} to peer {}", this.peerConnectionId,
message.getType().name(), message.getId(), this);
// Check message properly constructed

View File

@@ -0,0 +1,55 @@
package org.qortal.network;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class PeerSendManagement {
private static final Logger LOGGER = LogManager.getLogger(PeerSendManagement.class);
private final Map<String, PeerSendManager> peerSendManagers = new ConcurrentHashMap<>();
public PeerSendManager getOrCreateSendManager(Peer peer) {
return peerSendManagers.computeIfAbsent(peer.toString(), p -> new PeerSendManager(peer));
}
private PeerSendManagement() {
ScheduledExecutorService cleaner = Executors.newSingleThreadScheduledExecutor();
cleaner.scheduleAtFixedRate(() -> {
long idleCutoff = TimeUnit.MINUTES.toMillis(2);
Iterator<Map.Entry<String, PeerSendManager>> iterator = peerSendManagers.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, PeerSendManager> entry = iterator.next();
PeerSendManager manager = entry.getValue();
if (manager.isIdle(idleCutoff)) {
iterator.remove(); // SAFE removal during iteration
manager.shutdown();
LOGGER.debug("Cleaned up PeerSendManager for peer {}", entry.getKey());
}
}
}, 0, 5, TimeUnit.MINUTES);
}
private static PeerSendManagement instance;
public static PeerSendManagement getInstance() {
if( instance == null ) {
instance = new PeerSendManagement();
}
return instance;
}
}

View File

@@ -0,0 +1,138 @@
package org.qortal.network;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.qortal.network.message.Message;
public class PeerSendManager {
private static final Logger LOGGER = LogManager.getLogger(PeerSendManager.class);
private static final int MAX_FAILURES = 15;
private static final int MAX_MESSAGE_ATTEMPTS = 2;
private static final int RETRY_DELAY_MS = 100;
private static final long MAX_QUEUE_DURATION_MS = 20_000;
private static final long COOLDOWN_DURATION_MS = 20_000;
private final Peer peer;
private final BlockingQueue<TimedMessage> queue = new LinkedBlockingQueue<>();
private final ExecutorService executor;
private final AtomicInteger failureCount = new AtomicInteger(0);
private static final AtomicInteger threadCount = new AtomicInteger(1);
private volatile boolean coolingDown = false;
private volatile long lastUsed = System.currentTimeMillis();
public PeerSendManager(Peer peer) {
this.peer = peer;
this.executor = Executors.newSingleThreadExecutor(r -> {
Thread t = new Thread(r);
t.setName("PeerSendManager-" + peer.getResolvedAddress().getHostString() + "-" + threadCount.getAndIncrement());
return t;
});
start();
}
private void start() {
executor.submit(() -> {
while (!Thread.currentThread().isInterrupted()) {
try {
TimedMessage timedMessage = queue.take();
long age = System.currentTimeMillis() - timedMessage.timestamp;
if (age > MAX_QUEUE_DURATION_MS) {
LOGGER.debug("Dropping stale message {} ({}ms old)", timedMessage.message.getId(), age);
continue;
}
Message message = timedMessage.message;
int timeout = timedMessage.timeout;
boolean success = false;
for (int attempt = 1; attempt <= MAX_MESSAGE_ATTEMPTS; attempt++) {
try {
if (peer.sendMessageWithTimeoutNow(message, timeout)) {
success = true;
failureCount.set(0); // reset on success
break;
}
} catch (Exception e) {
LOGGER.debug("Attempt {} failed for message {} to peer {}: {}", attempt, message.getId(), peer, e.getMessage());
}
Thread.sleep(RETRY_DELAY_MS);
}
if (!success) {
int totalFailures = failureCount.incrementAndGet();
LOGGER.debug("Failed to send message {} to peer {}. Total failures: {}", message.getId(), peer, totalFailures);
if (totalFailures >= MAX_FAILURES) {
LOGGER.debug("Peer {} exceeded failure limit ({}). Disconnecting...", peer, totalFailures);
peer.disconnect("Too many message send failures");
coolingDown = true;
queue.clear();
try {
Thread.sleep(COOLDOWN_DURATION_MS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
} finally {
coolingDown = false;
failureCount.set(0);
}
}
}
Thread.sleep(50); // small throttle
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (Exception e) {
LOGGER.error("Unexpected error in PeerSendManager for peer {}: {}", peer, e.getMessage(), e);
}
}
});
}
public boolean queueMessage(Message message, int timeout) {
if (coolingDown) {
LOGGER.debug("In cooldown, ignoring message {}", message.getId());
return false;
}
lastUsed = System.currentTimeMillis();
if (!queue.offer(new TimedMessage(message, timeout))) {
LOGGER.debug("Send queue full, dropping message {}", message.getId());
return false;
}
return true;
}
public boolean isIdle(long cutoffMillis) {
return System.currentTimeMillis() - lastUsed > cutoffMillis;
}
public void shutdown() {
queue.clear();
executor.shutdownNow();
}
private static class TimedMessage {
final Message message;
final long timestamp;
final int timeout;
TimedMessage(Message message, int timeout) {
this.message = message;
this.timestamp = System.currentTimeMillis();
this.timeout = timeout;
}
}
}

View File

@@ -31,8 +31,28 @@ public class ChannelWriteTask implements Task {
@Override
public void perform() throws InterruptedException {
try {
boolean isSocketClogged = peer.writeChannel();
boolean isSocketClogged;
int clogCounter = 0;
do {
isSocketClogged = peer.writeChannel();
if (clogCounter > 9) {
LOGGER.warn("10 Socket Clogs - GIVING UP");
break;
}
if (isSocketClogged) {
LOGGER.debug(
"socket is clogged: peer = {} {}, retrying",
peer.getPeerData().getAddress().toString(),
Thread.currentThread().getName()
);
Thread.sleep(1000);
clogCounter++;
}
} while( isSocketClogged );
// Tell Network that we've finished
Network.getInstance().notifyChannelNotWriting(socketChannel);
@@ -49,4 +69,4 @@ public class ChannelWriteTask implements Task {
peer.disconnect("I/O error");
}
}
}
}

View File

@@ -44,11 +44,9 @@ public class HSQLDBChatRepository implements ChatRepository {
// if the PrimaryTable is available, then use it
if( this.repository.getBlockRepository().getBlockchainHeight() > BlockChain.getInstance().getMultipleNamesPerAccountHeight()) {
LOGGER.debug("using PrimaryNames for chat transactions");
tableName = "PrimaryNames";
}
else {
LOGGER.debug("using Names for chat transactions");
tableName = "Names";
}
@@ -164,11 +162,9 @@ public class HSQLDBChatRepository implements ChatRepository {
// if the PrimaryTable is available, then use it
if( this.repository.getBlockRepository().getBlockchainHeight() > BlockChain.getInstance().getMultipleNamesPerAccountHeight()) {
LOGGER.debug("using PrimaryNames for chat transactions");
tableName = "PrimaryNames";
}
else {
LOGGER.debug("using Names for chat transactions");
tableName = "Names";
}
@@ -218,11 +214,9 @@ public class HSQLDBChatRepository implements ChatRepository {
// if the PrimaryTable is available, then use it
if( this.repository.getBlockRepository().getBlockchainHeight() > BlockChain.getInstance().getMultipleNamesPerAccountHeight()) {
LOGGER.debug("using PrimaryNames for chat transactions");
tableName = "PrimaryNames";
}
else {
LOGGER.debug("using Names for chat transactions");
tableName = "Names";
}
@@ -322,11 +316,9 @@ public class HSQLDBChatRepository implements ChatRepository {
// if the PrimaryTable is available, then use it
if( this.repository.getBlockRepository().getBlockchainHeight() > BlockChain.getInstance().getMultipleNamesPerAccountHeight()) {
LOGGER.debug("using PrimaryNames for chat transactions");
tableName = "PrimaryNames";
}
else {
LOGGER.debug("using Names for chat transactions");
tableName = "Names";
}

View File

@@ -213,7 +213,7 @@ public class Settings {
public long recoveryModeTimeout = 9999999999999L;
/** Minimum peer version number required in order to sync with them */
private String minPeerVersion = "4.6.5";
private String minPeerVersion = "5.0.0";
/** Whether to allow connections with peers below minPeerVersion
* If true, we won't sync with them but they can still sync with us, and will show in the peers list
* If false, sync will be blocked both ways, and they will not appear in the peers list */

View File

@@ -212,7 +212,9 @@ public class BootstrapTests extends Common {
@Test
public void testBootstrapHosts() throws IOException {
String[] bootstrapHosts = Settings.getInstance().getBootstrapHosts();
String[] bootstrapTypes = { "archive" }; // , "toponly"
String[] bootstrapTypes = { "archive" }; // , "toponly", "full"
boolean invalidFile = false;
boolean invalidDate = false;
for (String host : bootstrapHosts) {
for (String type : bootstrapTypes) {
@@ -230,14 +232,20 @@ public class BootstrapTests extends Common {
// Ensure the bootstrap exists and has a size greated than 100MiB
System.out.println(String.format("%s %s size is %d bytes", host, type, fileSize));
assertTrue("Bootstrap size must be at least 100MiB", fileSize > 100*1024*1024L);
if(fileSize < 100*1024*1024L)
invalidFile = true;
//assertTrue("Bootstrap size must be at least 100MiB", fileSize > 100*1024*1024L);
// Ensure the bootstrap has been published recently (in the last 3 days)
long minimumLastMofifiedTimestamp = NTP.getTime() - (3 * 24 * 60 * 60 * 1000L);
System.out.println(String.format("%s %s last modified timestamp is %d", host, type, lastModified));
assertTrue("Bootstrap last modified date must be in the last 3 days", lastModified > minimumLastMofifiedTimestamp);
if(lastModified < minimumLastMofifiedTimestamp)
invalidDate = true;
//assertTrue("Bootstrap last modified date must be in the last 3 days", lastModified > minimumLastMofifiedTimestamp);
}
}
assertFalse("File size must be at least 100MiB", invalidFile);
assertFalse("Bootstrap last modified date must be in the last 3 days",invalidDate);
}
private void deleteBootstraps() throws IOException {

View File

@@ -304,7 +304,7 @@ public class CryptoTests extends Common {
@Test
public void testAESFileEncryption() throws NoSuchAlgorithmException, IOException, IllegalBlockSizeException,
InvalidKeyException, BadPaddingException, InvalidAlgorithmParameterException, NoSuchPaddingException {
InvalidKeyException, BadPaddingException, InvalidAlgorithmParameterException, NoSuchPaddingException, InterruptedException {
// Create temporary directory and file paths
java.nio.file.Path tempDir = Files.createTempDirectory("qortal-tests");
@@ -320,6 +320,7 @@ public class CryptoTests extends Common {
// Write it to the input file
FileOutputStream outputStream = new FileOutputStream(inputFilePath);
outputStream.write(randomBytes);
outputStream.close();
// Make sure only the input file exists
assertTrue(Files.exists(Paths.get(inputFilePath)));

View File

@@ -1,15 +1,22 @@
package org.qortal.test;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.qortal.gui.SplashFrame;
import org.qortal.gui.SysTray;
import org.qortal.repository.DataException;
import org.qortal.test.common.Common;
import java.awt.TrayIcon.MessageType;
@Ignore
public class GuiTests {
@Before
public void beforeTest() throws DataException {
Common.useDefaultSettings();
}
@Test
public void testSplashFrame() throws InterruptedException {
SplashFrame splashFrame = SplashFrame.getInstance();

View File

@@ -10,7 +10,7 @@ import java.util.Random;
import static org.junit.Assert.*;
@Ignore
@Ignore (value="Tests Work Fine - VERY Long Run time (1hr+)")
public class MemoryPoWTests {
private static final int workBufferLength = 8 * 1024 * 1024;
@@ -26,16 +26,16 @@ public class MemoryPoWTests {
long startTime = System.currentTimeMillis();
int nonce = MemoryPoW.compute2(data, workBufferLength, difficulty);
Integer nonce = MemoryPoW.compute2(data, workBufferLength, difficulty);
long finishTime = System.currentTimeMillis();
assertNotNull(nonce);
System.out.println(String.format("Memory-hard PoW (buffer size: %dKB, leading zeros: %d) took %dms, nonce: %d", workBufferLength / 1024,
System.out.printf("Memory-hard PoW (buffer size: %dKB, leading zeros: %d) took %dms, nonce: %d%n", workBufferLength / 1024,
difficulty,
finishTime - startTime,
nonce));
nonce);
assertTrue(MemoryPoW.verify2(data, workBufferLength, difficulty, nonce));
}
@@ -73,12 +73,12 @@ public class MemoryPoWTests {
double stddev = (double) Math.sqrt( (sampleSize * timesS2 - timesS1 * timesS1) / stddevDivisor );
System.out.println(String.format("Difficulty: %d, %d timings, mean: %d ms, stddev: %.2f ms, max nonce: %d",
System.out.printf("Difficulty: %d, %d timings, mean: %d ms, stddev: %.2f ms, max nonce: %d%n",
difficulty,
sampleSize,
timesS1 / sampleSize,
stddev,
maxNonce));
maxNonce);
}
}
@@ -97,7 +97,7 @@ public class MemoryPoWTests {
expectedNonce = 11032;
nonce = MemoryPoW.compute2(data, workBufferLength, difficulty);
System.out.println(String.format("Difficulty %d, nonce: %d", difficulty, nonce));
System.out.printf("Difficulty %d, nonce: %d%n", difficulty, nonce);
assertEquals(expectedNonce, nonce);
}

View File

@@ -1,82 +0,0 @@
package org.qortal.test;
import org.junit.Before;
import org.junit.Test;
import org.qortal.account.Account;
import org.qortal.account.PrivateKeyAccount;
import org.qortal.block.Block;
import org.qortal.controller.BlockMinter;
import org.qortal.data.transaction.PaymentTransactionData;
import org.qortal.data.transaction.TransactionData;
import org.qortal.repository.DataException;
import org.qortal.repository.Repository;
import org.qortal.repository.RepositoryManager;
import org.qortal.settings.Settings;
import org.qortal.test.common.BlockUtils;
import org.qortal.test.common.Common;
import org.qortal.test.common.TransactionUtils;
import org.qortal.test.common.transaction.TestTransaction;
import org.qortal.utils.NTP;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
public class PenaltyFixTests extends Common {
@Before
public void beforeTest() throws DataException {
Common.useSettings("test-settings-v2-penalty-fix.json");
NTP.setFixedOffset(Settings.getInstance().getTestNtpOffset());
}
@Test
public void testSingleSponsor() throws DataException {
try (final Repository repository = RepositoryManager.getRepository()) {
// Alice self share online, and will be used to mint the blocks
PrivateKeyAccount aliceSelfShare = Common.getTestAccount(repository, "alice-reward-share");
List<PrivateKeyAccount> onlineAccounts = new ArrayList<>();
onlineAccounts.add(aliceSelfShare);
PrivateKeyAccount bobAccount = Common.getTestAccount(repository, "bob");
// Test account from real penalty data (pen-revert.json)
Account penaltyAccount = new Account(repository, "QLcAQpko5egwNjifueCAeAsT8CAj2Sr5qJ");
// Bob sends a payment to the penalty account, so that it gets a row in the Accounts table
TransactionData paymentData = new PaymentTransactionData(TestTransaction.generateBase(bobAccount), penaltyAccount.getAddress(), 1);
TransactionUtils.signAndImportValid(repository, paymentData, bobAccount); // updates paymentData's signature
// Mint blocks up to height 4
Block block = null;
for (int i = 2; i <= 4; i++)
block = BlockMinter.mintTestingBlock(repository, onlineAccounts.toArray(new PrivateKeyAccount[0]));
assertEquals(4, (int)block.getBlockData().getHeight());
// Check blocks minted penalty of penalty account
assertEquals(0, (int) penaltyAccount.getBlocksMintedPenalty());
// Penalty revert code runs at block 5
block = BlockMinter.mintTestingBlock(repository, onlineAccounts.toArray(new PrivateKeyAccount[0]));
assertEquals(5, (int)block.getBlockData().getHeight());
// +5000000 blocks minted penalty should be applied
assertEquals(5000000, (int) penaltyAccount.getBlocksMintedPenalty());
// Orphan the last block, to simulate a re-org
BlockUtils.orphanLastBlock(repository);
assertEquals(0, (int) penaltyAccount.getBlocksMintedPenalty());
// Penalty revert code runs again
block = BlockMinter.mintTestingBlock(repository, onlineAccounts.toArray(new PrivateKeyAccount[0]));
assertEquals(5, (int)block.getBlockData().getHeight());
// Penalty should still be 5000000, rather than doubled up to 10000000
assertEquals(5000000, (int) penaltyAccount.getBlocksMintedPenalty());
}
}
}

View File

@@ -1,9 +1,8 @@
package org.qortal.test.crosschain;
import com.google.common.collect.ImmutableList;
import org.bitcoinj.core.NetworkParameters;
import org.bitcoinj.crypto.ChildNumber;
import org.bitcoinj.crypto.DeterministicKey;
import org.bitcoinj.crypto.HDPath;
import org.bitcoinj.script.Script;
import org.bitcoinj.wallet.DeterministicKeyChain;
import org.bitcoinj.wallet.DeterministicSeed;
@@ -33,7 +32,7 @@ public class BitcoinyTestsUtils {
final Wallet wallet = Wallet.createDeterministic(networkParameters, Script.ScriptType.P2PKH);
final DeterministicSeed seed = wallet.getKeyChainSeed();
final DeterministicKeyChain keyChain = DeterministicKeyChain.builder().seed(seed).build();
final ImmutableList<ChildNumber> path = keyChain.getAccountPath();
final HDPath path = keyChain.getAccountPath();
final DeterministicKey parent = keyChain.getKeyByPath(path, true);
final String rootKey = parent.serializePrivB58(networkParameters);

View File

@@ -0,0 +1,130 @@
# Qortal Auto-Update Publisher Scripts
This toolkit modernizes and automates the Qortal auto-update process. It includes:
- A Bash script (`build-auto-update.sh`) to build and push the update
- A Python script (`publish-auto-update.py`) to publish the auto-update transaction
- Full support for dry-run mode, interactive or scripted use, and secure key input
---
## 🧰 Prerequisites
- You must be a **non-admin member** of the Qortal `dev` group
- A Qortal core node must be running locally (default API port: `12391`)
- You need the latest version of the `qortal` repo cloned locally
---
## 🚀 Workflow Overview
### 1. Run the Build Script
This script:
- Auto-increments the version in `pom.xml`
- Rebuilds the JAR file
- XORs it into a `.update` file
- Creates a new `auto-update-<hash>` branch with only the update
- Pushes it to the repo
```bash
./tools/auto-update-scripts/build-auto-update.sh
```
You'll be prompted to:
- Confirm or modify the version number
- Push the version tag and update branch, and final commit.
- Optionally run the publisher script at the end
> ✅ Dry-run mode is supported to preview the full process.
---
### 2. Publish the Auto-Update
You can either:
- Let the build script call it for you
- Or run it manually:
```bash
# Run manually with interactive key prompt and auto-detected latest update:
python3 tools/auto-update-scripts/publish-auto-update.py
# Or specify a commit hash:
python3 tools/auto-update-scripts/publish-auto-update.py 0b37666d
# Or pass both from another script:
python3 tools/auto-update-scripts/publish-auto-update.py <privkey> <commit_hash>
```
> 🔐 Private key is always prompted securely unless passed explicitly (e.g. from automation).
This script will:
- Detect the latest `auto-update-<hash>` branch (or use the one you specify)
- Validate that the commit exists
- Restore the `.update` file if missing
- Compute its SHA256 hash
- Build and sign the transaction
- Submit it to your local node
> ✅ `--dry-run` is supported to show what would happen without sending anything.
---
## 🛠 Advanced Options
- Log files are created in `~/qortal-auto-update-logs` by default
- You can override the log directory interactively
- Branch naming is standardized: `auto-update-<short-commit-hash>`
- The `.update` file is XOR-obfuscated using Qortals built-in logic
- Your commit must already exist on the main repo (e.g. via push or PR merge)
---
## 📌 Notes
- **Do not use Git LFS** — Qortal nodes download `.update` files using raw HTTP from GitHub
We may build LFS support in the future, but for now it is NOT utilized, and will NOT work.
(Other locations for the publish of the .update file will be utilized in the future,
preferably utilizing QDN via gateway nodes, until auto-update setup can be re-written to
leverage QDN directly.)
- GitHub will warn if `.update` files exceed 50MB, but auto-update still works.
(In the past there HAVE been issues with accounts getting banned due to publish of .update file,
however, as of recently (April 2025) it seems they are only warning, and not banning. But we
will be modifying the need for this in the future anyway.)
- Update mirrors will be added in the future, and others can be added in settings as well.
---
## ✅ Example End-to-End (Manual)
```bash
cd ~/git-repos/qortal
./tools/auto-update-scripts/build-auto-update.sh
# follow prompts...
# then manually publish:
python3 tools/auto-update-scripts/publish-auto-update.py
```
---
## 🧪 Test Without Sending
```bash
./build-auto-update.sh # enable dry-run when prompted
# OR
python3 publish-auto-update.py 0b37666d --dry-run
```
---
## 🙌 Contributors
Modernization by [@crowetic](https://github.com/crowetic)
Based on original Perl scripts by Qortal core devs, specifically @catbref.
---
Questions or issues? Drop into the Qortal Dev group on Discord, Q-Chat, or reach out directly via Q-Mail to 'crowetic'.

View File

@@ -0,0 +1,264 @@
#!/usr/bin/env bash
set -euo pipefail
# === Configurable Defaults ===
BASE_BRANCH="master"
DEFAULT_LOG_DIR="${HOME}/qortal-auto-update-logs"
LOG_FILE=""
DRY_RUN=false
RUN_PUBLISH=false
PUBLISH_SCRIPT="tools/auto-update-scripts/publish-auto-update.py"
# === Helper Functions ===
function abort() {
echo -e "\nERROR: $1" >&2
exit 1
}
function confirm_or_exit() {
echo "$1"
read -rp "Continue? (y/N): " confirm
[[ "${confirm}" =~ ^[Yy]$ ]] || exit 1
}
function run_git() {
echo "Running: git $*" | tee -a "$LOG_FILE"
$DRY_RUN || git "$@"
}
function increment_version() {
local version=$1
local major minor patch
IFS='.' read -r major minor patch <<< "$version"
((patch++))
echo "$major.$minor.$patch"
}
# === Prompt for Logging Directory ===
echo "Default log directory: ${DEFAULT_LOG_DIR}"
read -rp "Use this log directory? (Y/n): " log_choice
if [[ "${log_choice}" =~ ^[Nn]$ ]]; then
read -rp "Enter desired log directory path: " CUSTOM_LOG_DIR
LOG_DIR="${CUSTOM_LOG_DIR}"
else
LOG_DIR="${DEFAULT_LOG_DIR}"
fi
mkdir -p "${LOG_DIR}" || abort "Unable to create log directory: ${LOG_DIR}"
LOG_FILE="${LOG_DIR}/qortal-auto-update-log-$(date +%Y%m%d-%H%M%S).log"
echo "Logging to: ${LOG_FILE}"
# Log everything to file as well as terminal
exec > >(tee -a "$LOG_FILE") 2>&1
# === Dry Run Mode Option ===
read -rp "Enable dry-run mode? (y/N): " dry_choice
if [[ "${dry_choice}" =~ ^[Yy]$ ]]; then
DRY_RUN=true
echo "Dry-run mode ENABLED. Commands will be shown but not executed."
else
echo "Dry-run mode DISABLED. Real commands will be executed."
fi
# === Run Python Publisher Option ===
read -rp "Run the Python publish_auto_update script at the end? (y/N): " pub_choice
if [[ "${pub_choice}" =~ ^[Yy]$ ]]; then
RUN_PUBLISH=true
read -rp "Run Python script in dry-run mode? (y/N): " pub_dry
if [[ "${pub_dry}" =~ ^[Yy]$ ]]; then
PUBLISH_DRY_FLAG="--dry-run"
else
PUBLISH_DRY_FLAG=""
fi
else
RUN_PUBLISH=false
fi
# === Detect Git Root ===
git_dir=$(git rev-parse --show-toplevel 2>/dev/null || true)
[[ -z "${git_dir}" ]] && abort "Not inside a git repository."
cd "${git_dir}"
echo
echo "Current Git identity:"
git config user.name || echo "(not set)"
git config user.email || echo "(not set)"
read -rp "Would you like to set/override the Git username and email for this repo? (y/N): " git_id_choice
if [[ "${git_id_choice}" =~ ^[Yy]$ ]]; then
read -rp "Enter Git username (e.g. Qortal-Auto-Update): " git_user
read -rp "Enter Git email (e.g. qortal-auto-update@example.com): " git_email
run_git config user.name "${git_user}"
run_git config user.email "${git_email}"
echo "Git identity set to: ${git_user} <${git_email}>"
fi
# === Confirm Git Origin URL ===
git_origin=$(git config --get remote.origin.url)
echo "Git origin URL: ${git_origin}"
confirm_or_exit "Is this the correct repository?"
# === Verify Current Branch ===
current_branch=$(git rev-parse --abbrev-ref HEAD)
echo "Current git branch: ${current_branch}"
if [[ "${current_branch}" != "${BASE_BRANCH}" ]]; then
echo "Expected to be on '${BASE_BRANCH}' branch, but found '${current_branch}'"
confirm_or_exit "Proceed anyway in 5 seconds or abort with CTRL+C."
sleep 5
fi
# === Check for Uncommitted Changes ===
uncommitted=$(git status --short --untracked-files=no)
if [[ -n "${uncommitted}" ]]; then
echo "Uncommitted changes detected:"
echo "${uncommitted}"
abort "Please commit or stash changes first."
fi
project=$(grep -oPm1 "(?<=<artifactId>)[^<]+" pom.xml)
[[ -z "${project}" ]] && abort "Unable to determine project name from pom.xml."
echo "Detected project: ${project}"
# === Auto-Increment Version in pom.xml ===
current_version=$(grep -oPm1 "(?<=<version>)[^<]+" pom.xml)
new_version=$(increment_version "$current_version")
$DRY_RUN || sed -i "s|<version>${current_version}</version>|<version>${new_version}</version>|" pom.xml
echo "Updated version from ${current_version} to ${new_version} in pom.xml"
git diff pom.xml
while true; do
read -rp "Is the updated version correct? (y/N): " version_ok
if [[ "${version_ok}" =~ ^[Yy]$ ]]; then
break
fi
read -rp "Enter the correct version number (e.g., 4.7.2): " user_version
# Validate format x.y.z and version > current_version
if [[ ! "${user_version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "Invalid format. Use x.y.z (e.g., 4.7.2)."
continue
fi
IFS='.' read -r curr_major curr_minor curr_patch <<< "${current_version}"
IFS='.' read -r new_major new_minor new_patch <<< "${user_version}"
if (( new_major < curr_major )) || \
(( new_major == curr_major && new_minor < curr_minor )) || \
(( new_major == curr_major && new_minor == curr_minor && new_patch <= curr_patch )); then
echo "Version must be greater than current version (${current_version})."
continue
fi
$DRY_RUN || sed -i "s|<version>${new_version}</version>|<version>${user_version}</version>|" pom.xml
echo "Updated version to user-provided version: ${user_version}"
git diff pom.xml
new_version="${user_version}"
echo
echo "Rechecking updated version..."
done
run_git add pom.xml
run_git commit -m "Bump version to ${new_version}"
run_git tag "v${new_version}"
confirm_or_exit "About to push version tag 'v${new_version}' to origin."
run_git push origin "v${new_version}"
confirm_or_exit "Also push the ${current_branch} branch to origin?"
run_git push origin "${current_branch}"
# === Extract Info ===
short_hash=$(git rev-parse --short HEAD)
[[ -z "${short_hash}" ]] && abort "Unable to extract commit hash."
echo "Using commit hash: ${short_hash}"
# === Build JAR ===
echo "Building JAR for ${project}..."
if ! $DRY_RUN; then
mvn clean package > /dev/null 2>&1 || {
echo "Build failed. Check logs in ${LOG_FILE}" >&2
abort "Maven build failed."
}
fi
jar_file=$(ls target/${project}*.jar | head -n1)
[[ ! -f "${jar_file}" ]] && abort "Built JAR file not found."
# === XOR Obfuscation ===
echo "Creating ${project}.update..."
$DRY_RUN || java -cp "${jar_file}" org.qortal.XorUpdate "${jar_file}" "${project}.update"
# === Create Auto-Update Branch ===
update_branch="auto-update-${short_hash}"
echo "Creating update branch: ${update_branch}"
if git show-ref --verify --quiet refs/heads/${update_branch}; then
run_git branch -D "${update_branch}"
fi
run_git checkout --orphan "${update_branch}"
$DRY_RUN || git rm -rf . > /dev/null 2>&1 || true
run_git add "${project}.update"
run_git commit -m "XORed auto-update JAR for commit ${short_hash}"
confirm_or_exit "About to push auto-update branch '${update_branch}' to origin."
run_git push --set-upstream origin "${update_branch}"
# === Return to Original Branch ===
echo "Switching back to original branch: ${current_branch}"
run_git checkout --force "${current_branch}"
echo "Done. ${project}.update is committed to ${update_branch}."
# === Summary Output ===
echo
echo "======================================"
echo "✅ Auto-Update Build Complete!"
echo "--------------------------------------"
echo "Project: ${project}"
echo "Version: ${new_version}"
echo "Tag: v${new_version}"
echo "Commit Hash: ${short_hash}"
echo "Auto-Update Branch: auto-update-${short_hash}"
echo
echo "Pushed to: ${git_origin}"
echo "Logs saved to: ${LOG_FILE}"
echo "======================================"
echo
# === Provide additional information regarding publish script, and private key. ===
if $RUN_PUBLISH; then
echo "...===...===...===...===...===..."
echo
echo "CONTINUING TO EXECUTE PUBLISH SCRIPT AS SELECTED"
echo
echo "This will publish the AUTO-UPDATE TRANSACTION for signing by the DEVELOPER GROUP ADMINS"
echo
echo "NOTICE: For security, when prompted for PRIVATE KEY, you will NOT see the input, SIMPLY PASTE/TYPE KEY AND PUSH ENTER."
echo
echo "...===...===...===...===...===..."
fi
# === Optionally Run Python Publisher ===
if $RUN_PUBLISH; then
echo "Running Python publish_auto_update script..."
if [[ -f "${PUBLISH_SCRIPT}" ]]; then
read -rsp "Enter your Base58 private key: " PRIVATE_KEY
echo
if [[ "${PUBLISH_DRY_FLAG}" == "--dry-run" ]]; then
echo "Dry-run mode active for Python script."
python3 "${PUBLISH_SCRIPT}" "${PRIVATE_KEY}" "${short_hash}" --dry-run
else
echo "Publishing auto-update for real..."
python3 "${PUBLISH_SCRIPT}" "${PRIVATE_KEY}" "${short_hash}"
fi
else
echo "WARNING: Python script not found at ${PUBLISH_SCRIPT}. Skipping."
fi
fi

View File

@@ -0,0 +1,238 @@
#!/bin/bash
# Check if version argument is passed
if [ -z "$1" ]; then
echo "Usage: $0 <VERSION>"
exit 1
fi
VERSION="$1"
# Repository and branch information
REPO="Qortal/qortal"
BRANCH="master"
WORKING_QORTAL_DIR='./qortal'
# 1. Check if working directory exists
if [ ! -d "$WORKING_QORTAL_DIR" ]; then
echo "Error: Working directory '$WORKING_QORTAL_DIR' not found."
read -p "Would you like to: (1) Create a new directory here, or (2) Specify a full path? [1/2]: " choice
if [ "$choice" = "1" ]; then
mkdir -p "$WORKING_QORTAL_DIR"
echo "Created new directory: $WORKING_QORTAL_DIR"
elif [ "$choice" = "2" ]; then
read -p "Enter full path to working directory: " new_path
WORKING_QORTAL_DIR="$new_path"
echo "Using specified directory: $WORKING_QORTAL_DIR"
else
echo "Invalid choice. Exiting."
exit 1
fi
fi
# 2. Check for qortal.jar
JAR_FILE="$WORKING_QORTAL_DIR/qortal.jar"
if [ ! -f "$JAR_FILE" ]; then
echo "Error: $JAR_FILE not found."
read -p "Would you like to: (1) Compile from source, (2) Use running qortal.jar, or (3) Specify a path? [1/2/3]: " choice
if [ "$choice" = "1" ]; then
echo "Cloning repo and compiling..."
git clone https://github.com/Qortal/qortal.git /tmp/qortal
if ! command -v mvn &> /dev/null; then
echo "Error: Maven not found. Please install Maven and try again."
exit 1
fi
cd /tmp/qortal || exit
mvn clean package
cp target/qortal-*.jar "$WORKING_QORTAL_DIR/qortal.jar"
cd - || exit
elif [ "$choice" = "2" ]; then
if [ -f "$HOME/qortal/qortal.jar" ]; then
cp "$HOME/qortal/qortal.jar" "$WORKING_QORTAL_DIR/"
echo "Copied from $HOME/qortal/qortal.jar"
else
echo "Error: $HOME/qortal/qortal.jar not found."
exit 1
fi
elif [ "$choice" = "3" ]; then
read -p "Enter full path to qortal.jar: " jar_path
cp "$jar_path" "$WORKING_QORTAL_DIR/"
echo "Used specified path: $jar_path"
else
echo "Invalid choice. Exiting."
exit 1
fi
fi
# 3. Check for required files (settings.json, log4j2.properties, etc.)
REQUIRED_FILES=("settings.json" "log4j2.properties" "start.sh" "stop.sh" "qort")
for file in "${REQUIRED_FILES[@]}"; do
if [ ! -f "$WORKING_QORTAL_DIR/$file" ]; then
echo "Error: $WORKING_QORTAL_DIR/$file not found."
read -p "Would you like to: (1) Get files from GitHub (2) exit and copy files manually then re-run? [1/2]: " choice
if [ "$choice" = "1" ]; then
if [ "$file" = "settings.json" ]; then
cat <<EOF > "$WORKING_QORTAL_DIR/settings.json"
{
"balanceRecorderEnabled": true,
"apiWhitelistEnabled": false,
"allowConnectionsWithOlderPeerVersions": false,
"apiRestricted": false
}
EOF
elif [ "${file}" = "qort" ]; then
echo "Downloading from GitHub..."
curl -s "https://raw.githubusercontent.com/Qortal/qortal/refs/heads/$BRANCH/tools/$file" -o "$WORKING_QORTAL_DIR/$file"
echo "Making $file script executable..."
chmod +x "$WORKING_QORTAL_DIR/$file"
elif [ "${file}" = "start.sh" ]; then
echo "Downloading from GitHub..."
curl -s "https://raw.githubusercontent.com/Qortal/qortal/refs/heads/$BRANCH/$file" -o "$WORKING_QORTAL_DIR/$file"
echo "Making $file script executable..."
chmod +x "$WORKING_QORTAL_DIR/$file"
elif [ "${file}" = "stop.sh" ]; then
echo "Downloading from GitHub..."
curl -s "https://raw.githubusercontent.com/Qortal/qortal/refs/heads/$BRANCH/$file" -o "$WORKING_QORTAL_DIR/$file"
echo "Making $file script executable..."
chmod +x "$WORKING_QORTAL_DIR/$file"
else
echo "Downloading from GitHub..."
curl -s "https://raw.githubusercontent.com/Qortal/qortal/refs/heads/$BRANCH/$file" -o "$WORKING_QORTAL_DIR/$file"
fi
elif [ "$choice" = "2" ]; then
echo "copy files manually to this location then re-run script..."
sleep 5
exit 1
else
echo "Invalid choice. Exiting."
exit 1
fi
fi
done
# Continue with the rest of the script...
# (The rest of the script remains unchanged)
# Fetch the latest 100 commits
COMMITS_JSON=$(curl -s "https://api.github.com/repos/${REPO}/commits?sha=${BRANCH}&per_page=100")
# Extract bump version commits
BUMP_COMMITS=$(echo "$COMMITS_JSON" | jq -r '.[] | select(.commit.message | test("bump version to"; "i")) | .sha')
CURRENT_BUMP_COMMIT=$(echo "$COMMITS_JSON" | jq -r ".[] | select(.commit.message | test(\"bump version to ${VERSION}\"; \"i\")) | .sha" | head -n1)
PREV_BUMP_COMMIT=$(echo "$BUMP_COMMITS" | sed -n '2p')
if [ -z "$CURRENT_BUMP_COMMIT" ]; then
echo "Error: Could not find bump commit for version ${VERSION} in ${REPO}/${BRANCH}"
exit 1
fi
# Get changelog between previous and current commit
echo "Generating changelog between ${PREV_BUMP_COMMIT} and ${CURRENT_BUMP_COMMIT}..."
CHANGELOG=$(curl -s "https://api.github.com/repos/${REPO}/compare/${PREV_BUMP_COMMIT}...${CURRENT_BUMP_COMMIT}" | jq -r '.commits[] | "- " + .sha[0:7] + " " + .commit.message')
# Fetch latest commit timestamp from GitHub API for final file timestamping
COMMIT_API_URL="https://api.github.com/repos/${REPO}/commits?sha=${BRANCH}&per_page=1"
COMMIT_TIMESTAMP=$(curl -s "${COMMIT_API_URL}" | jq -r '.[0].commit.committer.date')
if [ -z "${COMMIT_TIMESTAMP}" ] || [ "${COMMIT_TIMESTAMP}" == "null" ]; then
echo "Error: Unable to retrieve the latest commit timestamp from GitHub API."
exit 1
fi
# Define file names
JAR_FILE="qortal/qortal.jar"
EXE_FILE="qortal.exe"
ZIP_FILE="qortal.zip"
calculate_hashes() {
local file="$1"
echo "Calculating hashes for ${file}..."
MD5=$(md5sum "${file}" | awk '{print $1}')
SHA1=$(sha1sum "${file}" | awk '{print $1}')
SHA256=$(sha256sum "${file}" | awk '{print $1}')
echo "MD5: ${MD5}, SHA1: ${SHA1}, SHA256: ${SHA256}"
}
# Hashes for qortal.jar
if [ -f "${JAR_FILE}" ]; then
calculate_hashes "${JAR_FILE}"
JAR_MD5=${MD5}
JAR_SHA1=${SHA1}
JAR_SHA256=${SHA256}
else
echo "Error: ${JAR_FILE} not found."
exit 1
fi
# Hashes for qortal.exe
if [ -f "${EXE_FILE}" ]; then
calculate_hashes "${EXE_FILE}"
EXE_MD5=${MD5}
EXE_SHA1=${SHA1}
EXE_SHA256=${SHA256}
else
echo "Warning: ${EXE_FILE} not found. Skipping."
EXE_MD5="<INPUT>"
EXE_SHA1="<INPUT>"
EXE_SHA256="<INPUT>"
fi
# Apply commit timestamp to files in qortal/
echo "Applying commit timestamp (${COMMIT_TIMESTAMP}) to files..."
mv qortal.exe ${WORKING_QORTAL_DIR} 2>/dev/null || true
find ${WORKING_QORTAL_DIR} -type f -exec touch -d "${COMMIT_TIMESTAMP}" {} \;
mv ${WORKING_QORTAL_DIR}/qortal.exe . 2>/dev/null || true
# Create qortal.zip
echo "Packing ${ZIP_FILE}..."
7z a -r -tzip "${ZIP_FILE}" ${WORKING_QORTAL_DIR}/ -stl
if [ $? -ne 0 ]; then
echo "Error: Failed to create ${ZIP_FILE}."
exit 1
fi
calculate_hashes "${ZIP_FILE}"
ZIP_MD5=${MD5}
ZIP_SHA1=${SHA1}
ZIP_SHA256=${SHA256}
# Generate release notes
cat <<EOF > release-notes.txt
### **_Qortal Core V${VERSION}_**
#### 🔄 Changes Included in This Release:
${CHANGELOG}
### [qortal.jar](https://github.com/Qortal/qortal/releases/download/v${VERSION}/qortal.jar)
\`MD5: ${JAR_MD5}\` qortal.jar
\`SHA1: ${JAR_SHA1}\` qortal.jar
\`SHA256: ${JAR_SHA256}\` qortal.jar
### [qortal.exe](https://github.com/Qortal/qortal/releases/download/v${VERSION}/qortal.exe)
\`MD5: ${EXE_MD5}\` qortal.exe
\`SHA1: ${EXE_SHA1}\` qortal.exe
\`SHA256: ${EXE_SHA256}\` qortal.exe
[VirusTotal report for qortal.exe](https://www.virustotal.com/gui/file/${EXE_SHA256}/detection)
### [qortal.zip](https://github.com/Qortal/qortal/releases/download/v${VERSION}/qortal.zip)
Contains bare minimum of:
* built \`qortal.jar\`
* \`log4j2.properties\` from git repo
* \`start.sh\` from git repo
* \`stop.sh\` from git repo
* \`qort\` script for linux/mac easy API utilization
* \`printf "{\n}\n" > settings.json\`
All timestamps set to same date-time as commit.
Packed with \`7z a -r -tzip qortal.zip qortal/\`
\`MD5: ${ZIP_MD5}\` qortal.zip
\`SHA1: ${ZIP_SHA1}\` qortal.zip
\`SHA256: ${ZIP_SHA256}\` qortal.zip
EOF
echo "Release notes generated: release-notes.txt"

View File

@@ -0,0 +1,234 @@
#!/usr/bin/env python3
import argparse
import subprocess
import requests
import json
import os
import sys
import time
import hashlib
from pathlib import Path
def run(cmd, cwd=None, capture_output=True):
result = subprocess.run(cmd, shell=True, cwd=cwd, capture_output=capture_output, text=True)
if result.returncode != 0:
print(f"Command failed: {cmd}\n{result.stderr}")
sys.exit(1)
return result.stdout.strip()
def get_project_name():
pom = Path('pom.xml')
if not pom.exists():
sys.exit("pom.xml not found!")
for line in pom.read_text().splitlines():
if '<artifactId>' in line:
return line.strip().split('>')[1].split('<')[0]
sys.exit("artifactId not found in pom.xml")
def get_commit_info(commit_hash=None, dry_run=False):
if not commit_hash:
print("No commit hash provided, detecting most recent auto-update branch...")
run("git fetch origin") # Ensure up-to-date
# Get latest auto-update branch by commit date
branches = run("git for-each-ref --sort=-committerdate --format='%(refname:short)' refs/remotes/origin/")
for branch in branches.splitlines():
branch = branch.strip().strip("'")
if branch.startswith("origin/auto-update-"):
commit_hash = branch.replace("origin/auto-update-", "")
print(f"Found latest auto-update branch: {branch}")
break
if not commit_hash:
sys.exit("No auto-update branches found.")
# Validate and get timestamp
if not commit_exists(commit_hash):
sys.exit(f"Commit hash '{commit_hash}' does not exist.")
timestamp = int(run(f"git show --no-patch --format=%ct {commit_hash}")) * 1000
# Use the remote branch hash if available
try:
update_hash = run(f"git rev-parse origin/auto-update-{commit_hash}")
except SystemExit:
print(f"⚠️ Warning: remote branch origin/auto-update-{commit_hash} not found, using commit hash itself.")
update_hash = run(f"git rev-parse {commit_hash}")
return commit_hash, timestamp, update_hash
def commit_exists(commit_hash):
try:
run(f"git cat-file -t {commit_hash}")
return True
except SystemExit:
return False
def get_sha256(update_file_path):
sha256 = hashlib.sha256()
with open(update_file_path, 'rb') as f:
sha256.update(f.read())
return sha256.hexdigest()
def get_public_key(base58_privkey, port):
r = requests.post(f"http://localhost:{port}/utils/publickey", data=base58_privkey)
r.raise_for_status()
return r.text
def get_hex_key(base58_key, port):
r = requests.post(f"http://localhost:{port}/utils/frombase58", data=base58_key)
r.raise_for_status()
return r.text
def get_address(pubkey, port):
r = requests.get(f"http://localhost:{port}/addresses/convert/{pubkey}")
r.raise_for_status()
return r.text
def get_reference(address, port):
r = requests.get(f"http://localhost:{port}/addresses/lastreference/{address}")
r.raise_for_status()
return r.text
def to_base58(hex_str, port):
r = requests.get(f"http://localhost:{port}/utils/tobase58/{hex_str}")
r.raise_for_status()
return r.text
def sign_transaction(privkey, tx_base58, port):
payload = json.dumps({"privateKey": privkey, "transactionBytes": tx_base58})
headers = {"Content-Type": "application/json"}
r = requests.post(f"http://localhost:{port}/transactions/sign", data=payload, headers=headers)
r.raise_for_status()
return r.text
def process_transaction(signed_tx, port):
r = requests.post(f"http://localhost:{port}/transactions/process", data=signed_tx)
r.raise_for_status()
return r.text == 'true'
def decode_transaction(signed_tx, port):
r = requests.post(f"http://localhost:{port}/transactions/decode", data=signed_tx, headers={"Content-Type": "application/json"})
r.raise_for_status()
return r.text
def main():
import getpass
parser = argparse.ArgumentParser(description="Modern auto-update publisher for Qortal")
parser.add_argument("arg1", nargs="?", help="Private key OR commit hash")
parser.add_argument("arg2", nargs="?", help="Commit hash if arg1 was private key")
parser.add_argument("--port", type=int, default=12391, help="API port")
parser.add_argument("--dry-run", action="store_true", help="Simulate without submitting transaction")
args = parser.parse_args()
# Handle combinations
if args.arg1 and args.arg2:
privkey = args.arg1
commit_hash = args.arg2
elif args.arg1 and not args.arg2:
commit_hash = args.arg1
privkey = getpass.getpass("Enter your Base58 private key: ")
else:
commit_hash = None # Will auto-resolve from HEAD
privkey = getpass.getpass("Enter your Base58 private key: ")
# Switch to repo root
git_root = run("git rev-parse --show-toplevel")
os.chdir(git_root)
project = get_project_name()
# Resolve and verify commit
commit_hash, timestamp, update_hash = get_commit_info(commit_hash, args.dry_run)
if not commit_exists(commit_hash):
sys.exit(f"Commit hash '{commit_hash}' does not exist in this repo.")
print(f"Commit: {commit_hash}, Timestamp: {timestamp}, Auto-update hash: {update_hash}")
def get_sha256(update_file_path):
sha256 = hashlib.sha256()
with open(update_file_path, 'rb') as f:
sha256.update(f.read())
return sha256.hexdigest()
update_file = Path(f"{project}.update")
if not update_file.exists():
print(f"{project}.update not found locally. Attempting to restore from branch auto-update-{commit_hash}...")
try:
restore_cmd = f"git show auto-update-{commit_hash}:{project}.update > {project}.update"
run(restore_cmd)
print(f"✓ Restored {project}.update from branch auto-update-{commit_hash}")
except Exception as e:
sys.exit(f"Failed to restore {project}.update: {e}")
# Final check to ensure the file was restored
if not update_file.exists():
sys.exit(f"{project}.update still not found after attempted restore")
sha256 = get_sha256(update_file)
print(f"Update SHA256: {sha256}")
if args.dry_run:
print("\n--- DRY RUN ---")
print(f"Would use timestamp: {timestamp}")
print(f"Would use update hash: {update_hash}")
print(f"Would use SHA256: {sha256}")
sys.exit(0)
pubkey = get_public_key(privkey, args.port)
pubkey_hex = get_hex_key(pubkey, args.port)
address = get_address(pubkey, args.port)
reference = get_reference(address, args.port)
reference_hex = get_hex_key(reference, args.port)
data_hex = f"{timestamp:016x}{update_hash}{sha256}"
if len(data_hex) != 120:
sys.exit("Data hex length invalid!")
raw_tx_parts = [
"0000000a", # type 10 ARBITRARY
f"{int(time.time() * 1000):016x}", # current timestamp
"00000001", # dev group ID
reference_hex, # reference
pubkey_hex, # pubkey
"00000000", # nonce
"00000000", # name length
"00000000", # identifier length
"00000000", # method (PUT)
"00000000", # secret length
"00000000", # compression
"00000000", # number of payments
"00000001", # service ID
"01", # data type (RAW_DATA)
f"{int(len(data_hex)//2):08x}", # data length
data_hex, # payload
f"{int(len(data_hex)//2):08x}", # repeated data length
"00000000", # metadata hash length
f"{int(0.01 * 1e8):016x}" # fee
]
tx_hex = "".join(raw_tx_parts)
tx_base58 = to_base58(tx_hex, args.port)
signed_tx = sign_transaction(privkey, tx_base58, args.port)
print("Submitting in 5 seconds... press CTRL+C to cancel")
for i in range(5, 0, -1):
print(f"{i}...", end='\r', flush=True)
time.sleep(1)
if not process_transaction(signed_tx, args.port):
sys.exit("Transaction submission failed")
decoded = decode_transaction(signed_tx, args.port)
print("\nTransaction submitted successfully:")
print(decoded)
if __name__ == "__main__":
main()