mirror of
https://github.com/Qortal/qortal.git
synced 2025-11-03 07:57:04 +00:00
Compare commits
1 Commits
online-acc
...
blockminte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d0af9a126 |
33
.github/workflows/pr-testing.yml
vendored
33
.github/workflows/pr-testing.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: PR testing
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
mavenTesting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Cache local Maven repository
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-maven-
|
||||
- name: Set up the Java JDK
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'adopt'
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
mvn -B clean test -DskipTests=false --file pom.xml
|
||||
if [ -f "target/site/jacoco/index.html" ]; then echo "Total coverage: $(cat target/site/jacoco/index.html | grep -o 'Total[^%]*%' | grep -o '[0-9]*%')"; fi
|
||||
|
||||
- name: Log coverage percentage
|
||||
run: |
|
||||
if [ ! -f "target/site/jacoco/index.html" ]; then echo "No coverage information available"; fi
|
||||
if [ -f "target/site/jacoco/index.html" ]; then echo "Total coverage: $(cat target/site/jacoco/index.html | grep -o 'Total[^%]*%' | grep -o '[0-9]*%')"; fi
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -1,8 +1,6 @@
|
||||
/db*
|
||||
/lists/
|
||||
/bin/
|
||||
/target/
|
||||
/qortal-backup/
|
||||
/log.txt.*
|
||||
/arbitrary*
|
||||
/Qortal-BTC*
|
||||
@@ -16,19 +14,8 @@
|
||||
/settings.json
|
||||
/testnet*
|
||||
/settings*.json
|
||||
/testchain*.json
|
||||
/run-testnet*.sh
|
||||
/testchain.json
|
||||
/run-testnet.sh
|
||||
/.idea
|
||||
/qortal.iml
|
||||
.DS_Store
|
||||
/src/main/resources/resources
|
||||
/*.jar
|
||||
/run.pid
|
||||
/run.log
|
||||
/WindowsInstaller/Install Files/qortal.jar
|
||||
/*.7z
|
||||
/tmp
|
||||
/wallets
|
||||
/data*
|
||||
/src/test/resources/arbitrary/*/.qortal/cache
|
||||
apikey.txt
|
||||
*.DS_Store
|
||||
|
||||
26
Dockerfile
26
Dockerfile
@@ -1,26 +0,0 @@
|
||||
FROM maven:3-openjdk-11 as builder
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./ /work/
|
||||
RUN mvn clean package
|
||||
|
||||
###
|
||||
FROM openjdk:11
|
||||
|
||||
RUN useradd -r -u 1000 -g users qortal && \
|
||||
mkdir /usr/local/qortal /qortal && \
|
||||
chown 1000:100 /qortal
|
||||
|
||||
COPY --from=builder /work/log4j2.properties /usr/local/qortal/
|
||||
COPY --from=builder /work/target/qortal*.jar /usr/local/qortal/qortal.jar
|
||||
|
||||
USER 1000:100
|
||||
|
||||
EXPOSE 12391 12392
|
||||
HEALTHCHECK --start-period=5m CMD curl -sf http://127.0.0.1:12391/admin/info || exit 1
|
||||
|
||||
WORKDIR /qortal
|
||||
VOLUME /qortal
|
||||
|
||||
ENTRYPOINT ["java"]
|
||||
CMD ["-Djava.net.preferIPv4Stack=false", "-jar", "/usr/local/qortal/qortal.jar"]
|
||||
30
TestNets.md
30
TestNets.md
@@ -41,39 +41,13 @@
|
||||
- Start up at least as many nodes as `minBlockchainPeers` (or adjust this value instead)
|
||||
- Probably best to perform API call `DELETE /peers/known`
|
||||
- Add other nodes via API call `POST /peers <peer-hostname-or-IP>`
|
||||
- Add minting private key to nodes via API call `POST /admin/mintingaccounts <minting-private-key>`
|
||||
The keys must have corresponding `REWARD_SHARE` transactions in testnet genesis block
|
||||
- You must have at least 2 separate minting keys and two separate nodes. Assign one minting key to each node.
|
||||
- Alternatively, comment out the `if (mintedLastBlock) { }` conditional in BlockMinter.java to allow for a single node and key.
|
||||
- Add minting private key to node(s) via API call `POST /admin/mintingaccounts <minting-private-key>`
|
||||
This key must have corresponding `REWARD_SHARE` transaction in testnet genesis block
|
||||
- Wait for genesis block timestamp to pass
|
||||
- A node should mint block 2 approximately 60 seconds after genesis block timestamp
|
||||
- Other testnet nodes will sync *as long as there is at least `minBlockchainPeers` peers with an "up-to-date" chain`
|
||||
- You can also use API call `POST /admin/forcesync <connected-peer-IP-and-port>` on stuck nodes
|
||||
|
||||
## Single-node testnet
|
||||
|
||||
A single-node testnet is possible with code modifications, for basic testing, or to more easily start a new testnet.
|
||||
To do so, follow these steps:
|
||||
- Comment out the `if (mintedLastBlock) { }` conditional in BlockMinter.java
|
||||
- Comment out the `minBlockchainPeers` validation in Settings.validate()
|
||||
- Set `minBlockchainPeers` to 0 in settings.json
|
||||
- Set `Synchronizer.RECOVERY_MODE_TIMEOUT` to `0`
|
||||
- All other steps should remain the same. Only a single reward share key is needed.
|
||||
- Remember to put these values back after introducing other nodes
|
||||
|
||||
## Fixed network
|
||||
|
||||
To restrict a testnet to a set of private nodes, you can use the "fixed network" feature.
|
||||
This ensures that the testnet nodes only communicate with each other and not other known peers.
|
||||
To do this, add the following setting to each testnet node, substituting the IP addresses:
|
||||
```
|
||||
"fixedNetwork": [
|
||||
"192.168.0.101:62392",
|
||||
"192.168.0.102:62392",
|
||||
"192.168.0.103:62392"
|
||||
]
|
||||
```
|
||||
|
||||
## Dealing with stuck chain
|
||||
|
||||
Maybe your nodes have been offline and no-one has minted a recent testnet block.
|
||||
|
||||
@@ -61,7 +61,7 @@ appender.rolling.type = RollingFile
|
||||
appender.rolling.name = FILE
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||
appender.rolling.filePattern = ./${filename}.%i
|
||||
appender.rolling.filePattern = ${dirname:-}${filename}.%i
|
||||
appender.rolling.policy.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policy.size = 4MB
|
||||
# Set the immediate flush to true (default)
|
||||
|
||||
BIN
WindowsInstaller/Install Files/qortal.jar
Executable file
BIN
WindowsInstaller/Install Files/qortal.jar
Executable file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ configured paths, or create a dummy `D:` drive with the expected layout.
|
||||
|
||||
Typical build procedure:
|
||||
|
||||
* Place the `qortal.jar` file in `Install-Files\`
|
||||
* Overwrite the `qortal.jar` file in `Install-Files\`
|
||||
* Open AdvancedInstaller with qortal.aip file
|
||||
* If releasing a new version, change version number in:
|
||||
+ "Product Information" side menu
|
||||
|
||||
BIN
WindowsInstaller/qortal.ico
Normal file → Executable file
BIN
WindowsInstaller/qortal.ico
Normal file → Executable file
Binary file not shown.
|
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 250 KiB |
Binary file not shown.
@@ -1,9 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<version>1.1</version>
|
||||
<description>POM was created from install:install-file</description>
|
||||
</project>
|
||||
@@ -1,12 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<versioning>
|
||||
<release>1.1</release>
|
||||
<versions>
|
||||
<version>1.1</version>
|
||||
</versions>
|
||||
<lastUpdated>20220218200127</lastUpdated>
|
||||
</versioning>
|
||||
</metadata>
|
||||
@@ -61,7 +61,7 @@ appender.rolling.type = RollingFile
|
||||
appender.rolling.name = FILE
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||
appender.rolling.filePattern = ./${filename}.%i
|
||||
appender.rolling.filePattern = ${dirname:-}${filename}.%i
|
||||
appender.rolling.policy.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policy.size = 4MB
|
||||
# Set the immediate flush to true (default)
|
||||
|
||||
97
pom.xml
97
pom.xml
@@ -3,39 +3,28 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.qortal</groupId>
|
||||
<artifactId>qortal</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<version>1.4.6</version>
|
||||
<packaging>jar</packaging>
|
||||
<properties>
|
||||
<skipTests>true</skipTests>
|
||||
<altcoinj.version>7dc8c6f</altcoinj.version>
|
||||
<bitcoinj.version>0.15.10</bitcoinj.version>
|
||||
<bouncycastle.version>1.69</bouncycastle.version>
|
||||
<altcoinj.version>bf9fb80</altcoinj.version>
|
||||
<bitcoinj.version>0.15.6</bitcoinj.version>
|
||||
<bouncycastle.version>1.64</bouncycastle.version>
|
||||
<build.timestamp>${maven.build.timestamp}</build.timestamp>
|
||||
<ciyam-at.version>1.3.8</ciyam-at.version>
|
||||
<commons-net.version>3.6</commons-net.version>
|
||||
<commons-text.version>1.8</commons-text.version>
|
||||
<commons-io.version>2.6</commons-io.version>
|
||||
<commons-compress.version>1.21</commons-compress.version>
|
||||
<commons-lang3.version>3.12.0</commons-lang3.version>
|
||||
<xz.version>1.9</xz.version>
|
||||
<dagger.version>1.2.2</dagger.version>
|
||||
<guava.version>28.1-jre</guava.version>
|
||||
<hsqldb.version>2.5.1</hsqldb.version>
|
||||
<homoglyph.version>1.2.1</homoglyph.version>
|
||||
<icu4j.version>70.1</icu4j.version>
|
||||
<upnp.version>1.1</upnp.version>
|
||||
<jersey.version>2.29.1</jersey.version>
|
||||
<jetty.version>9.4.29.v20200521</jetty.version>
|
||||
<log4j.version>2.17.1</log4j.version>
|
||||
<log4j.version>2.12.1</log4j.version>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<slf4j.version>1.7.12</slf4j.version>
|
||||
<swagger-api.version>2.0.9</swagger-api.version>
|
||||
<swagger-ui.version>3.23.8</swagger-ui.version>
|
||||
<package-info-maven-plugin.version>1.1.0</package-info-maven-plugin.version>
|
||||
<jsoup.version>1.13.1</jsoup.version>
|
||||
<java-diff-utils.version>4.10</java-diff-utils.version>
|
||||
<grpc.version>1.45.1</grpc.version>
|
||||
<protobuf.version>3.19.4</protobuf.version>
|
||||
</properties>
|
||||
<build>
|
||||
<sourceDirectory>src/main/java</sourceDirectory>
|
||||
@@ -432,12 +421,6 @@
|
||||
<artifactId>AT</artifactId>
|
||||
<version>${ciyam-at.version}</version>
|
||||
</dependency>
|
||||
<!-- UPnP support -->
|
||||
<dependency>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<version>${upnp.version}</version>
|
||||
</dependency>
|
||||
<!-- Bitcoin support -->
|
||||
<dependency>
|
||||
<groupId>org.bitcoinj</groupId>
|
||||
@@ -446,7 +429,7 @@
|
||||
</dependency>
|
||||
<!-- For Litecoin, etc. support, requires bitcoinj -->
|
||||
<dependency>
|
||||
<groupId>com.github.qortal</groupId>
|
||||
<groupId>com.github.jjos2372</groupId>
|
||||
<artifactId>altcoinj</artifactId>
|
||||
<version>${altcoinj.version}</version>
|
||||
</dependency>
|
||||
@@ -456,36 +439,11 @@
|
||||
<artifactId>json-simple</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20210307</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
<version>${commons-text.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.tukaani</groupId>
|
||||
<artifactId>xz</artifactId>
|
||||
<version>${xz.version}</version>
|
||||
</dependency>
|
||||
<!-- For bitset/bitmap compression -->
|
||||
<dependency>
|
||||
<groupId>io.druid</groupId>
|
||||
@@ -572,18 +530,7 @@
|
||||
<dependency>
|
||||
<groupId>net.codebox</groupId>
|
||||
<artifactId>homoglyph</artifactId>
|
||||
<version>${homoglyph.version}</version>
|
||||
</dependency>
|
||||
<!-- Unicode support -->
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
<artifactId>icu4j</artifactId>
|
||||
<version>${icu4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
<artifactId>icu4j-charset</artifactId>
|
||||
<version>${icu4j.version}</version>
|
||||
<version>1.2.0</version>
|
||||
</dependency>
|
||||
<!-- Jetty -->
|
||||
<dependency>
|
||||
@@ -697,35 +644,5 @@
|
||||
<artifactId>bctls-jdk15on</artifactId>
|
||||
<version>${bouncycastle.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
<version>${jsoup.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.github.java-diff-utils</groupId>
|
||||
<artifactId>java-diff-utils</artifactId>
|
||||
<version>${java-diff-utils.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-netty</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-protobuf</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-stub</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${protobuf.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
||||
BIN
src/.DS_Store
vendored
Normal file
BIN
src/.DS_Store
vendored
Normal file
Binary file not shown.
BIN
src/main/.DS_Store
vendored
Normal file
BIN
src/main/.DS_Store
vendored
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,100 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* LiteWalletJni code based on https://github.com/PirateNetwork/cordova-plugin-litewallet
|
||||
*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2020 Zero Currency Coin
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package com.rust.litewalletjni;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.controller.PirateChainWalletController;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
public class LiteWalletJni {
|
||||
|
||||
protected static final Logger LOGGER = LogManager.getLogger(LiteWalletJni.class);
|
||||
|
||||
public static native String initlogging();
|
||||
public static native String initnew(final String serveruri, final String params, final String saplingOutputb64, final String saplingSpendb64);
|
||||
public static native String initfromseed(final String serveruri, final String params, final String seed, final String birthday, final String saplingOutputb64, final String saplingSpendb64);
|
||||
public static native String initfromb64(final String serveruri, final String params, final String datab64, final String saplingOutputb64, final String saplingSpendb64);
|
||||
public static native String save();
|
||||
|
||||
public static native String execute(final String cmd, final String args);
|
||||
public static native String getseedphrase();
|
||||
public static native String getseedphrasefromentropyb64(final String entropy64);
|
||||
public static native String checkseedphrase(final String input);
|
||||
|
||||
|
||||
private static boolean loaded = false;
|
||||
|
||||
public static void loadLibrary() {
|
||||
if (loaded) {
|
||||
return;
|
||||
}
|
||||
String osName = System.getProperty("os.name");
|
||||
String osArchitecture = System.getProperty("os.arch");
|
||||
|
||||
LOGGER.info("OS Name: {}", osName);
|
||||
LOGGER.info("OS Architecture: {}", osArchitecture);
|
||||
|
||||
try {
|
||||
String libFileName = PirateChainWalletController.getRustLibFilename();
|
||||
if (libFileName == null) {
|
||||
LOGGER.info("Library not found for OS: {}, arch: {}", osName, osArchitecture);
|
||||
return;
|
||||
}
|
||||
|
||||
Path libPath = Paths.get(PirateChainWalletController.getRustLibOuterDirectory().toString(), libFileName);
|
||||
System.load(libPath.toAbsolutePath().toString());
|
||||
loaded = true;
|
||||
}
|
||||
catch (UnsatisfiedLinkError e) {
|
||||
LOGGER.info("Unable to load library");
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isLoaded() {
|
||||
return loaded;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -7,20 +7,18 @@ import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.security.Security;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||
import org.bouncycastle.jsse.provider.BouncyCastleJsseProvider;
|
||||
import org.qortal.api.ApiKey;
|
||||
import org.qortal.api.ApiRequest;
|
||||
import org.qortal.controller.AutoUpdate;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import static org.qortal.controller.AutoUpdate.AGENTLIB_JVM_HOLDER_ARG;
|
||||
|
||||
public class ApplyUpdate {
|
||||
|
||||
static {
|
||||
@@ -40,7 +38,7 @@ public class ApplyUpdate {
|
||||
private static final String JAVA_TOOL_OPTIONS_NAME = "JAVA_TOOL_OPTIONS";
|
||||
private static final String JAVA_TOOL_OPTIONS_VALUE = "-XX:MaxRAMFraction=4";
|
||||
|
||||
private static final long CHECK_INTERVAL = 30 * 1000L; // ms
|
||||
private static final long CHECK_INTERVAL = 10 * 1000L; // ms
|
||||
private static final int MAX_ATTEMPTS = 12;
|
||||
|
||||
public static void main(String[] args) {
|
||||
@@ -72,40 +70,14 @@ public class ApplyUpdate {
|
||||
String baseUri = "http://localhost:" + Settings.getInstance().getApiPort() + "/";
|
||||
LOGGER.info(() -> String.format("Shutting down node using API via %s", baseUri));
|
||||
|
||||
// The /admin/stop endpoint requires an API key, which may or may not be already generated
|
||||
boolean apiKeyNewlyGenerated = false;
|
||||
ApiKey apiKey = null;
|
||||
try {
|
||||
apiKey = new ApiKey();
|
||||
if (!apiKey.generated()) {
|
||||
apiKey.generate();
|
||||
apiKeyNewlyGenerated = true;
|
||||
LOGGER.info("Generated API key");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error loading API key: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Create GET params
|
||||
Map<String, String> params = new HashMap<>();
|
||||
if (apiKey != null) {
|
||||
params.put("apiKey", apiKey.toString());
|
||||
}
|
||||
|
||||
// Attempt to stop the node
|
||||
int attempt;
|
||||
for (attempt = 0; attempt < MAX_ATTEMPTS; ++attempt) {
|
||||
final int attemptForLogging = attempt;
|
||||
LOGGER.info(() -> String.format("Attempt #%d out of %d to shutdown node", attemptForLogging + 1, MAX_ATTEMPTS));
|
||||
String response = ApiRequest.perform(baseUri + "admin/stop", params);
|
||||
if (response == null) {
|
||||
String response = ApiRequest.perform(baseUri + "admin/stop", null);
|
||||
if (response == null)
|
||||
// No response - consider node shut down
|
||||
if (apiKeyNewlyGenerated) {
|
||||
// API key was newly generated for this auto update, so we need to remove it
|
||||
ApplyUpdate.removeGeneratedApiKey();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGGER.info(() -> String.format("Response from API: %s", response));
|
||||
|
||||
@@ -117,11 +89,6 @@ public class ApplyUpdate {
|
||||
}
|
||||
}
|
||||
|
||||
if (apiKeyNewlyGenerated) {
|
||||
// API key was newly generated for this auto update, so we need to remove it
|
||||
ApplyUpdate.removeGeneratedApiKey();
|
||||
}
|
||||
|
||||
if (attempt == MAX_ATTEMPTS) {
|
||||
LOGGER.error("Failed to shutdown node - giving up");
|
||||
return false;
|
||||
@@ -130,19 +97,6 @@ public class ApplyUpdate {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void removeGeneratedApiKey() {
|
||||
try {
|
||||
LOGGER.info("Removing newly generated API key...");
|
||||
|
||||
// Delete the API key since it was only generated for this auto update
|
||||
ApiKey apiKey = new ApiKey();
|
||||
apiKey.delete();
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error loading or deleting API key: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static void replaceJar() {
|
||||
// Assuming current working directory contains the JAR files
|
||||
Path realJar = Paths.get(JAR_FILENAME);
|
||||
@@ -200,11 +154,6 @@ public class ApplyUpdate {
|
||||
// JVM arguments
|
||||
javaCmd.addAll(ManagementFactory.getRuntimeMXBean().getInputArguments());
|
||||
|
||||
// Reapply any retained, but disabled, -agentlib JVM arg
|
||||
javaCmd = javaCmd.stream()
|
||||
.map(arg -> arg.replace(AGENTLIB_JVM_HOLDER_ARG, "-agentlib"))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// Call mainClass in JAR
|
||||
javaCmd.addAll(Arrays.asList("-jar", JAR_FILENAME));
|
||||
|
||||
@@ -213,7 +162,7 @@ public class ApplyUpdate {
|
||||
}
|
||||
|
||||
try {
|
||||
LOGGER.info(String.format("Restarting node with: %s", String.join(" ", javaCmd)));
|
||||
LOGGER.info(() -> String.format("Restarting node with: %s", String.join(" ", javaCmd)));
|
||||
|
||||
ProcessBuilder processBuilder = new ProcessBuilder(javaCmd);
|
||||
|
||||
@@ -222,15 +171,8 @@ public class ApplyUpdate {
|
||||
processBuilder.environment().put(JAVA_TOOL_OPTIONS_NAME, JAVA_TOOL_OPTIONS_VALUE);
|
||||
}
|
||||
|
||||
// New process will inherit our stdout and stderr
|
||||
processBuilder.redirectOutput(ProcessBuilder.Redirect.INHERIT);
|
||||
processBuilder.redirectError(ProcessBuilder.Redirect.INHERIT);
|
||||
|
||||
Process process = processBuilder.start();
|
||||
|
||||
// Nothing to pipe to new process, so close output stream (process's stdin)
|
||||
process.getOutputStream().close();
|
||||
} catch (Exception e) {
|
||||
processBuilder.start();
|
||||
} catch (IOException e) {
|
||||
LOGGER.error(String.format("Failed to restart node (BAD): %s", e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package org.qortal;
|
||||
|
||||
import java.security.Security;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -58,10 +57,10 @@ public class RepositoryMaintenance {
|
||||
|
||||
LOGGER.info("Starting repository periodic maintenance. This can take a while...");
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
repository.performPeriodicMaintenance(null);
|
||||
repository.performPeriodicMaintenance();
|
||||
|
||||
LOGGER.info("Repository periodic maintenance completed");
|
||||
} catch (DataException | TimeoutException e) {
|
||||
} catch (DataException e) {
|
||||
LOGGER.error("Repository periodic maintenance failed", e);
|
||||
}
|
||||
|
||||
|
||||
@@ -8,13 +8,11 @@ import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.block.BlockChain;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.data.account.AccountBalanceData;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.NONE) // Stops JAX-RS errors when unmarshalling blockchain config
|
||||
@@ -61,17 +59,7 @@ public class Account {
|
||||
// Balance manipulations - assetId is 0 for QORT
|
||||
|
||||
public long getConfirmedBalance(long assetId) throws DataException {
|
||||
AccountBalanceData accountBalanceData;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Lite nodes request data from peers instead of the local db
|
||||
accountBalanceData = LiteNode.getInstance().fetchAccountBalance(this.address, assetId);
|
||||
}
|
||||
else {
|
||||
// All other node types fetch from the local db
|
||||
accountBalanceData = this.repository.getAccountRepository().getBalance(this.address, assetId);
|
||||
}
|
||||
|
||||
AccountBalanceData accountBalanceData = this.repository.getAccountRepository().getBalance(this.address, assetId);
|
||||
if (accountBalanceData == null)
|
||||
return 0;
|
||||
|
||||
@@ -217,12 +205,6 @@ public class Account {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Returns account's blockMinted (0+) or null if account not found in repository. */
|
||||
public Integer getBlocksMinted() throws DataException {
|
||||
return this.repository.getAccountRepository().getMintedBlockCount(this.address);
|
||||
}
|
||||
|
||||
|
||||
/** Returns whether account can build reward-shares.
|
||||
* <p>
|
||||
* To be able to create reward-shares, the account needs to pass at least one of these tests:<br>
|
||||
@@ -290,7 +272,7 @@ public class Account {
|
||||
/**
|
||||
* Returns 'effective' minting level, or zero if reward-share does not exist.
|
||||
* <p>
|
||||
* this is being used on src/main/java/org/qortal/api/resource/AddressesResource.java to fulfil the online accounts api call
|
||||
* For founder accounts, this returns "founderEffectiveMintingLevel" from blockchain config.
|
||||
*
|
||||
* @param repository
|
||||
* @param rewardSharePublicKey
|
||||
@@ -306,26 +288,5 @@ public class Account {
|
||||
Account rewardShareMinter = new Account(repository, rewardShareData.getMinter());
|
||||
return rewardShareMinter.getEffectiveMintingLevel();
|
||||
}
|
||||
/**
|
||||
* Returns 'effective' minting level, with a fix for the zero level.
|
||||
* <p>
|
||||
* For founder accounts, this returns "founderEffectiveMintingLevel" from blockchain config.
|
||||
*
|
||||
* @param repository
|
||||
* @param rewardSharePublicKey
|
||||
* @return 0+
|
||||
* @throws DataException
|
||||
*/
|
||||
public static int getRewardShareEffectiveMintingLevelIncludingLevelZero(Repository repository, byte[] rewardSharePublicKey) throws DataException {
|
||||
// Find actual minter and get their effective minting level
|
||||
RewardShareData rewardShareData = repository.getAccountRepository().getRewardShare(rewardSharePublicKey);
|
||||
if (rewardShareData == null)
|
||||
return 0;
|
||||
|
||||
else if(!rewardShareData.getMinter().equals(rewardShareData.getRecipient()))//the minter is different than the recipient this means sponsorship
|
||||
return 0;
|
||||
|
||||
Account rewardShareMinter = new Account(repository, rewardShareData.getMinter());
|
||||
return rewardShareMinter.getEffectiveMintingLevel();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,15 +11,15 @@ public class PrivateKeyAccount extends PublicKeyAccount {
|
||||
private final Ed25519PrivateKeyParameters edPrivateKeyParams;
|
||||
|
||||
/**
|
||||
* Create PrivateKeyAccount using byte[32] private key.
|
||||
* Create PrivateKeyAccount using byte[32] seed.
|
||||
*
|
||||
* @param privateKey
|
||||
* @param seed
|
||||
* byte[32] used to create private/public key pair
|
||||
* @throws IllegalArgumentException
|
||||
* if passed invalid privateKey
|
||||
* if passed invalid seed
|
||||
*/
|
||||
public PrivateKeyAccount(Repository repository, byte[] privateKey) {
|
||||
this(repository, new Ed25519PrivateKeyParameters(privateKey, 0));
|
||||
public PrivateKeyAccount(Repository repository, byte[] seed) {
|
||||
this(repository, new Ed25519PrivateKeyParameters(seed, 0));
|
||||
}
|
||||
|
||||
private PrivateKeyAccount(Repository repository, Ed25519PrivateKeyParameters edPrivateKeyParams) {
|
||||
@@ -37,6 +37,10 @@ public class PrivateKeyAccount extends PublicKeyAccount {
|
||||
return this.privateKey;
|
||||
}
|
||||
|
||||
public static byte[] toPublicKey(byte[] seed) {
|
||||
return new Ed25519PrivateKeyParameters(seed, 0).generatePublicKey().getEncoded();
|
||||
}
|
||||
|
||||
public byte[] sign(byte[] message) {
|
||||
return Crypto.sign(this.edPrivateKeyParams, message);
|
||||
}
|
||||
|
||||
@@ -129,14 +129,7 @@ public enum ApiError {
|
||||
// Foreign blockchain
|
||||
FOREIGN_BLOCKCHAIN_NETWORK_ISSUE(1201, 500),
|
||||
FOREIGN_BLOCKCHAIN_BALANCE_ISSUE(1202, 402),
|
||||
FOREIGN_BLOCKCHAIN_TOO_SOON(1203, 408),
|
||||
|
||||
// Trade portal
|
||||
ORDER_SIZE_TOO_SMALL(1300, 402),
|
||||
|
||||
// Data
|
||||
FILE_NOT_FOUND(1401, 404),
|
||||
NO_REPLY(1402, 404);
|
||||
FOREIGN_BLOCKCHAIN_TOO_SOON(1203, 408);
|
||||
|
||||
private static final Map<Integer, ApiError> map = stream(ApiError.values()).collect(toMap(apiError -> apiError.code, apiError -> apiError));
|
||||
|
||||
@@ -164,4 +157,4 @@ public enum ApiError {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -16,8 +16,4 @@ public enum ApiExceptionFactory {
|
||||
return createException(request, apiError, null);
|
||||
}
|
||||
|
||||
public ApiException createCustomException(HttpServletRequest request, ApiError apiError, String message) {
|
||||
return new ApiException(apiError.getStatus(), apiError.getCode(), message, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class ApiKey {
|
||||
|
||||
private String apiKey;
|
||||
|
||||
public ApiKey() throws IOException {
|
||||
this.load();
|
||||
}
|
||||
|
||||
public void generate() throws IOException {
|
||||
byte[] apiKey = new byte[16];
|
||||
new SecureRandom().nextBytes(apiKey);
|
||||
this.apiKey = Base58.encode(apiKey);
|
||||
|
||||
this.save();
|
||||
}
|
||||
|
||||
|
||||
/* Filesystem */
|
||||
|
||||
private Path getFilePath() {
|
||||
return Paths.get(Settings.getInstance().getApiKeyPath(), "apikey.txt");
|
||||
}
|
||||
|
||||
private boolean load() throws IOException {
|
||||
Path path = this.getFilePath();
|
||||
File apiKeyFile = new File(path.toString());
|
||||
if (!apiKeyFile.exists()) {
|
||||
// Try settings - to allow legacy API keys to be supported
|
||||
return this.loadLegacyApiKey();
|
||||
}
|
||||
|
||||
try {
|
||||
this.apiKey = new String(Files.readAllBytes(path));
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new IOException(String.format("Couldn't read contents from file %s", path.toString()));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean loadLegacyApiKey() {
|
||||
String legacyApiKey = Settings.getInstance().getApiKey();
|
||||
if (legacyApiKey != null && !legacyApiKey.isEmpty()) {
|
||||
this.apiKey = Settings.getInstance().getApiKey();
|
||||
|
||||
try {
|
||||
// Save it to the apikey file
|
||||
this.save();
|
||||
} catch (IOException e) {
|
||||
// Ignore failures as it will be reloaded from settings next time
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void save() throws IOException {
|
||||
if (this.apiKey == null || this.apiKey.isEmpty()) {
|
||||
throw new IllegalStateException("Unable to save a blank API key");
|
||||
}
|
||||
|
||||
Path filePath = this.getFilePath();
|
||||
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(filePath.toString()));
|
||||
writer.write(this.apiKey);
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public void delete() throws IOException {
|
||||
this.apiKey = null;
|
||||
|
||||
Path filePath = this.getFilePath();
|
||||
if (Files.exists(filePath)) {
|
||||
Files.delete(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public boolean generated() {
|
||||
return (this.apiKey != null);
|
||||
}
|
||||
|
||||
public boolean exists() {
|
||||
return this.getFilePath().toFile().exists();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.apiKey;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -14,7 +14,6 @@ import java.security.SecureRandom;
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.checkerframework.checker.units.qual.A;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RedirectPatternRule;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
@@ -40,7 +39,13 @@ import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.api.websocket.*;
|
||||
import org.qortal.api.websocket.ActiveChatsWebSocket;
|
||||
import org.qortal.api.websocket.AdminStatusWebSocket;
|
||||
import org.qortal.api.websocket.BlocksWebSocket;
|
||||
import org.qortal.api.websocket.ChatMessagesWebSocket;
|
||||
import org.qortal.api.websocket.PresenceWebSocket;
|
||||
import org.qortal.api.websocket.TradeBotWebSocket;
|
||||
import org.qortal.api.websocket.TradeOffersWebSocket;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
public class ApiService {
|
||||
@@ -49,7 +54,6 @@ public class ApiService {
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
private ApiKey apiKey;
|
||||
|
||||
private ApiService() {
|
||||
this.config = new ResourceConfig();
|
||||
@@ -70,15 +74,6 @@ public class ApiService {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void setApiKey(ApiKey apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
|
||||
public ApiKey getApiKey() {
|
||||
return this.apiKey;
|
||||
}
|
||||
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
@@ -206,9 +201,6 @@ public class ApiService {
|
||||
context.addServlet(ChatMessagesWebSocket.class, "/websockets/chat/messages");
|
||||
context.addServlet(TradeOffersWebSocket.class, "/websockets/crosschain/tradeoffers");
|
||||
context.addServlet(TradeBotWebSocket.class, "/websockets/crosschain/tradebot");
|
||||
context.addServlet(TradePresenceWebSocket.class, "/websockets/crosschain/tradepresence");
|
||||
|
||||
// Deprecated
|
||||
context.addServlet(PresenceWebSocket.class, "/websockets/presence");
|
||||
|
||||
// Start server
|
||||
|
||||
@@ -2,7 +2,7 @@ package org.qortal.api;
|
||||
|
||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||
|
||||
import org.qortal.utils.Base58;
|
||||
import org.bitcoinj.core.Base58;
|
||||
|
||||
public class Base58TypeAdapter extends XmlAdapter<String, byte[]> {
|
||||
|
||||
|
||||
@@ -1,171 +0,0 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
import org.eclipse.jetty.rewrite.handler.RewritePatternRule;
|
||||
import org.eclipse.jetty.server.*;
|
||||
import org.eclipse.jetty.server.handler.ErrorHandler;
|
||||
import org.eclipse.jetty.server.handler.InetAccessHandler;
|
||||
import org.eclipse.jetty.servlet.FilterHolder;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.servlets.CrossOriginFilter;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyStore;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class DomainMapService {
|
||||
|
||||
private static DomainMapService instance;
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
|
||||
private DomainMapService() {
|
||||
this.config = new ResourceConfig();
|
||||
this.config.packages("org.qortal.api.domainmap.resource");
|
||||
this.config.register(OpenApiResource.class);
|
||||
this.config.register(ApiDefinition.class);
|
||||
this.config.register(AnnotationPostProcessor.class);
|
||||
}
|
||||
|
||||
public static DomainMapService getInstance() {
|
||||
if (instance == null)
|
||||
instance = new DomainMapService();
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
public Iterable<Class<?>> getResources() {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
|
||||
// SSL support if requested
|
||||
String keystorePathname = Settings.getInstance().getSslKeystorePathname();
|
||||
String keystorePassword = Settings.getInstance().getSslKeystorePassword();
|
||||
|
||||
if (keystorePathname != null && keystorePassword != null) {
|
||||
// SSL version
|
||||
if (!Files.isReadable(Path.of(keystorePathname)))
|
||||
throw new RuntimeException("Failed to start SSL API due to broken keystore");
|
||||
|
||||
// BouncyCastle-specific SSLContext build
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS", "BCJSSE");
|
||||
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance("PKIX", "BCJSSE");
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType(), "BC");
|
||||
|
||||
try (InputStream keystoreStream = Files.newInputStream(Paths.get(keystorePathname))) {
|
||||
keyStore.load(keystoreStream, keystorePassword.toCharArray());
|
||||
}
|
||||
|
||||
keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
|
||||
sslContext.init(keyManagerFactory.getKeyManagers(), null, new SecureRandom());
|
||||
|
||||
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
|
||||
sslContextFactory.setSslContext(sslContext);
|
||||
|
||||
this.server = new Server();
|
||||
|
||||
HttpConfiguration httpConfig = new HttpConfiguration();
|
||||
httpConfig.setSecureScheme("https");
|
||||
httpConfig.setSecurePort(Settings.getInstance().getDomainMapPort());
|
||||
|
||||
SecureRequestCustomizer src = new SecureRequestCustomizer();
|
||||
httpConfig.addCustomizer(src);
|
||||
|
||||
HttpConnectionFactory httpConnectionFactory = new HttpConnectionFactory(httpConfig);
|
||||
SslConnectionFactory sslConnectionFactory = new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString());
|
||||
|
||||
ServerConnector portUnifiedConnector = new ServerConnector(this.server,
|
||||
new DetectorConnectionFactory(sslConnectionFactory),
|
||||
httpConnectionFactory);
|
||||
portUnifiedConnector.setHost(Settings.getInstance().getBindAddress());
|
||||
portUnifiedConnector.setPort(Settings.getInstance().getDomainMapPort());
|
||||
|
||||
this.server.addConnector(portUnifiedConnector);
|
||||
} else {
|
||||
// Non-SSL
|
||||
InetAddress bindAddr = InetAddress.getByName(Settings.getInstance().getBindAddress());
|
||||
InetSocketAddress endpoint = new InetSocketAddress(bindAddr, Settings.getInstance().getDomainMapPort());
|
||||
this.server = new Server(endpoint);
|
||||
}
|
||||
|
||||
// Error handler
|
||||
ErrorHandler errorHandler = new ApiErrorHandler();
|
||||
this.server.setErrorHandler(errorHandler);
|
||||
|
||||
// Request logging
|
||||
if (Settings.getInstance().isDomainMapLoggingEnabled()) {
|
||||
RequestLogWriter logWriter = new RequestLogWriter("domainmap-requests.log");
|
||||
logWriter.setAppend(true);
|
||||
logWriter.setTimeZone("UTC");
|
||||
RequestLog requestLog = new CustomRequestLog(logWriter, CustomRequestLog.EXTENDED_NCSA_FORMAT);
|
||||
this.server.setRequestLog(requestLog);
|
||||
}
|
||||
|
||||
// Access handler (currently no whitelist is used)
|
||||
InetAccessHandler accessHandler = new InetAccessHandler();
|
||||
this.server.setHandler(accessHandler);
|
||||
|
||||
// URL rewriting
|
||||
RewriteHandler rewriteHandler = new RewriteHandler();
|
||||
accessHandler.setHandler(rewriteHandler);
|
||||
|
||||
// Context
|
||||
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
|
||||
context.setContextPath("/");
|
||||
rewriteHandler.setHandler(context);
|
||||
|
||||
// Cross-origin resource sharing
|
||||
FilterHolder corsFilterHolder = new FilterHolder(CrossOriginFilter.class);
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET, POST, DELETE");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.CHAIN_PREFLIGHT_PARAM, "false");
|
||||
context.addFilter(corsFilterHolder, "/*", null);
|
||||
|
||||
// API servlet
|
||||
ServletContainer container = new ServletContainer(this.config);
|
||||
ServletHolder apiServlet = new ServletHolder(container);
|
||||
apiServlet.setInitOrder(1);
|
||||
context.addServlet(apiServlet, "/*");
|
||||
|
||||
// Start server
|
||||
this.server.start();
|
||||
} catch (Exception e) {
|
||||
// Failed to start
|
||||
throw new RuntimeException("Failed to start API", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
try {
|
||||
// Stop server
|
||||
this.server.stop();
|
||||
} catch (Exception e) {
|
||||
// Failed to stop
|
||||
}
|
||||
|
||||
this.server = null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
import org.eclipse.jetty.server.*;
|
||||
import org.eclipse.jetty.server.handler.ErrorHandler;
|
||||
import org.eclipse.jetty.server.handler.InetAccessHandler;
|
||||
import org.eclipse.jetty.servlet.FilterHolder;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.servlets.CrossOriginFilter;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyStore;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class GatewayService {
|
||||
|
||||
private static GatewayService instance;
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
|
||||
private GatewayService() {
|
||||
this.config = new ResourceConfig();
|
||||
this.config.packages("org.qortal.api.gateway.resource");
|
||||
this.config.register(OpenApiResource.class);
|
||||
this.config.register(ApiDefinition.class);
|
||||
this.config.register(AnnotationPostProcessor.class);
|
||||
}
|
||||
|
||||
public static GatewayService getInstance() {
|
||||
if (instance == null)
|
||||
instance = new GatewayService();
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
public Iterable<Class<?>> getResources() {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
|
||||
// SSL support if requested
|
||||
String keystorePathname = Settings.getInstance().getSslKeystorePathname();
|
||||
String keystorePassword = Settings.getInstance().getSslKeystorePassword();
|
||||
|
||||
if (keystorePathname != null && keystorePassword != null) {
|
||||
// SSL version
|
||||
if (!Files.isReadable(Path.of(keystorePathname)))
|
||||
throw new RuntimeException("Failed to start SSL API due to broken keystore");
|
||||
|
||||
// BouncyCastle-specific SSLContext build
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS", "BCJSSE");
|
||||
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance("PKIX", "BCJSSE");
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType(), "BC");
|
||||
|
||||
try (InputStream keystoreStream = Files.newInputStream(Paths.get(keystorePathname))) {
|
||||
keyStore.load(keystoreStream, keystorePassword.toCharArray());
|
||||
}
|
||||
|
||||
keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
|
||||
sslContext.init(keyManagerFactory.getKeyManagers(), null, new SecureRandom());
|
||||
|
||||
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
|
||||
sslContextFactory.setSslContext(sslContext);
|
||||
|
||||
this.server = new Server();
|
||||
|
||||
HttpConfiguration httpConfig = new HttpConfiguration();
|
||||
httpConfig.setSecureScheme("https");
|
||||
httpConfig.setSecurePort(Settings.getInstance().getGatewayPort());
|
||||
|
||||
SecureRequestCustomizer src = new SecureRequestCustomizer();
|
||||
httpConfig.addCustomizer(src);
|
||||
|
||||
HttpConnectionFactory httpConnectionFactory = new HttpConnectionFactory(httpConfig);
|
||||
SslConnectionFactory sslConnectionFactory = new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString());
|
||||
|
||||
ServerConnector portUnifiedConnector = new ServerConnector(this.server,
|
||||
new DetectorConnectionFactory(sslConnectionFactory),
|
||||
httpConnectionFactory);
|
||||
portUnifiedConnector.setHost(Settings.getInstance().getBindAddress());
|
||||
portUnifiedConnector.setPort(Settings.getInstance().getGatewayPort());
|
||||
|
||||
this.server.addConnector(portUnifiedConnector);
|
||||
} else {
|
||||
// Non-SSL
|
||||
InetAddress bindAddr = InetAddress.getByName(Settings.getInstance().getBindAddress());
|
||||
InetSocketAddress endpoint = new InetSocketAddress(bindAddr, Settings.getInstance().getGatewayPort());
|
||||
this.server = new Server(endpoint);
|
||||
}
|
||||
|
||||
// Error handler
|
||||
ErrorHandler errorHandler = new ApiErrorHandler();
|
||||
this.server.setErrorHandler(errorHandler);
|
||||
|
||||
// Request logging
|
||||
if (Settings.getInstance().isGatewayLoggingEnabled()) {
|
||||
RequestLogWriter logWriter = new RequestLogWriter("gateway-requests.log");
|
||||
logWriter.setAppend(true);
|
||||
logWriter.setTimeZone("UTC");
|
||||
RequestLog requestLog = new CustomRequestLog(logWriter, CustomRequestLog.EXTENDED_NCSA_FORMAT);
|
||||
this.server.setRequestLog(requestLog);
|
||||
}
|
||||
|
||||
// Access handler (currently no whitelist is used)
|
||||
InetAccessHandler accessHandler = new InetAccessHandler();
|
||||
this.server.setHandler(accessHandler);
|
||||
|
||||
// URL rewriting
|
||||
RewriteHandler rewriteHandler = new RewriteHandler();
|
||||
accessHandler.setHandler(rewriteHandler);
|
||||
|
||||
// Context
|
||||
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
|
||||
context.setContextPath("/");
|
||||
rewriteHandler.setHandler(context);
|
||||
|
||||
// Cross-origin resource sharing
|
||||
FilterHolder corsFilterHolder = new FilterHolder(CrossOriginFilter.class);
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET, POST, DELETE");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.CHAIN_PREFLIGHT_PARAM, "false");
|
||||
context.addFilter(corsFilterHolder, "/*", null);
|
||||
|
||||
// API servlet
|
||||
ServletContainer container = new ServletContainer(this.config);
|
||||
ServletHolder apiServlet = new ServletHolder(container);
|
||||
apiServlet.setInitOrder(1);
|
||||
context.addServlet(apiServlet, "/*");
|
||||
|
||||
// Start server
|
||||
this.server.start();
|
||||
} catch (Exception e) {
|
||||
// Failed to start
|
||||
throw new RuntimeException("Failed to start API", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
try {
|
||||
// Stop server
|
||||
this.server.stop();
|
||||
} catch (Exception e) {
|
||||
// Failed to stop
|
||||
}
|
||||
|
||||
this.server = null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
public class HTMLParser {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(HTMLParser.class);
|
||||
|
||||
private String linkPrefix;
|
||||
private byte[] data;
|
||||
|
||||
public HTMLParser(String resourceId, String inPath, String prefix, boolean usePrefix, byte[] data) {
|
||||
String inPathWithoutFilename = inPath.substring(0, inPath.lastIndexOf('/'));
|
||||
this.linkPrefix = usePrefix ? String.format("%s/%s%s", prefix, resourceId, inPathWithoutFilename) : "";
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public void addAdditionalHeaderTags() {
|
||||
String fileContents = new String(data);
|
||||
Document document = Jsoup.parse(fileContents);
|
||||
String baseUrl = this.linkPrefix + "/";
|
||||
Elements head = document.getElementsByTag("head");
|
||||
if (!head.isEmpty()) {
|
||||
// Add base href tag
|
||||
String baseElement = String.format("<base href=\"%s\">", baseUrl);
|
||||
head.get(0).prepend(baseElement);
|
||||
|
||||
// Add meta charset tag
|
||||
String metaCharsetElement = "<meta charset=\"UTF-8\">";
|
||||
head.get(0).prepend(metaCharsetElement);
|
||||
|
||||
}
|
||||
String html = document.html();
|
||||
this.data = html.getBytes();
|
||||
}
|
||||
|
||||
public static boolean isHtmlFile(String path) {
|
||||
if (path.endsWith(".html") || path.endsWith(".htm")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public byte[] getData() {
|
||||
return this.data;
|
||||
}
|
||||
}
|
||||
@@ -1,111 +1,33 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.qortal.arbitrary.ArbitraryDataResource;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataRenderManager;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
public abstract class Security {
|
||||
|
||||
public static final String API_KEY_HEADER = "X-API-KEY";
|
||||
|
||||
public static void checkApiCallAllowed(HttpServletRequest request) {
|
||||
// We may want to allow automatic authentication for local requests, if enabled in settings
|
||||
boolean localAuthBypassEnabled = Settings.getInstance().isLocalAuthBypassEnabled();
|
||||
if (localAuthBypassEnabled) {
|
||||
try {
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
// Request originates from loopback address, so allow it
|
||||
return;
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
// Ignore failure, and fallback to API key authentication
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve the API key
|
||||
ApiKey apiKey = Security.getApiKey(request);
|
||||
if (!apiKey.generated()) {
|
||||
// Not generated an API key yet, so disallow sensitive API calls
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "API key not generated");
|
||||
}
|
||||
|
||||
// We require an API key to be passed
|
||||
String expectedApiKey = Settings.getInstance().getApiKey();
|
||||
String passedApiKey = request.getHeader(API_KEY_HEADER);
|
||||
if (passedApiKey == null) {
|
||||
// Try query string - this is needed to avoid a CORS preflight. See: https://stackoverflow.com/a/43881141
|
||||
passedApiKey = request.getParameter("apiKey");
|
||||
}
|
||||
if (passedApiKey == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Missing 'X-API-KEY' header");
|
||||
}
|
||||
|
||||
// The API keys must match
|
||||
if (!apiKey.toString().equals(passedApiKey)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "API key invalid");
|
||||
}
|
||||
}
|
||||
if ((expectedApiKey != null && !expectedApiKey.equals(passedApiKey)) ||
|
||||
(passedApiKey != null && !passedApiKey.equals(expectedApiKey)))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
|
||||
public static void disallowLoopbackRequests(HttpServletRequest request) {
|
||||
InetAddress remoteAddr;
|
||||
try {
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Local requests not allowed");
|
||||
}
|
||||
remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
} catch (UnknownHostException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
}
|
||||
|
||||
public static void disallowLoopbackRequestsIfAuthBypassEnabled(HttpServletRequest request) {
|
||||
if (Settings.getInstance().isLocalAuthBypassEnabled()) {
|
||||
try {
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Local requests not allowed when localAuthBypassEnabled is enabled in settings");
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void requirePriorAuthorization(HttpServletRequest request, String resourceId, Service service, String identifier) {
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, identifier);
|
||||
if (!ArbitraryDataRenderManager.getInstance().isAuthorized(resource)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Call /render/authorize first");
|
||||
}
|
||||
}
|
||||
|
||||
public static void requirePriorAuthorizationOrApiKey(HttpServletRequest request, String resourceId, Service service, String identifier) {
|
||||
try {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
} catch (ApiException e) {
|
||||
// API call wasn't allowed, but maybe it was pre-authorized
|
||||
Security.requirePriorAuthorization(request, resourceId, service, identifier);
|
||||
}
|
||||
}
|
||||
|
||||
public static ApiKey getApiKey(HttpServletRequest request) {
|
||||
ApiKey apiKey = ApiService.getInstance().getApiKey();
|
||||
if (apiKey == null) {
|
||||
try {
|
||||
apiKey = new ApiKey();
|
||||
} catch (IOException e) {
|
||||
// Couldn't load API key - so we need to treat it as not generated, and therefore unauthorized
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
ApiService.getInstance().setApiKey(apiKey);
|
||||
}
|
||||
return apiKey;
|
||||
if (!remoteAddr.isLoopbackAddress())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
package org.qortal.api.domainmap.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataRenderer;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
@Path("/")
|
||||
@Tag(name = "Domain Map")
|
||||
public class DomainMapResource {
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
|
||||
@GET
|
||||
public HttpServletResponse getIndexByDomainMap() {
|
||||
return this.getDomainMap("/");
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{path:.*}")
|
||||
public HttpServletResponse getPathByDomainMap(@PathParam("path") String inPath) {
|
||||
return this.getDomainMap(inPath);
|
||||
}
|
||||
|
||||
private HttpServletResponse getDomainMap(String inPath) {
|
||||
Map<String, String> domainMap = Settings.getInstance().getSimpleDomainMap();
|
||||
if (domainMap != null && domainMap.containsKey(request.getServerName())) {
|
||||
// Build synchronously, so that we don't need to make the summary API endpoints available over
|
||||
// the domain map server. This means that there will be no loading screen, but this is potentially
|
||||
// preferred in this situation anyway (e.g. to avoid confusing search engine robots).
|
||||
return this.get(domainMap.get(request.getServerName()), ResourceIdType.NAME, Service.WEBSITE, inPath, null, "", false, false);
|
||||
}
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,126 +0,0 @@
|
||||
package org.qortal.api.gateway.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataReader;
|
||||
import org.qortal.arbitrary.ArbitraryDataRenderer;
|
||||
import org.qortal.arbitrary.ArbitraryDataResource;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.data.arbitrary.ArbitraryResourceStatus;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
|
||||
|
||||
@Path("/")
|
||||
@Tag(name = "Gateway")
|
||||
public class GatewayResource {
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
/**
|
||||
* We need to allow resource status checking (and building) via the gateway, as the node's API port
|
||||
* may not be forwarded and will almost certainly not be authenticated. Since gateways allow for
|
||||
* all resources to be loaded except those that are blocked, there is no need for authentication.
|
||||
*/
|
||||
@GET
|
||||
@Path("/arbitrary/resource/status/{service}/{name}")
|
||||
public ArbitraryResourceStatus getDefaultResourceStatus(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@QueryParam("build") Boolean build) {
|
||||
|
||||
return this.getStatus(service, name, null, build);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/arbitrary/resource/status/{service}/{name}/{identifier}")
|
||||
public ArbitraryResourceStatus getResourceStatus(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@PathParam("identifier") String identifier,
|
||||
@QueryParam("build") Boolean build) {
|
||||
|
||||
return this.getStatus(service, name, identifier, build);
|
||||
}
|
||||
|
||||
private ArbitraryResourceStatus getStatus(Service service, String name, String identifier, Boolean build) {
|
||||
|
||||
// If "build=true" has been specified in the query string, build the resource before returning its status
|
||||
if (build != null && build == true) {
|
||||
ArbitraryDataReader reader = new ArbitraryDataReader(name, ArbitraryDataFile.ResourceIdType.NAME, service, null);
|
||||
try {
|
||||
if (!reader.isBuilding()) {
|
||||
reader.loadSynchronously(false);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// No need to handle exception, as it will be reflected in the status
|
||||
}
|
||||
}
|
||||
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(name, ResourceIdType.NAME, service, identifier);
|
||||
return resource.getStatus(false);
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
public HttpServletResponse getRoot() {
|
||||
return ArbitraryDataRenderer.getResponse(response, 200, "");
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
@Path("{name}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByName(@PathParam("name") String name,
|
||||
@PathParam("path") String inPath) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, inPath, null, "", true, true);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{name}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByName(@PathParam("name") String name) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, "/", null, "", true, true);
|
||||
}
|
||||
|
||||
|
||||
// Optional /site alternative for backwards support
|
||||
|
||||
@GET
|
||||
@Path("/site/{name}/{path:.*}")
|
||||
public HttpServletResponse getSitePathByName(@PathParam("name") String name,
|
||||
@PathParam("path") String inPath) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, inPath, null, "/site", true, true);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/site/{name}")
|
||||
public HttpServletResponse getSiteIndexByName(@PathParam("name") String name) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, "/", null, "/site", true, true);
|
||||
}
|
||||
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class BlockMintingInfo {
|
||||
|
||||
public byte[] minterPublicKey;
|
||||
public int minterLevel;
|
||||
public int onlineAccountsCount;
|
||||
public BigDecimal maxDistance;
|
||||
public BigInteger keyDistance;
|
||||
public double keyDistanceRatio;
|
||||
public long timestamp;
|
||||
public long timeDelta;
|
||||
|
||||
public BlockMintingInfo() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,74 +1,61 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
import org.qortal.data.network.PeerChainTipData;
|
||||
import org.qortal.data.network.PeerData;
|
||||
import org.qortal.network.Handshake;
|
||||
import org.qortal.network.Peer;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class ConnectedPeer {
|
||||
|
||||
public enum Direction {
|
||||
INBOUND,
|
||||
OUTBOUND;
|
||||
}
|
||||
public enum Direction {
|
||||
INBOUND,
|
||||
OUTBOUND;
|
||||
}
|
||||
public Direction direction;
|
||||
public Handshake handshakeStatus;
|
||||
public Long lastPing;
|
||||
public Long connectedWhen;
|
||||
public Long peersConnectedWhen;
|
||||
|
||||
public Direction direction;
|
||||
public Handshake handshakeStatus;
|
||||
public Long lastPing;
|
||||
public Long connectedWhen;
|
||||
public Long peersConnectedWhen;
|
||||
public String address;
|
||||
public String version;
|
||||
|
||||
public String address;
|
||||
public String version;
|
||||
public String nodeId;
|
||||
|
||||
public String nodeId;
|
||||
public Integer lastHeight;
|
||||
@Schema(example = "base58")
|
||||
public byte[] lastBlockSignature;
|
||||
public Long lastBlockTimestamp;
|
||||
|
||||
public Integer lastHeight;
|
||||
@Schema(example = "base58")
|
||||
public byte[] lastBlockSignature;
|
||||
public Long lastBlockTimestamp;
|
||||
public UUID connectionId;
|
||||
public String age;
|
||||
protected ConnectedPeer() {
|
||||
}
|
||||
|
||||
protected ConnectedPeer() {
|
||||
}
|
||||
public ConnectedPeer(Peer peer) {
|
||||
this.direction = peer.isOutbound() ? Direction.OUTBOUND : Direction.INBOUND;
|
||||
this.handshakeStatus = peer.getHandshakeStatus();
|
||||
this.lastPing = peer.getLastPing();
|
||||
|
||||
public ConnectedPeer(Peer peer) {
|
||||
this.direction = peer.isOutbound() ? Direction.OUTBOUND : Direction.INBOUND;
|
||||
this.handshakeStatus = peer.getHandshakeStatus();
|
||||
this.lastPing = peer.getLastPing();
|
||||
PeerData peerData = peer.getPeerData();
|
||||
this.connectedWhen = peer.getConnectionTimestamp();
|
||||
this.peersConnectedWhen = peer.getPeersConnectionTimestamp();
|
||||
|
||||
PeerData peerData = peer.getPeerData();
|
||||
this.connectedWhen = peer.getConnectionTimestamp();
|
||||
this.peersConnectedWhen = peer.getPeersConnectionTimestamp();
|
||||
this.address = peerData.getAddress().toString();
|
||||
|
||||
this.address = peerData.getAddress().toString();
|
||||
this.version = peer.getPeersVersionString();
|
||||
this.nodeId = peer.getPeersNodeId();
|
||||
|
||||
this.version = peer.getPeersVersionString();
|
||||
this.nodeId = peer.getPeersNodeId();
|
||||
this.connectionId = peer.getPeerConnectionId();
|
||||
if (peer.getConnectionEstablishedTime() > 0) {
|
||||
long age = (System.currentTimeMillis() - peer.getConnectionEstablishedTime());
|
||||
long minutes = TimeUnit.MILLISECONDS.toMinutes(age);
|
||||
long seconds = TimeUnit.MILLISECONDS.toSeconds(age) - TimeUnit.MINUTES.toSeconds(minutes);
|
||||
this.age = String.format("%dm %ds", minutes, seconds);
|
||||
} else {
|
||||
this.age = "connecting...";
|
||||
}
|
||||
|
||||
PeerChainTipData peerChainTipData = peer.getChainTipData();
|
||||
if (peerChainTipData != null) {
|
||||
this.lastHeight = peerChainTipData.getLastHeight();
|
||||
this.lastBlockSignature = peerChainTipData.getLastBlockSignature();
|
||||
this.lastBlockTimestamp = peerChainTipData.getLastBlockTimestamp();
|
||||
}
|
||||
}
|
||||
PeerChainTipData peerChainTipData = peer.getChainTipData();
|
||||
if (peerChainTipData != null) {
|
||||
this.lastHeight = peerChainTipData.getLastHeight();
|
||||
this.lastBlockSignature = peerChainTipData.getLastBlockSignature();
|
||||
this.lastBlockTimestamp = peerChainTipData.getLastBlockTimestamp();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class CrossChainDualSecretRequest {
|
||||
|
||||
@Schema(description = "Public key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPublicKey;
|
||||
|
||||
@Schema(description = "Qortal AT address")
|
||||
public String atAddress;
|
||||
|
||||
@Schema(description = "secret-A (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secretA;
|
||||
|
||||
@Schema(description = "secret-B (32 bytes)", example = "EN2Bgx3BcEMtxFCewmCVSMkfZjVKYhx3KEXC5A21KBGx")
|
||||
public byte[] secretB;
|
||||
|
||||
@Schema(description = "Qortal address for receiving QORT from AT")
|
||||
public String receivingAddress;
|
||||
|
||||
public CrossChainDualSecretRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -8,14 +8,17 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class CrossChainSecretRequest {
|
||||
|
||||
@Schema(description = "Private key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPrivateKey;
|
||||
@Schema(description = "Public key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPublicKey;
|
||||
|
||||
@Schema(description = "Qortal AT address")
|
||||
public String atAddress;
|
||||
|
||||
@Schema(description = "Secret (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secret;
|
||||
@Schema(description = "secret-A (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secretA;
|
||||
|
||||
@Schema(description = "secret-B (32 bytes)", example = "EN2Bgx3BcEMtxFCewmCVSMkfZjVKYhx3KEXC5A21KBGx")
|
||||
public byte[] secretB;
|
||||
|
||||
@Schema(description = "Qortal address for receiving QORT from AT")
|
||||
public String receivingAddress;
|
||||
|
||||
@@ -25,12 +25,6 @@ public class CrossChainTradeSummary {
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
private long foreignAmount;
|
||||
|
||||
private String atAddress;
|
||||
|
||||
private String sellerAddress;
|
||||
|
||||
private String buyerReceivingAddress;
|
||||
|
||||
protected CrossChainTradeSummary() {
|
||||
/* For JAXB */
|
||||
}
|
||||
@@ -40,9 +34,6 @@ public class CrossChainTradeSummary {
|
||||
this.qortAmount = crossChainTradeData.qortAmount;
|
||||
this.foreignAmount = crossChainTradeData.expectedForeignAmount;
|
||||
this.btcAmount = this.foreignAmount;
|
||||
this.sellerAddress = crossChainTradeData.qortalCreator;
|
||||
this.buyerReceivingAddress = crossChainTradeData.qortalPartnerReceivingAddress;
|
||||
this.atAddress = crossChainTradeData.qortalAtAddress;
|
||||
}
|
||||
|
||||
public long getTradeTimestamp() {
|
||||
@@ -57,11 +48,7 @@ public class CrossChainTradeSummary {
|
||||
return this.btcAmount;
|
||||
}
|
||||
|
||||
public long getForeignAmount() { return this.foreignAmount; }
|
||||
|
||||
public String getAtAddress() { return this.atAddress; }
|
||||
|
||||
public String getSellerAddress() { return this.sellerAddress; }
|
||||
|
||||
public String getBuyerReceivingAddressAddress() { return this.buyerReceivingAddress; }
|
||||
public long getForeignAmount() {
|
||||
return this.foreignAmount;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.util.List;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class ListRequest {
|
||||
|
||||
@Schema(description = "A list of items")
|
||||
public List<String> items;
|
||||
|
||||
public ListRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -12,7 +12,6 @@ public class NodeInfo {
|
||||
public long buildTimestamp;
|
||||
public String nodeId;
|
||||
public boolean isTestNet;
|
||||
public String type;
|
||||
|
||||
public NodeInfo() {
|
||||
}
|
||||
|
||||
@@ -4,8 +4,6 @@ import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.OnlineAccountsManager;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.network.Network;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
@@ -22,12 +20,12 @@ public class NodeStatus {
|
||||
public final int height;
|
||||
|
||||
public NodeStatus() {
|
||||
this.isMintingPossible = OnlineAccountsManager.getInstance().hasActiveOnlineAccountSignatures();
|
||||
this.isMintingPossible = Controller.getInstance().isMintingPossible();
|
||||
|
||||
this.syncPercent = Synchronizer.getInstance().getSyncPercent();
|
||||
this.isSynchronizing = Synchronizer.getInstance().isSynchronizing();
|
||||
this.syncPercent = Controller.getInstance().getSyncPercent();
|
||||
this.isSynchronizing = this.syncPercent != null;
|
||||
|
||||
this.numberOfConnections = Network.getInstance().getImmutableHandshakedPeers().size();
|
||||
this.numberOfConnections = Network.getInstance().getHandshakedPeers().size();
|
||||
|
||||
this.height = Controller.getInstance().getChainHeight();
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class PeersSummary {
|
||||
|
||||
public int inboundConnections;
|
||||
public int outboundConnections;
|
||||
|
||||
public PeersSummary() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class DigibyteSendRequest {
|
||||
|
||||
@Schema(description = "Digibyte BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Digibyte address ('legacy' P2PKH only)", example = "1DigByteEaterAddressDontSendf59kuE")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of DGB to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long digibyteAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 DGB (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public DigibyteSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class DogecoinSendRequest {
|
||||
|
||||
@Schema(description = "Dogecoin BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Dogecoin address ('legacy' P2PKH only)", example = "DoGecoinEaterAddressDontSendhLfzKD")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of DOGE to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long dogecoinAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 DOGE (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public DogecoinSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class PirateChainSendRequest {
|
||||
|
||||
@Schema(description = "32 bytes of entropy, Base58 encoded", example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV")
|
||||
public String entropy58;
|
||||
|
||||
@Schema(description = "Recipient's Pirate Chain address", example = "zc...")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of ARRR to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long arrrAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 ARRR (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
@Schema(description = "Optional memo to include information for the recipient", example = "zc...")
|
||||
public String memo;
|
||||
|
||||
public PirateChainSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class RavencoinSendRequest {
|
||||
|
||||
@Schema(description = "Ravencoin BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Ravencoin address ('legacy' P2PKH only)", example = "1RvnCoinEaterAddressDontSendf59kuE")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of RVN to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long ravencoinAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 RVN (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public RavencoinSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -12,11 +12,14 @@ import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -30,13 +33,11 @@ import org.qortal.api.Security;
|
||||
import org.qortal.api.model.ApiOnlineAccount;
|
||||
import org.qortal.api.model.RewardShareKeyRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.controller.OnlineAccountsManager;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
import org.qortal.data.network.OnlineAccountData;
|
||||
import org.qortal.data.network.OnlineAccountLevel;
|
||||
import org.qortal.data.transaction.PublicizeTransactionData;
|
||||
import org.qortal.data.transaction.RewardShareTransactionData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
@@ -110,26 +111,18 @@ public class AddressesResource {
|
||||
if (!Crypto.isValidAddress(address))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
AccountData accountData;
|
||||
byte[] lastReference = null;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Lite nodes request data from peers instead of the local db
|
||||
accountData = LiteNode.getInstance().fetchAccountData(address);
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
AccountData accountData = repository.getAccountRepository().getAccount(address);
|
||||
// Not found?
|
||||
if (accountData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
lastReference = accountData.getReference();
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
else {
|
||||
// All other node types request data from local db
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
accountData = repository.getAccountRepository().getAccount(address);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
// Not found?
|
||||
if (accountData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
byte[] lastReference = accountData.getReference();
|
||||
|
||||
if (lastReference == null || lastReference.length == 0)
|
||||
return "false";
|
||||
@@ -165,7 +158,7 @@ public class AddressesResource {
|
||||
)
|
||||
@ApiErrors({ApiError.PUBLIC_KEY_NOT_FOUND, ApiError.REPOSITORY_ISSUE})
|
||||
public List<ApiOnlineAccount> getOnlineAccounts() {
|
||||
List<OnlineAccountData> onlineAccounts = OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
List<OnlineAccountData> onlineAccounts = Controller.getInstance().getOnlineAccounts();
|
||||
|
||||
// Map OnlineAccountData entries to OnlineAccount via reward-share data
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -187,66 +180,6 @@ public class AddressesResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/online/levels")
|
||||
@Operation(
|
||||
summary = "Return currently 'online' accounts counts, grouped by level",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "online accounts",
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, array = @ArraySchema(schema = @Schema(implementation = ApiOnlineAccount.class)))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.PUBLIC_KEY_NOT_FOUND, ApiError.REPOSITORY_ISSUE})
|
||||
public List<OnlineAccountLevel> getOnlineAccountsByLevel() {
|
||||
List<OnlineAccountData> onlineAccounts = OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<OnlineAccountLevel> onlineAccountLevels = new ArrayList<>();
|
||||
|
||||
for (OnlineAccountData onlineAccountData : onlineAccounts) {
|
||||
try {
|
||||
final int minterLevel = Account.getRewardShareEffectiveMintingLevelIncludingLevelZero(repository, onlineAccountData.getPublicKey());
|
||||
|
||||
OnlineAccountLevel onlineAccountLevel = onlineAccountLevels.stream()
|
||||
.filter(a -> a.getLevel() == minterLevel)
|
||||
.findFirst().orElse(null);
|
||||
|
||||
// Note: I don't think we can use the level as the List index here because there will be gaps.
|
||||
// So we are forced to manually look up the existing item each time.
|
||||
// There's probably a nice shorthand java way of doing this, but this approach gets the same result.
|
||||
|
||||
if (onlineAccountLevel == null) {
|
||||
// No entry exists for this level yet, so create one
|
||||
onlineAccountLevel = new OnlineAccountLevel(minterLevel, 1);
|
||||
onlineAccountLevels.add(onlineAccountLevel);
|
||||
}
|
||||
else {
|
||||
// Already exists - so increment the count
|
||||
int existingCount = onlineAccountLevel.getCount();
|
||||
onlineAccountLevel.setCount(++existingCount);
|
||||
|
||||
// Then replace the existing item
|
||||
int index = onlineAccountLevels.indexOf(onlineAccountLevel);
|
||||
onlineAccountLevels.set(index, onlineAccountLevel);
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by level
|
||||
onlineAccountLevels.sort(Comparator.comparingInt(OnlineAccountLevel::getLevel));
|
||||
|
||||
return onlineAccountLevels;
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/balance/{address}")
|
||||
@Operation(
|
||||
@@ -542,7 +475,7 @@ public class AddressesResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String computePublicize(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String rawBytes58) {
|
||||
public String computePublicize(String rawBytes58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
@@ -22,28 +22,32 @@ import java.time.OffsetDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.appender.RollingFileAppender;
|
||||
import org.qortal.account.Account;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.ActivitySummary;
|
||||
import org.qortal.api.model.NodeInfo;
|
||||
import org.qortal.api.model.NodeStatus;
|
||||
import org.qortal.block.BlockChain;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.controller.Synchronizer.SynchronizationResult;
|
||||
import org.qortal.data.account.MintingAccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
@@ -63,8 +67,6 @@ import com.google.common.collect.Lists;
|
||||
@Tag(name = "Admin")
|
||||
public class AdminResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(AdminResource.class);
|
||||
|
||||
private static final int MAX_LOG_LINES = 500;
|
||||
|
||||
@Context
|
||||
@@ -74,8 +76,7 @@ public class AdminResource {
|
||||
@Path("/unused")
|
||||
@Parameter(in = ParameterIn.PATH, name = "assetid", description = "Asset ID, 0 is native coin", schema = @Schema(type = "integer"))
|
||||
@Parameter(in = ParameterIn.PATH, name = "otherassetid", description = "Asset ID, 0 is native coin", schema = @Schema(type = "integer"))
|
||||
@Parameter(in = ParameterIn.PATH, name = "address", description = "An account address", example = "QgV4s3xnzLhVBEJxcYui4u4q11yhUHsd9v")
|
||||
@Parameter(in = ParameterIn.PATH, name = "path", description = "Local path to folder containing the files", schema = @Schema(type = "String", defaultValue = "/Users/user/Documents/MyStaticWebsite"))
|
||||
@Parameter(in = ParameterIn.PATH, name = "address", description = "an account address", example = "QgV4s3xnzLhVBEJxcYui4u4q11yhUHsd9v")
|
||||
@Parameter(in = ParameterIn.QUERY, name = "count", description = "Maximum number of entries to return, 0 means none", schema = @Schema(type = "integer", defaultValue = "20"))
|
||||
@Parameter(in = ParameterIn.QUERY, name = "limit", description = "Maximum number of entries to return, 0 means unlimited", schema = @Schema(type = "integer", defaultValue = "20"))
|
||||
@Parameter(in = ParameterIn.QUERY, name = "offset", description = "Starting entry in results, 0 is first entry", schema = @Schema(type = "integer"))
|
||||
@@ -119,23 +120,10 @@ public class AdminResource {
|
||||
nodeInfo.buildTimestamp = Controller.getInstance().getBuildTimestamp();
|
||||
nodeInfo.nodeId = Network.getInstance().getOurNodeId();
|
||||
nodeInfo.isTestNet = Settings.getInstance().isTestNet();
|
||||
nodeInfo.type = getNodeType();
|
||||
|
||||
return nodeInfo;
|
||||
}
|
||||
|
||||
private String getNodeType() {
|
||||
if (Settings.getInstance().isLite()) {
|
||||
return "lite";
|
||||
}
|
||||
else if (Settings.getInstance().isTopOnly()) {
|
||||
return "topOnly";
|
||||
}
|
||||
else {
|
||||
return "full";
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/status")
|
||||
@Operation(
|
||||
@@ -146,7 +134,10 @@ public class AdminResource {
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public NodeStatus status() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
NodeStatus nodeStatus = new NodeStatus();
|
||||
|
||||
return nodeStatus;
|
||||
@@ -165,7 +156,7 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String shutdown(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public String shutdown() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
new Thread(() -> {
|
||||
@@ -194,7 +185,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public ActivitySummary summary(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public ActivitySummary summary() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
ActivitySummary summary = new ActivitySummary();
|
||||
@@ -240,7 +231,7 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public Controller.StatsSnapshot getEngineStats(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public Controller.StatsSnapshot getEngineStats() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
return Controller.getInstance().getStatsSnapshot();
|
||||
@@ -258,7 +249,9 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<MintingAccountData> getMintingAccounts() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<MintingAccountData> mintingAccounts = repository.getAccountRepository().getMintingAccounts();
|
||||
@@ -304,7 +297,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.REPOSITORY_ISSUE, ApiError.CANNOT_MINT})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addMintingAccount(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String seed58) {
|
||||
public String addMintingAccount(String seed58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -327,7 +320,6 @@ public class AdminResource {
|
||||
|
||||
repository.getAccountRepository().save(mintingAccountData);
|
||||
repository.saveChanges();
|
||||
repository.exportNodeLocalData();//after adding new minting account let's persist it to the backup MintingAccounts.json
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY, e);
|
||||
} catch (DataException e) {
|
||||
@@ -358,7 +350,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String deleteMintingAccount(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
public String deleteMintingAccount(String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -368,7 +360,6 @@ public class AdminResource {
|
||||
return "false";
|
||||
|
||||
repository.saveChanges();
|
||||
repository.exportNodeLocalData();//after removing new minting account let's persist it to the backup MintingAccounts.json
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY, e);
|
||||
} catch (DataException e) {
|
||||
@@ -394,10 +385,6 @@ public class AdminResource {
|
||||
) @QueryParam("limit") Integer limit, @Parameter(
|
||||
ref = "offset"
|
||||
) @QueryParam("offset") Integer offset, @Parameter(
|
||||
name = "tail",
|
||||
description = "Fetch most recent log lines",
|
||||
schema = @Schema(type = "boolean")
|
||||
) @QueryParam("tail") Boolean tail, @Parameter(
|
||||
ref = "reverse"
|
||||
) @QueryParam("reverse") Boolean reverse) {
|
||||
LoggerContext loggerContext = (LoggerContext) LogManager.getContext();
|
||||
@@ -413,13 +400,6 @@ public class AdminResource {
|
||||
if (reverse != null && reverse)
|
||||
logLines = Lists.reverse(logLines);
|
||||
|
||||
// Tail mode - return the last X lines (where X = limit)
|
||||
if (tail != null && tail) {
|
||||
if (limit != null && limit > 0) {
|
||||
offset = logLines.size() - limit;
|
||||
}
|
||||
}
|
||||
|
||||
// offset out of bounds?
|
||||
if (offset != null && (offset < 0 || offset >= logLines.size()))
|
||||
return "";
|
||||
@@ -440,7 +420,7 @@ public class AdminResource {
|
||||
|
||||
limit = Math.min(limit, logLines.size());
|
||||
|
||||
logLines.subList(limit, logLines.size()).clear();
|
||||
logLines.subList(limit - 1, logLines.size()).clear();
|
||||
|
||||
return String.join("\n", logLines);
|
||||
} catch (IOException e) {
|
||||
@@ -470,7 +450,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_HEIGHT, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String orphan(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetHeightString) {
|
||||
public String orphan(String targetHeightString) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -479,23 +459,6 @@ public class AdminResource {
|
||||
if (targetHeight <= 0 || targetHeight > Controller.getInstance().getChainHeight())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_HEIGHT);
|
||||
|
||||
// Make sure we're not orphaning as far back as the archived blocks
|
||||
// FUTURE: we could support this by first importing earlier blocks from the archive
|
||||
if (Settings.getInstance().isTopOnly() ||
|
||||
Settings.getInstance().isArchiveEnabled()) {
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Find the first unarchived block
|
||||
int oldestBlock = repository.getBlockArchiveRepository().getBlockArchiveHeight();
|
||||
// Add some extra blocks just in case we're currently archiving/pruning
|
||||
oldestBlock += 100;
|
||||
if (targetHeight <= oldestBlock) {
|
||||
LOGGER.info("Unable to orphan beyond block {} because it is archived", oldestBlock);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_HEIGHT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (BlockChain.orphan(targetHeight))
|
||||
return "true";
|
||||
else
|
||||
@@ -529,7 +492,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String forceSync(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetPeerAddress) {
|
||||
public String forceSync(String targetPeerAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -537,7 +500,7 @@ public class AdminResource {
|
||||
PeerAddress peerAddress = PeerAddress.fromString(targetPeerAddress);
|
||||
InetSocketAddress resolvedAddress = peerAddress.toSocketAddress();
|
||||
|
||||
List<Peer> peers = Network.getInstance().getImmutableHandshakedPeers();
|
||||
List<Peer> peers = Network.getInstance().getHandshakedPeers();
|
||||
Peer targetPeer = peers.stream().filter(peer -> peer.getResolvedAddress().equals(resolvedAddress)).findFirst().orElse(null);
|
||||
|
||||
if (targetPeer == null)
|
||||
@@ -551,7 +514,7 @@ public class AdminResource {
|
||||
SynchronizationResult syncResult;
|
||||
try {
|
||||
do {
|
||||
syncResult = Synchronizer.getInstance().actuallySynchronize(targetPeer, true);
|
||||
syncResult = Controller.getInstance().actuallySynchronize(targetPeer, true);
|
||||
} while (syncResult == SynchronizationResult.OK);
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
@@ -571,16 +534,27 @@ public class AdminResource {
|
||||
@Path("/repository/data")
|
||||
@Operation(
|
||||
summary = "Export sensitive/node-local data from repository.",
|
||||
description = "Exports data to .json files on local machine"
|
||||
description = "Exports data to .script files on local machine"
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String exportRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public String exportRepository() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
repository.exportNodeLocalData();
|
||||
return "true";
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
repository.exportNodeLocalData();
|
||||
return "true";
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// We couldn't lock blockchain to perform export
|
||||
return "false";
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -590,13 +564,13 @@ public class AdminResource {
|
||||
@Path("/repository/data")
|
||||
@Operation(
|
||||
summary = "Import data into repository.",
|
||||
description = "Imports data from file on local machine. Filename is forced to 'qortal-backup/TradeBotStates.json' if apiKey is not set.",
|
||||
description = "Imports data from file on local machine. Filename is forced to 'import.script' if apiKey is not set.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string", example = "qortal-backup/TradeBotStates.json"
|
||||
type = "string", example = "MintingAccounts.script"
|
||||
)
|
||||
)
|
||||
),
|
||||
@@ -609,9 +583,13 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String importRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String filename) {
|
||||
public String importRepository(String filename) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
// Hard-coded because it's too dangerous to allow user-supplied filenames in weaker security contexts
|
||||
if (Settings.getInstance().getApiKey() == null)
|
||||
filename = "import.script";
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
|
||||
@@ -622,10 +600,6 @@ public class AdminResource {
|
||||
repository.saveChanges();
|
||||
|
||||
return "true";
|
||||
|
||||
} catch (IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA, e);
|
||||
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
@@ -651,7 +625,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String checkpointRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public String checkpointRepository() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
RepositoryManager.setRequestedCheckpoint(Boolean.TRUE);
|
||||
@@ -672,7 +646,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String backupRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public String backupRepository() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -681,16 +655,14 @@ public class AdminResource {
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
// Timeout if the database isn't ready for backing up after 60 seconds
|
||||
long timeout = 60 * 1000L;
|
||||
repository.backup(true, "backup", timeout);
|
||||
repository.backup(true);
|
||||
repository.saveChanges();
|
||||
|
||||
return "true";
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException | TimeoutException e) {
|
||||
} catch (InterruptedException e) {
|
||||
// We couldn't lock blockchain to perform backup
|
||||
return "false";
|
||||
} catch (DataException e) {
|
||||
@@ -706,7 +678,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public void performRepositoryMaintenance(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
public void performRepositoryMaintenance() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -715,114 +687,15 @@ public class AdminResource {
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
// Timeout if the database isn't ready to start after 60 seconds
|
||||
long timeout = 60 * 1000L;
|
||||
repository.performPeriodicMaintenance(timeout);
|
||||
repository.performPeriodicMaintenance();
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// No big deal
|
||||
} catch (DataException | TimeoutException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/repository/importarchivedtrades")
|
||||
@Operation(
|
||||
summary = "Imports archived trades from TradeBotStatesArchive.json",
|
||||
description = "This can be used to recover trades that exist in the archive only, which may be needed if a<br />" +
|
||||
"problem occurred during the proof-of-work computation stage of a buy request.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean importArchivedTrades(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
repository.importDataFromFile("qortal-backup/TradeBotStatesArchive.json");
|
||||
repository.saveChanges();
|
||||
|
||||
return true;
|
||||
|
||||
} catch (IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA, e);
|
||||
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// We couldn't lock blockchain to perform import
|
||||
return false;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/apikey/generate")
|
||||
@Operation(
|
||||
summary = "Generate an API key",
|
||||
description = "This request is unauthenticated if no API key has been generated yet. " +
|
||||
"If an API key already exists, it needs to be passed as a header and this endpoint " +
|
||||
"will then generate a new key which replaces the existing one.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "API key string",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String generateApiKey(@HeaderParam(Security.API_KEY_HEADER) String apiKeyHeader) {
|
||||
ApiKey apiKey = Security.getApiKey(request);
|
||||
|
||||
// If the API key is already generated, we need to authenticate this request
|
||||
if (apiKey.generated() && apiKey.exists()) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
}
|
||||
|
||||
// Not generated yet - so we are safe to generate one
|
||||
// FUTURE: we may want to restrict this to local/loopback only?
|
||||
|
||||
try {
|
||||
apiKey.generate();
|
||||
} catch (IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Unable to generate API key");
|
||||
}
|
||||
|
||||
return apiKey.toString();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apikey/test")
|
||||
@Operation(
|
||||
summary = "Test an API key",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "true if authenticated",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String testApiKey(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
return "true";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import com.google.common.primitives.Ints;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
@@ -9,14 +8,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.math.RoundingMode;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
@@ -28,25 +20,18 @@ import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.qortal.account.Account;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.BlockMintingInfo;
|
||||
import org.qortal.api.model.BlockSignerSummary;
|
||||
import org.qortal.block.Block;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.block.BlockData;
|
||||
import org.qortal.data.block.BlockSummaryData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
import org.qortal.repository.BlockArchiveReader;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transform.TransformationException;
|
||||
import org.qortal.transform.block.BlockTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
@Path("/blocks")
|
||||
@@ -75,8 +60,7 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_SIGNATURE, ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getBlock(@PathParam("signature") String signature58,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
public BlockData getBlock(@PathParam("signature") String signature58) {
|
||||
// Decode signature
|
||||
byte[] signature;
|
||||
try {
|
||||
@@ -86,101 +70,16 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
// Not found, so try the block archive
|
||||
blockData = repository.getBlockArchiveRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
return blockData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/data")
|
||||
@Operation(
|
||||
summary = "Fetch serialized, base58 encoded block data using base58 signature",
|
||||
description = "Returns serialized data for the block that matches the given signature, and an optional block serialization version",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "the block data",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_SIGNATURE, ApiError.BLOCK_UNKNOWN, ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public String getSerializedBlockData(@PathParam("signature") String signature58, @QueryParam("version") Integer version) {
|
||||
// Decode signature
|
||||
byte[] signature;
|
||||
try {
|
||||
signature = Base58.decode(signature58);
|
||||
} catch (NumberFormatException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_SIGNATURE, e);
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Default to version 1
|
||||
if (version == null) {
|
||||
version = 1;
|
||||
}
|
||||
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
Block block = new Block(repository, blockData);
|
||||
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
|
||||
bytes.write(Ints.toByteArray(block.getBlockData().getHeight()));
|
||||
|
||||
switch (version) {
|
||||
case 1:
|
||||
bytes.write(BlockTransformer.toBytes(block));
|
||||
break;
|
||||
|
||||
case 2:
|
||||
bytes.write(BlockTransformer.toBytesV2(block));
|
||||
break;
|
||||
|
||||
default:
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
return Base58.encode(bytes.toByteArray());
|
||||
}
|
||||
|
||||
// Not found, so try the block archive
|
||||
byte[] bytes = BlockArchiveReader.getInstance().fetchSerializedBlockBytesForSignature(signature, false, repository);
|
||||
if (bytes != null) {
|
||||
if (version != 1) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.INVALID_CRITERIA, "Archived blocks require version 1");
|
||||
}
|
||||
return Base58.encode(bytes);
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA, e);
|
||||
} catch (DataException | IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/transactions")
|
||||
@Operation(
|
||||
@@ -218,12 +117,8 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Check if the block exists in either the database or archive
|
||||
if (repository.getBlockRepository().getHeightFromSignature(signature) == 0 &&
|
||||
repository.getBlockArchiveRepository().getHeightFromSignature(signature) == 0) {
|
||||
// Not found in either the database or archive
|
||||
if (repository.getBlockRepository().getHeightFromSignature(signature) == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
return repository.getBlockRepository().getTransactionsFromSignature(signature, limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
@@ -252,19 +147,7 @@ public class BlocksResource {
|
||||
})
|
||||
public BlockData getFirstBlock() {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(1);
|
||||
if (blockData != null) {
|
||||
return blockData;
|
||||
}
|
||||
|
||||
// Try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(1);
|
||||
if (blockData != null) {
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
return repository.getBlockRepository().fromHeight(1);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -289,15 +172,9 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getLastBlock(@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
public BlockData getLastBlock() {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
BlockData blockData = repository.getBlockRepository().getLastBlock();
|
||||
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
|
||||
return blockData;
|
||||
return repository.getBlockRepository().getLastBlock();
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -332,28 +209,17 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
BlockData childBlockData = null;
|
||||
|
||||
// Check if block exists in database
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return repository.getBlockRepository().fromReference(signature);
|
||||
}
|
||||
|
||||
// Not found, so try the archive
|
||||
// This also checks that the parent block exists
|
||||
// It will return null if either the parent or child don't exit
|
||||
childBlockData = repository.getBlockArchiveRepository().fromReference(signature);
|
||||
// Check block exists
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
BlockData childBlockData = repository.getBlockRepository().fromReference(signature);
|
||||
|
||||
// Check child block exists
|
||||
if (childBlockData == null) {
|
||||
if (childBlockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
// Check child block's reference matches the supplied signature
|
||||
if (!Arrays.equals(childBlockData.getReference(), signature)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
return childBlockData;
|
||||
} catch (DataException e) {
|
||||
@@ -419,20 +285,13 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Firstly check the database
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return blockData.getHeight();
|
||||
}
|
||||
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return blockData.getHeight();
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
// Check block exists
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
return blockData.getHeight();
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -457,101 +316,13 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getByHeight(@PathParam("height") int height,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
public BlockData getByHeight(@PathParam("height") int height) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Firstly check the database
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/byheight/{height}/mintinginfo")
|
||||
@Operation(
|
||||
summary = "Fetch block minter info using block height",
|
||||
description = "Returns the minter info for the block with given height",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "the block",
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
implementation = BlockData.class
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockMintingInfo getBlockMintingInfoByHeight(@PathParam("height") int height) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Try the database
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
|
||||
// Still not found
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
Block block = new Block(repository, blockData);
|
||||
BlockData parentBlockData = repository.getBlockRepository().fromSignature(blockData.getReference());
|
||||
if (parentBlockData == null) {
|
||||
// Parent block not found - try the archive
|
||||
parentBlockData = repository.getBlockArchiveRepository().fromSignature(blockData.getReference());
|
||||
if (parentBlockData == null) {
|
||||
|
||||
// Still not found
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
int minterLevel = Account.getRewardShareEffectiveMintingLevel(repository, blockData.getMinterPublicKey());
|
||||
if (minterLevel == 0)
|
||||
// This may be unavailable when requesting a trimmed block
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
BigInteger distance = block.calcKeyDistance(parentBlockData.getHeight(), parentBlockData.getSignature(), blockData.getMinterPublicKey(), minterLevel);
|
||||
double ratio = new BigDecimal(distance).divide(new BigDecimal(block.MAX_DISTANCE), 40, RoundingMode.DOWN).doubleValue();
|
||||
long timestamp = block.calcTimestamp(parentBlockData, blockData.getMinterPublicKey(), minterLevel);
|
||||
long timeDelta = timestamp - parentBlockData.getTimestamp();
|
||||
|
||||
BlockMintingInfo blockMintingInfo = new BlockMintingInfo();
|
||||
blockMintingInfo.minterPublicKey = blockData.getMinterPublicKey();
|
||||
blockMintingInfo.minterLevel = minterLevel;
|
||||
blockMintingInfo.onlineAccountsCount = blockData.getOnlineAccountsCount();
|
||||
blockMintingInfo.maxDistance = new BigDecimal(block.MAX_DISTANCE);
|
||||
blockMintingInfo.keyDistance = distance;
|
||||
blockMintingInfo.keyDistanceRatio = ratio;
|
||||
blockMintingInfo.timestamp = timestamp;
|
||||
blockMintingInfo.timeDelta = timeDelta;
|
||||
|
||||
return blockMintingInfo;
|
||||
return blockData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -575,37 +346,15 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getByTimestamp(@PathParam("timestamp") long timestamp,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
public BlockData getByTimestamp(@PathParam("timestamp") long timestamp) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
BlockData blockData = null;
|
||||
|
||||
// Try the Blocks table
|
||||
int height = repository.getBlockRepository().getHeightFromTimestamp(timestamp);
|
||||
if (height > 1) {
|
||||
// Found match in Blocks table
|
||||
blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
// Not found in Blocks table, so try the archive
|
||||
height = repository.getBlockArchiveRepository().getHeightFromTimestamp(timestamp);
|
||||
if (height > 1) {
|
||||
// Found match in archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
}
|
||||
|
||||
// Ensure block exists
|
||||
if (blockData == null) {
|
||||
if (height == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
return blockData;
|
||||
} catch (DataException e) {
|
||||
@@ -642,14 +391,9 @@ public class BlocksResource {
|
||||
|
||||
for (/* count already set */; count > 0; --count, ++height) {
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
// Not found - try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
// Run out of blocks!
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (blockData == null)
|
||||
// Run out of blocks!
|
||||
break;
|
||||
|
||||
blocks.add(blockData);
|
||||
}
|
||||
@@ -694,29 +438,7 @@ public class BlocksResource {
|
||||
if (accountData == null || accountData.getPublicKey() == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.PUBLIC_KEY_NOT_FOUND);
|
||||
|
||||
|
||||
List<BlockSummaryData> summaries = repository.getBlockRepository()
|
||||
.getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
|
||||
// Add any from the archive
|
||||
List<BlockSummaryData> archivedSummaries = repository.getBlockArchiveRepository()
|
||||
.getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
if (archivedSummaries != null && !archivedSummaries.isEmpty()) {
|
||||
summaries.addAll(archivedSummaries);
|
||||
}
|
||||
else {
|
||||
summaries = archivedSummaries;
|
||||
}
|
||||
|
||||
// Sort the results (because they may have been obtained from two places)
|
||||
if (reverse != null && reverse) {
|
||||
summaries.sort((s1, s2) -> Integer.valueOf(s2.getHeight()).compareTo(Integer.valueOf(s1.getHeight())));
|
||||
}
|
||||
else {
|
||||
summaries.sort(Comparator.comparing(s -> Integer.valueOf(s.getHeight())));
|
||||
}
|
||||
|
||||
return summaries;
|
||||
return repository.getBlockRepository().getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -752,8 +474,7 @@ public class BlocksResource {
|
||||
if (!Crypto.isValidAddress(address))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
// This method pulls data from both Blocks and BlockArchive, so no need to query serparately
|
||||
return repository.getBlockArchiveRepository().getBlockSigners(addresses, limit, offset, reverse);
|
||||
return repository.getBlockRepository().getBlockSigners(addresses, limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -793,76 +514,7 @@ public class BlocksResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
/*
|
||||
* start end count result
|
||||
* 10 40 null blocks 10 to 39 (excludes end block, ignore count)
|
||||
*
|
||||
* null null null blocks 1 to 50 (assume count=50, maybe start=1)
|
||||
* 30 null null blocks 30 to 79 (assume count=50)
|
||||
* 30 null 10 blocks 30 to 39
|
||||
*
|
||||
* null null 50 last 50 blocks? so if max(blocks.height) is 200, then blocks 151 to 200
|
||||
* null 200 null blocks 150 to 199 (excludes end block, assume count=50)
|
||||
* null 200 10 blocks 190 to 199 (excludes end block)
|
||||
*/
|
||||
|
||||
List<BlockSummaryData> blockSummaries = new ArrayList<>();
|
||||
|
||||
// Use the latest X blocks if only a count is specified
|
||||
if (startHeight == null && endHeight == null && count != null) {
|
||||
BlockData chainTip = repository.getBlockRepository().getLastBlock();
|
||||
startHeight = chainTip.getHeight() - count;
|
||||
endHeight = chainTip.getHeight();
|
||||
}
|
||||
|
||||
// ... otherwise default the start height to 1
|
||||
if (startHeight == null && endHeight == null) {
|
||||
startHeight = 1;
|
||||
}
|
||||
|
||||
// Default the count to 50
|
||||
if (count == null) {
|
||||
count = 50;
|
||||
}
|
||||
|
||||
// If both a start and end height exist, ignore the count
|
||||
if (startHeight != null && endHeight != null) {
|
||||
if (startHeight > 0 && endHeight > 0) {
|
||||
count = Integer.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
// Derive start height from end height if missing
|
||||
if (startHeight == null || startHeight == 0) {
|
||||
if (endHeight != null && endHeight > 0) {
|
||||
if (count != null) {
|
||||
startHeight = endHeight - count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (/* count already set */; count > 0; --count, ++startHeight) {
|
||||
if (endHeight != null && startHeight >= endHeight) {
|
||||
break;
|
||||
}
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(startHeight);
|
||||
if (blockData == null) {
|
||||
// Not found - try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(startHeight);
|
||||
if (blockData == null) {
|
||||
// Run out of blocks!
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (blockData != null) {
|
||||
BlockSummaryData blockSummaryData = new BlockSummaryData(blockData);
|
||||
blockSummaries.add(blockSummaryData);
|
||||
}
|
||||
}
|
||||
|
||||
return blockSummaries;
|
||||
return repository.getBlockRepository().getBlockSummaries(startHeight, endHeight, count);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.repository.Bootstrap;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
@Path("/bootstrap")
|
||||
@Tag(name = "Bootstrap")
|
||||
public class BootstrapResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(BootstrapResource.class);
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/create")
|
||||
@Operation(
|
||||
summary = "Create bootstrap",
|
||||
description = "Builds a bootstrap file for distribution",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "path to file on success, an exception on failure",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String createBootstrap(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
Bootstrap bootstrap = new Bootstrap(repository);
|
||||
try {
|
||||
bootstrap.checkRepositoryState();
|
||||
} catch (DataException e) {
|
||||
LOGGER.info("Not ready to create bootstrap: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
}
|
||||
bootstrap.validateBlockchain();
|
||||
return bootstrap.create();
|
||||
|
||||
} catch (DataException | InterruptedException | IOException e) {
|
||||
LOGGER.info("Unable to create bootstrap", e);
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/validate")
|
||||
@Operation(
|
||||
summary = "Validate blockchain",
|
||||
description = "Useful to check database integrity prior to creating or after installing a bootstrap. " +
|
||||
"This process is intensive and can take over an hour to run.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "true if valid, false if invalid",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean validateBootstrap(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
Bootstrap bootstrap = new Bootstrap(repository);
|
||||
return bootstrap.validateCompleteBlockchain();
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,7 +13,11 @@ import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -154,7 +158,7 @@ public class ChatResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildChat(@HeaderParam(Security.API_KEY_HEADER) String apiKey, ChatTransactionData transactionData) {
|
||||
public String buildChat(ChatTransactionData transactionData) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -202,7 +206,7 @@ public class ChatResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildChat(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String rawBytes58) {
|
||||
public String buildChat(String rawBytes58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
@@ -5,14 +5,12 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -24,7 +22,7 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainBuildRequest;
|
||||
import org.qortal.api.model.CrossChainDualSecretRequest;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.api.model.CrossChainTradeRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.crosschain.BitcoinACCTv1;
|
||||
@@ -81,8 +79,7 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_DATA, ApiError.INVALID_REFERENCE, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildTrade(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainBuildRequest tradeRequest) {
|
||||
public String buildTrade(CrossChainBuildRequest tradeRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] creatorPublicKey = tradeRequest.creatorPublicKey;
|
||||
@@ -177,8 +174,7 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildTradeMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainTradeRequest tradeRequest) {
|
||||
public String buildTradeMessage(CrossChainTradeRequest tradeRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] tradePublicKey = tradeRequest.tradePublicKey;
|
||||
@@ -246,7 +242,7 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainDualSecretRequest.class
|
||||
implementation = CrossChainSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
@@ -261,8 +257,7 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainDualSecretRequest secretRequest) {
|
||||
public String buildRedeemMessage(CrossChainSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPublicKey = secretRequest.partnerPublicKey;
|
||||
@@ -365,4 +360,4 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -6,13 +6,11 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -25,8 +23,8 @@ import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.BitcoinSendRequest;
|
||||
import org.qortal.crosschain.Bitcoin;
|
||||
import org.qortal.crosschain.BitcoinyTransaction;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/btc")
|
||||
@Tag(name = "Cross-Chain (Bitcoin)")
|
||||
@@ -58,8 +56,7 @@ public class CrossChainBitcoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getBitcoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
public String getBitcoinWalletBalance(String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Bitcoin bitcoin = Bitcoin.getInstance();
|
||||
@@ -67,16 +64,11 @@ public class CrossChainBitcoinResource {
|
||||
if (!bitcoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = bitcoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
Long balance = bitcoin.getWalletBalance(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return balance.toString();
|
||||
}
|
||||
|
||||
@POST
|
||||
@@ -97,13 +89,12 @@ public class CrossChainBitcoinResource {
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = BitcoinyTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getBitcoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
public List<BitcoinyTransaction> getBitcoinWalletTransactions(String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Bitcoin bitcoin = Bitcoin.getInstance();
|
||||
@@ -122,7 +113,7 @@ public class CrossChainBitcoinResource {
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends BTC from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently supports 'legacy' P2PKH Bitcoin addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
description = "Currently only supports 'legacy' P2PKH Bitcoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
@@ -139,8 +130,7 @@ public class CrossChainBitcoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, BitcoinSendRequest bitcoinSendRequest) {
|
||||
public String sendBitcoin(BitcoinSendRequest bitcoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (bitcoinSendRequest.bitcoinAmount <= 0)
|
||||
@@ -174,4 +164,4 @@ public class CrossChainBitcoinResource {
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.DigibyteSendRequest;
|
||||
import org.qortal.crosschain.Digibyte;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/dgb")
|
||||
@Tag(name = "Cross-Chain (Digibyte)")
|
||||
public class CrossChainDigibyteResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns DGB balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getDigibyteWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = digibyte.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getDigibyteWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return digibyte.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends DGB from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently supports 'legacy' P2PKH Digibyte addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = DigibyteSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, DigibyteSendRequest digibyteSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (digibyteSendRequest.digibyteAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (digibyteSendRequest.feePerByte != null && digibyteSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidAddress(digibyteSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(digibyteSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = digibyte.buildSpend(digibyteSendRequest.xprv58,
|
||||
digibyteSendRequest.receivingAddress,
|
||||
digibyteSendRequest.digibyteAmount,
|
||||
digibyteSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
digibyte.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
import org.qortal.crosschain.DogecoinACCTv1;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transaction.MessageTransaction;
|
||||
import org.qortal.transaction.Transaction.ValidationResult;
|
||||
import org.qortal.transform.Transformer;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.Arrays;
|
||||
|
||||
@Path("/crosschain/DogecoinACCTv1")
|
||||
@Tag(name = "Cross-Chain (DogecoinACCTv1)")
|
||||
public class CrossChainDogecoinACCTv1Resource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/redeemmessage")
|
||||
@Operation(
|
||||
summary = "Signs and broadcasts a 'redeem' MESSAGE transaction that sends secrets to AT, releasing funds to partner",
|
||||
description = "Specify address of cross-chain AT that needs to be messaged, Alice's trade private key, the 32-byte secret,<br>"
|
||||
+ "and an address for receiving QORT from AT. All of these can be found in Alice's trade bot data.<br>"
|
||||
+ "AT needs to be in 'trade' mode. Messages sent to an AT in any other mode will be ignored, but still cost fees to send!<br>"
|
||||
+ "You need to use the private key that the AT considers the trade 'partner' otherwise the MESSAGE transaction will be invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPrivateKey = secretRequest.partnerPrivateKey;
|
||||
|
||||
if (partnerPrivateKey == null || partnerPrivateKey.length != Transformer.PRIVATE_KEY_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
if (secretRequest.atAddress == null || !Crypto.isValidAtAddress(secretRequest.atAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (secretRequest.secret == null || secretRequest.secret.length != DogecoinACCTv1.SECRET_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
if (secretRequest.receivingAddress == null || !Crypto.isValidAddress(secretRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, secretRequest.atAddress);
|
||||
CrossChainTradeData crossChainTradeData = DogecoinACCTv1.getInstance().populateTradeData(repository, atData);
|
||||
|
||||
if (crossChainTradeData.mode != AcctMode.TRADING)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] partnerPublicKey = new PrivateKeyAccount(null, partnerPrivateKey).getPublicKey();
|
||||
String partnerAddress = Crypto.toAddress(partnerPublicKey);
|
||||
|
||||
// MESSAGE must come from address that AT considers trade partner
|
||||
if (!crossChainTradeData.qortalPartnerAddress.equals(partnerAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
// Good to make MESSAGE
|
||||
|
||||
byte[] messageData = DogecoinACCTv1.buildRedeemMessage(secretRequest.secret, secretRequest.receivingAddress);
|
||||
|
||||
PrivateKeyAccount sender = new PrivateKeyAccount(repository, partnerPrivateKey);
|
||||
MessageTransaction messageTransaction = MessageTransaction.build(repository, sender, Group.NO_GROUP, secretRequest.atAddress, messageData, false, false);
|
||||
|
||||
messageTransaction.computeNonce();
|
||||
messageTransaction.sign(sender);
|
||||
|
||||
// reset repository state to prevent deadlock
|
||||
repository.discardChanges();
|
||||
ValidationResult result = messageTransaction.importAsUnconfirmed();
|
||||
|
||||
if (result != ValidationResult.OK)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSACTION_INVALID);
|
||||
|
||||
return true;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
private ATData fetchAtDataWithChecking(Repository repository, String atAddress) throws DataException {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
// Must be correct AT - check functionality using code hash
|
||||
if (!Arrays.equals(atData.getCodeHash(), DogecoinACCTv1.CODE_BYTES_HASH))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// No point sending message to AT that's finished
|
||||
if (atData.getIsFinished())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.DogecoinSendRequest;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.Dogecoin;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.List;
|
||||
|
||||
@Path("/crosschain/doge")
|
||||
@Tag(name = "Cross-Chain (Dogecoin)")
|
||||
public class CrossChainDogecoinResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns DOGE balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getDogecoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = dogecoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getDogecoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return dogecoin.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends DOGE from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Dogecoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = DogecoinSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, DogecoinSendRequest dogecoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (dogecoinSendRequest.dogecoinAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (dogecoinSendRequest.feePerByte != null && dogecoinSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidAddress(dogecoinSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(dogecoinSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = dogecoin.buildSpend(dogecoinSendRequest.xprv58,
|
||||
dogecoinSendRequest.receivingAddress,
|
||||
dogecoinSendRequest.dogecoinAmount,
|
||||
dogecoinSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
dogecoin.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,44 +4,36 @@ import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.bitcoinj.core.*;
|
||||
import org.bitcoinj.script.Script;
|
||||
import org.qortal.api.*;
|
||||
import org.bitcoinj.core.TransactionOutput;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainBitcoinyHTLCStatus;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.crosschain.*;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.data.crosschain.TradeBotData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.crosschain.Bitcoiny;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.BitcoinyHTLC;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import com.google.common.hash.HashCode;
|
||||
|
||||
@Path("/crosschain/htlc")
|
||||
@Tag(name = "Cross-Chain (Hash time-locked contracts)")
|
||||
public class CrossChainHtlcResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(CrossChainHtlcResource.class);
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@@ -49,7 +41,7 @@ public class CrossChainHtlcResource {
|
||||
@Path("/address/{blockchain}/{refundPKH}/{locktime}/{redeemPKH}/{hashOfSecret}")
|
||||
@Operation(
|
||||
summary = "Returns HTLC address based on trade info",
|
||||
description = "Public key hashes (PKH) and hash of secret should be 20 bytes (base58 encoded). Locktime is seconds since epoch.",
|
||||
description = "Blockchain can be BITCOIN or LITECOIN. Public key hashes (PKH) and hash of secret should be 20 bytes (hex). Locktime is seconds since epoch.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
@@ -58,21 +50,21 @@ public class CrossChainHtlcResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_CRITERIA})
|
||||
public String deriveHtlcAddress(@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundPKH,
|
||||
@PathParam("refundPKH") String refundHex,
|
||||
@PathParam("locktime") int lockTime,
|
||||
@PathParam("redeemPKH") String redeemPKH,
|
||||
@PathParam("hashOfSecret") String hashOfSecret) {
|
||||
@PathParam("redeemPKH") String redeemHex,
|
||||
@PathParam("hashOfSecret") String hashOfSecretHex) {
|
||||
SupportedBlockchain blockchain = SupportedBlockchain.valueOf(blockchainName);
|
||||
if (blockchain == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] refunderPubKeyHash;
|
||||
byte[] redeemerPubKeyHash;
|
||||
byte[] decodedHashOfSecret;
|
||||
byte[] hashOfSecret;
|
||||
|
||||
try {
|
||||
refunderPubKeyHash = Base58.decode(refundPKH);
|
||||
redeemerPubKeyHash = Base58.decode(redeemPKH);
|
||||
refunderPubKeyHash = HashCode.fromString(refundHex).asBytes();
|
||||
redeemerPubKeyHash = HashCode.fromString(redeemHex).asBytes();
|
||||
|
||||
if (refunderPubKeyHash.length != 20 || redeemerPubKeyHash.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PUBLIC_KEY);
|
||||
@@ -81,14 +73,14 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
|
||||
try {
|
||||
decodedHashOfSecret = Base58.decode(hashOfSecret);
|
||||
if (decodedHashOfSecret.length != 20)
|
||||
hashOfSecret = HashCode.fromString(hashOfSecretHex).asBytes();
|
||||
if (hashOfSecret.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, decodedHashOfSecret);
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, hashOfSecret);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) blockchain.getInstance();
|
||||
|
||||
@@ -99,7 +91,7 @@ public class CrossChainHtlcResource {
|
||||
@Path("/status/{blockchain}/{refundPKH}/{locktime}/{redeemPKH}/{hashOfSecret}")
|
||||
@Operation(
|
||||
summary = "Checks HTLC status",
|
||||
description = "Public key hashes (PKH) and hash of secret should be 20 bytes (base58 encoded). Locktime is seconds since epoch.",
|
||||
description = "Blockchain can be BITCOIN or LITECOIN. Public key hashes (PKH) and hash of secret should be 20 bytes (hex). Locktime is seconds since epoch.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, schema = @Schema(implementation = CrossChainBitcoinyHTLCStatus.class))
|
||||
@@ -107,13 +99,11 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public CrossChainBitcoinyHTLCStatus checkHtlcStatus(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundPKH,
|
||||
public CrossChainBitcoinyHTLCStatus checkHtlcStatus(@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundHex,
|
||||
@PathParam("locktime") int lockTime,
|
||||
@PathParam("redeemPKH") String redeemPKH,
|
||||
@PathParam("hashOfSecret") String hashOfSecret) {
|
||||
@PathParam("redeemPKH") String redeemHex,
|
||||
@PathParam("hashOfSecret") String hashOfSecretHex) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
SupportedBlockchain blockchain = SupportedBlockchain.valueOf(blockchainName);
|
||||
@@ -122,11 +112,11 @@ public class CrossChainHtlcResource {
|
||||
|
||||
byte[] refunderPubKeyHash;
|
||||
byte[] redeemerPubKeyHash;
|
||||
byte[] decodedHashOfSecret;
|
||||
byte[] hashOfSecret;
|
||||
|
||||
try {
|
||||
refunderPubKeyHash = Base58.decode(refundPKH);
|
||||
redeemerPubKeyHash = Base58.decode(redeemPKH);
|
||||
refunderPubKeyHash = HashCode.fromString(refundHex).asBytes();
|
||||
redeemerPubKeyHash = HashCode.fromString(redeemHex).asBytes();
|
||||
|
||||
if (refunderPubKeyHash.length != 20 || redeemerPubKeyHash.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PUBLIC_KEY);
|
||||
@@ -135,14 +125,14 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
|
||||
try {
|
||||
decodedHashOfSecret = Base58.decode(hashOfSecret);
|
||||
if (decodedHashOfSecret.length != 20)
|
||||
hashOfSecret = HashCode.fromString(hashOfSecretHex).asBytes();
|
||||
if (hashOfSecret.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, decodedHashOfSecret);
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, hashOfSecret);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) blockchain.getInstance();
|
||||
|
||||
@@ -178,495 +168,8 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/redeem/{ataddress}")
|
||||
@Operation(
|
||||
summary = "Redeems HTLC associated with supplied AT",
|
||||
description = "To be used by a QORT seller (Bob) who needs to redeem LTC/DOGE/etc proceeds that are stuck in a P2SH.<br>" +
|
||||
"This requires Bob's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if the buyer has yet to redeem the QORT held in the AT.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean redeemHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("ataddress") String atAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
// TODO: refund
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
// TODO: redeem
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Attempt to find secret from the buyer's message to AT
|
||||
byte[] decodedSecret = acct.findSecretA(repository, crossChainTradeData);
|
||||
if (decodedSecret == null) {
|
||||
LOGGER.info(() -> String.format("Unable to find secret-A from redeem message to AT %s", atAddress));
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
|
||||
// Search for the tradePrivateKey in the tradebot data
|
||||
byte[] decodedPrivateKey = null;
|
||||
if (tradeBotData != null)
|
||||
decodedPrivateKey = tradeBotData.getTradePrivateKey();
|
||||
|
||||
// Search for the foreign blockchain receiving address in the tradebot data
|
||||
byte[] foreignBlockchainReceivingAccountInfo = null;
|
||||
if (tradeBotData != null)
|
||||
// Use receiving address PKH from tradebot data
|
||||
foreignBlockchainReceivingAccountInfo = tradeBotData.getReceivingAccountInfo();
|
||||
|
||||
return this.doRedeemHtlc(atAddress, decodedPrivateKey, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/redeemAll")
|
||||
@Operation(
|
||||
summary = "Redeems HTLC for all applicable ATs in tradebot data",
|
||||
description = "To be used by a QORT seller (Bob) who needs to redeem LTC/DOGE/etc proceeds that are stuck in P2SH transactions.<br>" +
|
||||
"This requires Bob's trade bot data to be present in the database for any ATs that need redeeming.<br>" +
|
||||
"Returns true if at least one trade is redeemed. More detail is available in the log.txt.* file.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean redeemAllHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
boolean success = false;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
for (TradeBotData tradeBotData : allTradeBotData) {
|
||||
String atAddress = tradeBotData.getAtAddress();
|
||||
if (atAddress == null) {
|
||||
LOGGER.info("Missing AT address in tradebot data", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
String tradeState = tradeBotData.getState();
|
||||
if (tradeState == null) {
|
||||
LOGGER.info("Missing trade state for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeState.startsWith("ALICE")) {
|
||||
LOGGER.info("AT {} isn't redeemable because it is a buy order", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null) {
|
||||
LOGGER.info("Couldn't find AT with address {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (Objects.equals(bitcoiny.getCurrencyCode(), "ARRR")) {
|
||||
LOGGER.info("Skipping AT {} because ARRR is currently unsupported", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null) {
|
||||
LOGGER.info("Couldn't find crosschain trade data for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Attempt to find secret from the buyer's message to AT
|
||||
byte[] decodedSecret = acct.findSecretA(repository, crossChainTradeData);
|
||||
if (decodedSecret == null) {
|
||||
LOGGER.info("Unable to find secret-A from redeem message to AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Search for the tradePrivateKey in the tradebot data
|
||||
byte[] decodedPrivateKey = tradeBotData.getTradePrivateKey();
|
||||
|
||||
// Search for the foreign blockchain receiving address PKH in the tradebot data
|
||||
byte[] foreignBlockchainReceivingAccountInfo = tradeBotData.getReceivingAccountInfo();
|
||||
|
||||
try {
|
||||
LOGGER.info("Attempting to redeem P2SH balance associated with AT {}...", atAddress);
|
||||
boolean redeemed = this.doRedeemHtlc(atAddress, decodedPrivateKey, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
if (redeemed) {
|
||||
LOGGER.info("Redeemed P2SH balance associated with AT {}", atAddress);
|
||||
success = true;
|
||||
}
|
||||
else {
|
||||
LOGGER.info("Couldn't redeem P2SH balance associated with AT {}. Already redeemed?", atAddress);
|
||||
}
|
||||
} catch (ApiException e) {
|
||||
LOGGER.info("Couldn't redeem P2SH balance associated with AT {}. Missing data?", atAddress);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
private boolean doRedeemHtlc(String atAddress, byte[] decodedTradePrivateKey, byte[] decodedSecret,
|
||||
byte[] foreignBlockchainReceivingAccountInfo) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate trade private key
|
||||
if (decodedTradePrivateKey == null || decodedTradePrivateKey.length != 32)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate secret
|
||||
if (decodedSecret == null || decodedSecret.length != 32)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate receiving address
|
||||
if (foreignBlockchainReceivingAccountInfo == null || foreignBlockchainReceivingAccountInfo.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Make sure the receiving address isn't a QORT address, given that we can share the same field for both QORT and foreign blockchains
|
||||
if (Crypto.isValidAddress(foreignBlockchainReceivingAccountInfo))
|
||||
if (Base58.encode(foreignBlockchainReceivingAccountInfo).startsWith("Q"))
|
||||
// This is likely a QORT address, not a foreign blockchain
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
|
||||
// Use secret-A to redeem P2SH-A
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (bitcoiny.getClass() == Bitcoin.class) {
|
||||
LOGGER.info("Redeeming a Bitcoin HTLC is not yet supported");
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int lockTime = crossChainTradeData.lockTimeA;
|
||||
byte[] redeemScriptA = BitcoinyHTLC.buildScript(crossChainTradeData.partnerForeignPKH, lockTime, crossChainTradeData.creatorForeignPKH, crossChainTradeData.hashOfSecretA);
|
||||
String p2shAddressA = bitcoiny.deriveP2shAddress(redeemScriptA);
|
||||
LOGGER.info(String.format("Redeeming P2SH address: %s", p2shAddressA));
|
||||
|
||||
// Fee for redeem/refund is subtracted from P2SH-A balance.
|
||||
long feeTimestamp = calcFeeTimestamp(lockTime, crossChainTradeData.tradeTimeout);
|
||||
long p2shFee = bitcoiny.getP2shFee(feeTimestamp);
|
||||
long minimumAmountA = crossChainTradeData.expectedForeignAmount + p2shFee;
|
||||
BitcoinyHTLC.Status htlcStatusA = BitcoinyHTLC.determineHtlcStatus(bitcoiny.getBlockchainProvider(), p2shAddressA, minimumAmountA);
|
||||
|
||||
switch (htlcStatusA) {
|
||||
case UNFUNDED:
|
||||
case FUNDING_IN_PROGRESS:
|
||||
// P2SH-A suddenly not funded? Our best bet at this point is to hope for AT auto-refund
|
||||
return false;
|
||||
|
||||
case REDEEM_IN_PROGRESS:
|
||||
case REDEEMED:
|
||||
// Double-check that we have redeemed P2SH-A...
|
||||
return false;
|
||||
|
||||
case REFUND_IN_PROGRESS:
|
||||
case REFUNDED:
|
||||
// Wait for AT to auto-refund
|
||||
return false;
|
||||
|
||||
case FUNDED: {
|
||||
Coin redeemAmount = Coin.valueOf(crossChainTradeData.expectedForeignAmount);
|
||||
ECKey redeemKey = ECKey.fromPrivate(decodedTradePrivateKey);
|
||||
List<TransactionOutput> fundingOutputs = bitcoiny.getUnspentOutputs(p2shAddressA);
|
||||
|
||||
Transaction p2shRedeemTransaction = BitcoinyHTLC.buildRedeemTransaction(bitcoiny.getNetworkParameters(), redeemAmount, redeemKey,
|
||||
fundingOutputs, redeemScriptA, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
|
||||
bitcoiny.broadcastTransaction(p2shRedeemTransaction);
|
||||
LOGGER.info(String.format("P2SH address %s redeemed!", p2shAddressA));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/refund/{ataddress}")
|
||||
@Operation(
|
||||
summary = "Refunds HTLC associated with supplied AT",
|
||||
description = "To be used by a QORT buyer (Alice) who needs to refund their LTC/DOGE/etc that is stuck in a P2SH.<br>" +
|
||||
"This requires Alice's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if it's already redeemed by the seller, or if the lockTime (60 minutes) hasn't passed yet.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean refundHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("ataddress") String atAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
if (tradeBotData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (tradeBotData.getForeignKey() == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Determine foreign blockchain receive address for refund
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
String receiveAddress = bitcoiny.getUnusedReceiveAddress(tradeBotData.getForeignKey());
|
||||
|
||||
return this.doRefundHtlc(atAddress, receiveAddress);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/refundAll")
|
||||
@Operation(
|
||||
summary = "Refunds HTLC for all applicable ATs in tradebot data",
|
||||
description = "To be used by a QORT buyer (Alice) who needs to refund their LTC/DOGE/etc proceeds that are stuck in P2SH transactions.<br>" +
|
||||
"This requires Alice's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if it's already redeemed by the seller, or if the lockTime (60 minutes) hasn't passed yet.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean refundAllHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
boolean success = false;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
for (TradeBotData tradeBotData : allTradeBotData) {
|
||||
String atAddress = tradeBotData.getAtAddress();
|
||||
if (atAddress == null) {
|
||||
LOGGER.info("Missing AT address in tradebot data", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
String tradeState = tradeBotData.getState();
|
||||
if (tradeState == null) {
|
||||
LOGGER.info("Missing trade state for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeState.startsWith("BOB")) {
|
||||
LOGGER.info("AT {} isn't refundable because it is a sell order", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null) {
|
||||
LOGGER.info("Couldn't find AT with address {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null) {
|
||||
LOGGER.info("Couldn't find crosschain trade data for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeBotData.getForeignKey() == null) {
|
||||
LOGGER.info("Couldn't find foreign key for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Determine foreign blockchain receive address for refund
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (Objects.equals(bitcoiny.getCurrencyCode(), "ARRR")) {
|
||||
LOGGER.info("Skipping AT {} because ARRR is currently unsupported", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
String receivingAddress = bitcoiny.getUnusedReceiveAddress(tradeBotData.getForeignKey());
|
||||
|
||||
LOGGER.info("Attempting to refund P2SH balance associated with AT {}...", atAddress);
|
||||
boolean refunded = this.doRefundHtlc(atAddress, receivingAddress);
|
||||
if (refunded) {
|
||||
LOGGER.info("Refunded P2SH balance associated with AT {}", atAddress);
|
||||
success = true;
|
||||
}
|
||||
else {
|
||||
LOGGER.info("Couldn't refund P2SH balance associated with AT {}. Already redeemed?", atAddress);
|
||||
}
|
||||
} catch (ApiException | ForeignBlockchainException e) {
|
||||
LOGGER.info("Couldn't refund P2SH balance associated with AT {}. Missing data?", atAddress);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
|
||||
private boolean doRefundHtlc(String atAddress, String receiveAddress) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// If the AT is "finished" then it will have a zero balance
|
||||
// In these cases we should avoid HTLC refunds if tbe QORT haven't been returned to the seller
|
||||
if (atData.getIsFinished() && crossChainTradeData.mode != AcctMode.REFUNDED && crossChainTradeData.mode != AcctMode.CANCELLED) {
|
||||
LOGGER.info(String.format("Skipping AT %s because the QORT has already been redemed", atAddress));
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
if (tradeBotData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (bitcoiny.getClass() == Bitcoin.class) {
|
||||
LOGGER.info("Refunding a Bitcoin HTLC is not yet supported");
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int lockTime = tradeBotData.getLockTimeA();
|
||||
|
||||
// We can't refund P2SH-A until lockTime-A has passed
|
||||
if (NTP.getTime() <= lockTime * 1000L)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
// We can't refund P2SH-A until median block time has passed lockTime-A (see BIP113)
|
||||
int medianBlockTime = bitcoiny.getMedianBlockTime();
|
||||
if (medianBlockTime <= lockTime)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
byte[] redeemScriptA = BitcoinyHTLC.buildScript(tradeBotData.getTradeForeignPublicKeyHash(), lockTime, crossChainTradeData.creatorForeignPKH, tradeBotData.getHashOfSecret());
|
||||
String p2shAddressA = bitcoiny.deriveP2shAddress(redeemScriptA);
|
||||
LOGGER.info(String.format("Refunding P2SH address: %s", p2shAddressA));
|
||||
|
||||
// Fee for redeem/refund is subtracted from P2SH-A balance.
|
||||
long feeTimestamp = calcFeeTimestamp(lockTime, crossChainTradeData.tradeTimeout);
|
||||
long p2shFee = bitcoiny.getP2shFee(feeTimestamp);
|
||||
long minimumAmountA = crossChainTradeData.expectedForeignAmount + p2shFee;
|
||||
BitcoinyHTLC.Status htlcStatusA = BitcoinyHTLC.determineHtlcStatus(bitcoiny.getBlockchainProvider(), p2shAddressA, minimumAmountA);
|
||||
|
||||
switch (htlcStatusA) {
|
||||
case UNFUNDED:
|
||||
case FUNDING_IN_PROGRESS:
|
||||
// Still waiting for P2SH-A to be funded...
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
case REDEEM_IN_PROGRESS:
|
||||
case REDEEMED:
|
||||
case REFUND_IN_PROGRESS:
|
||||
case REFUNDED:
|
||||
// Too late!
|
||||
return false;
|
||||
|
||||
case FUNDED:{
|
||||
Coin refundAmount = Coin.valueOf(crossChainTradeData.expectedForeignAmount);
|
||||
ECKey refundKey = ECKey.fromPrivate(tradeBotData.getTradePrivateKey());
|
||||
List<TransactionOutput> fundingOutputs = bitcoiny.getUnspentOutputs(p2shAddressA);
|
||||
|
||||
// Validate the destination foreign blockchain address
|
||||
Address receiving = Address.fromString(bitcoiny.getNetworkParameters(), receiveAddress);
|
||||
if (receiving.getOutputScriptType() != Script.ScriptType.P2PKH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Transaction p2shRefundTransaction = BitcoinyHTLC.buildRefundTransaction(bitcoiny.getNetworkParameters(), refundAmount, refundKey,
|
||||
fundingOutputs, redeemScriptA, lockTime, receiving.getHash());
|
||||
|
||||
bitcoiny.broadcastTransaction(p2shRefundTransaction);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private long calcFeeTimestamp(int lockTimeA, int tradeTimeout) {
|
||||
return (lockTimeA - tradeTimeout * 60) * 1000L;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
import org.qortal.crosschain.LitecoinACCTv1;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transaction.MessageTransaction;
|
||||
import org.qortal.transaction.Transaction.ValidationResult;
|
||||
import org.qortal.transform.TransformationException;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.transform.transaction.MessageTransactionTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
@Path("/crosschain/LitecoinACCTv1")
|
||||
@Tag(name = "Cross-Chain (LitecoinACCTv1)")
|
||||
public class CrossChainLitecoinACCTv1Resource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/redeemmessage")
|
||||
@Operation(
|
||||
summary = "Signs and broadcasts a 'redeem' MESSAGE transaction that sends secrets to AT, releasing funds to partner",
|
||||
description = "Specify address of cross-chain AT that needs to be messaged, Alice's trade private key, the 32-byte secret,<br>"
|
||||
+ "and an address for receiving QORT from AT. All of these can be found in Alice's trade bot data.<br>"
|
||||
+ "AT needs to be in 'trade' mode. Messages sent to an AT in any other mode will be ignored, but still cost fees to send!<br>"
|
||||
+ "You need to use the private key that the AT considers the trade 'partner' otherwise the MESSAGE transaction will be invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPrivateKey = secretRequest.partnerPrivateKey;
|
||||
|
||||
if (partnerPrivateKey == null || partnerPrivateKey.length != Transformer.PRIVATE_KEY_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
if (secretRequest.atAddress == null || !Crypto.isValidAtAddress(secretRequest.atAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (secretRequest.secret == null || secretRequest.secret.length != LitecoinACCTv1.SECRET_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
if (secretRequest.receivingAddress == null || !Crypto.isValidAddress(secretRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, secretRequest.atAddress);
|
||||
CrossChainTradeData crossChainTradeData = LitecoinACCTv1.getInstance().populateTradeData(repository, atData);
|
||||
|
||||
if (crossChainTradeData.mode != AcctMode.TRADING)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] partnerPublicKey = new PrivateKeyAccount(null, partnerPrivateKey).getPublicKey();
|
||||
String partnerAddress = Crypto.toAddress(partnerPublicKey);
|
||||
|
||||
// MESSAGE must come from address that AT considers trade partner
|
||||
if (!crossChainTradeData.qortalPartnerAddress.equals(partnerAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
// Good to make MESSAGE
|
||||
|
||||
byte[] messageData = LitecoinACCTv1.buildRedeemMessage(secretRequest.secret, secretRequest.receivingAddress);
|
||||
|
||||
PrivateKeyAccount sender = new PrivateKeyAccount(repository, partnerPrivateKey);
|
||||
MessageTransaction messageTransaction = MessageTransaction.build(repository, sender, Group.NO_GROUP, secretRequest.atAddress, messageData, false, false);
|
||||
|
||||
messageTransaction.computeNonce();
|
||||
messageTransaction.sign(sender);
|
||||
|
||||
// reset repository state to prevent deadlock
|
||||
repository.discardChanges();
|
||||
ValidationResult result = messageTransaction.importAsUnconfirmed();
|
||||
|
||||
if (result != ValidationResult.OK)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSACTION_INVALID);
|
||||
|
||||
return true;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
private ATData fetchAtDataWithChecking(Repository repository, String atAddress) throws DataException {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
// Must be correct AT - check functionality using code hash
|
||||
if (!Arrays.equals(atData.getCodeHash(), LitecoinACCTv1.CODE_BYTES_HASH))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// No point sending message to AT that's finished
|
||||
if (atData.getIsFinished())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -6,13 +6,11 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -24,9 +22,9 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.LitecoinSendRequest;
|
||||
import org.qortal.crosschain.BitcoinyTransaction;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.Litecoin;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/ltc")
|
||||
@Tag(name = "Cross-Chain (Litecoin)")
|
||||
@@ -58,8 +56,7 @@ public class CrossChainLitecoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getLitecoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
public String getLitecoinWalletBalance(String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Litecoin litecoin = Litecoin.getInstance();
|
||||
@@ -67,16 +64,11 @@ public class CrossChainLitecoinResource {
|
||||
if (!litecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = litecoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
Long balance = litecoin.getWalletBalance(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return balance.toString();
|
||||
}
|
||||
|
||||
@POST
|
||||
@@ -97,13 +89,12 @@ public class CrossChainLitecoinResource {
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = BitcoinyTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getLitecoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
public List<BitcoinyTransaction> getLitecoinWalletTransactions(String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Litecoin litecoin = Litecoin.getInstance();
|
||||
@@ -122,7 +113,7 @@ public class CrossChainLitecoinResource {
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends LTC from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently supports 'legacy' P2PKH Litecoin addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
description = "Currently only supports 'legacy' P2PKH Litecoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
@@ -139,8 +130,7 @@ public class CrossChainLitecoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, LitecoinSendRequest litecoinSendRequest) {
|
||||
public String sendBitcoin(LitecoinSendRequest litecoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (litecoinSendRequest.litecoinAmount <= 0)
|
||||
@@ -174,4 +164,4 @@ public class CrossChainLitecoinResource {
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,229 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.PirateChainSendRequest;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.PirateChain;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.List;
|
||||
|
||||
@Path("/crosschain/arrr")
|
||||
@Tag(name = "Cross-Chain (Pirate Chain)")
|
||||
public class CrossChainPirateChainResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns ARRR balance",
|
||||
description = "Supply 32 bytes of entropy, Base58 encoded",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "32 bytes of entropy, Base58 encoded",
|
||||
example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getPirateChainWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String entropy58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
PirateChain pirateChain = PirateChain.getInstance();
|
||||
|
||||
try {
|
||||
Long balance = pirateChain.getWalletBalance(entropy58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions",
|
||||
description = "Supply 32 bytes of entropy, Base58 encoded",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "32 bytes of entropy, Base58 encoded",
|
||||
example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getPirateChainWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String entropy58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
PirateChain pirateChain = PirateChain.getInstance();
|
||||
|
||||
try {
|
||||
return pirateChain.getWalletTransactions(entropy58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends ARRR from wallet",
|
||||
description = "Currently supports 'legacy' P2PKH PirateChain addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "32 bytes of entropy, Base58 encoded",
|
||||
example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, PirateChainSendRequest pirateChainSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (pirateChainSendRequest.arrrAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (pirateChainSendRequest.feePerByte != null && pirateChainSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
PirateChain pirateChain = PirateChain.getInstance();
|
||||
|
||||
try {
|
||||
return pirateChain.sendCoins(pirateChainSendRequest);
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
// TODO
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/walletaddress")
|
||||
@Operation(
|
||||
summary = "Returns main wallet address",
|
||||
description = "Supply 32 bytes of entropy, Base58 encoded",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "32 bytes of entropy, Base58 encoded",
|
||||
example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getPirateChainWalletAddress(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String entropy58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
PirateChain pirateChain = PirateChain.getInstance();
|
||||
|
||||
try {
|
||||
return pirateChain.getWalletAddress(entropy58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/syncstatus")
|
||||
@Operation(
|
||||
summary = "Returns synchronization status",
|
||||
description = "Supply 32 bytes of entropy, Base58 encoded",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "32 bytes of entropy, Base58 encoded",
|
||||
example = "5oSXF53qENtdUyKhqSxYzP57m6RhVFP9BJKRr9E5kRGV"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getPirateChainSyncStatus(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String entropy58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
PirateChain pirateChain = PirateChain.getInstance();
|
||||
|
||||
try {
|
||||
return pirateChain.getSyncStatus(entropy58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.RavencoinSendRequest;
|
||||
import org.qortal.crosschain.Ravencoin;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/rvn")
|
||||
@Tag(name = "Cross-Chain (Ravencoin)")
|
||||
public class CrossChainRavencoinResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns RVN balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getRavencoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = ravencoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getRavencoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return ravencoin.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends RVN from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Ravencoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = RavencoinSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, RavencoinSendRequest ravencoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (ravencoinSendRequest.ravencoinAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (ravencoinSendRequest.feePerByte != null && ravencoinSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidAddress(ravencoinSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(ravencoinSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = ravencoin.buildSpend(ravencoinSendRequest.xprv58,
|
||||
ravencoinSendRequest.receivingAddress,
|
||||
ravencoinSendRequest.ravencoinAmount,
|
||||
ravencoinSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
ravencoin.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
@@ -11,11 +10,20 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -25,7 +33,6 @@ import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainCancelRequest;
|
||||
import org.qortal.api.model.CrossChainTradeSummary;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.ACCT;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
@@ -88,7 +95,7 @@ public class CrossChainResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
final boolean isExecutable = true;
|
||||
List<CrossChainTradeData> crossChainTrades = new ArrayList<>();
|
||||
List<CrossChainTradeData> crossChainTradesData = new ArrayList<>();
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
Map<ByteArray, Supplier<ACCT>> acctsByCodeHash = SupportedBlockchain.getFilteredAcctMap(foreignBlockchain);
|
||||
@@ -101,29 +108,11 @@ public class CrossChainResource {
|
||||
|
||||
for (ATData atData : atsData) {
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData.mode == AcctMode.OFFERING) {
|
||||
crossChainTrades.add(crossChainTradeData);
|
||||
}
|
||||
crossChainTradesData.add(crossChainTradeData);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the trades by timestamp
|
||||
if (reverse != null && reverse) {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(b.creationTimestamp, a.creationTimestamp));
|
||||
}
|
||||
else {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(a.creationTimestamp, b.creationTimestamp));
|
||||
}
|
||||
|
||||
if (limit != null && limit > 0) {
|
||||
// Make sure to not return more than the limit
|
||||
int upperLimit = Math.min(limit, crossChainTrades.size());
|
||||
crossChainTrades = crossChainTrades.subList(0, upperLimit);
|
||||
}
|
||||
|
||||
crossChainTrades.stream().forEach(CrossChainResource::decorateTradeDataWithPresence);
|
||||
|
||||
return crossChainTrades;
|
||||
return crossChainTradesData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -154,11 +143,7 @@ public class CrossChainResource {
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
|
||||
decorateTradeDataWithPresence(crossChainTradeData);
|
||||
|
||||
return crossChainTradeData;
|
||||
return acct.populateTradeData(repository, atData);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -210,11 +195,6 @@ public class CrossChainResource {
|
||||
|
||||
if (minimumTimestamp != null) {
|
||||
minimumFinalHeight = repository.getBlockRepository().getHeightFromTimestamp(minimumTimestamp);
|
||||
// If not found in the block repository it will return either 0 or 1
|
||||
if (minimumFinalHeight == 0 || minimumFinalHeight == 1) {
|
||||
// Try the archive
|
||||
minimumFinalHeight = repository.getBlockArchiveRepository().getHeightFromTimestamp(minimumTimestamp);
|
||||
}
|
||||
|
||||
if (minimumFinalHeight == 0)
|
||||
// We don't have any blocks since minimumTimestamp, let alone trades, so nothing to return
|
||||
@@ -242,30 +222,12 @@ public class CrossChainResource {
|
||||
|
||||
// We also need block timestamp for use as trade timestamp
|
||||
long timestamp = repository.getBlockRepository().getTimestampFromHeight(atState.getHeight());
|
||||
if (timestamp == 0) {
|
||||
// Try the archive
|
||||
timestamp = repository.getBlockArchiveRepository().getTimestampFromHeight(atState.getHeight());
|
||||
}
|
||||
|
||||
CrossChainTradeSummary crossChainTradeSummary = new CrossChainTradeSummary(crossChainTradeData, timestamp);
|
||||
crossChainTrades.add(crossChainTradeSummary);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the trades by timestamp
|
||||
if (reverse != null && reverse) {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(b.getTradeTimestamp(), a.getTradeTimestamp()));
|
||||
}
|
||||
else {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(a.getTradeTimestamp(), b.getTradeTimestamp()));
|
||||
}
|
||||
|
||||
if (limit != null && limit > 0) {
|
||||
// Make sure to not return more than the limit
|
||||
int upperLimit = Math.min(limit, crossChainTrades.size());
|
||||
crossChainTrades = crossChainTrades.subList(0, upperLimit);
|
||||
}
|
||||
|
||||
return crossChainTrades;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
@@ -293,27 +255,15 @@ public class CrossChainResource {
|
||||
description = "foreign blockchain",
|
||||
example = "LITECOIN",
|
||||
schema = @Schema(implementation = SupportedBlockchain.class)
|
||||
) @PathParam("blockchain") SupportedBlockchain foreignBlockchain,
|
||||
@Parameter(
|
||||
description = "Maximum number of trades to include in price calculation",
|
||||
example = "10",
|
||||
schema = @Schema(type = "integer", defaultValue = "10")
|
||||
) @QueryParam("maxtrades") Integer maxtrades,
|
||||
@Parameter(
|
||||
description = "Display price in terms of foreign currency per unit QORT",
|
||||
example = "false",
|
||||
schema = @Schema(type = "boolean", defaultValue = "false")
|
||||
) @QueryParam("inverse") Boolean inverse) {
|
||||
) @PathParam("blockchain") SupportedBlockchain foreignBlockchain) {
|
||||
// foreignBlockchain is required
|
||||
if (foreignBlockchain == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// We want both a minimum of 5 trades and enough trades to span at least 4 hours
|
||||
int minimumCount = 5;
|
||||
int maximumCount = maxtrades != null ? maxtrades : 10;
|
||||
long minimumPeriod = 4 * 60 * 60 * 1000L; // ms
|
||||
Boolean isFinished = Boolean.TRUE;
|
||||
boolean useInversePrice = (inverse != null && inverse == true);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
Map<ByteArray, Supplier<ACCT>> acctsByCodeHash = SupportedBlockchain.getFilteredAcctMap(foreignBlockchain);
|
||||
@@ -321,49 +271,21 @@ public class CrossChainResource {
|
||||
long totalForeign = 0;
|
||||
long totalQort = 0;
|
||||
|
||||
Map<Long, CrossChainTradeData> reverseSortedTradeData = new TreeMap<>(Collections.reverseOrder());
|
||||
|
||||
// Collect recent AT states for each ACCT version
|
||||
for (Map.Entry<ByteArray, Supplier<ACCT>> acctInfo : acctsByCodeHash.entrySet()) {
|
||||
byte[] codeHash = acctInfo.getKey().value;
|
||||
ACCT acct = acctInfo.getValue().get();
|
||||
|
||||
List<ATStateData> atStates = repository.getATRepository().getMatchingFinalATStatesQuorum(codeHash,
|
||||
isFinished, acct.getModeByteOffset(), (long) AcctMode.REDEEMED.value, minimumCount, maximumCount, minimumPeriod);
|
||||
isFinished, acct.getModeByteOffset(), (long) AcctMode.REDEEMED.value, minimumCount, minimumPeriod);
|
||||
|
||||
for (ATStateData atState : atStates) {
|
||||
// We also need block timestamp for use as trade timestamp
|
||||
long timestamp = repository.getBlockRepository().getTimestampFromHeight(atState.getHeight());
|
||||
if (timestamp == 0) {
|
||||
// Try the archive
|
||||
timestamp = repository.getBlockArchiveRepository().getTimestampFromHeight(atState.getHeight());
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atState);
|
||||
reverseSortedTradeData.put(timestamp, crossChainTradeData);
|
||||
totalForeign += crossChainTradeData.expectedForeignAmount;
|
||||
totalQort += crossChainTradeData.qortAmount;
|
||||
}
|
||||
}
|
||||
|
||||
// Loop through the sorted map and calculate the average price
|
||||
// Also remove elements beyond the maxtrades limit
|
||||
Set set = reverseSortedTradeData.entrySet();
|
||||
Iterator i = set.iterator();
|
||||
int index = 0;
|
||||
while (i.hasNext()) {
|
||||
Map.Entry tradeDataMap = (Map.Entry)i.next();
|
||||
CrossChainTradeData crossChainTradeData = (CrossChainTradeData) tradeDataMap.getValue();
|
||||
|
||||
if (maxtrades != null && index >= maxtrades) {
|
||||
// We've reached the limit
|
||||
break;
|
||||
}
|
||||
|
||||
totalForeign += crossChainTradeData.expectedForeignAmount;
|
||||
totalQort += crossChainTradeData.qortAmount;
|
||||
index++;
|
||||
}
|
||||
|
||||
return useInversePrice ? Amounts.scaledDivide(totalForeign, totalQort) : Amounts.scaledDivide(totalQort, totalForeign);
|
||||
return Amounts.scaledDivide(totalQort, totalForeign);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -398,7 +320,7 @@ public class CrossChainResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String cancelTrade(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainCancelRequest cancelRequest) {
|
||||
public String cancelTrade(CrossChainCancelRequest cancelRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] creatorPublicKey = cancelRequest.creatorPublicKey;
|
||||
@@ -493,7 +415,4 @@ public class CrossChainResource {
|
||||
}
|
||||
}
|
||||
|
||||
private static void decorateTradeDataWithPresence(CrossChainTradeData crossChainTradeData) {
|
||||
TradeBot.getInstance().decorateTradeDataWithPresence(crossChainTradeData);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,14 +7,17 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -27,7 +30,6 @@ import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.TradeBotCreateRequest;
|
||||
import org.qortal.api.model.crosschain.TradeBotRespondRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.tradebot.AcctTradeBot;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.crosschain.ForeignBlockchain;
|
||||
@@ -42,7 +44,6 @@ import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
@Path("/crosschain/tradebot")
|
||||
@Tag(name = "Cross-Chain (Trade-Bot)")
|
||||
@@ -67,9 +68,7 @@ public class CrossChainTradeBotResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<TradeBotData> getTradeBotStates(
|
||||
@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@Parameter(
|
||||
description = "Limit to specific blockchain",
|
||||
example = "LITECOIN",
|
||||
@@ -108,10 +107,9 @@ public class CrossChainTradeBotResource {
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.INSUFFICIENT_BALANCE, ApiError.REPOSITORY_ISSUE, ApiError.ORDER_SIZE_TOO_SMALL})
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.INSUFFICIENT_BALANCE, ApiError.REPOSITORY_ISSUE})
|
||||
@SuppressWarnings("deprecation")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotCreator(@HeaderParam(Security.API_KEY_HEADER) String apiKey, TradeBotCreateRequest tradeBotCreateRequest) {
|
||||
public String tradeBotCreator(TradeBotCreateRequest tradeBotCreateRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (tradeBotCreateRequest.foreignBlockchain == null)
|
||||
@@ -130,17 +128,10 @@ public class CrossChainTradeBotResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (tradeBotCreateRequest.foreignAmount == null || tradeBotCreateRequest.foreignAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
|
||||
if (tradeBotCreateRequest.foreignAmount < foreignBlockchain.getMinimumOrderAmount())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (tradeBotCreateRequest.qortAmount <= 0 || tradeBotCreateRequest.fundingQortAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
|
||||
final Long minLatestBlockTimestamp = NTP.getTime() - (60 * 60 * 1000L);
|
||||
if (!Controller.getInstance().isUpToDate(minLatestBlockTimestamp))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Do some simple checking first
|
||||
@@ -155,7 +146,7 @@ public class CrossChainTradeBotResource {
|
||||
|
||||
return Base58.encode(unsignedBytes);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,8 +172,7 @@ public class CrossChainTradeBotResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, ApiError.REPOSITORY_ISSUE})
|
||||
@SuppressWarnings("deprecation")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotResponder(@HeaderParam(Security.API_KEY_HEADER) String apiKey, TradeBotRespondRequest tradeBotRespondRequest) {
|
||||
public String tradeBotResponder(TradeBotRespondRequest tradeBotRespondRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final String atAddress = tradeBotRespondRequest.atAddress;
|
||||
@@ -200,10 +190,6 @@ public class CrossChainTradeBotResource {
|
||||
if (tradeBotRespondRequest.receivingAddress == null || !Crypto.isValidAddress(tradeBotRespondRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
final Long minLatestBlockTimestamp = NTP.getTime() - (60 * 60 * 1000L);
|
||||
if (!Controller.getInstance().isUpToDate(minLatestBlockTimestamp))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
|
||||
// Extract data from cross-chain trading AT
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, atAddress);
|
||||
@@ -240,7 +226,7 @@ public class CrossChainTradeBotResource {
|
||||
return "false";
|
||||
}
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,8 +250,7 @@ public class CrossChainTradeBotResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_ADDRESS, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotDelete(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String tradePrivateKey58) {
|
||||
public String tradeBotDelete(String tradePrivateKey58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final byte[] tradePrivateKey;
|
||||
@@ -298,4 +283,4 @@ public class CrossChainTradeBotResource {
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -98,15 +98,7 @@ public class GroupsResource {
|
||||
ref = "reverse"
|
||||
) @QueryParam("reverse") Boolean reverse) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<GroupData> allGroupData = repository.getGroupRepository().getAllGroups(limit, offset, reverse);
|
||||
allGroupData.forEach(groupData -> {
|
||||
try {
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupData.getGroupId());
|
||||
} catch (DataException e) {
|
||||
// Exclude memberCount for this group
|
||||
}
|
||||
});
|
||||
return allGroupData;
|
||||
return repository.getGroupRepository().getAllGroups(limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -158,15 +150,7 @@ public class GroupsResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<GroupData> allGroupData = repository.getGroupRepository().getGroupsWithMember(member);
|
||||
allGroupData.forEach(groupData -> {
|
||||
try {
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupData.getGroupId());
|
||||
} catch (DataException e) {
|
||||
// Exclude memberCount for this group
|
||||
}
|
||||
});
|
||||
return allGroupData;
|
||||
return repository.getGroupRepository().getGroupsWithMember(member);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -193,7 +177,6 @@ public class GroupsResource {
|
||||
if (groupData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.GROUP_UNKNOWN);
|
||||
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupId);
|
||||
return groupData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
@@ -939,4 +922,4 @@ public class GroupsResource {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,176 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.model.ListRequest;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.list.ResourceListManager;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
|
||||
@Path("/lists")
|
||||
@Tag(name = "Lists")
|
||||
public class ListsResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Add items to a new or existing list",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = ListRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "Returns true if all items were processed, false if any couldn't be " +
|
||||
"processed, or an exception on failure. If false or an exception is returned, " +
|
||||
"the list will not be updated, and the request will need to be re-issued.",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addItemstoList(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("listName") String listName,
|
||||
ListRequest listRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (listName == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
if (listRequest == null || listRequest.items == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int successCount = 0;
|
||||
int errorCount = 0;
|
||||
|
||||
for (String item : listRequest.items) {
|
||||
|
||||
boolean success = ResourceListManager.getInstance().addToList(listName, item, false);
|
||||
if (success) {
|
||||
successCount++;
|
||||
}
|
||||
else {
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (successCount > 0 && errorCount == 0) {
|
||||
// All were successful, so save the list
|
||||
ResourceListManager.getInstance().saveList(listName);
|
||||
return "true";
|
||||
}
|
||||
else {
|
||||
// Something went wrong, so revert
|
||||
ResourceListManager.getInstance().revertList(listName);
|
||||
return "false";
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Remove one or more items from a list",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = ListRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "Returns true if all items were processed, false if any couldn't be " +
|
||||
"processed, or an exception on failure. If false or an exception is returned, " +
|
||||
"the list will not be updated, and the request will need to be re-issued.",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removeItemsFromList(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("listName") String listName,
|
||||
ListRequest listRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (listRequest == null || listRequest.items == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int successCount = 0;
|
||||
int errorCount = 0;
|
||||
|
||||
for (String address : listRequest.items) {
|
||||
|
||||
// Attempt to remove the item
|
||||
// Don't save as we will do this at the end of the process
|
||||
boolean success = ResourceListManager.getInstance().removeFromList(listName, address, false);
|
||||
if (success) {
|
||||
successCount++;
|
||||
}
|
||||
else {
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (successCount > 0 && errorCount == 0) {
|
||||
// All were successful, so save the list
|
||||
ResourceListManager.getInstance().saveList(listName);
|
||||
return "true";
|
||||
}
|
||||
else {
|
||||
// Something went wrong, so revert
|
||||
ResourceListManager.getInstance().revertList(listName);
|
||||
return "false";
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Fetch all items in a list",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "A JSON array of items",
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, array = @ArraySchema(schema = @Schema(implementation = String.class)))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getItemsInList(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("listName") String listName) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
return ResourceListManager.getInstance().getJSONStringForList(listName);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -26,7 +26,6 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.NameSummary;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.naming.NameData;
|
||||
import org.qortal.data.transaction.BuyNameTransactionData;
|
||||
@@ -102,14 +101,7 @@ public class NamesResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<NameData> names;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
names = LiteNode.getInstance().fetchAccountNames(address);
|
||||
}
|
||||
else {
|
||||
names = repository.getNameRepository().getNamesByOwner(address, limit, offset, reverse);
|
||||
}
|
||||
List<NameData> names = repository.getNameRepository().getNamesByOwner(address, limit, offset, reverse);
|
||||
|
||||
return names.stream().map(NameSummary::new).collect(Collectors.toList());
|
||||
} catch (DataException e) {
|
||||
@@ -134,18 +126,10 @@ public class NamesResource {
|
||||
@ApiErrors({ApiError.NAME_UNKNOWN, ApiError.REPOSITORY_ISSUE})
|
||||
public NameData getName(@PathParam("name") String name) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
NameData nameData;
|
||||
NameData nameData = repository.getNameRepository().fromName(name);
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
nameData = LiteNode.getInstance().fetchNameData(name);
|
||||
}
|
||||
else {
|
||||
nameData = repository.getNameRepository().fromName(name);
|
||||
}
|
||||
|
||||
if (nameData == null) {
|
||||
if (nameData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NAME_UNKNOWN);
|
||||
}
|
||||
|
||||
return nameData;
|
||||
} catch (ApiException e) {
|
||||
|
||||
@@ -16,18 +16,19 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.core.config.Configuration;
|
||||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.ConnectedPeer;
|
||||
import org.qortal.api.model.PeersSummary;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.controller.Synchronizer.SynchronizationResult;
|
||||
@@ -66,7 +67,7 @@ public class PeersResource {
|
||||
}
|
||||
)
|
||||
public List<ConnectedPeer> getPeers() {
|
||||
return Network.getInstance().getImmutableConnectedPeers().stream().map(ConnectedPeer::new).collect(Collectors.toList());
|
||||
return Network.getInstance().getConnectedPeers().stream().map(ConnectedPeer::new).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@GET
|
||||
@@ -132,29 +133,9 @@ public class PeersResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public ExecuteProduceConsume.StatsSnapshot getEngineStats(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @QueryParam("newLoggingLevel") Level newLoggingLevel) {
|
||||
public ExecuteProduceConsume.StatsSnapshot getEngineStats() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (newLoggingLevel != null) {
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
|
||||
String epcClassName = "org.qortal.network.Network.NetworkProcessor";
|
||||
LoggerConfig loggerConfig = config.getLoggerConfig(epcClassName);
|
||||
LoggerConfig specificConfig = loggerConfig;
|
||||
|
||||
// We need a specific configuration for this logger,
|
||||
// otherwise we would change the level of all other loggers
|
||||
// having the original configuration as parent as well
|
||||
if (!loggerConfig.getName().equals(epcClassName)) {
|
||||
specificConfig = new LoggerConfig(epcClassName, newLoggingLevel, true);
|
||||
specificConfig.setParent(loggerConfig);
|
||||
config.addLogger(epcClassName, specificConfig);
|
||||
}
|
||||
specificConfig.setLevel(newLoggingLevel);
|
||||
ctx.updateLoggers();
|
||||
}
|
||||
|
||||
return Network.getInstance().getStatsSnapshot();
|
||||
}
|
||||
|
||||
@@ -190,7 +171,7 @@ public class PeersResource {
|
||||
ApiError.INVALID_NETWORK_ADDRESS, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addPeer(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
public String addPeer(String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final Long addedWhen = NTP.getTime();
|
||||
@@ -245,7 +226,7 @@ public class PeersResource {
|
||||
ApiError.INVALID_NETWORK_ADDRESS, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removePeer(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
public String removePeer(String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -281,7 +262,7 @@ public class PeersResource {
|
||||
ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removeKnownPeers(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
public String removeKnownPeers(String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -321,7 +302,7 @@ public class PeersResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<BlockSummaryData> commonBlock(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetPeerAddress) {
|
||||
public List<BlockSummaryData> commonBlock(String targetPeerAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -329,7 +310,7 @@ public class PeersResource {
|
||||
PeerAddress peerAddress = PeerAddress.fromString(targetPeerAddress);
|
||||
InetSocketAddress resolvedAddress = peerAddress.toSocketAddress();
|
||||
|
||||
List<Peer> peers = Network.getInstance().getImmutableHandshakedPeers();
|
||||
List<Peer> peers = Network.getInstance().getHandshakedPeers();
|
||||
Peer targetPeer = peers.stream().filter(peer -> peer.getResolvedAddress().equals(resolvedAddress)).findFirst().orElse(null);
|
||||
|
||||
if (targetPeer == null)
|
||||
@@ -340,7 +321,7 @@ public class PeersResource {
|
||||
boolean force = true;
|
||||
List<BlockSummaryData> peerBlockSummaries = new ArrayList<>();
|
||||
|
||||
SynchronizationResult findCommonBlockResult = Synchronizer.getInstance().fetchSummariesFromCommonBlock(repository, targetPeer, ourInitialHeight, force, peerBlockSummaries, true);
|
||||
SynchronizationResult findCommonBlockResult = Synchronizer.getInstance().fetchSummariesFromCommonBlock(repository, targetPeer, ourInitialHeight, force, peerBlockSummaries);
|
||||
if (findCommonBlockResult != SynchronizationResult.OK)
|
||||
return null;
|
||||
|
||||
@@ -357,36 +338,4 @@ public class PeersResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/summary")
|
||||
@Operation(
|
||||
summary = "Returns total inbound and outbound connections for connected peers",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
array = @ArraySchema(
|
||||
schema = @Schema(
|
||||
implementation = PeersSummary.class
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
public PeersSummary peersSummary() {
|
||||
PeersSummary peersSummary = new PeersSummary();
|
||||
|
||||
List<Peer> connectedPeers = Network.getInstance().getImmutableConnectedPeers().stream().collect(Collectors.toList());
|
||||
for (Peer peer : connectedPeers) {
|
||||
if (!peer.isOutbound()) {
|
||||
peersSummary.inboundConnections++;
|
||||
}
|
||||
else {
|
||||
peersSummary.outboundConnections++;
|
||||
}
|
||||
}
|
||||
return peersSummary;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,212 +0,0 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.io.*;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.arbitrary.*;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataRenderManager;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
|
||||
@Path("/render")
|
||||
@Tag(name = "Render")
|
||||
public class RenderResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(RenderResource.class);
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
@POST
|
||||
@Path("/preview")
|
||||
@Operation(
|
||||
summary = "Generate preview URL based on a user-supplied path and service",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string", example = "/Users/user/Documents/MyStaticWebsite"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "a temporary URL to preview the website",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String preview(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String directoryPath) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Method method = Method.PUT;
|
||||
Compression compression = Compression.ZIP;
|
||||
|
||||
ArbitraryDataWriter arbitraryDataWriter = new ArbitraryDataWriter(Paths.get(directoryPath),
|
||||
null, Service.WEBSITE, null, method, compression,
|
||||
null, null, null, null);
|
||||
try {
|
||||
arbitraryDataWriter.save();
|
||||
} catch (IOException | DataException | InterruptedException | MissingDataException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE);
|
||||
} catch (RuntimeException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
}
|
||||
|
||||
ArbitraryDataFile arbitraryDataFile = arbitraryDataWriter.getArbitraryDataFile();
|
||||
if (arbitraryDataFile != null) {
|
||||
String digest58 = arbitraryDataFile.digest58();
|
||||
if (digest58 != null) {
|
||||
return "http://localhost:12393/render/hash/" + digest58 + "?secret=" + Base58.encode(arbitraryDataFile.getSecret());
|
||||
}
|
||||
}
|
||||
return "Unable to generate preview URL";
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/authorize/{resourceId}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("resourceId") String resourceId) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, null, null);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("authorize/{service}/{resourceId}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("service") Service service,
|
||||
@PathParam("resourceId") String resourceId) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, null);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("authorize/{service}/{resourceId}/{identifier}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("service") Service service,
|
||||
@PathParam("resourceId") String resourceId,
|
||||
@PathParam("identifier") String identifier) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, identifier);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexBySignature(@PathParam("signature") String signature,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, signature, Service.WEBSITE, null);
|
||||
return this.get(signature, ResourceIdType.SIGNATURE, null, "/", null, "/render/signature", true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathBySignature(@PathParam("signature") String signature, @PathParam("path") String inPath,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, signature, Service.WEBSITE, null);
|
||||
return this.get(signature, ResourceIdType.SIGNATURE, null, inPath,null, "/render/signature", true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/hash/{hash}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByHash(@PathParam("hash") String hash58, @QueryParam("secret") String secret58,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, hash58, Service.WEBSITE, null);
|
||||
return this.get(hash58, ResourceIdType.FILE_HASH, Service.WEBSITE, "/", secret58, "/render/hash", true, false, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/hash/{hash}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByHash(@PathParam("hash") String hash58, @PathParam("path") String inPath,
|
||||
@QueryParam("secret") String secret58,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, hash58, Service.WEBSITE, null);
|
||||
return this.get(hash58, ResourceIdType.FILE_HASH, Service.WEBSITE, inPath, secret58, "/render/hash", true, false, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{service}/{name}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByName(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@PathParam("path") String inPath,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, name, service, null);
|
||||
String prefix = String.format("/render/%s", service);
|
||||
return this.get(name, ResourceIdType.NAME, service, inPath, null, prefix, true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{service}/{name}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByName(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, name, service, null);
|
||||
String prefix = String.format("/render/%s", service);
|
||||
return this.get(name, ResourceIdType.NAME, service, "/", null, prefix, true, true, theme);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async, String theme) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
|
||||
if (theme != null) {
|
||||
renderer.setTheme(theme);
|
||||
}
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -9,10 +9,7 @@ import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
@@ -33,8 +30,6 @@ import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.SimpleTransactionSignRequest;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
import org.qortal.globalization.Translator;
|
||||
import org.qortal.repository.DataException;
|
||||
@@ -49,7 +44,6 @@ import org.qortal.transform.transaction.TransactionTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import com.google.common.primitives.Bytes;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
@Path("/transactions")
|
||||
@Tag(name = "Transactions")
|
||||
@@ -253,29 +247,14 @@ public class TransactionsResource {
|
||||
ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public List<TransactionData> getUnconfirmedTransactions(@Parameter(
|
||||
description = "A list of transaction types"
|
||||
) @QueryParam("txType") List<TransactionType> txTypes, @Parameter(
|
||||
description = "Transaction creator's base58 encoded public key"
|
||||
) @QueryParam("creator") String creatorPublicKey58, @Parameter(
|
||||
ref = "limit"
|
||||
) @QueryParam("limit") Integer limit, @Parameter(
|
||||
ref = "offset"
|
||||
) @QueryParam("offset") Integer offset, @Parameter(
|
||||
ref = "reverse"
|
||||
) @QueryParam("reverse") Boolean reverse) {
|
||||
|
||||
// Decode public key if supplied
|
||||
byte[] creatorPublicKey = null;
|
||||
if (creatorPublicKey58 != null) {
|
||||
try {
|
||||
creatorPublicKey = Base58.decode(creatorPublicKey58);
|
||||
} catch (NumberFormatException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PUBLIC_KEY, e);
|
||||
}
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getTransactionRepository().getUnconfirmedTransactions(txTypes, creatorPublicKey, limit, offset, reverse);
|
||||
return repository.getTransactionRepository().getUnconfirmedTransactions(limit, offset, reverse);
|
||||
} catch (ApiException e) {
|
||||
throw e;
|
||||
} catch (DataException e) {
|
||||
@@ -369,7 +348,7 @@ public class TransactionsResource {
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<byte[]> signatures = repository.getTransactionRepository().getSignaturesMatchingCriteria(startBlock, blockLimit, txGroupId,
|
||||
txTypes, null, null, address, confirmationStatus, limit, offset, reverse);
|
||||
txTypes, null, address, confirmationStatus, limit, offset, reverse);
|
||||
|
||||
// Expand signatures to transactions
|
||||
List<TransactionData> transactions = new ArrayList<>(signatures.size());
|
||||
@@ -384,150 +363,6 @@ public class TransactionsResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/address/{address}")
|
||||
@Operation(
|
||||
summary = "Returns transactions for given address",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "transactions",
|
||||
content = @Content(
|
||||
array = @ArraySchema(
|
||||
schema = @Schema(
|
||||
implementation = TransactionData.class
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_ADDRESS, ApiError.REPOSITORY_ISSUE})
|
||||
public List<TransactionData> getAddressTransactions(@PathParam("address") String address,
|
||||
@Parameter(ref = "limit") @QueryParam("limit") Integer limit,
|
||||
@Parameter(ref = "offset") @QueryParam("offset") Integer offset,
|
||||
@Parameter(ref = "reverse") @QueryParam("reverse") Boolean reverse) {
|
||||
if (!Crypto.isValidAddress(address)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
}
|
||||
|
||||
if (limit == null) {
|
||||
limit = 0;
|
||||
}
|
||||
if (offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
List<TransactionData> transactions;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Fetch from network
|
||||
transactions = LiteNode.getInstance().fetchAccountTransactions(address, limit, offset);
|
||||
|
||||
// Sort the data, since we can't guarantee the order that a peer sent it in
|
||||
if (reverse) {
|
||||
transactions.sort(Comparator.comparingLong(TransactionData::getTimestamp).reversed());
|
||||
} else {
|
||||
transactions.sort(Comparator.comparingLong(TransactionData::getTimestamp));
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Fetch from local db
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<byte[]> signatures = repository.getTransactionRepository().getSignaturesMatchingCriteria(null, null, null,
|
||||
null, null, null, address, TransactionsResource.ConfirmationStatus.CONFIRMED, limit, offset, reverse);
|
||||
|
||||
// Expand signatures to transactions
|
||||
transactions = new ArrayList<>(signatures.size());
|
||||
for (byte[] signature : signatures) {
|
||||
transactions.add(repository.getTransactionRepository().fromSignature(signature));
|
||||
}
|
||||
} catch (ApiException e) {
|
||||
throw e;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
return transactions;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/unitfee")
|
||||
@Operation(
|
||||
summary = "Get transaction unit fee",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "number"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public long getTransactionUnitFee(@QueryParam("txType") TransactionType txType,
|
||||
@QueryParam("timestamp") Long timestamp,
|
||||
@QueryParam("level") Integer accountLevel) {
|
||||
try {
|
||||
if (timestamp == null) {
|
||||
timestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
Constructor<?> constructor = txType.constructor;
|
||||
Transaction transaction = (Transaction) constructor.newInstance(null, null);
|
||||
// FUTURE: add accountLevel parameter to transaction.getUnitFee() if needed
|
||||
return transaction.getUnitFee(timestamp);
|
||||
|
||||
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/fee")
|
||||
@Operation(
|
||||
summary = "Get recommended fee for supplied transaction data",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public long getRecommendedTransactionFee(String rawInputBytes58) {
|
||||
byte[] rawInputBytes = Base58.decode(rawInputBytes58);
|
||||
if (rawInputBytes.length == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.JSON);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Append null signature on the end before transformation
|
||||
byte[] rawBytes = Bytes.concat(rawInputBytes, new byte[TransactionTransformer.SIGNATURE_LENGTH]);
|
||||
|
||||
TransactionData transactionData = TransactionTransformer.fromBytes(rawBytes);
|
||||
if (transactionData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
Transaction transaction = Transaction.fromData(repository, transactionData);
|
||||
return transaction.calcRecommendedFee();
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSFORMATION_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/creator/{publickey}")
|
||||
@Operation(
|
||||
@@ -582,84 +417,33 @@ public class TransactionsResource {
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/convert")
|
||||
@Operation(
|
||||
summary = "Convert transaction bytes into bytes for signing",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "raw, unsigned transaction in base58 encoding",
|
||||
example = "raw transaction base58"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, unsigned transaction encoded in Base58, ready for signing",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.NON_PRODUCTION, ApiError.TRANSFORMATION_ERROR
|
||||
})
|
||||
public String convertTransactionForSigning(String rawInputBytes58) {
|
||||
byte[] rawInputBytes = Base58.decode(rawInputBytes58);
|
||||
if (rawInputBytes.length == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.JSON);
|
||||
|
||||
try {
|
||||
// Append null signature on the end before transformation
|
||||
byte[] rawBytes = Bytes.concat(rawInputBytes, new byte[TransactionTransformer.SIGNATURE_LENGTH]);
|
||||
|
||||
TransactionData transactionData = TransactionTransformer.fromBytes(rawBytes);
|
||||
if (transactionData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
byte[] convertedBytes = TransactionTransformer.toBytesForSigning(transactionData);
|
||||
|
||||
return Base58.encode(convertedBytes);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSFORMATION_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/sign")
|
||||
@Operation(
|
||||
summary = "Sign a raw, unsigned transaction",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = SimpleTransactionSignRequest.class
|
||||
)
|
||||
summary = "Sign a raw, unsigned transaction",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = SimpleTransactionSignRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, signed transaction encoded in Base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, signed transaction encoded in Base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.NON_PRODUCTION, ApiError.INVALID_PRIVATE_KEY, ApiError.TRANSFORMATION_ERROR
|
||||
ApiError.NON_PRODUCTION, ApiError.INVALID_PRIVATE_KEY, ApiError.TRANSFORMATION_ERROR
|
||||
})
|
||||
public String signTransaction(SimpleTransactionSignRequest signRequest) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
@@ -723,10 +507,7 @@ public class TransactionsResource {
|
||||
ApiError.BLOCKCHAIN_NEEDS_SYNC, ApiError.INVALID_SIGNATURE, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public String processTransaction(String rawBytes58) {
|
||||
// Only allow a transaction to be processed if our latest block is less than 60 minutes old
|
||||
// If older than this, we should first wait until the blockchain is synced
|
||||
final Long minLatestBlockTimestamp = NTP.getTime() - (60 * 60 * 1000L);
|
||||
if (!Controller.getInstance().isUpToDate(minLatestBlockTimestamp))
|
||||
if (!Controller.getInstance().isUpToDate())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
|
||||
byte[] rawBytes = Base58.decode(rawBytes58);
|
||||
@@ -748,7 +529,7 @@ public class TransactionsResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_SIGNATURE);
|
||||
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
if (!blockchainLock.tryLock(60, TimeUnit.SECONDS))
|
||||
if (!blockchainLock.tryLock(30, TimeUnit.SECONDS))
|
||||
throw createTransactionInvalidException(request, ValidationResult.NO_BLOCKCHAIN_LOCK);
|
||||
|
||||
try {
|
||||
|
||||
@@ -33,6 +33,7 @@ import org.qortal.transaction.Transaction.TransactionType;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.transform.transaction.TransactionTransformer;
|
||||
import org.qortal.transform.transaction.TransactionTransformer.Transformation;
|
||||
import org.qortal.utils.BIP39;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import com.google.common.hash.HashCode;
|
||||
@@ -194,6 +195,123 @@ public class UtilsResource {
|
||||
return Base58.encode(random);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/mnemonic")
|
||||
@Operation(
|
||||
summary = "Generate 12-word BIP39 mnemonic",
|
||||
description = "Optionally pass 16-byte, base58-encoded entropy or entropy will be internally generated.<br>"
|
||||
+ "Example entropy input: YcVfxkQb6JRzqk5kF2tNLv",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "mnemonic",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.NON_PRODUCTION, ApiError.INVALID_DATA})
|
||||
public String getMnemonic(@QueryParam("entropy") String suppliedEntropy) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NON_PRODUCTION);
|
||||
|
||||
/*
|
||||
* BIP39 word lists have 2048 entries so can be represented by 11 bits.
|
||||
* UUID (128bits) and another 4 bits gives 132 bits.
|
||||
* 132 bits, divided by 11, gives 12 words.
|
||||
*/
|
||||
byte[] entropy;
|
||||
if (suppliedEntropy != null) {
|
||||
// Use caller-supplied entropy input
|
||||
try {
|
||||
entropy = Base58.decode(suppliedEntropy);
|
||||
} catch (NumberFormatException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
}
|
||||
|
||||
// Must be 16-bytes
|
||||
if (entropy.length != 16)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
} else {
|
||||
// Generate entropy internally
|
||||
UUID uuid = UUID.randomUUID();
|
||||
|
||||
byte[] uuidMSB = Longs.toByteArray(uuid.getMostSignificantBits());
|
||||
byte[] uuidLSB = Longs.toByteArray(uuid.getLeastSignificantBits());
|
||||
entropy = Bytes.concat(uuidMSB, uuidLSB);
|
||||
}
|
||||
|
||||
// Use SHA256 to generate more bits
|
||||
byte[] hash = Crypto.digest(entropy);
|
||||
|
||||
// Append first 4 bits from hash to end. (Actually 8 bits but we only use 4).
|
||||
byte checksum = (byte) (hash[0] & 0xf0);
|
||||
entropy = Bytes.concat(entropy, new byte[] {
|
||||
checksum
|
||||
});
|
||||
|
||||
return BIP39.encode(entropy, "en");
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/mnemonic")
|
||||
@Operation(
|
||||
summary = "Calculate binary entropy from 12-word BIP39 mnemonic",
|
||||
description = "Returns the base58-encoded binary form, or \"false\" if mnemonic is invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "entropy in base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.NON_PRODUCTION})
|
||||
public String fromMnemonic(String mnemonic) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NON_PRODUCTION);
|
||||
|
||||
if (mnemonic.isEmpty())
|
||||
return "false";
|
||||
|
||||
// Strip leading/trailing whitespace if any
|
||||
mnemonic = mnemonic.trim();
|
||||
|
||||
String[] phraseWords = mnemonic.split(" ");
|
||||
if (phraseWords.length != 12)
|
||||
return "false";
|
||||
|
||||
// Convert BIP39 mnemonic to binary
|
||||
byte[] binary = BIP39.decode(phraseWords, "en");
|
||||
if (binary == null)
|
||||
return "false";
|
||||
|
||||
byte[] entropy = Arrays.copyOf(binary, 16); // 132 bits is 16.5 bytes, but we're discarding checksum nybble
|
||||
|
||||
byte checksumNybble = (byte) (binary[16] & 0xf0);
|
||||
byte[] checksum = Crypto.digest(entropy);
|
||||
if (checksumNybble != (byte) (checksum[0] & 0xf0))
|
||||
return "false";
|
||||
|
||||
return Base58.encode(entropy);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/privatekey")
|
||||
@Operation(
|
||||
|
||||
@@ -115,9 +115,6 @@ public class ChatMessagesWebSocket extends ApiWebSocket {
|
||||
}
|
||||
|
||||
private void onNotify(Session session, ChatTransactionData chatTransactionData, List<String> involvingAddresses) {
|
||||
if (chatTransactionData == null)
|
||||
return;
|
||||
|
||||
// We only want direct/non-group messages where sender/recipient match our addresses
|
||||
String recipient = chatTransactionData.getRecipient();
|
||||
if (recipient == null)
|
||||
|
||||
@@ -20,7 +20,6 @@ import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage;
|
||||
import org.eclipse.jetty.websocket.api.annotations.WebSocket;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.PresenceTransactionData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
@@ -100,13 +99,13 @@ public class PresenceWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
// We use Synchronizer.NewChainTipEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof Controller.NewTransactionEvent) && !(event instanceof Synchronizer.NewChainTipEvent))
|
||||
// We use NewBlockEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof Controller.NewTransactionEvent) && !(event instanceof Controller.NewBlockEvent))
|
||||
return;
|
||||
|
||||
removeOldEntries();
|
||||
|
||||
if (event instanceof Synchronizer.NewChainTipEvent)
|
||||
if (event instanceof Controller.NewBlockEvent)
|
||||
// We only wanted a chance to cull old entries
|
||||
return;
|
||||
|
||||
|
||||
@@ -2,7 +2,10 @@ package org.qortal.api.websocket;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.*;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.eclipse.jetty.websocket.api.Session;
|
||||
@@ -82,7 +85,6 @@ public class TradeBotWebSocket extends ApiWebSocket implements Listener {
|
||||
@Override
|
||||
public void onWebSocketConnect(Session session) {
|
||||
Map<String, List<String>> queryParams = session.getUpgradeRequest().getParameterMap();
|
||||
final boolean excludeInitialData = queryParams.get("excludeInitialData") != null;
|
||||
|
||||
List<String> foreignBlockchains = queryParams.get("foreignBlockchain");
|
||||
final String foreignBlockchain = foreignBlockchains == null ? null : foreignBlockchains.get(0);
|
||||
@@ -96,22 +98,15 @@ public class TradeBotWebSocket extends ApiWebSocket implements Listener {
|
||||
// save session's preferred blockchain (if any)
|
||||
sessionBlockchain.put(session, foreignBlockchain);
|
||||
|
||||
|
||||
|
||||
// Maybe send all known trade-bot entries
|
||||
// Send all known trade-bot entries
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> tradeBotEntries = new ArrayList<>();
|
||||
List<TradeBotData> tradeBotEntries = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
// We might need to exclude the initial data from the response
|
||||
if (!excludeInitialData) {
|
||||
tradeBotEntries = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
// Optional filtering
|
||||
if (foreignBlockchain != null)
|
||||
tradeBotEntries = tradeBotEntries.stream()
|
||||
.filter(tradeBotData -> tradeBotData.getForeignBlockchain().equals(foreignBlockchain))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
// Optional filtering
|
||||
if (foreignBlockchain != null)
|
||||
tradeBotEntries = tradeBotEntries.stream()
|
||||
.filter(tradeBotData -> tradeBotData.getForeignBlockchain().equals(foreignBlockchain))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (!sendEntries(session, tradeBotEntries)) {
|
||||
session.close(4002, "websocket issue");
|
||||
|
||||
@@ -23,7 +23,6 @@ import org.eclipse.jetty.websocket.api.annotations.WebSocket;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.api.model.CrossChainOfferSummary;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.ACCT;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
@@ -81,10 +80,10 @@ public class TradeOffersWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
if (!(event instanceof Synchronizer.NewChainTipEvent))
|
||||
if (!(event instanceof Controller.NewBlockEvent))
|
||||
return;
|
||||
|
||||
BlockData blockData = ((Synchronizer.NewChainTipEvent) event).getNewChainTip();
|
||||
BlockData blockData = ((Controller.NewBlockEvent) event).getBlockData();
|
||||
|
||||
// Process any new info
|
||||
|
||||
@@ -173,7 +172,6 @@ public class TradeOffersWebSocket extends ApiWebSocket implements Listener {
|
||||
public void onWebSocketConnect(Session session) {
|
||||
Map<String, List<String>> queryParams = session.getUpgradeRequest().getParameterMap();
|
||||
final boolean includeHistoric = queryParams.get("includeHistoric") != null;
|
||||
final boolean excludeInitialData = queryParams.get("excludeInitialData") != null;
|
||||
|
||||
List<String> foreignBlockchains = queryParams.get("foreignBlockchain");
|
||||
final String foreignBlockchain = foreignBlockchains == null ? null : foreignBlockchains.get(0);
|
||||
@@ -190,23 +188,20 @@ public class TradeOffersWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
List<CrossChainOfferSummary> crossChainOfferSummaries = new ArrayList<>();
|
||||
|
||||
// We might need to exclude the initial data from the response
|
||||
if (!excludeInitialData) {
|
||||
synchronized (cachedInfoByBlockchain) {
|
||||
Collection<CachedOfferInfo> cachedInfos;
|
||||
synchronized (cachedInfoByBlockchain) {
|
||||
Collection<CachedOfferInfo> cachedInfos;
|
||||
|
||||
if (foreignBlockchain == null)
|
||||
// No preferred blockchain, so iterate through all of them
|
||||
cachedInfos = cachedInfoByBlockchain.values();
|
||||
else
|
||||
cachedInfos = Collections.singleton(cachedInfoByBlockchain.computeIfAbsent(foreignBlockchain, k -> new CachedOfferInfo()));
|
||||
if (foreignBlockchain == null)
|
||||
// No preferred blockchain, so iterate through all of them
|
||||
cachedInfos = cachedInfoByBlockchain.values();
|
||||
else
|
||||
cachedInfos = Collections.singleton(cachedInfoByBlockchain.computeIfAbsent(foreignBlockchain, k -> new CachedOfferInfo()));
|
||||
|
||||
for (CachedOfferInfo cachedInfo : cachedInfos) {
|
||||
crossChainOfferSummaries.addAll(cachedInfo.currentSummaries.values());
|
||||
for (CachedOfferInfo cachedInfo : cachedInfos) {
|
||||
crossChainOfferSummaries.addAll(cachedInfo.currentSummaries.values());
|
||||
|
||||
if (includeHistoric)
|
||||
crossChainOfferSummaries.addAll(cachedInfo.historicSummaries.values());
|
||||
}
|
||||
if (includeHistoric)
|
||||
crossChainOfferSummaries.addAll(cachedInfo.historicSummaries.values());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
package org.qortal.api.websocket;
|
||||
|
||||
import org.eclipse.jetty.websocket.api.Session;
|
||||
import org.eclipse.jetty.websocket.api.annotations.*;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.data.network.TradePresenceData;
|
||||
import org.qortal.event.Event;
|
||||
import org.qortal.event.EventBus;
|
||||
import org.qortal.event.Listener;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.*;
|
||||
|
||||
@WebSocket
|
||||
@SuppressWarnings("serial")
|
||||
public class TradePresenceWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
/** Map key is public key in base58, map value is trade presence */
|
||||
private static final Map<String, TradePresenceData> currentEntries = Collections.synchronizedMap(new HashMap<>());
|
||||
|
||||
@Override
|
||||
public void configure(WebSocketServletFactory factory) {
|
||||
factory.register(TradePresenceWebSocket.class);
|
||||
|
||||
populateCurrentInfo();
|
||||
|
||||
EventBus.INSTANCE.addListener(this::listen);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
// XXX - Suggest we change this to something like Synchronizer.NewChainTipEvent?
|
||||
// We use NewBlockEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof TradeBot.TradePresenceEvent) && !(event instanceof Controller.NewBlockEvent))
|
||||
return;
|
||||
|
||||
removeOldEntries();
|
||||
|
||||
if (event instanceof Controller.NewBlockEvent)
|
||||
// We only wanted a chance to cull old entries
|
||||
return;
|
||||
|
||||
TradePresenceData tradePresence = ((TradeBot.TradePresenceEvent) event).getTradePresenceData();
|
||||
|
||||
boolean somethingChanged = mergePresence(tradePresence);
|
||||
|
||||
if (!somethingChanged)
|
||||
// nothing changed
|
||||
return;
|
||||
|
||||
List<TradePresenceData> tradePresences = Collections.singletonList(tradePresence);
|
||||
|
||||
// Notify sessions
|
||||
for (Session session : getSessions()) {
|
||||
sendTradePresences(session, tradePresences);
|
||||
}
|
||||
}
|
||||
|
||||
@OnWebSocketConnect
|
||||
@Override
|
||||
public void onWebSocketConnect(Session session) {
|
||||
Map<String, List<String>> queryParams = session.getUpgradeRequest().getParameterMap();
|
||||
final boolean excludeInitialData = queryParams.get("excludeInitialData") != null;
|
||||
|
||||
List<TradePresenceData> tradePresences = new ArrayList<>();
|
||||
|
||||
// We might need to exclude the initial data from the response
|
||||
if (!excludeInitialData) {
|
||||
synchronized (currentEntries) {
|
||||
tradePresences = List.copyOf(currentEntries.values());
|
||||
}
|
||||
}
|
||||
|
||||
if (!sendTradePresences(session, tradePresences)) {
|
||||
session.close(4002, "websocket issue");
|
||||
return;
|
||||
}
|
||||
|
||||
super.onWebSocketConnect(session);
|
||||
}
|
||||
|
||||
@OnWebSocketClose
|
||||
@Override
|
||||
public void onWebSocketClose(Session session, int statusCode, String reason) {
|
||||
// clean up
|
||||
super.onWebSocketClose(session, statusCode, reason);
|
||||
}
|
||||
|
||||
@OnWebSocketError
|
||||
public void onWebSocketError(Session session, Throwable throwable) {
|
||||
/* ignored */
|
||||
}
|
||||
|
||||
@OnWebSocketMessage
|
||||
public void onWebSocketMessage(Session session, String message) {
|
||||
/* ignored */
|
||||
}
|
||||
|
||||
private boolean sendTradePresences(Session session, List<TradePresenceData> tradePresences) {
|
||||
try {
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
marshall(stringWriter, tradePresences);
|
||||
|
||||
String output = stringWriter.toString();
|
||||
session.getRemote().sendStringByFuture(output);
|
||||
} catch (IOException e) {
|
||||
// No output this time?
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void populateCurrentInfo() {
|
||||
// We want ALL trade presences
|
||||
TradeBot.getInstance().getAllTradePresences().stream()
|
||||
.forEach(TradePresenceWebSocket::mergePresence);
|
||||
}
|
||||
|
||||
/** Merge trade presence into cache of current entries, returns true if cache was updated. */
|
||||
private static boolean mergePresence(TradePresenceData tradePresence) {
|
||||
// Put/replace for this publickey making sure we keep newest timestamp
|
||||
String pubKey58 = Base58.encode(tradePresence.getPublicKey());
|
||||
|
||||
TradePresenceData newEntry = currentEntries.compute(pubKey58, (k, v) -> v == null || v.getTimestamp() < tradePresence.getTimestamp() ? tradePresence : v);
|
||||
|
||||
return newEntry == tradePresence;
|
||||
}
|
||||
|
||||
private static void removeOldEntries() {
|
||||
long now = NTP.getTime();
|
||||
|
||||
currentEntries.values().removeIf(v -> v.getTimestamp() < now);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ArbitraryDataBuildQueueItem extends ArbitraryDataResource {
|
||||
|
||||
private final Long creationTimestamp;
|
||||
private Long buildStartTimestamp = null;
|
||||
private Long buildEndTimestamp = null;
|
||||
private Integer priority = 0;
|
||||
private boolean failed = false;
|
||||
|
||||
private static int HIGH_PRIORITY_THRESHOLD = 5;
|
||||
|
||||
/* The maximum amount of time to spend on a single build */
|
||||
// TODO: interrupt an in-progress build
|
||||
public static long BUILD_TIMEOUT = 60*1000L; // 60 seconds
|
||||
/* The amount of time to remember that a build has failed, to avoid retries */
|
||||
public static long FAILURE_TIMEOUT = 5*60*1000L; // 5 minutes
|
||||
|
||||
public ArbitraryDataBuildQueueItem(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
super(resourceId, resourceIdType, service, identifier);
|
||||
|
||||
this.creationTimestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
public void prepareForBuild() {
|
||||
this.buildStartTimestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
public void build() throws IOException, DataException, MissingDataException {
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
this.buildStartTimestamp = null;
|
||||
throw new DataException("NTP time hasn't synced yet");
|
||||
}
|
||||
|
||||
if (this.buildStartTimestamp == null) {
|
||||
this.buildStartTimestamp = now;
|
||||
}
|
||||
ArbitraryDataReader arbitraryDataReader =
|
||||
new ArbitraryDataReader(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(true);
|
||||
} finally {
|
||||
this.buildEndTimestamp = NTP.getTime();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isBuilding() {
|
||||
return this.buildStartTimestamp != null;
|
||||
}
|
||||
|
||||
public boolean isQueued() {
|
||||
return this.buildStartTimestamp == null;
|
||||
}
|
||||
|
||||
public boolean hasReachedBuildTimeout(Long now) {
|
||||
if (now == null || this.creationTimestamp == null) {
|
||||
return true;
|
||||
}
|
||||
return now - this.creationTimestamp > BUILD_TIMEOUT;
|
||||
}
|
||||
|
||||
public boolean hasReachedFailureTimeout(Long now) {
|
||||
if (now == null || this.buildStartTimestamp == null) {
|
||||
return true;
|
||||
}
|
||||
return now - this.buildStartTimestamp > FAILURE_TIMEOUT;
|
||||
}
|
||||
|
||||
public Long getBuildStartTimestamp() {
|
||||
return this.buildStartTimestamp;
|
||||
}
|
||||
|
||||
public Integer getPriority() {
|
||||
if (this.priority != null) {
|
||||
return this.priority;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void setPriority(Integer priority) {
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
public boolean isHighPriority() {
|
||||
return this.priority >= HIGH_PRIORITY_THRESHOLD;
|
||||
}
|
||||
|
||||
public void setFailed(boolean failed) {
|
||||
this.failed = failed;
|
||||
}
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataCache;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.Method;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataBuilder {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataBuilder.class);
|
||||
|
||||
private final String name;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
|
||||
private boolean canRequestMissingFiles;
|
||||
|
||||
private List<ArbitraryTransactionData> transactions;
|
||||
private ArbitraryTransactionData latestPutTransaction;
|
||||
private final List<Path> paths;
|
||||
private byte[] latestSignature;
|
||||
private Path finalPath;
|
||||
private int layerCount;
|
||||
|
||||
public ArbitraryDataBuilder(String name, Service service, String identifier) {
|
||||
this.name = name;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
this.paths = new ArrayList<>();
|
||||
|
||||
// By default we can request missing files
|
||||
// Callers can use setCanRequestMissingFiles(false) to prevent it
|
||||
this.canRequestMissingFiles = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process transactions, but do not build anything
|
||||
* This is useful for checking the status of a given resource
|
||||
*
|
||||
* @throws DataException
|
||||
* @throws IOException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void process() throws DataException, IOException, MissingDataException {
|
||||
this.fetchTransactions();
|
||||
this.validateTransactions();
|
||||
this.processTransactions();
|
||||
this.validatePaths();
|
||||
this.findLatestSignature();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the latest state of a given resource
|
||||
*
|
||||
* @throws DataException
|
||||
* @throws IOException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void build() throws DataException, IOException, MissingDataException {
|
||||
this.process();
|
||||
this.buildLatestState();
|
||||
this.cacheLatestSignature();
|
||||
}
|
||||
|
||||
private void fetchTransactions() throws DataException {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent PUT
|
||||
ArbitraryTransactionData latestPut = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.name, this.service, Method.PUT, this.identifier);
|
||||
if (latestPut == null) {
|
||||
String message = String.format("Couldn't find PUT transaction for name %s, service %s and identifier %s",
|
||||
this.name, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestPutTransaction = latestPut;
|
||||
|
||||
// Load all transactions since the latest PUT
|
||||
List<ArbitraryTransactionData> transactionDataList = repository.getArbitraryRepository()
|
||||
.getArbitraryTransactions(this.name, this.service, this.identifier, latestPut.getTimestamp());
|
||||
|
||||
this.transactions = transactionDataList;
|
||||
this.layerCount = transactionDataList.size();
|
||||
}
|
||||
}
|
||||
|
||||
private void validateTransactions() throws DataException {
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
ArbitraryTransactionData latestPut = this.latestPutTransaction;
|
||||
|
||||
if (latestPut == null) {
|
||||
throw new DataException("Cannot PATCH without existing PUT. Deploy using PUT first.");
|
||||
}
|
||||
if (latestPut.getMethod() != Method.PUT) {
|
||||
throw new DataException("Expected PUT but received PATCH");
|
||||
}
|
||||
if (transactionDataList.size() == 0) {
|
||||
throw new DataException(String.format("No transactions found for name %s, service %s, " +
|
||||
"identifier: %s, since %d", name, service, this.identifierString(), latestPut.getTimestamp()));
|
||||
}
|
||||
|
||||
// Verify that the signature of the first transaction matches the latest PUT
|
||||
ArbitraryTransactionData firstTransaction = transactionDataList.get(0);
|
||||
if (!Arrays.equals(firstTransaction.getSignature(), latestPut.getSignature())) {
|
||||
throw new DataException("First transaction did not match latest PUT transaction");
|
||||
}
|
||||
|
||||
// Remove the first transaction, as it should be the only PUT
|
||||
transactionDataList.remove(0);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (transactionData == null) {
|
||||
throw new DataException("Transaction not found");
|
||||
}
|
||||
if (transactionData.getMethod() != Method.PATCH) {
|
||||
throw new DataException("Expected PATCH but received PUT");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void processTransactions() throws IOException, DataException, MissingDataException {
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
int count = 0;
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
LOGGER.trace("Found arbitrary transaction {}", Base58.encode(transactionData.getSignature()));
|
||||
count++;
|
||||
|
||||
// Build the data file, overwriting anything that was previously there
|
||||
String sig58 = Base58.encode(transactionData.getSignature());
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(sig58, ResourceIdType.TRANSACTION_DATA,
|
||||
this.service, this.identifier);
|
||||
arbitraryDataReader.setTransactionData(transactionData);
|
||||
arbitraryDataReader.setCanRequestMissingFiles(this.canRequestMissingFiles);
|
||||
boolean hasMissingData = false;
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(true);
|
||||
}
|
||||
catch (MissingDataException e) {
|
||||
hasMissingData = true;
|
||||
}
|
||||
|
||||
// Handle missing data
|
||||
if (hasMissingData) {
|
||||
if (!this.canRequestMissingFiles) {
|
||||
throw new MissingDataException("Files are missing but were not requested.");
|
||||
}
|
||||
if (count == transactionDataList.size()) {
|
||||
// This is the final transaction in the list, so we need to fail
|
||||
throw new MissingDataException("Requesting missing files. Please wait and try again.");
|
||||
}
|
||||
// There are more transactions, so we should process them to give them the opportunity to request data
|
||||
continue;
|
||||
}
|
||||
|
||||
// By this point we should have all data needed to build the layers
|
||||
Path path = arbitraryDataReader.getFilePath();
|
||||
if (path == null) {
|
||||
throw new DataException(String.format("Null path when building data from transaction %s", sig58));
|
||||
}
|
||||
if (!Files.exists(path)) {
|
||||
throw new DataException(String.format("Path doesn't exist when building data from transaction %s", sig58));
|
||||
}
|
||||
paths.add(path);
|
||||
}
|
||||
}
|
||||
|
||||
private void findLatestSignature() throws DataException {
|
||||
if (this.transactions.size() == 0) {
|
||||
throw new DataException("Unable to find latest signature from empty transaction list");
|
||||
}
|
||||
|
||||
// Find the latest signature
|
||||
ArbitraryTransactionData latestTransaction = this.transactions.get(this.transactions.size() - 1);
|
||||
if (latestTransaction == null) {
|
||||
throw new DataException("Unable to find latest signature from null transaction");
|
||||
}
|
||||
|
||||
this.latestSignature = latestTransaction.getSignature();
|
||||
}
|
||||
|
||||
private void validatePaths() throws DataException {
|
||||
if (this.paths.isEmpty()) {
|
||||
throw new DataException("No paths available from which to build latest state");
|
||||
}
|
||||
}
|
||||
|
||||
private void buildLatestState() throws IOException, DataException {
|
||||
if (this.paths.size() == 1) {
|
||||
// No patching needed
|
||||
this.finalPath = this.paths.get(0);
|
||||
return;
|
||||
}
|
||||
|
||||
Path pathBefore = this.paths.get(0);
|
||||
boolean validateAllLayers = Settings.getInstance().shouldValidateAllDataLayers();
|
||||
|
||||
// Loop from the second path onwards
|
||||
for (int i=1; i<paths.size(); i++) {
|
||||
String identifierPrefix = this.identifier != null ? String.format("[%s]", this.identifier) : "";
|
||||
LOGGER.debug(String.format("[%s][%s]%s Applying layer %d...", this.service, this.name, identifierPrefix, i));
|
||||
|
||||
// Create an instance of ArbitraryDataCombiner
|
||||
Path pathAfter = this.paths.get(i);
|
||||
byte[] signatureBefore = this.transactions.get(i-1).getSignature();
|
||||
ArbitraryDataCombiner combiner = new ArbitraryDataCombiner(pathBefore, pathAfter, signatureBefore);
|
||||
|
||||
// We only want to validate this layer's hash if it's the final layer, or if the settings
|
||||
// indicate that we should validate interim layers too
|
||||
boolean isFinalLayer = (i == paths.size() - 1);
|
||||
combiner.setShouldValidateHashes(isFinalLayer || validateAllLayers);
|
||||
|
||||
// Now combine this layer with the last, and set the output path to the "before" path for the next cycle
|
||||
combiner.combine();
|
||||
combiner.cleanup();
|
||||
pathBefore = combiner.getFinalPath();
|
||||
}
|
||||
this.finalPath = pathBefore;
|
||||
}
|
||||
|
||||
private void cacheLatestSignature() throws IOException, DataException {
|
||||
byte[] latestTransactionSignature = this.transactions.get(this.transactions.size()-1).getSignature();
|
||||
if (latestTransactionSignature == null) {
|
||||
throw new DataException("Missing latest transaction signature");
|
||||
}
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
throw new DataException("NTP time not synced yet");
|
||||
}
|
||||
|
||||
ArbitraryDataMetadataCache cache = new ArbitraryDataMetadataCache(this.finalPath);
|
||||
cache.setSignature(latestTransactionSignature);
|
||||
cache.setTimestamp(NTP.getTime());
|
||||
cache.write();
|
||||
}
|
||||
|
||||
private String identifierString() {
|
||||
return identifier != null ? identifier : "";
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
public byte[] getLatestSignature() {
|
||||
return this.latestSignature;
|
||||
}
|
||||
|
||||
public int getLayerCount() {
|
||||
return this.layerCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the below setter to ensure that we only read existing
|
||||
* data without requesting any missing files,
|
||||
*
|
||||
* @param canRequestMissingFiles
|
||||
*/
|
||||
public void setCanRequestMissingFiles(boolean canRequestMissingFiles) {
|
||||
this.canRequestMissingFiles = canRequestMissingFiles;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataCache;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataCache {
|
||||
|
||||
private final boolean overwrite;
|
||||
private final Path filePath;
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
|
||||
public ArbitraryDataCache(Path filePath, boolean overwrite, String resourceId,
|
||||
ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
this.filePath = filePath;
|
||||
this.overwrite = overwrite;
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
}
|
||||
|
||||
public boolean isCachedDataAvailable() {
|
||||
return !this.shouldInvalidate();
|
||||
}
|
||||
|
||||
public boolean shouldInvalidate() {
|
||||
try {
|
||||
// If the user has requested an overwrite, always invalidate the cache
|
||||
if (this.overwrite) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Overwrite is false, but we still need to invalidate if no files exist
|
||||
if (!Files.exists(this.filePath) || FilesystemUtils.isDirectoryEmpty(this.filePath)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// We might want to overwrite anyway, if an updated version is available
|
||||
if (this.shouldInvalidateResource()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
// Something went wrong, so invalidate the cache just in case
|
||||
return true;
|
||||
}
|
||||
|
||||
// No need to invalidate the cache
|
||||
// Remember that it's up to date, so that we won't check again for a while
|
||||
ArbitraryDataManager.getInstance().addResourceToCache(this.getArbitraryDataResource());
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateResource() {
|
||||
switch (this.resourceIdType) {
|
||||
|
||||
case NAME:
|
||||
return this.shouldInvalidateName();
|
||||
|
||||
default:
|
||||
// Other resource ID types remain constant, so no need to invalidate
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateName() {
|
||||
// To avoid spamming the database too often, we shouldn't check sigs or invalidate when rate limited
|
||||
if (this.rateLimitInEffect()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the state's sig doesn't match the latest transaction's sig, we need to invalidate
|
||||
// This means that an updated layer is available
|
||||
return this.shouldInvalidateDueToSignatureMismatch();
|
||||
}
|
||||
|
||||
/**
|
||||
* rateLimitInEffect()
|
||||
*
|
||||
* When loading a website, we need to check the cache for every static asset loaded by the page.
|
||||
* This would involve asking the database for the latest transaction every time.
|
||||
* To reduce database load and page load times, we maintain an in-memory list to "rate limit" lookups.
|
||||
* Once a resource ID is in this in-memory list, we will avoid cache invalidations until it
|
||||
* has been present in the list for a certain amount of time.
|
||||
* Items are automatically removed from the list when a new arbitrary transaction arrives, so this
|
||||
* should not prevent updates from taking effect immediately.
|
||||
*
|
||||
* @return whether to avoid lookups for this resource due to the in-memory cache
|
||||
*/
|
||||
private boolean rateLimitInEffect() {
|
||||
return ArbitraryDataManager.getInstance().isResourceCached(this.getArbitraryDataResource());
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateDueToSignatureMismatch() {
|
||||
|
||||
// Fetch the latest transaction for this name and service
|
||||
byte[] latestTransactionSig = this.fetchLatestTransactionSignature();
|
||||
|
||||
// Now fetch the transaction signature stored in the cache metadata
|
||||
byte[] cachedSig = this.fetchCachedSignature();
|
||||
|
||||
// If either are null, we should invalidate
|
||||
if (latestTransactionSig == null || cachedSig == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if they match
|
||||
return !Arrays.equals(latestTransactionSig, cachedSig);
|
||||
}
|
||||
|
||||
private byte[] fetchLatestTransactionSignature() {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Find latest transaction for name and service, with any method
|
||||
ArbitraryTransactionData latestTransaction = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, null, this.identifier);
|
||||
|
||||
if (latestTransaction != null) {
|
||||
return latestTransaction.getSignature();
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private byte[] fetchCachedSignature() {
|
||||
try {
|
||||
// Fetch the transaction signature stored in the cache metadata
|
||||
ArbitraryDataMetadataCache cache = new ArbitraryDataMetadataCache(this.filePath);
|
||||
cache.read();
|
||||
return cache.getSignature();
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private ArbitraryDataResource getArbitraryDataResource() {
|
||||
// TODO: pass an ArbitraryDataResource into the constructor, rather than individual components
|
||||
return new ArbitraryDataResource(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.nio.file.DirectoryNotEmptyException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataCombiner {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataCombiner.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private final byte[] signatureBefore;
|
||||
private boolean shouldValidateHashes;
|
||||
private Path finalPath;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataCombiner(Path pathBefore, Path pathAfter, byte[] signatureBefore) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.signatureBefore = signatureBefore;
|
||||
}
|
||||
|
||||
public void combine() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.readMetadata();
|
||||
this.validatePreviousSignature();
|
||||
this.validatePreviousHash();
|
||||
this.process();
|
||||
this.validateCurrentHash();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
public void cleanup() {
|
||||
this.cleanupPath(this.pathBefore);
|
||||
this.cleanupPath(this.pathAfter);
|
||||
}
|
||||
|
||||
private void cleanupPath(Path path) {
|
||||
// Delete pathBefore, if it exists in our data/temp directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(path)) {
|
||||
File directory = new File(path.toString());
|
||||
try {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup directory {}", directory.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the parent directory of pathBefore if it is empty (and exists in our data/temp directory)
|
||||
Path parentDirectory = path.getParent();
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(parentDirectory)) {
|
||||
try {
|
||||
Files.deleteIfExists(parentDirectory);
|
||||
} catch (DirectoryNotEmptyException e) {
|
||||
// No need to log anything
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup parent directory {}", parentDirectory.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
if (this.pathBefore == null || this.pathAfter == null) {
|
||||
throw new DataException("No paths available to build patch");
|
||||
}
|
||||
if (!Files.exists(this.pathBefore) || !Files.exists(this.pathAfter)) {
|
||||
throw new DataException("Unable to create patch because at least one path doesn't exist");
|
||||
}
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void readMetadata() throws IOException, DataException {
|
||||
this.metadata = new ArbitraryDataMetadataPatch(this.pathAfter);
|
||||
this.metadata.read();
|
||||
}
|
||||
|
||||
private void validatePreviousSignature() throws DataException {
|
||||
if (this.signatureBefore == null) {
|
||||
throw new DataException("No previous signature passed to the combiner");
|
||||
}
|
||||
|
||||
byte[] previousSignature = this.metadata.getPreviousSignature();
|
||||
if (previousSignature == null) {
|
||||
throw new DataException("Unable to extract previous signature from patch metadata");
|
||||
}
|
||||
|
||||
// Compare the signatures
|
||||
if (!Arrays.equals(previousSignature, this.signatureBefore)) {
|
||||
throw new DataException("Previous signatures do not match - transactions out of order?");
|
||||
}
|
||||
}
|
||||
|
||||
private void validatePreviousHash() throws IOException, DataException {
|
||||
if (!Settings.getInstance().shouldValidateAllDataLayers()) {
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] previousHash = this.metadata.getPreviousHash();
|
||||
if (previousHash == null) {
|
||||
throw new DataException("Unable to extract previous hash from patch metadata");
|
||||
}
|
||||
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathBefore);
|
||||
digest.compute();
|
||||
boolean valid = digest.isHashValid(previousHash);
|
||||
if (!valid) {
|
||||
String previousHash58 = Base58.encode(previousHash);
|
||||
throw new InvalidObjectException(String.format("Previous state hash mismatch. " +
|
||||
"Patch prevHash: %s, actual: %s", previousHash58, digest.getHash58()));
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws IOException, DataException {
|
||||
ArbitraryDataMerge merge = new ArbitraryDataMerge(this.pathBefore, this.pathAfter);
|
||||
merge.compute();
|
||||
this.finalPath = merge.getMergePath();
|
||||
}
|
||||
|
||||
private void validateCurrentHash() throws IOException, DataException {
|
||||
if (!this.shouldValidateHashes) {
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] currentHash = this.metadata.getCurrentHash();
|
||||
if (currentHash == null) {
|
||||
throw new DataException("Unable to extract current hash from patch metadata");
|
||||
}
|
||||
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.finalPath);
|
||||
digest.compute();
|
||||
boolean valid = digest.isHashValid(currentHash);
|
||||
if (!valid) {
|
||||
String currentHash58 = Base58.encode(currentHash);
|
||||
throw new InvalidObjectException(String.format("Current state hash mismatch. " +
|
||||
"Patch curHash: %s, actual: %s", currentHash58, digest.getHash58()));
|
||||
}
|
||||
}
|
||||
|
||||
public void setShouldValidateHashes(boolean shouldValidateHashes) {
|
||||
this.shouldValidateHashes = shouldValidateHashes;
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,141 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ArbitraryDataCreatePatch {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataCreatePatch.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private Path pathAfter;
|
||||
private final byte[] previousSignature;
|
||||
|
||||
private Path finalPath;
|
||||
private int totalFileCount;
|
||||
private int fileDifferencesCount;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
private Path workingPath;
|
||||
private String identifier;
|
||||
|
||||
public ArbitraryDataCreatePatch(Path pathBefore, Path pathAfter, byte[] previousSignature) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.previousSignature = previousSignature;
|
||||
}
|
||||
|
||||
public void create() throws DataException, IOException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.copyFiles();
|
||||
this.process();
|
||||
|
||||
} catch (Exception e) {
|
||||
this.cleanupOnFailure();
|
||||
throw e;
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
if (this.pathBefore == null || this.pathAfter == null) {
|
||||
throw new DataException("No paths available to build patch");
|
||||
}
|
||||
if (!Files.exists(this.pathBefore) || !Files.exists(this.pathAfter)) {
|
||||
throw new DataException("Unable to create patch because at least one path doesn't exist");
|
||||
}
|
||||
|
||||
this.createRandomIdentifier();
|
||||
this.createWorkingDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
this.cleanupWorkingPath();
|
||||
}
|
||||
|
||||
private void cleanupWorkingPath() {
|
||||
try {
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, true);
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to cleanup working directory");
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanupOnFailure() {
|
||||
try {
|
||||
FilesystemUtils.safeDeleteDirectory(this.finalPath, true);
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to cleanup diff directory on failure");
|
||||
}
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "patch", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.workingPath = tempDir;
|
||||
}
|
||||
|
||||
private void copyFiles() throws IOException {
|
||||
// When dealing with single files, we need to copy them to a container directory
|
||||
// in order for the structure to align with the previous revision and therefore
|
||||
// make comparisons possible.
|
||||
|
||||
if (this.pathAfter.toFile().isFile()) {
|
||||
// Create a "data" directory within the working directory
|
||||
Path workingDataPath = Paths.get(this.workingPath.toString(), "data");
|
||||
Files.createDirectories(workingDataPath);
|
||||
// Copy to temp directory
|
||||
// Filename is currently hardcoded to "data"
|
||||
String filename = "data"; //this.pathAfter.getFileName().toString();
|
||||
Files.copy(this.pathAfter, Paths.get(workingDataPath.toString(), filename));
|
||||
// Update pathAfter to point to the new path
|
||||
this.pathAfter = workingDataPath;
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws IOException, DataException {
|
||||
|
||||
ArbitraryDataDiff diff = new ArbitraryDataDiff(this.pathBefore, this.pathAfter, this.previousSignature);
|
||||
this.finalPath = diff.getDiffPath();
|
||||
diff.compute();
|
||||
|
||||
this.totalFileCount = diff.getTotalFileCount();
|
||||
this.metadata = diff.getMetadata();
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
public int getTotalFileCount() {
|
||||
return this.totalFileCount;
|
||||
}
|
||||
|
||||
public ArbitraryDataMetadataPatch getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,383 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.patch.UnifiedDiffPatch;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
public class ArbitraryDataDiff {
|
||||
|
||||
/** Only create a patch if both the before and after file sizes are within defined limit **/
|
||||
private static final long MAX_DIFF_FILE_SIZE = 100 * 1024L; // 100kiB
|
||||
|
||||
|
||||
public enum DiffType {
|
||||
COMPLETE_FILE,
|
||||
UNIFIED_DIFF
|
||||
}
|
||||
|
||||
public static class ModifiedPath {
|
||||
private Path path;
|
||||
private DiffType diffType;
|
||||
|
||||
public ModifiedPath(Path path, DiffType diffType) {
|
||||
this.path = path;
|
||||
this.diffType = diffType;
|
||||
}
|
||||
|
||||
public ModifiedPath(JSONObject jsonObject) {
|
||||
String pathString = jsonObject.getString("path");
|
||||
if (pathString != null) {
|
||||
this.path = Paths.get(pathString);
|
||||
}
|
||||
|
||||
String diffTypeString = jsonObject.getString("type");
|
||||
if (diffTypeString != null) {
|
||||
this.diffType = DiffType.valueOf(diffTypeString);
|
||||
}
|
||||
}
|
||||
|
||||
public Path getPath() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public DiffType getDiffType() {
|
||||
return this.diffType;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return this.path.toString();
|
||||
}
|
||||
}
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataDiff.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private final byte[] previousSignature;
|
||||
private byte[] previousHash;
|
||||
private byte[] currentHash;
|
||||
private Path diffPath;
|
||||
private String identifier;
|
||||
|
||||
private final List<Path> addedPaths;
|
||||
private final List<ModifiedPath> modifiedPaths;
|
||||
private final List<Path> removedPaths;
|
||||
|
||||
private int totalFileCount;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataDiff(Path pathBefore, Path pathAfter, byte[] previousSignature) throws DataException {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.previousSignature = previousSignature;
|
||||
|
||||
this.addedPaths = new ArrayList<>();
|
||||
this.modifiedPaths = new ArrayList<>();
|
||||
this.removedPaths = new ArrayList<>();
|
||||
|
||||
this.createRandomIdentifier();
|
||||
this.createOutputDirectory();
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.hashPreviousState();
|
||||
this.findAddedOrModifiedFiles();
|
||||
this.findRemovedFiles();
|
||||
this.validate();
|
||||
this.hashCurrentState();
|
||||
this.writeMetadata();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() {
|
||||
LOGGER.debug("Generating diff...");
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createOutputDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "diff", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.diffPath = tempDir;
|
||||
}
|
||||
|
||||
private void hashPreviousState() throws IOException, DataException {
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathBefore);
|
||||
digest.compute();
|
||||
this.previousHash = digest.getHash();
|
||||
}
|
||||
|
||||
private void findAddedOrModifiedFiles() throws IOException {
|
||||
try {
|
||||
final Path pathBeforeAbsolute = this.pathBefore.toAbsolutePath();
|
||||
final Path pathAfterAbsolute = this.pathAfter.toAbsolutePath();
|
||||
final Path diffPathAbsolute = this.diffPath.toAbsolutePath();
|
||||
final ArbitraryDataDiff diff = this;
|
||||
|
||||
// Check for additions or modifications
|
||||
Files.walkFileTree(this.pathAfter, new FileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path after, BasicFileAttributes attrs) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path afterPathAbsolute, BasicFileAttributes attrs) throws IOException {
|
||||
Path afterPathRelative = pathAfterAbsolute.relativize(afterPathAbsolute.toAbsolutePath());
|
||||
Path beforePathAbsolute = pathBeforeAbsolute.resolve(afterPathRelative);
|
||||
|
||||
if (afterPathRelative.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
boolean wasAdded = false;
|
||||
boolean wasModified = false;
|
||||
|
||||
if (!Files.exists(beforePathAbsolute)) {
|
||||
LOGGER.trace("File was added: {}", afterPathRelative.toString());
|
||||
diff.addedPaths.add(afterPathRelative);
|
||||
wasAdded = true;
|
||||
}
|
||||
else if (Files.size(afterPathAbsolute) != Files.size(beforePathAbsolute)) {
|
||||
// Check file size first because it's quicker
|
||||
LOGGER.trace("File size was modified: {}", afterPathRelative.toString());
|
||||
wasModified = true;
|
||||
}
|
||||
else if (!Arrays.equals(ArbitraryDataDiff.digestFromPath(afterPathAbsolute), ArbitraryDataDiff.digestFromPath(beforePathAbsolute))) {
|
||||
// Check hashes as a last resort
|
||||
LOGGER.trace("File contents were modified: {}", afterPathRelative.toString());
|
||||
wasModified = true;
|
||||
}
|
||||
|
||||
if (wasAdded) {
|
||||
diff.copyFilePathToBaseDir(afterPathAbsolute, diffPathAbsolute, afterPathRelative);
|
||||
}
|
||||
if (wasModified) {
|
||||
try {
|
||||
diff.pathModified(beforePathAbsolute, afterPathAbsolute, afterPathRelative, diffPathAbsolute);
|
||||
} catch (DataException e) {
|
||||
// We can only throw IOExceptions because we are overriding FileVisitor.visitFile()
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Keep a tally of the total number of files to help with decision making
|
||||
diff.totalFileCount++;
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFileFailed(Path file, IOException e){
|
||||
LOGGER.info("File visit failed: {}, error: {}", file.toString(), e.getMessage());
|
||||
// TODO: throw exception?
|
||||
return FileVisitResult.TERMINATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("IOException when walking through file tree: {}", e.getMessage());
|
||||
throw(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void findRemovedFiles() throws IOException {
|
||||
try {
|
||||
final Path pathBeforeAbsolute = this.pathBefore.toAbsolutePath();
|
||||
final Path pathAfterAbsolute = this.pathAfter.toAbsolutePath();
|
||||
final ArbitraryDataDiff diff = this;
|
||||
|
||||
// Check for removals
|
||||
Files.walkFileTree(this.pathBefore, new FileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path before, BasicFileAttributes attrs) {
|
||||
Path directoryPathBefore = pathBeforeAbsolute.relativize(before.toAbsolutePath());
|
||||
Path directoryPathAfter = pathAfterAbsolute.resolve(directoryPathBefore);
|
||||
|
||||
if (directoryPathBefore.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (!Files.exists(directoryPathAfter)) {
|
||||
LOGGER.trace("Directory was removed: {}", directoryPathAfter.toString());
|
||||
diff.removedPaths.add(directoryPathBefore);
|
||||
// TODO: we might need to mark directories differently to files
|
||||
}
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path before, BasicFileAttributes attrs) {
|
||||
Path filePathBefore = pathBeforeAbsolute.relativize(before.toAbsolutePath());
|
||||
Path filePathAfter = pathAfterAbsolute.resolve(filePathBefore);
|
||||
|
||||
if (filePathBefore.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (!Files.exists(filePathAfter)) {
|
||||
LOGGER.trace("File was removed: {}", filePathBefore.toString());
|
||||
diff.removedPaths.add(filePathBefore);
|
||||
}
|
||||
|
||||
// Keep a tally of the total number of files to help with decision making
|
||||
diff.totalFileCount++;
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFileFailed(Path file, IOException e){
|
||||
LOGGER.info("File visit failed: {}, error: {}", file.toString(), e.getMessage());
|
||||
// TODO: throw exception?
|
||||
return FileVisitResult.TERMINATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new IOException(String.format("IOException when walking through file tree: %s", e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void validate() throws DataException {
|
||||
if (this.addedPaths.isEmpty() && this.modifiedPaths.isEmpty() && this.removedPaths.isEmpty()) {
|
||||
throw new DataException("Current state matches previous state. Nothing to do.");
|
||||
}
|
||||
}
|
||||
|
||||
private void hashCurrentState() throws IOException, DataException {
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathAfter);
|
||||
digest.compute();
|
||||
this.currentHash = digest.getHash();
|
||||
}
|
||||
|
||||
private void writeMetadata() throws IOException, DataException {
|
||||
ArbitraryDataMetadataPatch metadata = new ArbitraryDataMetadataPatch(this.diffPath);
|
||||
metadata.setAddedPaths(this.addedPaths);
|
||||
metadata.setModifiedPaths(this.modifiedPaths);
|
||||
metadata.setRemovedPaths(this.removedPaths);
|
||||
metadata.setPreviousSignature(this.previousSignature);
|
||||
metadata.setPreviousHash(this.previousHash);
|
||||
metadata.setCurrentHash(this.currentHash);
|
||||
metadata.write();
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
|
||||
private void pathModified(Path beforePathAbsolute, Path afterPathAbsolute, Path afterPathRelative,
|
||||
Path destinationBasePathAbsolute) throws IOException, DataException {
|
||||
|
||||
Path destination = Paths.get(destinationBasePathAbsolute.toString(), afterPathRelative.toString());
|
||||
long beforeSize = Files.size(beforePathAbsolute);
|
||||
long afterSize = Files.size(afterPathAbsolute);
|
||||
DiffType diffType;
|
||||
|
||||
if (beforeSize > MAX_DIFF_FILE_SIZE || afterSize > MAX_DIFF_FILE_SIZE) {
|
||||
// Files are large, so don't attempt a diff
|
||||
this.copyFilePathToBaseDir(afterPathAbsolute, destinationBasePathAbsolute, afterPathRelative);
|
||||
diffType = DiffType.COMPLETE_FILE;
|
||||
}
|
||||
else {
|
||||
// Attempt to create patch using java-diff-utils
|
||||
UnifiedDiffPatch unifiedDiffPatch = new UnifiedDiffPatch(beforePathAbsolute, afterPathAbsolute, destination);
|
||||
unifiedDiffPatch.create();
|
||||
if (unifiedDiffPatch.isValid()) {
|
||||
diffType = DiffType.UNIFIED_DIFF;
|
||||
}
|
||||
else {
|
||||
// Diff failed validation, so copy the whole file instead
|
||||
this.copyFilePathToBaseDir(afterPathAbsolute, destinationBasePathAbsolute, afterPathRelative);
|
||||
diffType = DiffType.COMPLETE_FILE;
|
||||
}
|
||||
}
|
||||
|
||||
ModifiedPath modifiedPath = new ModifiedPath(afterPathRelative, diffType);
|
||||
this.modifiedPaths.add(modifiedPath);
|
||||
}
|
||||
|
||||
private void copyFilePathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
// Ensure parent folders exist in the destination
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
File file = new File(dest.toString());
|
||||
File parent = file.getParentFile();
|
||||
if (parent != null) {
|
||||
parent.mkdirs();
|
||||
}
|
||||
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
|
||||
|
||||
public Path getDiffPath() {
|
||||
return this.diffPath;
|
||||
}
|
||||
|
||||
public int getTotalFileCount() {
|
||||
return this.totalFileCount;
|
||||
}
|
||||
|
||||
public ArbitraryDataMetadataPatch getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
|
||||
// Utils
|
||||
|
||||
private static byte[] digestFromPath(Path path) {
|
||||
try {
|
||||
return Crypto.digest(path.toFile());
|
||||
} catch (IOException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ArbitraryDataDigest {
|
||||
|
||||
private final Path path;
|
||||
private byte[] hash;
|
||||
|
||||
public ArbitraryDataDigest(Path path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
List<Path> allPaths = Files.walk(path).filter(Files::isRegularFile).sorted().collect(Collectors.toList());
|
||||
Path basePathAbsolute = this.path.toAbsolutePath();
|
||||
|
||||
MessageDigest sha256;
|
||||
try {
|
||||
sha256 = MessageDigest.getInstance("SHA-256");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new DataException("SHA-256 hashing algorithm unavailable");
|
||||
}
|
||||
|
||||
for (Path path : allPaths) {
|
||||
// We need to work with paths relative to the base path, to ensure the same hash
|
||||
// is generated on different systems
|
||||
Path relativePath = basePathAbsolute.relativize(path.toAbsolutePath());
|
||||
|
||||
// Exclude Qortal folder since it can be different each time
|
||||
// We only care about hashing the actual user data
|
||||
if (relativePath.startsWith(".qortal/")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Hash path
|
||||
byte[] filePathBytes = relativePath.toString().getBytes(StandardCharsets.UTF_8);
|
||||
sha256.update(filePathBytes);
|
||||
|
||||
// Hash contents
|
||||
byte[] fileContent = Files.readAllBytes(path);
|
||||
sha256.update(fileContent);
|
||||
}
|
||||
this.hash = sha256.digest();
|
||||
}
|
||||
|
||||
public boolean isHashValid(byte[] hash) {
|
||||
return Arrays.equals(hash, this.hash);
|
||||
}
|
||||
|
||||
public byte[] getHash() {
|
||||
return this.hash;
|
||||
}
|
||||
|
||||
public String getHash58() {
|
||||
if (this.hash == null) {
|
||||
return null;
|
||||
}
|
||||
return Base58.encode(this.hash);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,790 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
|
||||
public class ArbitraryDataFile {
|
||||
|
||||
// Validation results
|
||||
public enum ValidationResult {
|
||||
OK(1),
|
||||
FILE_TOO_LARGE(10),
|
||||
FILE_NOT_FOUND(11);
|
||||
|
||||
public final int value;
|
||||
|
||||
private static final Map<Integer, ArbitraryDataFile.ValidationResult> map = stream(ArbitraryDataFile.ValidationResult.values()).collect(toMap(result -> result.value, result -> result));
|
||||
|
||||
ValidationResult(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile.ValidationResult valueOf(int value) {
|
||||
return map.get(value);
|
||||
}
|
||||
}
|
||||
|
||||
// Resource ID types
|
||||
public enum ResourceIdType {
|
||||
SIGNATURE,
|
||||
FILE_HASH,
|
||||
TRANSACTION_DATA,
|
||||
NAME
|
||||
}
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFile.class);
|
||||
|
||||
public static final long MAX_FILE_SIZE = 500 * 1024 * 1024; // 500MiB
|
||||
protected static final int MAX_CHUNK_SIZE = 1 * 1024 * 1024; // 1MiB
|
||||
public static final int CHUNK_SIZE = 512 * 1024; // 0.5MiB
|
||||
public static int SHORT_DIGEST_LENGTH = 8;
|
||||
|
||||
protected Path filePath;
|
||||
protected String hash58;
|
||||
protected byte[] signature;
|
||||
private ArrayList<ArbitraryDataFileChunk> chunks;
|
||||
private byte[] secret;
|
||||
|
||||
// Metadata
|
||||
private byte[] metadataHash;
|
||||
private ArbitraryDataFile metadataFile;
|
||||
private ArbitraryDataTransactionMetadata metadata;
|
||||
|
||||
|
||||
public ArbitraryDataFile() {
|
||||
}
|
||||
|
||||
public ArbitraryDataFile(String hash58, byte[] signature) throws DataException {
|
||||
this.filePath = ArbitraryDataFile.getOutputFilePath(hash58, signature, false);
|
||||
this.chunks = new ArrayList<>();
|
||||
this.hash58 = hash58;
|
||||
this.signature = signature;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile(byte[] fileContent, byte[] signature) throws DataException {
|
||||
if (fileContent == null) {
|
||||
LOGGER.error("fileContent is null");
|
||||
return;
|
||||
}
|
||||
|
||||
this.hash58 = Base58.encode(Crypto.digest(fileContent));
|
||||
this.signature = signature;
|
||||
LOGGER.trace(String.format("File digest: %s, size: %d bytes", this.hash58, fileContent.length));
|
||||
|
||||
Path outputFilePath = getOutputFilePath(this.hash58, signature, true);
|
||||
File outputFile = outputFilePath.toFile();
|
||||
try (FileOutputStream outputStream = new FileOutputStream(outputFile)) {
|
||||
outputStream.write(fileContent);
|
||||
this.filePath = outputFilePath;
|
||||
} catch (IOException e) {
|
||||
this.delete();
|
||||
throw new DataException(String.format("Unable to write data with hash %s: %s", this.hash58, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromHash58(String hash58, byte[] signature) throws DataException {
|
||||
return new ArbitraryDataFile(hash58, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromHash(byte[] hash, byte[] signature) throws DataException {
|
||||
if (hash == null) {
|
||||
return null;
|
||||
}
|
||||
return ArbitraryDataFile.fromHash58(Base58.encode(hash), signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromPath(Path path, byte[] signature) {
|
||||
if (path == null) {
|
||||
return null;
|
||||
}
|
||||
File file = path.toFile();
|
||||
if (file.exists()) {
|
||||
try {
|
||||
byte[] digest = Crypto.digest(file);
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
|
||||
|
||||
// Copy file to data directory if needed
|
||||
if (Files.exists(path) && !arbitraryDataFile.isInBaseDirectory(path)) {
|
||||
arbitraryDataFile.copyToDataDirectory(path, signature);
|
||||
}
|
||||
// Or, if it's already in the data directory, we may need to move it
|
||||
else if (!path.equals(arbitraryDataFile.getFilePath())) {
|
||||
// Wrong path, so relocate (but don't cleanup, as the source folder may still be needed by the caller)
|
||||
Path dest = arbitraryDataFile.getFilePath();
|
||||
FilesystemUtils.moveFile(path, dest, false);
|
||||
}
|
||||
return arbitraryDataFile;
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromFile(File file, byte[] signature) {
|
||||
return ArbitraryDataFile.fromPath(Paths.get(file.getPath()), signature);
|
||||
}
|
||||
|
||||
private Path copyToDataDirectory(Path sourcePath, byte[] signature) throws DataException {
|
||||
if (this.hash58 == null || this.filePath == null) {
|
||||
return null;
|
||||
}
|
||||
Path outputFilePath = getOutputFilePath(this.hash58, signature, true);
|
||||
sourcePath = sourcePath.toAbsolutePath();
|
||||
Path destPath = outputFilePath.toAbsolutePath();
|
||||
try {
|
||||
return Files.copy(sourcePath, destPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Unable to copy file %s to data directory %s", sourcePath, destPath));
|
||||
}
|
||||
}
|
||||
|
||||
public static Path getOutputFilePath(String hash58, byte[] signature, boolean createDirectories) throws DataException {
|
||||
Path directory;
|
||||
|
||||
if (hash58 == null) {
|
||||
return null;
|
||||
}
|
||||
if (signature != null) {
|
||||
// Key by signature
|
||||
String signature58 = Base58.encode(signature);
|
||||
String sig58First2Chars = signature58.substring(0, 2).toLowerCase();
|
||||
String sig58Next2Chars = signature58.substring(2, 4).toLowerCase();
|
||||
directory = Paths.get(Settings.getInstance().getDataPath(), sig58First2Chars, sig58Next2Chars, signature58);
|
||||
}
|
||||
else {
|
||||
// Put files without signatures in a "_misc" directory, and the files will be relocated later
|
||||
String hash58First2Chars = hash58.substring(0, 2).toLowerCase();
|
||||
String hash58Next2Chars = hash58.substring(2, 4).toLowerCase();
|
||||
directory = Paths.get(Settings.getInstance().getDataPath(), "_misc", hash58First2Chars, hash58Next2Chars);
|
||||
}
|
||||
|
||||
if (createDirectories) {
|
||||
try {
|
||||
Files.createDirectories(directory);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create data subdirectory");
|
||||
}
|
||||
}
|
||||
return Paths.get(directory.toString(), hash58);
|
||||
}
|
||||
|
||||
public ValidationResult isValid() {
|
||||
try {
|
||||
// Ensure the file exists on disk
|
||||
if (!Files.exists(this.filePath)) {
|
||||
LOGGER.error("File doesn't exist at path {}", this.filePath);
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
// Validate the file size
|
||||
long fileSize = Files.size(this.filePath);
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
LOGGER.error(String.format("ArbitraryDataFile is too large: %d bytes (max size: %d bytes)", fileSize, MAX_FILE_SIZE));
|
||||
return ArbitraryDataFile.ValidationResult.FILE_TOO_LARGE;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
|
||||
public void validateFileSize(long expectedSize) throws DataException {
|
||||
// Verify that we can determine the file's size
|
||||
long fileSize = 0;
|
||||
try {
|
||||
fileSize = Files.size(this.getFilePath());
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Couldn't get file size for transaction %s", Base58.encode(signature)));
|
||||
}
|
||||
|
||||
// Ensure the file's size matches the size reported by the transaction
|
||||
if (fileSize != expectedSize) {
|
||||
throw new DataException(String.format("File size mismatch for transaction %s", Base58.encode(signature)));
|
||||
}
|
||||
}
|
||||
|
||||
private void addChunk(ArbitraryDataFileChunk chunk) {
|
||||
this.chunks.add(chunk);
|
||||
}
|
||||
|
||||
private void addChunkHashes(List<byte[]> chunkHashes) throws DataException {
|
||||
if (chunkHashes == null || chunkHashes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
for (byte[] chunkHash : chunkHashes) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
this.addChunk(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
public List<byte[]> getChunkHashes() {
|
||||
List<byte[]> hashes = new ArrayList<>();
|
||||
if (this.chunks == null || this.chunks.isEmpty()) {
|
||||
return hashes;
|
||||
}
|
||||
|
||||
for (ArbitraryDataFileChunk chunkData : this.chunks) {
|
||||
hashes.add(chunkData.getHash());
|
||||
}
|
||||
|
||||
return hashes;
|
||||
}
|
||||
|
||||
public int split(int chunkSize) throws DataException {
|
||||
try {
|
||||
|
||||
File file = this.getFile();
|
||||
byte[] buffer = new byte[chunkSize];
|
||||
this.chunks = new ArrayList<>();
|
||||
|
||||
if (file != null) {
|
||||
try (FileInputStream fileInputStream = new FileInputStream(file);
|
||||
BufferedInputStream bis = new BufferedInputStream(fileInputStream)) {
|
||||
|
||||
int numberOfBytes;
|
||||
while ((numberOfBytes = bis.read(buffer)) > 0) {
|
||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
||||
out.write(buffer, 0, numberOfBytes);
|
||||
out.flush();
|
||||
|
||||
ArbitraryDataFileChunk chunk = new ArbitraryDataFileChunk(out.toByteArray(), this.signature);
|
||||
ValidationResult validationResult = chunk.isValid();
|
||||
if (validationResult == ValidationResult.OK) {
|
||||
this.chunks.add(chunk);
|
||||
} else {
|
||||
throw new DataException(String.format("Chunk %s is invalid", chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new DataException("Unable to split file into chunks");
|
||||
}
|
||||
|
||||
return this.chunks.size();
|
||||
}
|
||||
|
||||
public boolean join() {
|
||||
// Ensure we have chunks
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
|
||||
// Create temporary path for joined file
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "join");
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Join the chunks
|
||||
Path outputPath = Paths.get(tempDir.toString(), this.chunks.get(0).digest58());
|
||||
File outputFile = new File(outputPath.toString());
|
||||
try (BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile))) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
File sourceFile = chunk.filePath.toFile();
|
||||
BufferedInputStream in = new BufferedInputStream(new FileInputStream(sourceFile));
|
||||
byte[] buffer = new byte[2048];
|
||||
int inSize;
|
||||
while ((inSize = in.read(buffer)) != -1) {
|
||||
out.write(buffer, 0, inSize);
|
||||
}
|
||||
in.close();
|
||||
}
|
||||
out.close();
|
||||
|
||||
// Copy temporary file to data directory
|
||||
this.filePath = this.copyToDataDirectory(outputPath, this.signature);
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(outputPath)) {
|
||||
Files.delete(outputPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (FileNotFoundException e) {
|
||||
return false;
|
||||
} catch (IOException | DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean delete() {
|
||||
// Delete the complete file
|
||||
// ... but only if it's inside the Qortal data or temp directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
if (Files.exists(this.filePath)) {
|
||||
try {
|
||||
Files.delete(this.filePath);
|
||||
this.cleanupFilesystem();
|
||||
LOGGER.debug("Deleted file {}", this.filePath);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("Couldn't delete file at path {}", this.filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean delete(int attempts) {
|
||||
// Keep trying to delete the data until it is deleted, or we reach 10 attempts
|
||||
for (int i=0; i<attempts; i++) {
|
||||
if (this.delete()) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
Thread.sleep(1000L);
|
||||
} catch (InterruptedException e) {
|
||||
// Fall through to exit method
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean deleteAllChunks() {
|
||||
boolean success = false;
|
||||
|
||||
// Delete the individual chunks
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
Iterator iterator = this.chunks.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
ArbitraryDataFileChunk chunk = (ArbitraryDataFileChunk) iterator.next();
|
||||
success = chunk.delete();
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
public boolean deleteMetadata() {
|
||||
if (this.metadataFile != null && this.metadataFile.exists()) {
|
||||
return this.metadataFile.delete();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean deleteAll() {
|
||||
// Delete the complete file
|
||||
boolean fileDeleted = this.delete();
|
||||
|
||||
// Delete the metadata file
|
||||
boolean metadataDeleted = this.deleteMetadata();
|
||||
|
||||
// Delete the individual chunks
|
||||
boolean chunksDeleted = this.deleteAllChunks();
|
||||
|
||||
return fileDeleted || metadataDeleted || chunksDeleted;
|
||||
}
|
||||
|
||||
protected void cleanupFilesystem() throws IOException {
|
||||
// It is essential that use a separate path reference in this method
|
||||
// as we don't want to modify this.filePath
|
||||
Path path = this.filePath;
|
||||
|
||||
FilesystemUtils.safeDeleteEmptyParentDirectories(path);
|
||||
}
|
||||
|
||||
public byte[] getBytes() {
|
||||
try {
|
||||
return Files.readAllBytes(this.filePath);
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to read bytes for file");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Helper methods */
|
||||
|
||||
private boolean isInBaseDirectory(Path filePath) {
|
||||
Path path = filePath.toAbsolutePath();
|
||||
String dataPath = Settings.getInstance().getDataPath();
|
||||
String basePath = Paths.get(dataPath).toAbsolutePath().toString();
|
||||
return path.startsWith(basePath);
|
||||
}
|
||||
|
||||
public boolean exists() {
|
||||
File file = this.filePath.toFile();
|
||||
return file.exists();
|
||||
}
|
||||
|
||||
public boolean chunkExists(byte[] hash) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (Arrays.equals(hash, chunk.getHash())) {
|
||||
return chunk.exists();
|
||||
}
|
||||
}
|
||||
if (Arrays.equals(hash, this.metadataHash)) {
|
||||
if (this.metadataFile != null) {
|
||||
return this.metadataFile.exists();
|
||||
}
|
||||
}
|
||||
if (Arrays.equals(this.getHash(), hash)) {
|
||||
return this.exists();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean allChunksExist() {
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have the chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
|
||||
// If the chunks array is empty, then this resource has no chunks,
|
||||
// so we must return false to avoid confusing the caller.
|
||||
if (chunks.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, we need to check each chunk individually
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (!chunk.exists()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so assume we don't have all the chunks
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean anyChunksExist() throws DataException {
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have any chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (chunk.exists()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so assume we don't have all the chunks
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean allFilesExist() {
|
||||
if (this.exists()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Complete file doesn't exist, so check the chunks
|
||||
if (this.allChunksExist()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of file hashes for this transaction that we do not hold locally
|
||||
*
|
||||
* @return a List of chunk hashes, or null if we are unable to determine what is missing
|
||||
*/
|
||||
public List<byte[]> missingHashes() {
|
||||
List<byte[]> missingHashes = new ArrayList<>();
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have the chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (!chunk.exists()) {
|
||||
missingHashes.add(chunkHash);
|
||||
}
|
||||
}
|
||||
|
||||
return missingHashes;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so we can't make a sensible decision
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean containsChunk(byte[] hash) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (Arrays.equals(hash, chunk.getHash())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public long size() {
|
||||
try {
|
||||
return Files.size(this.filePath);
|
||||
} catch (IOException e) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public int chunkCount() {
|
||||
return this.chunks.size();
|
||||
}
|
||||
|
||||
public List<ArbitraryDataFileChunk> getChunks() {
|
||||
return this.chunks;
|
||||
}
|
||||
|
||||
public byte[] chunkHashes() throws DataException {
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
// Return null if we only have one chunk, with the same hash as the parent
|
||||
if (Arrays.equals(this.digest(), this.chunks.get(0).digest())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
byte[] chunkHash = chunk.digest();
|
||||
if (chunkHash.length != 32) {
|
||||
LOGGER.info("Invalid chunk hash length: {}", chunkHash.length);
|
||||
throw new DataException("Invalid chunk hash length");
|
||||
}
|
||||
outputStream.write(chunk.digest());
|
||||
}
|
||||
return outputStream.toByteArray();
|
||||
} catch (IOException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<byte[]> chunkHashList() {
|
||||
List<byte[]> chunks = new ArrayList<>();
|
||||
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
// Return null if we only have one chunk, with the same hash as the parent
|
||||
if (Arrays.equals(this.digest(), this.chunks.get(0).digest())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
byte[] chunkHash = chunk.digest();
|
||||
if (chunkHash.length != 32) {
|
||||
LOGGER.info("Invalid chunk hash length: {}", chunkHash.length);
|
||||
throw new DataException("Invalid chunk hash length");
|
||||
}
|
||||
chunks.add(chunkHash);
|
||||
}
|
||||
return chunks;
|
||||
|
||||
} catch (DataException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void loadMetadata() throws DataException {
|
||||
try {
|
||||
this.metadata.read();
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
throw new DataException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private File getFile() {
|
||||
File file = this.filePath.toFile();
|
||||
if (file.exists()) {
|
||||
return file;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Path getFilePath() {
|
||||
return this.filePath;
|
||||
}
|
||||
|
||||
public byte[] digest() {
|
||||
File file = this.getFile();
|
||||
if (file != null && file.exists()) {
|
||||
try {
|
||||
return Crypto.digest(file);
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String digest58() {
|
||||
if (this.digest() != null) {
|
||||
return Base58.encode(this.digest());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String shortHash58() {
|
||||
if (this.hash58 == null) {
|
||||
return null;
|
||||
}
|
||||
return this.hash58.substring(0, Math.min(this.hash58.length(), SHORT_DIGEST_LENGTH));
|
||||
}
|
||||
|
||||
public String getHash58() {
|
||||
return this.hash58;
|
||||
}
|
||||
|
||||
public byte[] getHash() {
|
||||
return Base58.decode(this.hash58);
|
||||
}
|
||||
|
||||
public String printChunks() {
|
||||
String outputString = "";
|
||||
if (this.chunkCount() > 0) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (outputString.length() > 0) {
|
||||
outputString = outputString.concat(",");
|
||||
}
|
||||
outputString = outputString.concat(chunk.digest58());
|
||||
}
|
||||
}
|
||||
return outputString;
|
||||
}
|
||||
|
||||
public void setSecret(byte[] secret) {
|
||||
this.secret = secret;
|
||||
}
|
||||
|
||||
public byte[] getSecret() {
|
||||
return this.secret;
|
||||
}
|
||||
|
||||
public byte[] getSignature() {
|
||||
return this.signature;
|
||||
}
|
||||
|
||||
public void setMetadataFile(ArbitraryDataFile metadataFile) {
|
||||
this.metadataFile = metadataFile;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile getMetadataFile() {
|
||||
return this.metadataFile;
|
||||
}
|
||||
|
||||
public void setMetadataHash(byte[] hash) throws DataException {
|
||||
this.metadataHash = hash;
|
||||
|
||||
if (hash == null) {
|
||||
return;
|
||||
}
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(hash, this.signature);
|
||||
if (metadataFile.exists()) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
this.addChunkHashes(this.metadata.getChunks());
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] getMetadataHash() {
|
||||
return this.metadataHash;
|
||||
}
|
||||
|
||||
public void setMetadata(ArbitraryDataTransactionMetadata metadata) throws DataException {
|
||||
this.metadata = metadata;
|
||||
this.loadMetadata();
|
||||
}
|
||||
|
||||
public ArbitraryDataTransactionMetadata getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.shortHash58();
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
public class ArbitraryDataFileChunk extends ArbitraryDataFile {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFileChunk.class);
|
||||
|
||||
public ArbitraryDataFileChunk(String hash58, byte[] signature) throws DataException {
|
||||
super(hash58, signature);
|
||||
}
|
||||
|
||||
public ArbitraryDataFileChunk(byte[] fileContent, byte[] signature) throws DataException {
|
||||
super(fileContent, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFileChunk fromHash58(String hash58, byte[] signature) throws DataException {
|
||||
return new ArbitraryDataFileChunk(hash58, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFileChunk fromHash(byte[] hash, byte[] signature) throws DataException {
|
||||
return ArbitraryDataFileChunk.fromHash58(Base58.encode(hash), signature);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult isValid() {
|
||||
// DataChunk validation applies here too
|
||||
ValidationResult superclassValidationResult = super.isValid();
|
||||
if (superclassValidationResult != ValidationResult.OK) {
|
||||
return superclassValidationResult;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate the file size (chunks have stricter limits)
|
||||
long fileSize = Files.size(this.filePath);
|
||||
if (fileSize > MAX_CHUNK_SIZE) {
|
||||
LOGGER.error(String.format("DataFileChunk is too large: %d bytes (max chunk size: %d bytes)", fileSize, MAX_CHUNK_SIZE));
|
||||
return ValidationResult.FILE_TOO_LARGE;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
}
|
||||
@@ -1,176 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.patch.UnifiedDiffPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ArbitraryDataMerge {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMerge.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private Path mergePath;
|
||||
private String identifier;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataMerge(Path pathBefore, Path pathAfter) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.copyPreviousStateToMergePath();
|
||||
this.loadMetadata();
|
||||
this.applyDifferences();
|
||||
this.copyMetadata();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
this.createRandomIdentifier();
|
||||
this.createOutputDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createOutputDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "merge", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.mergePath = tempDir;
|
||||
}
|
||||
|
||||
private void copyPreviousStateToMergePath() throws IOException {
|
||||
ArbitraryDataMerge.copyDirPathToBaseDir(this.pathBefore, this.mergePath, Paths.get(""));
|
||||
}
|
||||
|
||||
private void loadMetadata() throws IOException, DataException {
|
||||
this.metadata = new ArbitraryDataMetadataPatch(this.pathAfter);
|
||||
this.metadata.read();
|
||||
}
|
||||
|
||||
private void applyDifferences() throws IOException, DataException {
|
||||
|
||||
List<Path> addedPaths = this.metadata.getAddedPaths();
|
||||
for (Path path : addedPaths) {
|
||||
LOGGER.trace("File was added: {}", path.toString());
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), path.toString());
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, path);
|
||||
}
|
||||
|
||||
List<ModifiedPath> modifiedPaths = this.metadata.getModifiedPaths();
|
||||
for (ModifiedPath modifiedPath : modifiedPaths) {
|
||||
LOGGER.trace("File was modified: {}", modifiedPath.toString());
|
||||
this.applyPatch(modifiedPath);
|
||||
}
|
||||
|
||||
List<Path> removedPaths = this.metadata.getRemovedPaths();
|
||||
for (Path path : removedPaths) {
|
||||
LOGGER.trace("File was removed: {}", path.toString());
|
||||
ArbitraryDataMerge.deletePathInBaseDir(this.mergePath, path);
|
||||
}
|
||||
}
|
||||
|
||||
private void applyPatch(ModifiedPath modifiedPath) throws IOException, DataException {
|
||||
if (modifiedPath.getDiffType() == DiffType.UNIFIED_DIFF) {
|
||||
// Create destination file from patch
|
||||
UnifiedDiffPatch unifiedDiffPatch = new UnifiedDiffPatch(pathBefore, pathAfter, mergePath);
|
||||
unifiedDiffPatch.apply(modifiedPath.getPath());
|
||||
}
|
||||
else if (modifiedPath.getDiffType() == DiffType.COMPLETE_FILE) {
|
||||
// Copy complete file
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), modifiedPath.getPath().toString());
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, modifiedPath.getPath());
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unrecognized patch diff type: %s", modifiedPath.getDiffType()));
|
||||
}
|
||||
}
|
||||
|
||||
private void copyMetadata() throws IOException {
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), ".qortal");
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, Paths.get(".qortal"));
|
||||
}
|
||||
|
||||
|
||||
private static void copyPathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
File sourceFile = source.toFile();
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
|
||||
if (sourceFile.isFile()) {
|
||||
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
else if (sourceFile.isDirectory()) {
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
}
|
||||
else {
|
||||
throw new IOException(String.format("Invalid file: %s", source.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private static void copyDirPathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
}
|
||||
|
||||
private static void deletePathInBaseDir(Path base, Path relativePath) throws IOException {
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
File file = new File(dest.toString());
|
||||
if (file.exists() && file.isFile()) {
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(dest)) {
|
||||
LOGGER.trace("Deleting file {}", dest);
|
||||
Files.delete(dest);
|
||||
}
|
||||
}
|
||||
if (file.exists() && file.isDirectory()) {
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(dest)) {
|
||||
LOGGER.trace("Deleting directory {}", dest);
|
||||
FileUtils.deleteDirectory(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Path getMergePath() {
|
||||
return this.mergePath;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,580 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataBuildManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataStorageManager;
|
||||
import org.qortal.crypto.AES;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.utils.ArbitraryTransactionUtils;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.ZipUtils;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.NoSuchPaddingException;
|
||||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.security.InvalidAlgorithmParameterException;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataReader {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataReader.class);
|
||||
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private ArbitraryTransactionData transactionData;
|
||||
private String secret58;
|
||||
private Path filePath;
|
||||
private boolean canRequestMissingFiles;
|
||||
|
||||
// Intermediate paths
|
||||
private final Path workingPath;
|
||||
private final Path uncompressedPath;
|
||||
|
||||
// Stats (available for synchronous builds only)
|
||||
private int layerCount;
|
||||
private byte[] latestSignature;
|
||||
|
||||
public ArbitraryDataReader(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
// Ensure names are always lowercase
|
||||
if (resourceIdType == ResourceIdType.NAME) {
|
||||
resourceId = resourceId.toLowerCase();
|
||||
}
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
|
||||
this.workingPath = this.buildWorkingPath();
|
||||
this.uncompressedPath = Paths.get(this.workingPath.toString(), "data");
|
||||
|
||||
// By default we can request missing files
|
||||
// Callers can use setCanRequestMissingFiles(false) to prevent it
|
||||
this.canRequestMissingFiles = true;
|
||||
}
|
||||
|
||||
private Path buildWorkingPath() {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = this.identifier != null ? this.identifier : "default";
|
||||
return Paths.get(baseDir, "reader", this.resourceIdType.toString(), this.resourceId, this.service.toString(), identifier);
|
||||
}
|
||||
|
||||
public boolean isCachedDataAvailable() {
|
||||
// If this resource is in the build queue then we shouldn't attempt to serve
|
||||
// cached data, as it may not be fully built
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(this.createQueueItem())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Not in the build queue - so check the cache itself
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, false,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
this.filePath = this.uncompressedPath;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isBuilding() {
|
||||
return ArbitraryDataBuildManager.getInstance().isInBuildQueue(this.createQueueItem());
|
||||
}
|
||||
|
||||
private ArbitraryDataBuildQueueItem createQueueItem() {
|
||||
return new ArbitraryDataBuildQueueItem(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* loadAsynchronously
|
||||
*
|
||||
* Attempts to load the resource asynchronously
|
||||
* This adds the build task to a queue, and the result will be cached when complete
|
||||
* To check the status of the build, periodically call isCachedDataAvailable()
|
||||
* Once it returns true, you can then use getFilePath() to access the data itself.
|
||||
*
|
||||
* @param overwrite - set to true to force rebuild an existing cache
|
||||
* @return true if added or already present in queue; false if not
|
||||
*/
|
||||
public boolean loadAsynchronously(boolean overwrite, int priority) {
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, overwrite,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
// Use cached data
|
||||
this.filePath = this.uncompressedPath;
|
||||
return true;
|
||||
}
|
||||
|
||||
ArbitraryDataBuildQueueItem item = this.createQueueItem();
|
||||
item.setPriority(priority);
|
||||
return ArbitraryDataBuildManager.getInstance().addToBuildQueue(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* loadSynchronously
|
||||
*
|
||||
* Attempts to load the resource synchronously
|
||||
* Warning: this can block for a long time when building or fetching complex data
|
||||
* If no exception is thrown, you can then use getFilePath() to access the data immediately after returning
|
||||
*
|
||||
* @param overwrite - set to true to force rebuild an existing cache
|
||||
* @throws IOException
|
||||
* @throws DataException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void loadSynchronously(boolean overwrite) throws DataException, IOException, MissingDataException {
|
||||
try {
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, overwrite,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
// Use cached data
|
||||
this.filePath = this.uncompressedPath;
|
||||
return;
|
||||
}
|
||||
|
||||
this.preExecute();
|
||||
this.deleteExistingFiles();
|
||||
this.fetch();
|
||||
this.decrypt();
|
||||
this.uncompress();
|
||||
this.validate();
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info("DataException when trying to load QDN resource", e);
|
||||
this.deleteWorkingDirectory();
|
||||
throw new DataException(e.getMessage());
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
ArbitraryDataBuildManager.getInstance().setBuildInProgress(true);
|
||||
this.checkEnabled();
|
||||
this.createWorkingDirectory();
|
||||
this.createUncompressedDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
ArbitraryDataBuildManager.getInstance().setBuildInProgress(false);
|
||||
}
|
||||
|
||||
private void checkEnabled() throws DataException {
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
throw new DataException("QDN is disabled in settings");
|
||||
}
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
try {
|
||||
Files.createDirectories(this.workingPath);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Working directory should only be deleted on failure, since it is currently used to
|
||||
* serve a cached version of the resource for subsequent requests.
|
||||
* @throws IOException
|
||||
*/
|
||||
private void deleteWorkingDirectory() {
|
||||
try {
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, true);
|
||||
} catch (IOException e) {
|
||||
// Ignore failures as this isn't an essential step
|
||||
LOGGER.info("Unable to delete working path {}: {}", this.workingPath, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void createUncompressedDirectory() throws DataException {
|
||||
try {
|
||||
// Create parent directory
|
||||
Files.createDirectories(this.uncompressedPath.getParent());
|
||||
// Ensure child directory doesn't already exist
|
||||
FileUtils.deleteDirectory(this.uncompressedPath.toFile());
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create uncompressed directory");
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteExistingFiles() {
|
||||
final Path uncompressedPath = this.uncompressedPath;
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(uncompressedPath)) {
|
||||
if (Files.exists(uncompressedPath)) {
|
||||
LOGGER.trace("Attempting to delete path {}", this.uncompressedPath);
|
||||
try {
|
||||
Files.walkFileTree(uncompressedPath, new SimpleFileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException {
|
||||
// Don't delete the parent directory, as we want to leave an empty folder
|
||||
if (dir.compareTo(uncompressedPath) == 0) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (e == null) {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to delete file or directory: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void fetch() throws DataException, IOException, MissingDataException {
|
||||
switch (resourceIdType) {
|
||||
|
||||
case FILE_HASH:
|
||||
this.fetchFromFileHash();
|
||||
break;
|
||||
|
||||
case NAME:
|
||||
this.fetchFromName();
|
||||
break;
|
||||
|
||||
case SIGNATURE:
|
||||
this.fetchFromSignature();
|
||||
break;
|
||||
|
||||
case TRANSACTION_DATA:
|
||||
this.fetchFromTransactionData(this.transactionData);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new DataException(String.format("Unknown resource ID type specified: %s", resourceIdType.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchFromFileHash() throws DataException {
|
||||
// Load data file directly from the hash (without a signature)
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash58(resourceId, null);
|
||||
// Set filePath to the location of the ArbitraryDataFile
|
||||
this.filePath = arbitraryDataFile.getFilePath();
|
||||
}
|
||||
|
||||
private void fetchFromName() throws DataException, IOException, MissingDataException {
|
||||
try {
|
||||
|
||||
// Build the existing state using past transactions
|
||||
ArbitraryDataBuilder builder = new ArbitraryDataBuilder(this.resourceId, this.service, this.identifier);
|
||||
builder.build();
|
||||
Path builtPath = builder.getFinalPath();
|
||||
if (builtPath == null) {
|
||||
throw new DataException("Unable to build path");
|
||||
}
|
||||
|
||||
// Update stats
|
||||
this.layerCount = builder.getLayerCount();
|
||||
this.latestSignature = builder.getLatestSignature();
|
||||
|
||||
// Set filePath to the builtPath
|
||||
this.filePath = builtPath;
|
||||
|
||||
} catch (InvalidObjectException e) {
|
||||
// Hash validation failed. Invalidate the cache for this name, so it can be rebuilt
|
||||
LOGGER.info("Deleting {}", this.workingPath.toString());
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, false);
|
||||
throw(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchFromSignature() throws DataException, IOException, MissingDataException {
|
||||
|
||||
// Load the full transaction data from the database so we can access the file hashes
|
||||
ArbitraryTransactionData transactionData;
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
transactionData = (ArbitraryTransactionData) repository.getTransactionRepository().fromSignature(Base58.decode(resourceId));
|
||||
}
|
||||
if (transactionData == null) {
|
||||
throw new DataException(String.format("Transaction data not found for signature %s", this.resourceId));
|
||||
}
|
||||
|
||||
this.fetchFromTransactionData(transactionData);
|
||||
}
|
||||
|
||||
private void fetchFromTransactionData(ArbitraryTransactionData transactionData) throws DataException, IOException, MissingDataException {
|
||||
if (transactionData == null) {
|
||||
throw new DataException(String.format("Transaction data not found for signature %s", this.resourceId));
|
||||
}
|
||||
|
||||
// Load hashes
|
||||
byte[] digest = transactionData.getData();
|
||||
byte[] metadataHash = transactionData.getMetadataHash();
|
||||
byte[] signature = transactionData.getSignature();
|
||||
|
||||
// Load secret
|
||||
byte[] secret = transactionData.getSecret();
|
||||
if (secret != null) {
|
||||
this.secret58 = Base58.encode(secret);
|
||||
}
|
||||
|
||||
// Load data file(s)
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
|
||||
ArbitraryTransactionUtils.checkAndRelocateMiscFiles(transactionData);
|
||||
arbitraryDataFile.setMetadataHash(metadataHash);
|
||||
|
||||
if (!arbitraryDataFile.allFilesExist()) {
|
||||
if (ArbitraryDataStorageManager.getInstance().isNameBlocked(transactionData.getName())) {
|
||||
throw new DataException(
|
||||
String.format("Unable to request missing data for file %s because the name is blocked", arbitraryDataFile));
|
||||
}
|
||||
else {
|
||||
// Ask the arbitrary data manager to fetch data for this transaction
|
||||
String message;
|
||||
if (this.canRequestMissingFiles) {
|
||||
boolean requested = ArbitraryDataManager.getInstance().fetchData(transactionData);
|
||||
|
||||
if (requested) {
|
||||
message = String.format("Requested missing data for file %s", arbitraryDataFile);
|
||||
} else {
|
||||
message = String.format("Unable to reissue request for missing file %s for signature %s due to rate limit. Please try again later.", arbitraryDataFile, Base58.encode(transactionData.getSignature()));
|
||||
}
|
||||
}
|
||||
else {
|
||||
message = String.format("Missing data for file %s", arbitraryDataFile);
|
||||
}
|
||||
|
||||
// Throw a missing data exception, which allows subsequent layers to fetch data
|
||||
LOGGER.trace(message);
|
||||
throw new MissingDataException(message);
|
||||
}
|
||||
}
|
||||
|
||||
if (arbitraryDataFile.allChunksExist() && !arbitraryDataFile.exists()) {
|
||||
// We have all the chunks but not the complete file, so join them
|
||||
arbitraryDataFile.join();
|
||||
}
|
||||
|
||||
// If the complete file still doesn't exist then something went wrong
|
||||
if (!arbitraryDataFile.exists()) {
|
||||
throw new IOException(String.format("File doesn't exist: %s", arbitraryDataFile));
|
||||
}
|
||||
// Ensure the complete hash matches the joined chunks
|
||||
if (!Arrays.equals(arbitraryDataFile.digest(), digest)) {
|
||||
// Delete the invalid file
|
||||
arbitraryDataFile.delete();
|
||||
throw new DataException("Unable to validate complete file hash");
|
||||
}
|
||||
// Ensure the file's size matches the size reported by the transaction (throws a DataException if not)
|
||||
arbitraryDataFile.validateFileSize(transactionData.getSize());
|
||||
|
||||
// Set filePath to the location of the ArbitraryDataFile
|
||||
this.filePath = arbitraryDataFile.getFilePath();
|
||||
}
|
||||
|
||||
private void decrypt() throws DataException {
|
||||
try {
|
||||
// First try with explicit parameters (CBC mode with PKCS5 padding)
|
||||
this.decryptUsingAlgo("AES/CBC/PKCS5Padding");
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info("Unable to decrypt using specific parameters: {}", e.getMessage());
|
||||
// Something went wrong, so fall back to default AES params (necessary for legacy resource support)
|
||||
this.decryptUsingAlgo("AES");
|
||||
|
||||
// TODO: delete files and block this resource if privateDataEnabled is false and the second attempt fails too
|
||||
}
|
||||
}
|
||||
|
||||
private void decryptUsingAlgo(String algorithm) throws DataException {
|
||||
// Decrypt if we have the secret key.
|
||||
byte[] secret = this.secret58 != null ? Base58.decode(this.secret58) : null;
|
||||
if (secret != null && secret.length == Transformer.AES256_LENGTH) {
|
||||
try {
|
||||
LOGGER.info("Decrypting using algorithm {}...", algorithm);
|
||||
Path unencryptedPath = Paths.get(this.workingPath.toString(), "zipped.zip");
|
||||
SecretKey aesKey = new SecretKeySpec(secret, 0, secret.length, "AES");
|
||||
AES.decryptFile(algorithm, aesKey, this.filePath.toString(), unencryptedPath.toString());
|
||||
|
||||
// Replace filePath pointer with the encrypted file path
|
||||
// Don't delete the original ArbitraryDataFile, as this is handled in the cleanup phase
|
||||
this.filePath = unencryptedPath;
|
||||
|
||||
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException
|
||||
| BadPaddingException | IllegalBlockSizeException | IOException | InvalidKeyException e) {
|
||||
LOGGER.info(String.format("Exception when decrypting using algorithm %s", algorithm), e);
|
||||
throw new DataException(String.format("Unable to decrypt file at path %s using algorithm %s: %s", this.filePath, algorithm, e.getMessage()));
|
||||
}
|
||||
} else {
|
||||
// Assume it is unencrypted. This will be the case when we have built a custom path by combining
|
||||
// multiple decrypted archives into a single state.
|
||||
}
|
||||
}
|
||||
|
||||
private void uncompress() throws IOException, DataException {
|
||||
if (this.filePath == null || !Files.exists(this.filePath)) {
|
||||
throw new DataException("Can't uncompress non-existent file path");
|
||||
}
|
||||
File file = new File(this.filePath.toString());
|
||||
if (file.isDirectory()) {
|
||||
// Already a directory - nothing to uncompress
|
||||
// We still need to copy the directory to its final destination if it's not already there
|
||||
this.moveFilePathToFinalDestination();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Default to ZIP compression - this is needed for previews
|
||||
Compression compression = transactionData != null ? transactionData.getCompression() : Compression.ZIP;
|
||||
|
||||
// Handle each type of compression
|
||||
if (compression == Compression.ZIP) {
|
||||
ZipUtils.unzip(this.filePath.toString(), this.uncompressedPath.getParent().toString());
|
||||
}
|
||||
else if (compression == Compression.NONE) {
|
||||
Files.createDirectories(this.uncompressedPath);
|
||||
Path finalPath = Paths.get(this.uncompressedPath.toString(), "data");
|
||||
this.filePath.toFile().renameTo(finalPath.toFile());
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unrecognized compression type: %s", transactionData.getCompression()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Unable to unzip file: %s", e.getMessage()));
|
||||
}
|
||||
|
||||
if (!this.uncompressedPath.toFile().exists()) {
|
||||
throw new DataException(String.format("Unable to unzip file: %s", this.filePath));
|
||||
}
|
||||
|
||||
// Delete original compressed file
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
if (Files.exists(this.filePath)) {
|
||||
try {
|
||||
Files.delete(this.filePath);
|
||||
} catch (IOException e) {
|
||||
// Ignore failures as this isn't an essential step
|
||||
LOGGER.info("Unable to delete file at path {}", this.filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace filePath pointer with the uncompressed file path
|
||||
this.filePath = this.uncompressedPath;
|
||||
}
|
||||
|
||||
private void validate() throws IOException, DataException {
|
||||
if (this.service.isValidationRequired()) {
|
||||
Service.ValidationResult result = this.service.validate(this.filePath);
|
||||
if (result != Service.ValidationResult.OK) {
|
||||
throw new DataException(String.format("Validation of %s failed: %s", this.service, result.toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void moveFilePathToFinalDestination() throws IOException, DataException {
|
||||
if (this.filePath.compareTo(this.uncompressedPath) != 0) {
|
||||
File source = new File(this.filePath.toString());
|
||||
File dest = new File(this.uncompressedPath.toString());
|
||||
if (!source.exists()) {
|
||||
throw new DataException("Source directory doesn't exist");
|
||||
}
|
||||
// Ensure destination directory doesn't exist
|
||||
FileUtils.deleteDirectory(dest);
|
||||
// Move files to destination
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
|
||||
try {
|
||||
// Delete existing
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
File directory = new File(this.filePath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
// ... and its parent directory if empty
|
||||
Path parentDirectory = this.filePath.getParent();
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(parentDirectory)) {
|
||||
Files.deleteIfExists(parentDirectory);
|
||||
}
|
||||
|
||||
} catch (DirectoryNotEmptyException e) {
|
||||
// No need to log anything
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup directories: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Finally, update filePath to point to uncompressedPath
|
||||
this.filePath = this.uncompressedPath;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void setTransactionData(ArbitraryTransactionData transactionData) {
|
||||
this.transactionData = transactionData;
|
||||
}
|
||||
|
||||
public void setSecret58(String secret58) {
|
||||
this.secret58 = secret58;
|
||||
}
|
||||
|
||||
public Path getFilePath() {
|
||||
return this.filePath;
|
||||
}
|
||||
|
||||
public int getLayerCount() {
|
||||
return this.layerCount;
|
||||
}
|
||||
|
||||
public byte[] getLatestSignature() {
|
||||
return this.latestSignature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the below setter to ensure that we only read existing
|
||||
* data without requesting any missing files,
|
||||
*
|
||||
* @param canRequestMissingFiles - whether or not fetching missing files is allowed
|
||||
*/
|
||||
public void setCanRequestMissingFiles(boolean canRequestMissingFiles) {
|
||||
this.canRequestMissingFiles = canRequestMissingFiles;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,219 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import com.google.common.io.Resources;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.HTMLParser;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataRenderer {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataRenderer.class);
|
||||
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private String theme = "light";
|
||||
private String inPath;
|
||||
private final String secret58;
|
||||
private final String prefix;
|
||||
private final boolean usePrefix;
|
||||
private final boolean async;
|
||||
private final HttpServletRequest request;
|
||||
private final HttpServletResponse response;
|
||||
private final ServletContext context;
|
||||
|
||||
public ArbitraryDataRenderer(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async,
|
||||
HttpServletRequest request, HttpServletResponse response, ServletContext context) {
|
||||
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.inPath = inPath;
|
||||
this.secret58 = secret58;
|
||||
this.prefix = prefix;
|
||||
this.usePrefix = usePrefix;
|
||||
this.async = async;
|
||||
this.request = request;
|
||||
this.response = response;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public HttpServletResponse render() {
|
||||
if (!inPath.startsWith(File.separator)) {
|
||||
inPath = File.separator + inPath;
|
||||
}
|
||||
|
||||
// Don't render data if QDN is disabled
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
return ArbitraryDataRenderer.getResponse(response, 500, "QDN is disabled in settings");
|
||||
}
|
||||
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(resourceId, resourceIdType, service, null);
|
||||
arbitraryDataReader.setSecret58(secret58); // Optional, used for loading encrypted file hashes only
|
||||
try {
|
||||
if (!arbitraryDataReader.isCachedDataAvailable()) {
|
||||
// If async is requested, show a loading screen whilst build is in progress
|
||||
if (async) {
|
||||
arbitraryDataReader.loadAsynchronously(false, 10);
|
||||
return this.getLoadingResponse(service, resourceId, theme);
|
||||
}
|
||||
|
||||
// Otherwise, loop until we have data
|
||||
int attempts = 0;
|
||||
while (!Controller.isStopping()) {
|
||||
attempts++;
|
||||
if (!arbitraryDataReader.isBuilding()) {
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(false);
|
||||
break;
|
||||
} catch (MissingDataException e) {
|
||||
if (attempts > 5) {
|
||||
// Give up after 5 attempts
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Data unavailable. Please try again later.");
|
||||
}
|
||||
}
|
||||
}
|
||||
Thread.sleep(3000L);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.info(String.format("Unable to load %s %s: %s", service, resourceId, e.getMessage()));
|
||||
return ArbitraryDataRenderer.getResponse(response, 500, "Error 500: Internal Server Error");
|
||||
}
|
||||
|
||||
java.nio.file.Path path = arbitraryDataReader.getFilePath();
|
||||
if (path == null) {
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
String unzippedPath = path.toString();
|
||||
|
||||
try {
|
||||
String filename = this.getFilename(unzippedPath, inPath);
|
||||
String filePath = Paths.get(unzippedPath, filename).toString();
|
||||
|
||||
if (HTMLParser.isHtmlFile(filename)) {
|
||||
// HTML file - needs to be parsed
|
||||
byte[] data = Files.readAllBytes(Paths.get(filePath)); // TODO: limit file size that can be read into memory
|
||||
HTMLParser htmlParser = new HTMLParser(resourceId, inPath, prefix, usePrefix, data);
|
||||
htmlParser.addAdditionalHeaderTags();
|
||||
response.addHeader("Content-Security-Policy", "default-src 'self' 'unsafe-inline' 'unsafe-eval'; media-src 'self' blob:; img-src 'self' data: blob:;");
|
||||
response.setContentType(context.getMimeType(filename));
|
||||
response.setContentLength(htmlParser.getData().length);
|
||||
response.getOutputStream().write(htmlParser.getData());
|
||||
}
|
||||
else {
|
||||
// Regular file - can be streamed directly
|
||||
File file = new File(filePath);
|
||||
FileInputStream inputStream = new FileInputStream(file);
|
||||
response.addHeader("Content-Security-Policy", "default-src 'self'");
|
||||
response.setContentType(context.getMimeType(filename));
|
||||
int bytesRead, length = 0;
|
||||
byte[] buffer = new byte[10240];
|
||||
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||
response.getOutputStream().write(buffer, 0, bytesRead);
|
||||
length += bytesRead;
|
||||
}
|
||||
response.setContentLength(length);
|
||||
inputStream.close();
|
||||
}
|
||||
return response;
|
||||
} catch (FileNotFoundException | NoSuchFileException e) {
|
||||
LOGGER.info("Unable to serve file: {}", e.getMessage());
|
||||
if (inPath.equals("/")) {
|
||||
// Delete the unzipped folder if no index file was found
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(unzippedPath));
|
||||
} catch (IOException ioException) {
|
||||
LOGGER.debug("Unable to delete directory: {}", unzippedPath, e);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Unable to serve file at path {}: {}", inPath, e.getMessage());
|
||||
}
|
||||
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
|
||||
private String getFilename(String directory, String userPath) {
|
||||
if (userPath == null || userPath.endsWith("/") || userPath.equals("")) {
|
||||
// Locate index file
|
||||
List<String> indexFiles = ArbitraryDataRenderer.indexFiles();
|
||||
for (String indexFile : indexFiles) {
|
||||
Path path = Paths.get(directory, indexFile);
|
||||
if (Files.exists(path)) {
|
||||
return userPath + indexFile;
|
||||
}
|
||||
}
|
||||
}
|
||||
return userPath;
|
||||
}
|
||||
|
||||
private HttpServletResponse getLoadingResponse(Service service, String name, String theme) {
|
||||
String responseString = "";
|
||||
URL url = Resources.getResource("loading/index.html");
|
||||
try {
|
||||
responseString = Resources.toString(url, StandardCharsets.UTF_8);
|
||||
|
||||
// Replace vars
|
||||
responseString = responseString.replace("%%SERVICE%%", service.toString());
|
||||
responseString = responseString.replace("%%NAME%%", name);
|
||||
responseString = responseString.replace("%%THEME%%", theme);
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Unable to show loading screen: {}", e.getMessage());
|
||||
}
|
||||
return ArbitraryDataRenderer.getResponse(response, 503, responseString);
|
||||
}
|
||||
|
||||
public static HttpServletResponse getResponse(HttpServletResponse response, int responseCode, String responseString) {
|
||||
try {
|
||||
byte[] responseData = responseString.getBytes();
|
||||
response.setStatus(responseCode);
|
||||
response.setContentLength(responseData.length);
|
||||
response.getOutputStream().write(responseData);
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error writing {} response", responseCode);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
public static List<String> indexFiles() {
|
||||
List<String> indexFiles = new ArrayList<>();
|
||||
indexFiles.add("index.html");
|
||||
indexFiles.add("index.htm");
|
||||
indexFiles.add("default.html");
|
||||
indexFiles.add("default.htm");
|
||||
indexFiles.add("home.html");
|
||||
indexFiles.add("home.htm");
|
||||
return indexFiles;
|
||||
}
|
||||
|
||||
public void setTheme(String theme) {
|
||||
this.theme = theme;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,407 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataBuildManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataStorageManager;
|
||||
import org.qortal.data.arbitrary.ArbitraryResourceStatus;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.list.ResourceListManager;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.ArbitraryTransactionUtils;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.qortal.data.arbitrary.ArbitraryResourceStatus.Status;
|
||||
|
||||
public class ArbitraryDataResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataResource.class);
|
||||
|
||||
protected final String resourceId;
|
||||
protected final ResourceIdType resourceIdType;
|
||||
protected final Service service;
|
||||
protected final String identifier;
|
||||
|
||||
private List<ArbitraryTransactionData> transactions;
|
||||
private ArbitraryTransactionData latestPutTransaction;
|
||||
private ArbitraryTransactionData latestTransaction;
|
||||
private int layerCount;
|
||||
private Integer localChunkCount = null;
|
||||
private Integer totalChunkCount = null;
|
||||
|
||||
public ArbitraryDataResource(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
this.resourceId = resourceId.toLowerCase();
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
}
|
||||
|
||||
public ArbitraryResourceStatus getStatus(boolean quick) {
|
||||
// Calculate the chunk counts
|
||||
// Avoid this for "quick" statuses, to speed things up
|
||||
if (!quick) {
|
||||
this.calculateChunkCounts();
|
||||
}
|
||||
|
||||
if (resourceIdType != ResourceIdType.NAME) {
|
||||
// We only support statuses for resources with a name
|
||||
return new ArbitraryResourceStatus(Status.UNSUPPORTED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if the name is blocked
|
||||
if (ResourceListManager.getInstance()
|
||||
.listContains("blockedNames", this.resourceId, false)) {
|
||||
return new ArbitraryResourceStatus(Status.BLOCKED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if a build has failed
|
||||
ArbitraryDataBuildQueueItem queueItem =
|
||||
new ArbitraryDataBuildQueueItem(resourceId, resourceIdType, service, identifier);
|
||||
if (ArbitraryDataBuildManager.getInstance().isInFailedBuildsList(queueItem)) {
|
||||
return new ArbitraryResourceStatus(Status.BUILD_FAILED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Firstly check the cache to see if it's already built
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(
|
||||
resourceId, resourceIdType, service, identifier);
|
||||
if (arbitraryDataReader.isCachedDataAvailable()) {
|
||||
return new ArbitraryResourceStatus(Status.READY, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if we have all data locally for this resource
|
||||
if (!this.allFilesDownloaded()) {
|
||||
if (this.isDownloading()) {
|
||||
return new ArbitraryResourceStatus(Status.DOWNLOADING, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
else if (this.isDataPotentiallyAvailable()) {
|
||||
return new ArbitraryResourceStatus(Status.PUBLISHED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
return new ArbitraryResourceStatus(Status.MISSING_DATA, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if there's a build in progress
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(queueItem)) {
|
||||
return new ArbitraryResourceStatus(Status.BUILDING, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// We have all data locally
|
||||
return new ArbitraryResourceStatus(Status.DOWNLOADED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
public ArbitraryDataTransactionMetadata getLatestTransactionMetadata() {
|
||||
this.fetchLatestTransaction();
|
||||
|
||||
if (latestTransaction != null) {
|
||||
byte[] signature = latestTransaction.getSignature();
|
||||
byte[] metadataHash = latestTransaction.getMetadataHash();
|
||||
if (metadataHash == null) {
|
||||
// This resource doesn't have metadata
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ArbitraryDataFile metadataFile = ArbitraryDataFile.fromHash(metadataHash, signature);
|
||||
if (metadataFile.exists()) {
|
||||
ArbitraryDataTransactionMetadata transactionMetadata = new ArbitraryDataTransactionMetadata(metadataFile.getFilePath());
|
||||
transactionMetadata.read();
|
||||
return transactionMetadata;
|
||||
}
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean delete() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
byte[] hash = transactionData.getData();
|
||||
byte[] metadataHash = transactionData.getMetadataHash();
|
||||
byte[] signature = transactionData.getSignature();
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
|
||||
arbitraryDataFile.setMetadataHash(metadataHash);
|
||||
|
||||
// Delete any chunks or complete files from each transaction
|
||||
arbitraryDataFile.deleteAll();
|
||||
}
|
||||
|
||||
// Also delete cached data for the entire resource
|
||||
this.deleteCache();
|
||||
|
||||
// Invalidate the hosted transactions cache as we have removed an item
|
||||
ArbitraryDataStorageManager.getInstance().invalidateHostedTransactionsCache();
|
||||
|
||||
return true;
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteCache() throws IOException {
|
||||
// Don't delete anything if there's a build in progress
|
||||
ArbitraryDataBuildQueueItem queueItem =
|
||||
new ArbitraryDataBuildQueueItem(resourceId, resourceIdType, service, identifier);
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(queueItem)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = this.identifier != null ? this.identifier : "default";
|
||||
Path cachePath = Paths.get(baseDir, "reader", this.resourceIdType.toString(), this.resourceId, this.service.toString(), identifier);
|
||||
if (cachePath.toFile().exists()) {
|
||||
boolean success = FilesystemUtils.safeDeleteDirectory(cachePath, true);
|
||||
if (success) {
|
||||
LOGGER.info("Cleared cache for resource {}", this.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean allFilesDownloaded() {
|
||||
// Use chunk counts to speed things up if we can
|
||||
if (this.localChunkCount != null && this.totalChunkCount != null &&
|
||||
this.localChunkCount >= this.totalChunkCount) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (!ArbitraryTransactionUtils.completeFileExists(transactionData) ||
|
||||
!ArbitraryTransactionUtils.allChunksExist(transactionData)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void calculateChunkCounts() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
int localChunkCount = 0;
|
||||
int totalChunkCount = 0;
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
localChunkCount += ArbitraryTransactionUtils.ourChunkCount(transactionData);
|
||||
totalChunkCount += ArbitraryTransactionUtils.totalChunkCount(transactionData);
|
||||
}
|
||||
|
||||
this.localChunkCount = localChunkCount;
|
||||
this.totalChunkCount = totalChunkCount;
|
||||
|
||||
} catch (DataException e) {}
|
||||
}
|
||||
|
||||
private boolean isRateLimited() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (ArbitraryDataManager.getInstance().isSignatureRateLimited(transactionData.getSignature())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Best guess as to whether data might be available
|
||||
* This is only used to give an indication to the user of progress
|
||||
* @return - whether data might be available on the network
|
||||
*/
|
||||
private boolean isDataPotentiallyAvailable() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
long lastRequestTime = ArbitraryDataManager.getInstance().lastRequestForSignature(transactionData.getSignature());
|
||||
// If we haven't requested yet, or requested in the last 30 seconds, there's still a
|
||||
// chance that data is on its way but hasn't arrived yet
|
||||
if (lastRequestTime == 0 || now - lastRequestTime < 30 * 1000L) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Best guess as to whether we are currently downloading a resource
|
||||
* This is only used to give an indication to the user of progress
|
||||
* @return - whether we are trying to download the resource
|
||||
*/
|
||||
private boolean isDownloading() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
long lastRequestTime = ArbitraryDataManager.getInstance().lastRequestForSignature(transactionData.getSignature());
|
||||
// If were have requested data in the last 30 seconds, treat it as "downloading"
|
||||
if (lastRequestTime > 0 && now - lastRequestTime < 30 * 1000L) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// FUTURE: we may want to check for file hashes (including the metadata file hash) in
|
||||
// ArbitraryDataManager.arbitraryDataFileRequests and return true if one is found.
|
||||
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void fetchTransactions() throws DataException {
|
||||
if (this.transactions != null && !this.transactions.isEmpty()) {
|
||||
// Already fetched
|
||||
return;
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent PUT
|
||||
ArbitraryTransactionData latestPut = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, ArbitraryTransactionData.Method.PUT, this.identifier);
|
||||
if (latestPut == null) {
|
||||
String message = String.format("Couldn't find PUT transaction for name %s, service %s and identifier %s",
|
||||
this.resourceId, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestPutTransaction = latestPut;
|
||||
|
||||
// Load all transactions since the latest PUT
|
||||
List<ArbitraryTransactionData> transactionDataList = repository.getArbitraryRepository()
|
||||
.getArbitraryTransactions(this.resourceId, this.service, this.identifier, latestPut.getTimestamp());
|
||||
|
||||
this.transactions = transactionDataList;
|
||||
this.layerCount = transactionDataList.size();
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info(String.format("Repository error when fetching transactions for resource %s: %s", this, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchLatestTransaction() {
|
||||
if (this.latestTransaction != null) {
|
||||
// Already fetched
|
||||
return;
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent transaction
|
||||
ArbitraryTransactionData latestTransaction = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, null, this.identifier);
|
||||
if (latestTransaction == null) {
|
||||
String message = String.format("Couldn't find transaction for name %s, service %s and identifier %s",
|
||||
this.resourceId, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestTransaction = latestTransaction;
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info(String.format("Repository error when fetching latest transaction for resource %s: %s", this, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private String resourceIdString() {
|
||||
return resourceId != null ? resourceId : "";
|
||||
}
|
||||
|
||||
private String resourceIdTypeString() {
|
||||
return resourceIdType != null ? resourceIdType.toString() : "";
|
||||
}
|
||||
|
||||
private String serviceString() {
|
||||
return service != null ? service.toString() : "";
|
||||
}
|
||||
|
||||
private String identifierString() {
|
||||
return identifier != null ? identifier : "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("%s %s %s", this.serviceString(), this.resourceIdString(), this.identifierString());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return unique key used to identify this resource
|
||||
*/
|
||||
public String getUniqueKey() {
|
||||
return String.format("%s-%s-%s", this.service, this.resourceId, this.identifier).toLowerCase();
|
||||
}
|
||||
|
||||
public String getResourceId() {
|
||||
return this.resourceId;
|
||||
}
|
||||
|
||||
public Service getService() {
|
||||
return this.service;
|
||||
}
|
||||
|
||||
public String getIdentifier() {
|
||||
return this.identifier;
|
||||
}
|
||||
}
|
||||
@@ -1,334 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Category;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.PaymentData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.data.transaction.BaseTransactionData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.transaction.ArbitraryTransaction;
|
||||
import org.qortal.transaction.Transaction;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Random;
|
||||
|
||||
public class ArbitraryDataTransactionBuilder {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataTransactionBuilder.class);
|
||||
|
||||
// Min transaction version required
|
||||
private static final int MIN_TRANSACTION_VERSION = 5;
|
||||
|
||||
// Maximum number of PATCH layers allowed
|
||||
private static final int MAX_LAYERS = 10;
|
||||
// Maximum size difference (out of 1) allowed for PATCH transactions
|
||||
private static final double MAX_SIZE_DIFF = 0.2f;
|
||||
// Maximum proportion of files modified relative to total
|
||||
private static final double MAX_FILE_DIFF = 0.5f;
|
||||
|
||||
private final String publicKey58;
|
||||
private final Path path;
|
||||
private final String name;
|
||||
private Method method;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private final Repository repository;
|
||||
|
||||
// Metadata
|
||||
private final String title;
|
||||
private final String description;
|
||||
private final List<String> tags;
|
||||
private final Category category;
|
||||
|
||||
private int chunkSize = ArbitraryDataFile.CHUNK_SIZE;
|
||||
|
||||
private ArbitraryTransactionData arbitraryTransactionData;
|
||||
private ArbitraryDataFile arbitraryDataFile;
|
||||
|
||||
public ArbitraryDataTransactionBuilder(Repository repository, String publicKey58, Path path, String name,
|
||||
Method method, Service service, String identifier,
|
||||
String title, String description, List<String> tags, Category category) {
|
||||
this.repository = repository;
|
||||
this.publicKey58 = publicKey58;
|
||||
this.path = path;
|
||||
this.name = name;
|
||||
this.method = method;
|
||||
this.service = service;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
|
||||
// Metadata (optional)
|
||||
this.title = ArbitraryDataTransactionMetadata.limitTitle(title);
|
||||
this.description = ArbitraryDataTransactionMetadata.limitDescription(description);
|
||||
this.tags = ArbitraryDataTransactionMetadata.limitTags(tags);
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public void build() throws DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.checkMethod();
|
||||
this.createTransaction();
|
||||
}
|
||||
finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void checkMethod() throws DataException {
|
||||
if (this.method == null) {
|
||||
// We need to automatically determine the method
|
||||
this.method = this.determineMethodAutomatically();
|
||||
}
|
||||
}
|
||||
|
||||
private Method determineMethodAutomatically() throws DataException {
|
||||
ArbitraryDataReader reader = new ArbitraryDataReader(this.name, ResourceIdType.NAME, this.service, this.identifier);
|
||||
try {
|
||||
reader.loadSynchronously(true);
|
||||
} catch (Exception e) {
|
||||
// Catch all exceptions if the existing resource cannot be loaded first time
|
||||
// In these cases it's simplest to just use a PUT transaction
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Get existing metadata and see if it matches the new metadata
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(this.name, ResourceIdType.NAME, this.service, this.identifier);
|
||||
ArbitraryDataTransactionMetadata existingMetadata = resource.getLatestTransactionMetadata();
|
||||
|
||||
try {
|
||||
// Check layer count
|
||||
int layerCount = reader.getLayerCount();
|
||||
if (layerCount >= MAX_LAYERS) {
|
||||
LOGGER.info("Reached maximum layer count ({} / {}) - using PUT", layerCount, MAX_LAYERS);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check size of differences between this layer and previous layer
|
||||
ArbitraryDataCreatePatch patch = new ArbitraryDataCreatePatch(reader.getFilePath(), this.path, reader.getLatestSignature());
|
||||
try {
|
||||
patch.create();
|
||||
}
|
||||
catch (DataException | IOException e) {
|
||||
// Handle matching states separately, as it's best to block transactions with duplicate states
|
||||
if (e.getMessage().equals("Current state matches previous state. Nothing to do.")) {
|
||||
// Only throw an exception if the metadata is also identical, as well as the data
|
||||
if (this.isMetadataEqual(existingMetadata)) {
|
||||
throw new DataException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info("Caught exception when creating patch: {}", e.getMessage());
|
||||
LOGGER.info("Unable to load existing resource - using PUT to overwrite it.");
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
long diffSize = FilesystemUtils.getDirectorySize(patch.getFinalPath());
|
||||
long existingStateSize = FilesystemUtils.getDirectorySize(reader.getFilePath());
|
||||
double difference = (double) diffSize / (double) existingStateSize;
|
||||
if (difference > MAX_SIZE_DIFF) {
|
||||
LOGGER.info("Reached maximum difference ({} / {}) - using PUT", difference, MAX_SIZE_DIFF);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check number of modified files
|
||||
ArbitraryDataMetadataPatch metadata = patch.getMetadata();
|
||||
int totalFileCount = patch.getTotalFileCount();
|
||||
int differencesCount = metadata.getFileDifferencesCount();
|
||||
difference = (double) differencesCount / (double) totalFileCount;
|
||||
if (difference > MAX_FILE_DIFF) {
|
||||
LOGGER.info("Reached maximum file differences ({} / {}) - using PUT", difference, MAX_FILE_DIFF);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check the patch types
|
||||
// Limit this check to single file resources only for now
|
||||
boolean atLeastOnePatch = false;
|
||||
if (totalFileCount == 1) {
|
||||
for (ModifiedPath path : metadata.getModifiedPaths()) {
|
||||
if (path.getDiffType() != DiffType.COMPLETE_FILE) {
|
||||
atLeastOnePatch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!atLeastOnePatch) {
|
||||
LOGGER.info("Patch consists of complete files only - using PUT");
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// State is appropriate for a PATCH transaction
|
||||
return Method.PATCH;
|
||||
}
|
||||
catch (IOException e) {
|
||||
// IMPORTANT: Don't catch DataException here, as they must be passed to the caller
|
||||
LOGGER.info("Caught exception: {}", e.getMessage());
|
||||
LOGGER.info("Unable to load existing resource - using PUT to overwrite it.");
|
||||
return Method.PUT;
|
||||
}
|
||||
}
|
||||
|
||||
private void createTransaction() throws DataException {
|
||||
arbitraryDataFile = null;
|
||||
try {
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
throw new DataException("NTP time not synced yet");
|
||||
}
|
||||
|
||||
// Ensure that this chain supports transactions necessary for complex arbitrary data
|
||||
int transactionVersion = Transaction.getVersionByTimestamp(now);
|
||||
if (transactionVersion < MIN_TRANSACTION_VERSION) {
|
||||
throw new DataException("Transaction version unsupported on this blockchain.");
|
||||
}
|
||||
|
||||
if (publicKey58 == null || path == null) {
|
||||
throw new DataException("Missing public key or path");
|
||||
}
|
||||
byte[] creatorPublicKey = Base58.decode(publicKey58);
|
||||
final String creatorAddress = Crypto.toAddress(creatorPublicKey);
|
||||
byte[] lastReference = repository.getAccountRepository().getLastReference(creatorAddress);
|
||||
if (lastReference == null) {
|
||||
// Use a random last reference on the very first transaction for an account
|
||||
// Code copied from CrossChainResource.buildAtMessage()
|
||||
// We already require PoW on all arbitrary transactions, so no additional logic is needed
|
||||
Random random = new Random();
|
||||
lastReference = new byte[Transformer.SIGNATURE_LENGTH];
|
||||
random.nextBytes(lastReference);
|
||||
}
|
||||
|
||||
Compression compression = Compression.ZIP;
|
||||
|
||||
// FUTURE? Use zip compression for directories, or no compression for single files
|
||||
// Compression compression = (path.toFile().isDirectory()) ? Compression.ZIP : Compression.NONE;
|
||||
|
||||
ArbitraryDataWriter arbitraryDataWriter = new ArbitraryDataWriter(path, name, service, identifier, method,
|
||||
compression, title, description, tags, category);
|
||||
try {
|
||||
arbitraryDataWriter.setChunkSize(this.chunkSize);
|
||||
arbitraryDataWriter.save();
|
||||
} catch (IOException | DataException | InterruptedException | RuntimeException | MissingDataException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw new DataException(e.getMessage());
|
||||
}
|
||||
|
||||
// Get main file
|
||||
arbitraryDataFile = arbitraryDataWriter.getArbitraryDataFile();
|
||||
if (arbitraryDataFile == null) {
|
||||
throw new DataException("Arbitrary data file is null");
|
||||
}
|
||||
|
||||
// Get chunks metadata file
|
||||
ArbitraryDataFile metadataFile = arbitraryDataFile.getMetadataFile();
|
||||
if (metadataFile == null && arbitraryDataFile.chunkCount() > 1) {
|
||||
throw new DataException(String.format("Chunks metadata data file is null but there are %d chunks", arbitraryDataFile.chunkCount()));
|
||||
}
|
||||
|
||||
String digest58 = arbitraryDataFile.digest58();
|
||||
if (digest58 == null) {
|
||||
LOGGER.error("Unable to calculate file digest");
|
||||
throw new DataException("Unable to calculate file digest");
|
||||
}
|
||||
|
||||
final BaseTransactionData baseTransactionData = new BaseTransactionData(now, Group.NO_GROUP,
|
||||
lastReference, creatorPublicKey, 0L, null);
|
||||
final int size = (int) arbitraryDataFile.size();
|
||||
final int version = 5;
|
||||
final int nonce = 0;
|
||||
byte[] secret = arbitraryDataFile.getSecret();
|
||||
final ArbitraryTransactionData.DataType dataType = ArbitraryTransactionData.DataType.DATA_HASH;
|
||||
final byte[] digest = arbitraryDataFile.digest();
|
||||
final byte[] metadataHash = (metadataFile != null) ? metadataFile.getHash() : null;
|
||||
final List<PaymentData> payments = new ArrayList<>();
|
||||
|
||||
ArbitraryTransactionData transactionData = new ArbitraryTransactionData(baseTransactionData,
|
||||
version, service, nonce, size, name, identifier, method,
|
||||
secret, compression, digest, dataType, metadataHash, payments);
|
||||
|
||||
this.arbitraryTransactionData = transactionData;
|
||||
|
||||
} catch (DataException e) {
|
||||
if (arbitraryDataFile != null) {
|
||||
arbitraryDataFile.deleteAll();
|
||||
}
|
||||
throw(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private boolean isMetadataEqual(ArbitraryDataTransactionMetadata existingMetadata) {
|
||||
if (!Objects.equals(existingMetadata.getTitle(), this.title)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getDescription(), this.description)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getCategory(), this.category)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getTags(), this.tags)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void computeNonce() throws DataException {
|
||||
if (this.arbitraryTransactionData == null) {
|
||||
throw new DataException("Arbitrary transaction data is required to compute nonce");
|
||||
}
|
||||
|
||||
ArbitraryTransaction transaction = (ArbitraryTransaction) Transaction.fromData(repository, this.arbitraryTransactionData);
|
||||
LOGGER.info("Computing nonce...");
|
||||
transaction.computeNonce();
|
||||
|
||||
Transaction.ValidationResult result = transaction.isValidUnconfirmed();
|
||||
if (result != Transaction.ValidationResult.OK) {
|
||||
arbitraryDataFile.deleteAll();
|
||||
throw new DataException(String.format("Arbitrary transaction invalid: %s", result));
|
||||
}
|
||||
LOGGER.info("Transaction is valid");
|
||||
}
|
||||
|
||||
public ArbitraryTransactionData getArbitraryTransactionData() {
|
||||
return this.arbitraryTransactionData;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile getArbitraryDataFile() {
|
||||
return this.arbitraryDataFile;
|
||||
}
|
||||
|
||||
public void setChunkSize(int chunkSize) {
|
||||
this.chunkSize = chunkSize;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,388 +0,0 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Category;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.crypto.AES;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.ZipUtils;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.NoSuchPaddingException;
|
||||
import javax.crypto.SecretKey;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.InvalidAlgorithmParameterException;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ArbitraryDataWriter {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataWriter.class);
|
||||
|
||||
private Path filePath;
|
||||
private final String name;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private final Method method;
|
||||
private final Compression compression;
|
||||
|
||||
// Metadata
|
||||
private final String title;
|
||||
private final String description;
|
||||
private final List<String> tags;
|
||||
private final Category category;
|
||||
|
||||
private int chunkSize = ArbitraryDataFile.CHUNK_SIZE;
|
||||
|
||||
private SecretKey aesKey;
|
||||
private ArbitraryDataFile arbitraryDataFile;
|
||||
|
||||
// Intermediate paths to cleanup
|
||||
private Path workingPath;
|
||||
private Path compressedPath;
|
||||
private Path encryptedPath;
|
||||
|
||||
public ArbitraryDataWriter(Path filePath, String name, Service service, String identifier, Method method, Compression compression,
|
||||
String title, String description, List<String> tags, Category category) {
|
||||
this.filePath = filePath;
|
||||
this.name = name;
|
||||
this.service = service;
|
||||
this.method = method;
|
||||
this.compression = compression;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
|
||||
// Metadata (optional)
|
||||
this.title = ArbitraryDataTransactionMetadata.limitTitle(title);
|
||||
this.description = ArbitraryDataTransactionMetadata.limitDescription(description);
|
||||
this.tags = ArbitraryDataTransactionMetadata.limitTags(tags);
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public void save() throws IOException, DataException, InterruptedException, MissingDataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.validateService();
|
||||
this.process();
|
||||
this.compress();
|
||||
this.encrypt();
|
||||
this.split();
|
||||
this.createMetadataFile();
|
||||
this.validate();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
this.checkEnabled();
|
||||
|
||||
// Enforce compression when uploading a directory
|
||||
File file = new File(this.filePath.toString());
|
||||
if (file.isDirectory() && compression == Compression.NONE) {
|
||||
throw new DataException("Unable to upload a directory without compression");
|
||||
}
|
||||
|
||||
// Create temporary working directory
|
||||
this.createWorkingDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() throws IOException {
|
||||
this.cleanupFilesystem();
|
||||
}
|
||||
|
||||
private void checkEnabled() throws DataException {
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
throw new DataException("QDN is disabled in settings");
|
||||
}
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = Base58.encode(Crypto.digest(this.filePath.toString().getBytes()));
|
||||
Path tempDir = Paths.get(baseDir, "writer", identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.workingPath = tempDir;
|
||||
}
|
||||
|
||||
private void validateService() throws IOException, DataException {
|
||||
if (this.service.isValidationRequired()) {
|
||||
Service.ValidationResult result = this.service.validate(this.filePath);
|
||||
if (result != Service.ValidationResult.OK) {
|
||||
throw new DataException(String.format("Validation of %s failed: %s", this.service, result.toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws DataException, IOException, MissingDataException {
|
||||
switch (this.method) {
|
||||
|
||||
case PUT:
|
||||
// Nothing to do
|
||||
break;
|
||||
|
||||
case PATCH:
|
||||
this.processPatch();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new DataException(String.format("Unknown method specified: %s", method.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private void processPatch() throws DataException, IOException, MissingDataException {
|
||||
|
||||
// Build the existing state using past transactions
|
||||
ArbitraryDataBuilder builder = new ArbitraryDataBuilder(this.name, this.service, this.identifier);
|
||||
builder.build();
|
||||
Path builtPath = builder.getFinalPath();
|
||||
|
||||
// Obtain the latest signature, so this can be included in the patch
|
||||
byte[] latestSignature = builder.getLatestSignature();
|
||||
|
||||
// Compute a diff of the latest changes on top of the previous state
|
||||
// Then use only the differences as our data payload
|
||||
ArbitraryDataCreatePatch patch = new ArbitraryDataCreatePatch(builtPath, this.filePath, latestSignature);
|
||||
patch.create();
|
||||
this.filePath = patch.getFinalPath();
|
||||
|
||||
// Delete the input directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(builtPath)) {
|
||||
File directory = new File(builtPath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
// Validate the patch
|
||||
this.validatePatch();
|
||||
}
|
||||
|
||||
private void validatePatch() throws DataException {
|
||||
if (this.filePath == null) {
|
||||
throw new DataException("Null path after creating patch");
|
||||
}
|
||||
|
||||
File qortalMetadataDirectoryFile = Paths.get(this.filePath.toString(), ".qortal").toFile();
|
||||
if (!qortalMetadataDirectoryFile.exists()) {
|
||||
throw new DataException("Qortal metadata folder doesn't exist in patch");
|
||||
}
|
||||
if (!qortalMetadataDirectoryFile.isDirectory()) {
|
||||
throw new DataException("Qortal metadata folder isn't a directory");
|
||||
}
|
||||
|
||||
File qortalPatchMetadataFile = Paths.get(this.filePath.toString(), ".qortal", "patch").toFile();
|
||||
if (!qortalPatchMetadataFile.exists()) {
|
||||
throw new DataException("Qortal patch metadata file doesn't exist in patch");
|
||||
}
|
||||
if (!qortalPatchMetadataFile.isFile()) {
|
||||
throw new DataException("Qortal patch metadata file isn't a file");
|
||||
}
|
||||
}
|
||||
|
||||
private void compress() throws InterruptedException, DataException {
|
||||
// Compress the data if requested
|
||||
if (this.compression != Compression.NONE) {
|
||||
this.compressedPath = Paths.get(this.workingPath.toString(), "data.zip");
|
||||
try {
|
||||
|
||||
if (this.compression == Compression.ZIP) {
|
||||
LOGGER.info("Compressing...");
|
||||
String enclosingFolderName = "data";
|
||||
ZipUtils.zip(this.filePath.toString(), this.compressedPath.toString(), enclosingFolderName);
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unknown compression type specified: %s", compression.toString()));
|
||||
}
|
||||
// FUTURE: other compression types
|
||||
|
||||
// Delete the input directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
File directory = new File(this.filePath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
// Replace filePath pointer with the zipped file path
|
||||
this.filePath = this.compressedPath;
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
throw new DataException("Unable to zip directory", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void encrypt() throws DataException {
|
||||
this.encryptedPath = Paths.get(this.workingPath.toString(), "data.zip.encrypted");
|
||||
try {
|
||||
// Encrypt the file with AES
|
||||
LOGGER.info("Encrypting...");
|
||||
this.aesKey = AES.generateKey(256);
|
||||
AES.encryptFile("AES/CBC/PKCS5Padding", this.aesKey, this.filePath.toString(), this.encryptedPath.toString());
|
||||
|
||||
// Delete the input file
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
Files.delete(this.filePath);
|
||||
}
|
||||
// Replace filePath pointer with the encrypted file path
|
||||
this.filePath = this.encryptedPath;
|
||||
|
||||
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException
|
||||
| BadPaddingException | IllegalBlockSizeException | IOException | InvalidKeyException e) {
|
||||
throw new DataException(String.format("Unable to encrypt file %s: %s", this.filePath, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void split() throws IOException, DataException {
|
||||
// We don't have a signature yet, so use null to put the file in a generic folder
|
||||
this.arbitraryDataFile = ArbitraryDataFile.fromPath(this.filePath, null);
|
||||
if (this.arbitraryDataFile == null) {
|
||||
throw new IOException("No file available when trying to split");
|
||||
}
|
||||
|
||||
int chunkCount = this.arbitraryDataFile.split(this.chunkSize);
|
||||
if (chunkCount > 0) {
|
||||
LOGGER.info(String.format("Successfully split into %d chunk%s", chunkCount, (chunkCount == 1 ? "" : "s")));
|
||||
}
|
||||
else {
|
||||
throw new DataException("Unable to split file into chunks");
|
||||
}
|
||||
}
|
||||
|
||||
private void createMetadataFile() throws IOException, DataException {
|
||||
// If we have at least one chunk, we need to create an index file containing their hashes
|
||||
if (this.needsMetadataFile()) {
|
||||
// Create the JSON file
|
||||
Path chunkFilePath = Paths.get(this.workingPath.toString(), "metadata.json");
|
||||
ArbitraryDataTransactionMetadata metadata = new ArbitraryDataTransactionMetadata(chunkFilePath);
|
||||
metadata.setTitle(this.title);
|
||||
metadata.setDescription(this.description);
|
||||
metadata.setTags(this.tags);
|
||||
metadata.setCategory(this.category);
|
||||
metadata.setChunks(this.arbitraryDataFile.chunkHashList());
|
||||
metadata.write();
|
||||
|
||||
// Create an ArbitraryDataFile from the JSON file (we don't have a signature yet)
|
||||
ArbitraryDataFile metadataFile = ArbitraryDataFile.fromPath(chunkFilePath, null);
|
||||
this.arbitraryDataFile.setMetadataFile(metadataFile);
|
||||
}
|
||||
}
|
||||
|
||||
private void validate() throws IOException, DataException {
|
||||
if (this.arbitraryDataFile == null) {
|
||||
throw new DataException("No file available when validating");
|
||||
}
|
||||
this.arbitraryDataFile.setSecret(this.aesKey.getEncoded());
|
||||
|
||||
// Validate the file
|
||||
ValidationResult validationResult = this.arbitraryDataFile.isValid();
|
||||
if (validationResult != ValidationResult.OK) {
|
||||
throw new DataException(String.format("File %s failed validation: %s", this.arbitraryDataFile, validationResult));
|
||||
}
|
||||
LOGGER.info("Whole file hash is valid: {}", this.arbitraryDataFile.digest58());
|
||||
|
||||
// Validate each chunk
|
||||
for (ArbitraryDataFileChunk chunk : this.arbitraryDataFile.getChunks()) {
|
||||
validationResult = chunk.isValid();
|
||||
if (validationResult != ValidationResult.OK) {
|
||||
throw new DataException(String.format("Chunk %s failed validation: %s", chunk, validationResult));
|
||||
}
|
||||
}
|
||||
LOGGER.info("Chunk hashes are valid");
|
||||
|
||||
// Validate chunks metadata file
|
||||
if (this.arbitraryDataFile.chunkCount() > 1) {
|
||||
ArbitraryDataFile metadataFile = this.arbitraryDataFile.getMetadataFile();
|
||||
if (metadataFile == null || !metadataFile.exists()) {
|
||||
throw new DataException("No metadata file available, but there are multiple chunks");
|
||||
}
|
||||
// Read the file
|
||||
ArbitraryDataTransactionMetadata metadata = new ArbitraryDataTransactionMetadata(metadataFile.getFilePath());
|
||||
metadata.read();
|
||||
// Check all chunks exist
|
||||
for (byte[] chunk : this.arbitraryDataFile.chunkHashList()) {
|
||||
if (!metadata.containsChunk(chunk)) {
|
||||
throw new DataException(String.format("Missing chunk %s in metadata file", Base58.encode(chunk)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that the metadata is correct
|
||||
if (!Objects.equals(metadata.getTitle(), this.title)) {
|
||||
throw new DataException("Metadata mismatch: title");
|
||||
}
|
||||
if (!Objects.equals(metadata.getDescription(), this.description)) {
|
||||
throw new DataException("Metadata mismatch: description");
|
||||
}
|
||||
if (!Objects.equals(metadata.getTags(), this.tags)) {
|
||||
throw new DataException("Metadata mismatch: tags");
|
||||
}
|
||||
if (!Objects.equals(metadata.getCategory(), this.category)) {
|
||||
throw new DataException("Metadata mismatch: category");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanupFilesystem() throws IOException {
|
||||
// Clean up
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.compressedPath)) {
|
||||
File zippedFile = new File(this.compressedPath.toString());
|
||||
if (zippedFile.exists()) {
|
||||
zippedFile.delete();
|
||||
}
|
||||
}
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.encryptedPath)) {
|
||||
File encryptedFile = new File(this.encryptedPath.toString());
|
||||
if (encryptedFile.exists()) {
|
||||
encryptedFile.delete();
|
||||
}
|
||||
}
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.workingPath)) {
|
||||
FileUtils.deleteDirectory(new File(this.workingPath.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean needsMetadataFile() {
|
||||
if (this.arbitraryDataFile.chunkCount() > 1) {
|
||||
return true;
|
||||
}
|
||||
if (this.title != null || this.description != null || this.tags != null || this.category != null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
public ArbitraryDataFile getArbitraryDataFile() {
|
||||
return this.arbitraryDataFile;
|
||||
}
|
||||
|
||||
public void setChunkSize(int chunkSize) {
|
||||
this.chunkSize = chunkSize;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
package org.qortal.arbitrary.exception;
|
||||
|
||||
public class MissingDataException extends Exception {
|
||||
|
||||
public MissingDataException() {
|
||||
}
|
||||
|
||||
public MissingDataException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public MissingDataException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public MissingDataException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.repository.DataException;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* ArbitraryDataMetadata
|
||||
*
|
||||
* This is a base class to handle reading and writing JSON to the supplied filePath.
|
||||
*
|
||||
* It is not usable on its own; it must be subclassed, with two methods overridden:
|
||||
*
|
||||
* readJson() - code to unserialize the JSON file
|
||||
* buildJson() - code to serialize the JSON file
|
||||
*
|
||||
*/
|
||||
public class ArbitraryDataMetadata {
|
||||
|
||||
protected static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMetadata.class);
|
||||
|
||||
protected Path filePath;
|
||||
|
||||
protected String jsonString;
|
||||
|
||||
public ArbitraryDataMetadata(Path filePath) {
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
protected void readJson() throws DataException {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
protected void buildJson() {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
|
||||
public void read() throws IOException, DataException {
|
||||
this.loadJson();
|
||||
this.readJson();
|
||||
}
|
||||
|
||||
public void write() throws IOException, DataException {
|
||||
this.buildJson();
|
||||
this.createParentDirectories();
|
||||
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(this.filePath.toString()));
|
||||
writer.write(this.jsonString);
|
||||
writer.newLine();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
|
||||
protected void loadJson() throws IOException {
|
||||
File metadataFile = new File(this.filePath.toString());
|
||||
if (!metadataFile.exists()) {
|
||||
throw new IOException(String.format("Metadata file doesn't exist: %s", this.filePath.toString()));
|
||||
}
|
||||
|
||||
this.jsonString = new String(Files.readAllBytes(this.filePath));
|
||||
}
|
||||
|
||||
|
||||
protected void createParentDirectories() throws DataException {
|
||||
try {
|
||||
Files.createDirectories(this.filePath.getParent());
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create parent directories");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getJsonString() {
|
||||
return this.jsonString;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class ArbitraryDataMetadataCache extends ArbitraryDataQortalMetadata {
|
||||
|
||||
private byte[] signature;
|
||||
private long timestamp;
|
||||
|
||||
public ArbitraryDataMetadataCache(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String fileName() {
|
||||
return "cache";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readJson() throws DataException {
|
||||
if (this.jsonString == null) {
|
||||
throw new DataException("Patch JSON string is null");
|
||||
}
|
||||
|
||||
JSONObject cache = new JSONObject(this.jsonString);
|
||||
if (cache.has("signature")) {
|
||||
String sig = cache.getString("signature");
|
||||
if (sig != null) {
|
||||
this.signature = Base58.decode(sig);
|
||||
}
|
||||
}
|
||||
if (cache.has("timestamp")) {
|
||||
this.timestamp = cache.getLong("timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildJson() {
|
||||
JSONObject patch = new JSONObject();
|
||||
patch.put("signature", Base58.encode(this.signature));
|
||||
patch.put("timestamp", this.timestamp);
|
||||
|
||||
this.jsonString = patch.toString(2);
|
||||
LOGGER.trace("Cache metadata: {}", this.jsonString);
|
||||
}
|
||||
|
||||
|
||||
public void setSignature(byte[] signature) {
|
||||
this.signature = signature;
|
||||
}
|
||||
|
||||
public byte[] getSignature() {
|
||||
return this.signature;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return this.timestamp;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataMetadataPatch extends ArbitraryDataQortalMetadata {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMetadataPatch.class);
|
||||
|
||||
private List<Path> addedPaths;
|
||||
private List<ModifiedPath> modifiedPaths;
|
||||
private List<Path> removedPaths;
|
||||
private byte[] previousSignature;
|
||||
private byte[] previousHash;
|
||||
private byte[] currentHash;
|
||||
|
||||
public ArbitraryDataMetadataPatch(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
this.addedPaths = new ArrayList<>();
|
||||
this.modifiedPaths = new ArrayList<>();
|
||||
this.removedPaths = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String fileName() {
|
||||
return "patch";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readJson() throws DataException {
|
||||
if (this.jsonString == null) {
|
||||
throw new DataException("Patch JSON string is null");
|
||||
}
|
||||
|
||||
JSONObject patch = new JSONObject(this.jsonString);
|
||||
if (patch.has("prevSig")) {
|
||||
String prevSig = patch.getString("prevSig");
|
||||
if (prevSig != null) {
|
||||
this.previousSignature = Base58.decode(prevSig);
|
||||
}
|
||||
}
|
||||
if (patch.has("prevHash")) {
|
||||
String prevHash = patch.getString("prevHash");
|
||||
if (prevHash != null) {
|
||||
this.previousHash = Base58.decode(prevHash);
|
||||
}
|
||||
}
|
||||
if (patch.has("curHash")) {
|
||||
String curHash = patch.getString("curHash");
|
||||
if (curHash != null) {
|
||||
this.currentHash = Base58.decode(curHash);
|
||||
}
|
||||
}
|
||||
if (patch.has("added")) {
|
||||
JSONArray added = (JSONArray) patch.get("added");
|
||||
if (added != null) {
|
||||
for (int i=0; i<added.length(); i++) {
|
||||
String pathString = added.getString(i);
|
||||
this.addedPaths.add(Paths.get(pathString));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (patch.has("modified")) {
|
||||
JSONArray modified = (JSONArray) patch.get("modified");
|
||||
if (modified != null) {
|
||||
for (int i=0; i<modified.length(); i++) {
|
||||
JSONObject jsonObject = modified.getJSONObject(i);
|
||||
ModifiedPath modifiedPath = new ModifiedPath(jsonObject);
|
||||
this.modifiedPaths.add(modifiedPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (patch.has("removed")) {
|
||||
JSONArray removed = (JSONArray) patch.get("removed");
|
||||
if (removed != null) {
|
||||
for (int i=0; i<removed.length(); i++) {
|
||||
String pathString = removed.getString(i);
|
||||
this.removedPaths.add(Paths.get(pathString));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildJson() {
|
||||
JSONObject patch = new JSONObject();
|
||||
// Attempt to use a LinkedHashMap so that the order of fields is maintained
|
||||
try {
|
||||
Field changeMap = patch.getClass().getDeclaredField("map");
|
||||
changeMap.setAccessible(true);
|
||||
changeMap.set(patch, new LinkedHashMap<>());
|
||||
changeMap.setAccessible(false);
|
||||
} catch (IllegalAccessException | NoSuchFieldException e) {
|
||||
// Don't worry about failures as this is for optional ordering only
|
||||
}
|
||||
|
||||
patch.put("prevSig", Base58.encode(this.previousSignature));
|
||||
patch.put("prevHash", Base58.encode(this.previousHash));
|
||||
patch.put("curHash", Base58.encode(this.currentHash));
|
||||
patch.put("added", new JSONArray(this.addedPaths));
|
||||
patch.put("removed", new JSONArray(this.removedPaths));
|
||||
|
||||
JSONArray modifiedPaths = new JSONArray();
|
||||
for (ModifiedPath modifiedPath : this.modifiedPaths) {
|
||||
JSONObject modifiedPathJson = new JSONObject();
|
||||
modifiedPathJson.put("path", modifiedPath.getPath());
|
||||
modifiedPathJson.put("type", modifiedPath.getDiffType());
|
||||
modifiedPaths.put(modifiedPathJson);
|
||||
}
|
||||
patch.put("modified", modifiedPaths);
|
||||
|
||||
this.jsonString = patch.toString(2);
|
||||
LOGGER.debug("Patch metadata: {}", this.jsonString);
|
||||
}
|
||||
|
||||
public void setAddedPaths(List<Path> addedPaths) {
|
||||
this.addedPaths = addedPaths;
|
||||
}
|
||||
|
||||
public List<Path> getAddedPaths() {
|
||||
return this.addedPaths;
|
||||
}
|
||||
|
||||
public void setModifiedPaths(List<ModifiedPath> modifiedPaths) {
|
||||
this.modifiedPaths = modifiedPaths;
|
||||
}
|
||||
|
||||
public List<ModifiedPath> getModifiedPaths() {
|
||||
return this.modifiedPaths;
|
||||
}
|
||||
|
||||
public void setRemovedPaths(List<Path> removedPaths) {
|
||||
this.removedPaths = removedPaths;
|
||||
}
|
||||
|
||||
public List<Path> getRemovedPaths() {
|
||||
return this.removedPaths;
|
||||
}
|
||||
|
||||
public void setPreviousSignature(byte[] previousSignature) {
|
||||
this.previousSignature = previousSignature;
|
||||
}
|
||||
|
||||
public byte[] getPreviousSignature() {
|
||||
return this.previousSignature;
|
||||
}
|
||||
|
||||
public void setPreviousHash(byte[] previousHash) {
|
||||
this.previousHash = previousHash;
|
||||
}
|
||||
|
||||
public byte[] getPreviousHash() {
|
||||
return this.previousHash;
|
||||
}
|
||||
|
||||
public void setCurrentHash(byte[] currentHash) {
|
||||
this.currentHash = currentHash;
|
||||
}
|
||||
|
||||
public byte[] getCurrentHash() {
|
||||
return this.currentHash;
|
||||
}
|
||||
|
||||
|
||||
public int getFileDifferencesCount() {
|
||||
return this.addedPaths.size() + this.modifiedPaths.size() + this.removedPaths.size();
|
||||
}
|
||||
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user