forked from Qortal/qortal
Compare commits
1481 Commits
synchroniz
...
lite-node
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3409086978 | ||
|
|
6c201db3dd | ||
|
|
da47df0a25 | ||
|
|
eea215dacf | ||
|
|
0949271dda | ||
|
|
6bb9227159 | ||
|
|
a95a37277c | ||
|
|
48b9aa5c18 | ||
|
|
fbe4f3fad8 | ||
|
|
e7ee3a06c7 | ||
|
|
599877195b | ||
|
|
7f9d267992 | ||
|
|
52904db413 | ||
|
|
5e0bde226a | ||
|
|
0695039ee3 | ||
|
|
a4bcd4451c | ||
|
|
e5b4b61832 | ||
|
|
dd55dc277b | ||
|
|
81ef1ae964 | ||
|
|
46701e4de7 | ||
|
|
0f52ccb433 | ||
|
|
8aed84e6af | ||
|
|
568497e1c5 | ||
|
|
f3f8e0013d | ||
|
|
d03c145189 | ||
|
|
682a5fde94 | ||
|
|
cca5bac30a | ||
|
|
64e102a8c6 | ||
|
|
f9972f50e0 | ||
|
|
05d9a7e820 | ||
|
|
df290950ea | ||
|
|
ae64be4802 | ||
|
|
348f3c382e | ||
|
|
d98678fc5f | ||
|
|
1da157d33f | ||
|
|
de4f004a08 | ||
|
|
522ef282c8 | ||
|
|
b5522ea260 | ||
|
|
b1f184c493 | ||
|
|
d66dd51bf6 | ||
|
|
0baed55a44 | ||
|
|
390b359761 | ||
|
|
311f41c610 | ||
|
|
0a156c76a2 | ||
|
|
70eaaa9e3b | ||
|
|
3e622f7185 | ||
|
|
3f12be50ac | ||
|
|
68412b49a1 | ||
|
|
c9b2620461 | ||
|
|
337b03aa68 | ||
|
|
df3f16ccf1 | ||
|
|
22aa5c41b5 | ||
|
|
8e09567221 | ||
|
|
3505788d42 | ||
|
|
91e0c9b940 | ||
|
|
00996b047f | ||
|
|
44fc0f367d | ||
|
|
b0e6259073 | ||
|
|
6255b2a907 | ||
|
|
a5fb0be274 | ||
|
|
e835f6d998 | ||
|
|
54ff564bb1 | ||
|
|
f8a5ded0ba | ||
|
|
a1be66f02b | ||
|
|
0815ad2cf0 | ||
|
|
3484047ad4 | ||
|
|
a63fa1cce5 | ||
|
|
59119ebc3b | ||
|
|
276f1b7e68 | ||
|
|
c482e5b5ca | ||
|
|
8c3e0adf35 | ||
|
|
64ff3ac672 | ||
|
|
cfe92525ed | ||
|
|
0e3a9ee2b2 | ||
|
|
a921db2cc6 | ||
|
|
3d99f86630 | ||
|
|
8d1a58ec06 | ||
|
|
2e5a7cb5a1 | ||
|
|
895f02f178 | ||
|
|
c59869982b | ||
|
|
3b3368f950 | ||
|
|
3f02c760c2 | ||
|
|
fee603e500 | ||
|
|
ad31d8014d | ||
|
|
58a0ac74d2 | ||
|
|
8388aa9c23 | ||
|
|
c1894d8c00 | ||
|
|
f7f9cdc518 | ||
|
|
850d7f8220 | ||
|
|
051043283c | ||
|
|
15bc69de01 | ||
|
|
ee3cfa4d6d | ||
|
|
df1f3079a5 | ||
|
|
d9ae8a5552 | ||
|
|
2326c31ee7 | ||
|
|
91cb0f30dd | ||
|
|
c0307c352c | ||
|
|
8fd7c1b313 | ||
|
|
b8147659b1 | ||
|
|
7a1bac682f | ||
|
|
9fdb7c977f | ||
|
|
4f3948323b | ||
|
|
70fcc1f712 | ||
|
|
f20fe9199f | ||
|
|
91dee4a3b8 | ||
|
|
0b89b8084e | ||
|
|
a5a80302b2 | ||
|
|
e61a24ee7b | ||
|
|
55ed342b59 | ||
|
|
3c6f79eec0 | ||
|
|
590800ac1d | ||
|
|
95c412b946 | ||
|
|
a232395750 | ||
|
|
6edbc8b6a5 | ||
|
|
f8ffeed302 | ||
|
|
e2ee68427c | ||
|
|
74ff23239d | ||
|
|
f1fa2ba2f6 | ||
|
|
e1522cec94 | ||
|
|
8841b3cbb1 | ||
|
|
94260bd93f | ||
|
|
15ff8af7ac | ||
|
|
d420033b36 | ||
|
|
bda63f0310 | ||
|
|
54add26ccb | ||
|
|
089b068362 | ||
|
|
fe474b4507 | ||
|
|
bbe15b563c | ||
|
|
59025b8f47 | ||
|
|
1b42c5edb1 | ||
|
|
362335913d | ||
|
|
4340dac595 | ||
|
|
f3e1fc884c | ||
|
|
39c06d8817 | ||
|
|
91cee36c21 | ||
|
|
6bef883942 | ||
|
|
25ba2406c0 | ||
|
|
e4dc8f85a7 | ||
|
|
12a4a260c8 | ||
|
|
268f02b5c3 | ||
|
|
13eff43b87 | ||
|
|
e604a19bce | ||
|
|
e63e39fe9a | ||
|
|
584c951824 | ||
|
|
f0d9982ee4 | ||
|
|
c65de74d13 | ||
|
|
df0a9701ba | ||
|
|
4ec7b1ff1e | ||
|
|
7d3a465386 | ||
|
|
30347900d9 | ||
|
|
e5f88fe2f4 | ||
|
|
0d0ccfd0ac | ||
|
|
9013d11d24 | ||
|
|
fc5672a161 | ||
|
|
221c3629e4 | ||
|
|
76fc56f1c9 | ||
|
|
8e59aa2885 | ||
|
|
0738dbd613 | ||
|
|
196ecffaf3 | ||
|
|
a0fedbd4b0 | ||
|
|
7c47e22000 | ||
|
|
6aad6a1618 | ||
|
|
b764172500 | ||
|
|
c185d79672 | ||
|
|
76b8ba91dd | ||
|
|
0418c831e6 | ||
|
|
4078f94caa | ||
|
|
a12ae8ad24 | ||
|
|
498ca29aab | ||
|
|
ba70e457b6 | ||
|
|
d62808fe1d | ||
|
|
6c14b79dfb | ||
|
|
631a253bcc | ||
|
|
4cb63100d3 | ||
|
|
42fcee0cfd | ||
|
|
829a2e937b | ||
|
|
5d7e5e8e59 | ||
|
|
6f0a0ef324 | ||
|
|
f7fe91abeb | ||
|
|
7252e8d160 | ||
|
|
2630c35f8c | ||
|
|
49f466c073 | ||
|
|
c198f785e6 | ||
|
|
5be093dafc | ||
|
|
2c33d5256c | ||
|
|
4448e2b5df | ||
|
|
146d234dec | ||
|
|
18d5c924e6 | ||
|
|
b520838195 | ||
|
|
1b036b763c | ||
|
|
8545a8bf0d | ||
|
|
f0136a5018 | ||
|
|
6697b3376b | ||
|
|
ea785f79b8 | ||
|
|
0352a09de7 | ||
|
|
5b4f15ab2e | ||
|
|
fd37c2b76b | ||
|
|
924aa05681 | ||
|
|
84b42210f1 | ||
|
|
941080c395 | ||
|
|
35d9a10cf4 | ||
|
|
7c181379b4 | ||
|
|
f9576d8afb | ||
|
|
6a8a113fa1 | ||
|
|
ef59c34165 | ||
|
|
a19e1f06c0 | ||
|
|
a9371f0a90 | ||
|
|
a7a94e49e8 | ||
|
|
affd100298 | ||
|
|
fd6ec301a4 | ||
|
|
5666e6084b | ||
|
|
69309c437e | ||
|
|
e392e4d344 | ||
|
|
bd53856927 | ||
|
|
cbd1018ecf | ||
|
|
46606152eb | ||
|
|
e6f93e0a08 | ||
|
|
8d81f1822f | ||
|
|
5903607363 | ||
|
|
590a8f52db | ||
|
|
ecac47d1bc | ||
|
|
3b477ef637 | ||
|
|
e2ef5b2ef3 | ||
|
|
1d59feeb72 | ||
|
|
c53dd31765 | ||
|
|
4c02081992 | ||
|
|
cb57af3c53 | ||
|
|
01d810fc00 | ||
|
|
8c2a9279ee | ||
|
|
0d65448f3d | ||
|
|
9da2b3c11a | ||
|
|
95400da977 | ||
|
|
dc41dc4c69 | ||
|
|
a5c11d4c23 | ||
|
|
878394535e | ||
|
|
35dba27a55 | ||
|
|
f22ad13fa9 | ||
|
|
aa2e5cb87b | ||
|
|
7740f3da7e | ||
|
|
badb576991 | ||
|
|
c65a63fc7e | ||
|
|
0111747016 | ||
|
|
eac4b0d87b | ||
|
|
3dadce4da4 | ||
|
|
1864468818 | ||
|
|
1a59379162 | ||
|
|
31d34c3946 | ||
|
|
3cc394f02d | ||
|
|
53c4fe9e80 | ||
|
|
d5521068b0 | ||
|
|
a63ef4010d | ||
|
|
cec3e86eef | ||
|
|
8950bb7af9 | ||
|
|
9e6fe7ceb9 | ||
|
|
c333d18cd0 | ||
|
|
0271ef69c9 | ||
|
|
2d493a4ea2 | ||
|
|
e339ab856f | ||
|
|
782904a971 | ||
|
|
a3753c01bc | ||
|
|
d5c3921846 | ||
|
|
a2c462b3da | ||
|
|
8673c7ef6e | ||
|
|
8d7be7757f | ||
|
|
6b83927048 | ||
|
|
e07adbd60e | ||
|
|
7798b8dcdc | ||
|
|
146e7970bf | ||
|
|
f4f7cc58e3 | ||
|
|
21b4b494e7 | ||
|
|
7307844bee | ||
|
|
5d419dd4ec | ||
|
|
6d0db7cc5e | ||
|
|
8de606588c | ||
|
|
5842b1272d | ||
|
|
35b0a85818 | ||
|
|
fcdd85af6c | ||
|
|
5aac2dc9df | ||
|
|
17a9b4e442 | ||
|
|
becb0b37e6 | ||
|
|
67ca876567 | ||
|
|
464ce66fd5 | ||
|
|
3e505481fe | ||
|
|
c90c3a183e | ||
|
|
d1a7e734dc | ||
|
|
6054982379 | ||
|
|
85b3278c8a | ||
|
|
c90c287601 | ||
|
|
6ee395ed12 | ||
|
|
6275ac2b81 | ||
|
|
fd0a6ec71f | ||
|
|
6c1c814aca | ||
|
|
43791f00aa | ||
|
|
538ac30b4e | ||
|
|
58f11489db | ||
|
|
acddf36467 | ||
|
|
166d32032a | ||
|
|
e4238a62c9 | ||
|
|
ad9c466712 | ||
|
|
a3d31bbaf1 | ||
|
|
4821139501 | ||
|
|
83213800b9 | ||
|
|
265ae19591 | ||
|
|
c1598d20b5 | ||
|
|
0712259057 | ||
|
|
ea42a5617f | ||
|
|
58a690e2c3 | ||
|
|
3ae2f0086e | ||
|
|
19c83cc54d | ||
|
|
8ac298e07d | ||
|
|
9b43e4ea3d | ||
|
|
dbacfb964b | ||
|
|
a664a6a790 | ||
|
|
ee1f072056 | ||
|
|
a6aabaa7f0 | ||
|
|
49b307db60 | ||
|
|
f7341cd9ab | ||
|
|
6932fb9935 | ||
|
|
2343e739d1 | ||
|
|
fc82f0b622 | ||
|
|
c0c50f2e18 | ||
|
|
9332d7207e | ||
|
|
a8c79b807b | ||
|
|
2637311ef5 | ||
|
|
06b5b8f793 | ||
|
|
61f58173cb | ||
|
|
b7b66f6cba | ||
|
|
dda2316884 | ||
|
|
b782679d1f | ||
|
|
b0f19f8f70 | ||
|
|
de5f31ac58 | ||
|
|
214f49e356 | ||
|
|
d7658ee9f9 | ||
|
|
70c864bc2f | ||
|
|
9804eccbf0 | ||
|
|
d1f24d45da | ||
|
|
9630625449 | ||
|
|
b72153f62b | ||
|
|
0a88a0c95e | ||
|
|
ab4ba9bb17 | ||
|
|
a49218a840 | ||
|
|
b6d633ab24 | ||
|
|
133943cd4e | ||
|
|
f8ffb1a179 | ||
|
|
41c4e0c83e | ||
|
|
99f6bb5ac6 | ||
|
|
3e0306f646 | ||
|
|
84e4f9a1c1 | ||
|
|
cd5ce6dd5e | ||
|
|
9ec4e24ef6 | ||
|
|
fa447ccded | ||
|
|
ef838627c4 | ||
|
|
b8aaf14cdc | ||
|
|
2740543abf | ||
|
|
3c526db52e | ||
|
|
cfe0414d96 | ||
|
|
08e06ba11a | ||
|
|
8c03164ea5 | ||
|
|
0fe2f226bc | ||
|
|
55b5702158 | ||
|
|
a4cbbb3868 | ||
|
|
816b01c1fc | ||
|
|
483e7549f8 | ||
|
|
60d71863dc | ||
|
|
170244e679 | ||
|
|
472e1da792 | ||
|
|
cbf03d58c8 | ||
|
|
ba41d84af9 | ||
|
|
98831a9449 | ||
|
|
9692539a3f | ||
|
|
76df332b57 | ||
|
|
c6405340bc | ||
|
|
775e3c065e | ||
|
|
8937b3ec86 | ||
|
|
3fbb86fded | ||
|
|
0cf2f7f254 | ||
|
|
9e571b87e8 | ||
|
|
23bafb6233 | ||
|
|
6dec65c5d9 | ||
|
|
4e59eb8958 | ||
|
|
756d5e685a | ||
|
|
f52530b848 | ||
|
|
c2bf37b878 | ||
|
|
98a2dd04b8 | ||
|
|
694ea689c8 | ||
|
|
618aaaf243 | ||
|
|
9224ffbf73 | ||
|
|
892612c084 | ||
|
|
077165b807 | ||
|
|
7994fc6407 | ||
|
|
d98df3e47d | ||
|
|
1064b1a08b | ||
|
|
0b7a7ed0f1 | ||
|
|
114b1aac76 | ||
|
|
6d06953a0e | ||
|
|
0430fc8a47 | ||
|
|
7338f5f985 | ||
|
|
640bcdd504 | ||
|
|
c9d5d996e5 | ||
|
|
710befec0c | ||
|
|
8ccb158241 | ||
|
|
97199d9b91 | ||
|
|
5a8b895475 | ||
|
|
6c9600cda0 | ||
|
|
82fa6a4fd8 | ||
|
|
45f2d7ab70 | ||
|
|
33731b969a | ||
|
|
40a8cdc71f | ||
|
|
cbe83987d8 | ||
|
|
01e4bf3a77 | ||
|
|
b198a8ea07 | ||
|
|
e2e87766fa | ||
|
|
f005a0975d | ||
|
|
5700369935 | ||
|
|
8a1fb6fe4e | ||
|
|
5b788dad2f | ||
|
|
fa2bd40d5f | ||
|
|
074bfadb28 | ||
|
|
bd60c793be | ||
|
|
90f3d2568a | ||
|
|
c73cdefe6f | ||
|
|
c5093168b1 | ||
|
|
a35e309a2f | ||
|
|
d4ff7bbe4d | ||
|
|
d4d73fc5fc | ||
|
|
bb35030112 | ||
|
|
7aed0354f1 | ||
|
|
c4f763960c | ||
|
|
c5182a4589 | ||
|
|
fc1a376fbd | ||
|
|
27387a134f | ||
|
|
be7bb2df9e | ||
|
|
72a291a54a | ||
|
|
b1342d84fb | ||
|
|
cdd57190ce | ||
|
|
d200a098cd | ||
|
|
a0ed3f53a4 | ||
|
|
e5c12b18af | ||
|
|
7808a1553e | ||
|
|
a0ba016171 | ||
|
|
344704b6bf | ||
|
|
3303e41a39 | ||
|
|
4e71ae0e59 | ||
|
|
9daf7a6668 | ||
|
|
a2b2b63932 | ||
|
|
af06774ba6 | ||
|
|
244d4f78e2 | ||
|
|
311fe98f44 | ||
|
|
6f7c8d96b9 | ||
|
|
ff6ec83b1c | ||
|
|
ea10eec926 | ||
|
|
be561a1609 | ||
|
|
6f724f648d | ||
|
|
048776e090 | ||
|
|
dedf65bd4b | ||
|
|
a7c02733ec | ||
|
|
59346db427 | ||
|
|
25efee55b8 | ||
|
|
a79ed02ccf | ||
|
|
79f87babdf | ||
|
|
f296d5138b | ||
|
|
b30445c5f8 | ||
|
|
d105613e51 | ||
|
|
ef43e78d54 | ||
|
|
6f61fbb127 | ||
|
|
9f9b7cab99 | ||
|
|
f129e16878 | ||
|
|
8a42dce763 | ||
|
|
6423d5e474 | ||
|
|
6e91157dcf | ||
|
|
85c61c1bc1 | ||
|
|
54af36fb85 | ||
|
|
fcdcc939e6 | ||
|
|
13450d5afa | ||
|
|
5e1e653095 | ||
|
|
e8fabcb449 | ||
|
|
a4ce41ed39 | ||
|
|
1b42062d57 | ||
|
|
c2a4b01a9c | ||
|
|
47e763b0cf | ||
|
|
0278f6c9f2 | ||
|
|
d96bc14516 | ||
|
|
318f433f22 | ||
|
|
cfc80cb9b0 | ||
|
|
01c6149422 | ||
|
|
6f80a6c08a | ||
|
|
8fb2d38cd1 | ||
|
|
5018d27c25 | ||
|
|
1d77101253 | ||
|
|
1ddd468c1f | ||
|
|
f05cd9ea51 | ||
|
|
70c00a4150 | ||
|
|
d296029e8e | ||
|
|
e257fd8628 | ||
|
|
119c1b43be | ||
|
|
1277ce38de | ||
|
|
6761b91400 | ||
|
|
2a6244a5c2 | ||
|
|
777bddd3d8 | ||
|
|
e2b13791bb | ||
|
|
f44c21ce59 | ||
|
|
ade977e416 | ||
|
|
f09a131bd6 | ||
|
|
4815587de1 | ||
|
|
e0ebfb9b53 | ||
|
|
90836afd91 | ||
|
|
4e1b0a25bb | ||
|
|
89c3236bf5 | ||
|
|
7658bc2025 | ||
|
|
7cf60c7c35 | ||
|
|
ccde725d3b | ||
|
|
e3b45cac0a | ||
|
|
8f8a500dcd | ||
|
|
f9749cd82c | ||
|
|
051052fdd2 | ||
|
|
940304b4c2 | ||
|
|
b4d2fae27f | ||
|
|
11e194292c | ||
|
|
5ba6f6f53e | ||
|
|
f58a16905f | ||
|
|
33e82b336b | ||
|
|
0ced712974 | ||
|
|
db8e35cc13 | ||
|
|
b6db5aa2d3 | ||
|
|
396dc5c9b0 | ||
|
|
67e424a32a | ||
|
|
d8cbec41d2 | ||
|
|
374f6b8d52 | ||
|
|
20ec4cbd14 | ||
|
|
1c80835f49 | ||
|
|
5e0af26c27 | ||
|
|
b42674ac06 | ||
|
|
3394543705 | ||
|
|
75c51aa61b | ||
|
|
6041722250 | ||
|
|
60d038b367 | ||
|
|
b2c4bf96af | ||
|
|
f007f9a86d | ||
|
|
b1c1634950 | ||
|
|
5157ccf7c0 | ||
|
|
c4a782301d | ||
|
|
17fe94fa46 | ||
|
|
75d9347d23 | ||
|
|
ef784124f3 | ||
|
|
bd1b631914 | ||
|
|
edfc8cfdc4 | ||
|
|
fbe34015d4 | ||
|
|
391fa008d0 | ||
|
|
7df8381b8f | ||
|
|
c0234ae328 | ||
|
|
5c64a85d7c | ||
|
|
7aa8f115ce | ||
|
|
cf2c8d6c67 | ||
|
|
37edebcad9 | ||
|
|
4d4f661548 | ||
|
|
46e4cb4f50 | ||
|
|
34e622cf0c | ||
|
|
7ccb99aa2c | ||
|
|
9e3847e56f | ||
|
|
90ced351f4 | ||
|
|
04295ea8c5 | ||
|
|
2452d3c24b | ||
|
|
302428f1d1 | ||
|
|
cf603aa80e | ||
|
|
1f9f949a8c | ||
|
|
0bde1e97dc | ||
|
|
42aca2e40f | ||
|
|
e1e44d35bb | ||
|
|
a790b2e529 | ||
|
|
357946388c | ||
|
|
b774583f28 | ||
|
|
6436daca08 | ||
|
|
f153c7bb80 | ||
|
|
1f8a618dcc | ||
|
|
2d853e5a2f | ||
|
|
361dc79ede | ||
|
|
19173321ea | ||
|
|
87b724ec72 | ||
|
|
67db0f950b | ||
|
|
f85bbf12ca | ||
|
|
37e4f1e8d5 | ||
|
|
44d8bfd763 | ||
|
|
0cdbad6194 | ||
|
|
4799a8a68e | ||
|
|
8caec81d1e | ||
|
|
83d5bf45e5 | ||
|
|
037eb8a163 | ||
|
|
3e5025b46e | ||
|
|
35a5dc6219 | ||
|
|
ace3ca0ad9 | ||
|
|
a8a498ddea | ||
|
|
d16663f0a9 | ||
|
|
623470209f | ||
|
|
553de5a873 | ||
|
|
ccf8773b18 | ||
|
|
cad25bf85d | ||
|
|
9ce748452d | ||
|
|
9263d74b75 | ||
|
|
9601bddc84 | ||
|
|
e281e19052 | ||
|
|
0238b78f45 | ||
|
|
0ccee4326d | ||
|
|
e9ab54f657 | ||
|
|
0afb1a2d04 | ||
|
|
2d2b2964a5 | ||
|
|
10c4f7631b | ||
|
|
d921cffdaa | ||
|
|
5369e21780 | ||
|
|
d34fb4494e | ||
|
|
1bd493ea37 | ||
|
|
391c3fe4c9 | ||
|
|
3a7da9f13b | ||
|
|
3780767ccc | ||
|
|
411279b3eb | ||
|
|
be3069e0e5 | ||
|
|
22cf870555 | ||
|
|
d6479c1390 | ||
|
|
a4e82c79cc | ||
|
|
bcc89adb5f | ||
|
|
a41c9e339a | ||
|
|
feeca77436 | ||
|
|
fcce12ba40 | ||
|
|
f4b06fb834 | ||
|
|
e7fd803d19 | ||
|
|
3b96747871 | ||
|
|
33088df07d | ||
|
|
a215714b6b | ||
|
|
6a9904fd43 | ||
|
|
cc297ccfcd | ||
|
|
c7c88dec04 | ||
|
|
e481a5926a | ||
|
|
0464245218 | ||
|
|
0001f31c06 | ||
|
|
391d31759a | ||
|
|
ed2f2435d2 | ||
|
|
6e6b2ccfa0 | ||
|
|
be9a73560d | ||
|
|
e82b5a4ecf | ||
|
|
a27d8ac828 | ||
|
|
6267258189 | ||
|
|
e7527f532e | ||
|
|
8b6e74d505 | ||
|
|
e6106c0c4e | ||
|
|
f52bafc014 | ||
|
|
9e0630ea79 | ||
|
|
968bfb92d0 | ||
|
|
284c9fcee2 | ||
|
|
5b0b939531 | ||
|
|
2efac0c96b | ||
|
|
dc52fd1dcf | ||
|
|
19240a9caf | ||
|
|
4eef28f93d | ||
|
|
c6e5c4e3b5 | ||
|
|
007f567c7a | ||
|
|
ffe178c64c | ||
|
|
c3835cefb1 | ||
|
|
2c382f3d3f | ||
|
|
b592aa6a02 | ||
|
|
57e82b62a1 | ||
|
|
13f3aca838 | ||
|
|
94b17eaff3 | ||
|
|
eb9b94b9c6 | ||
|
|
a3038da3d7 | ||
|
|
6026b7800a | ||
|
|
36c5b71656 | ||
|
|
a320bea68a | ||
|
|
a87fe8b44d | ||
|
|
0a2b4dedc7 | ||
|
|
f7ed3eefc8 | ||
|
|
8bb3a3f8a6 | ||
|
|
89d08ca359 | ||
|
|
b80aec37e0 | ||
|
|
e34fd855a9 | ||
|
|
fc12ea18b8 | ||
|
|
f87df53791 | ||
|
|
d6746362a4 | ||
|
|
2850bd0b46 | ||
|
|
b762eff4eb | ||
|
|
4b3b96447f | ||
|
|
13bcfbe3c5 | ||
|
|
8525fb89f8 | ||
|
|
ed2d1c4932 | ||
|
|
5091f8457e | ||
|
|
84b69fc58c | ||
|
|
a2cac003a4 | ||
|
|
7c16a90221 | ||
|
|
97cdd53861 | ||
|
|
b7ee00fb22 | ||
|
|
ef2ee20820 | ||
|
|
4866e5050a | ||
|
|
8e36c456e1 | ||
|
|
4b8bcd265b | ||
|
|
0db681eeda | ||
|
|
8823f69256 | ||
|
|
f3e9dfe734 | ||
|
|
a7b31ab1f9 | ||
|
|
644ab27186 | ||
|
|
e90ecd2085 | ||
|
|
bc38184ebf | ||
|
|
d9de27e6f2 | ||
|
|
6930bf0200 | ||
|
|
199833bdd4 | ||
|
|
0dcd2e6e93 | ||
|
|
0dd43d5c9a | ||
|
|
e879bd0fc5 | ||
|
|
8bf7daff65 | ||
|
|
ae0f01d326 | ||
|
|
af8d0a3965 | ||
|
|
1b170c74c0 | ||
|
|
f6b9ff50c3 | ||
|
|
9ef75ebcde | ||
|
|
f76a618768 | ||
|
|
098d7baa4d | ||
|
|
59a57d3d28 | ||
|
|
cce95e09de | ||
|
|
ec48ebcd79 | ||
|
|
908f80a15d | ||
|
|
02eab89d82 | ||
|
|
c588786a06 | ||
|
|
b4f3105035 | ||
|
|
d018f11877 | ||
|
|
d0000c6131 | ||
|
|
c05ffefd7d | ||
|
|
530fc67a05 | ||
|
|
c79ec11b07 | ||
|
|
668ef26056 | ||
|
|
75ec7723ef | ||
|
|
73e609fa29 | ||
|
|
8cb06bf451 | ||
|
|
1be8a059f4 | ||
|
|
7f41c7ab0e | ||
|
|
3860c5d8ec | ||
|
|
a061a7cc4d | ||
|
|
844501d6cd | ||
|
|
020bd00b8f | ||
|
|
0706b0d287 | ||
|
|
ce56cd2b16 | ||
|
|
b7a0a7eea4 | ||
|
|
824d14e793 | ||
|
|
83e0ed2b5d | ||
|
|
c8b70b51c3 | ||
|
|
c0fedaa3a4 | ||
|
|
e74dcff010 | ||
|
|
3b5b45b463 | ||
|
|
fead482b0d | ||
|
|
29bd8203b5 | ||
|
|
08b79e45cf | ||
|
|
3a05a0bcaa | ||
|
|
d0aafaee60 | ||
|
|
332b874493 | ||
|
|
6c995ed738 | ||
|
|
fb09d77cdc | ||
|
|
9c952785e6 | ||
|
|
2f51c1bf47 | ||
|
|
276a110e90 | ||
|
|
b761674b2c | ||
|
|
0b20bf0145 | ||
|
|
1397cbeac2 | ||
|
|
06e122f303 | ||
|
|
f062acfd7c | ||
|
|
97ca414fc0 | ||
|
|
a9af5bcec4 | ||
|
|
7e30bf4197 | ||
|
|
c724ea9f69 | ||
|
|
e6cc4a1180 | ||
|
|
3cce097b9d | ||
|
|
53f9d6869d | ||
|
|
61beee0f49 | ||
|
|
1f3d400ad6 | ||
|
|
f2ff2187d9 | ||
|
|
28ddc0055f | ||
|
|
90b5b6bd8b | ||
|
|
53466797a5 | ||
|
|
f5235938b7 | ||
|
|
054860b38d | ||
|
|
b60d02b8f4 | ||
|
|
0d69797851 | ||
|
|
bfffff0750 | ||
|
|
b7bcd8da7d | ||
|
|
d3862c97ba | ||
|
|
c069c39ce1 | ||
|
|
e994d501b0 | ||
|
|
caf163f98c | ||
|
|
1c408db907 | ||
|
|
8d44e07c32 | ||
|
|
d99fae4340 | ||
|
|
d49caa29ce | ||
|
|
8bebe11b4e | ||
|
|
236a456cae | ||
|
|
7bc745fa8e | ||
|
|
056fc8fbaf | ||
|
|
b6aa507b41 | ||
|
|
4b1a5a5e14 | ||
|
|
a364206159 | ||
|
|
b5feb5f733 | ||
|
|
991125034e | ||
|
|
a0fe1a85f1 | ||
|
|
3a2e68c334 | ||
|
|
b6418cd912 | ||
|
|
e652038018 | ||
|
|
b2e2af51ed | ||
|
|
9502444bbc | ||
|
|
a0fe803c35 | ||
|
|
ea2ca37abe | ||
|
|
0601ffbb34 | ||
|
|
09a7fcaba4 | ||
|
|
ce15784851 | ||
|
|
b861b2dffb | ||
|
|
e50fd786da | ||
|
|
5e82de667e | ||
|
|
d7ddcda9da | ||
|
|
6d031130b9 | ||
|
|
a61b0685f0 | ||
|
|
abfeafc823 | ||
|
|
3a51be3430 | ||
|
|
3b914d4a7f | ||
|
|
ede4802ceb | ||
|
|
fe79119809 | ||
|
|
319d96f94e | ||
|
|
6f07dc7852 | ||
|
|
16bcba6e2e | ||
|
|
1002acb021 | ||
|
|
b771544c5d | ||
|
|
8c7f09c454 | ||
|
|
618cffefb1 | ||
|
|
8fd37e857e | ||
|
|
8218bfd24b | ||
|
|
cbb2dbffb9 | ||
|
|
528a838643 | ||
|
|
cbed6418e7 | ||
|
|
4882cc92a8 | ||
|
|
28fb11068e | ||
|
|
394ced9fb9 | ||
|
|
90465149e6 | ||
|
|
c6d868d981 | ||
|
|
bada4fd140 | ||
|
|
60f96d15bd | ||
|
|
0328007345 | ||
|
|
3ad0e92a0f | ||
|
|
3934120541 | ||
|
|
24ca126f5a | ||
|
|
651ca71126 | ||
|
|
e7cb33d8e2 | ||
|
|
c63d238316 | ||
|
|
dcdc48d917 | ||
|
|
f4c1671079 | ||
|
|
7aa2fbee1c | ||
|
|
f1939fdc2b | ||
|
|
c9356d0ff5 | ||
|
|
6b5d938a40 | ||
|
|
d82da160f3 | ||
|
|
54c8aac20d | ||
|
|
314b6fc2f8 | ||
|
|
974df031a0 | ||
|
|
36d0292c6b | ||
|
|
7c16952c92 | ||
|
|
557807e3ba | ||
|
|
c1d5b2df29 | ||
|
|
05be5c1199 | ||
|
|
f19a65148a | ||
|
|
a55fc4fff9 | ||
|
|
35a7a70b93 | ||
|
|
3e0574e563 | ||
|
|
69e557e70d | ||
|
|
1b846be5fc | ||
|
|
707eb58068 | ||
|
|
8630f3be96 | ||
|
|
c90aeba286 | ||
|
|
5055cfc6cb | ||
|
|
c222c4eb29 | ||
|
|
6c01955561 | ||
|
|
305e0f1772 | ||
|
|
52a94e3256 | ||
|
|
a418fb18b6 | ||
|
|
9cd579d3db | ||
|
|
e1a6ba7377 | ||
|
|
04aabe0921 | ||
|
|
8dd4d71d75 | ||
|
|
49dd63af1e | ||
|
|
18c6f0ccc3 | ||
|
|
55c50a4b5b | ||
|
|
12b3267d5c | ||
|
|
d6d564c027 | ||
|
|
1fbd5f7922 | ||
|
|
f0e13fa492 | ||
|
|
c8d5ac9248 | ||
|
|
aa4f77d4de | ||
|
|
f3ef112297 | ||
|
|
bbb71083ef | ||
|
|
e2134d76ec | ||
|
|
651372cd64 | ||
|
|
581fe17b58 | ||
|
|
af8608f302 | ||
|
|
290a19b6c6 | ||
|
|
73eaa93be8 | ||
|
|
7ab17383a6 | ||
|
|
b103c5b13f | ||
|
|
b7d8a83017 | ||
|
|
e7bf4f455d | ||
|
|
a7f212c4f2 | ||
|
|
eb991c6026 | ||
|
|
a78af8f248 | ||
|
|
f34bdf0f58 | ||
|
|
ba272253a5 | ||
|
|
9f488b7b77 | ||
|
|
3fb7df18a0 | ||
|
|
00401080e0 | ||
|
|
b265dc3bfb | ||
|
|
63cabbe960 | ||
|
|
f6c1a7e6db | ||
|
|
a3dcacade9 | ||
|
|
17e65e422c | ||
|
|
f53e2ffa47 | ||
|
|
a1e4047695 | ||
|
|
47ce884bbe | ||
|
|
1b17c2613d | ||
|
|
dedc8d89c7 | ||
|
|
d00fce86d2 | ||
|
|
abab2d1cde | ||
|
|
33b715eb4e | ||
|
|
f6effbb6bb | ||
|
|
dff9ec0704 | ||
|
|
bfaf4c58e4 | ||
|
|
ab7d24b637 | ||
|
|
c256dae736 | ||
|
|
5a55ef64c4 | ||
|
|
045026431b | ||
|
|
4dff91a0e5 | ||
|
|
7105872a37 | ||
|
|
179bd8e018 | ||
|
|
c82293342f | ||
|
|
81bf79e9d3 | ||
|
|
8d6dffb3ff | ||
|
|
2f6a8f793b | ||
|
|
9bcd0bbfac | ||
|
|
cd359de7eb | ||
|
|
000f9ed459 | ||
|
|
c5b2c0b4ec | ||
|
|
b7e9af100a | ||
|
|
0d6409098f | ||
|
|
e07238ded8 | ||
|
|
27903f278d | ||
|
|
ddf966d08c | ||
|
|
65dca36ae1 | ||
|
|
289dae0780 | ||
|
|
71f802ef35 | ||
|
|
0135f25b9d | ||
|
|
de3ebf664f | ||
|
|
850d879726 | ||
|
|
5397e6c723 | ||
|
|
889f6fc5fc | ||
|
|
41c2ed7c67 | ||
|
|
cdf47d4719 | ||
|
|
210368bea0 | ||
|
|
4f48751d0b | ||
|
|
b6d3e82304 | ||
|
|
3bb3528aa5 | ||
|
|
4f892835b8 | ||
|
|
ac49221639 | ||
|
|
75ed5db3e4 | ||
|
|
59c8e4e6a2 | ||
|
|
52b322b756 | ||
|
|
dc876d9c96 | ||
|
|
5b028428c4 | ||
|
|
f67a0469fc | ||
|
|
494cd0efff | ||
|
|
fc8e38e862 | ||
|
|
f09fb5a209 | ||
|
|
b00c1c1575 | ||
|
|
7e5dd62a92 | ||
|
|
35718f6215 | ||
|
|
a6d3891a95 | ||
|
|
9591c4eb58 | ||
|
|
8aaf720b0b | ||
|
|
63a35c97bc | ||
|
|
8eddaa3fac | ||
|
|
1b3f37eb78 | ||
|
|
1f8fbfaa24 | ||
|
|
ea92ccb4c1 | ||
|
|
d25a77b633 | ||
|
|
51bb776e56 | ||
|
|
47b1b6daba | ||
|
|
adeb654248 | ||
|
|
c4d7335fdd | ||
|
|
ca7f42c409 | ||
|
|
ca02cd72ae | ||
|
|
1ba542eb50 | ||
|
|
53cd967541 | ||
|
|
49749a0bc7 | ||
|
|
446f924380 | ||
|
|
5b231170cd | ||
|
|
7375357b11 | ||
|
|
347d799d85 | ||
|
|
0d17f02191 | ||
|
|
ce5bc80347 | ||
|
|
0a4479fe9e | ||
|
|
de8e96cd75 | ||
|
|
e2a62f88a6 | ||
|
|
8926d2a73c | ||
|
|
114833cf8e | ||
|
|
32227436e0 | ||
|
|
28ff5636af | ||
|
|
656896d16f | ||
|
|
19bf8afece | ||
|
|
841b6c4ddf | ||
|
|
4c171df848 | ||
|
|
1f79d88840 | ||
|
|
6ee7e9d731 | ||
|
|
4856223838 | ||
|
|
74ea2a847d | ||
|
|
9813dde3d9 | ||
|
|
fea7b62b9c | ||
|
|
37e03bf2bb | ||
|
|
5656de79a2 | ||
|
|
70c6048cc1 | ||
|
|
87595fd704 | ||
|
|
dc030a42bb | ||
|
|
89283ed179 | ||
|
|
64e8a05a9f | ||
|
|
676320586a | ||
|
|
734fa51806 | ||
|
|
f056ecc8d8 | ||
|
|
1a722c1517 | ||
|
|
44607ba6a4 | ||
|
|
01d66212da | ||
|
|
925e10b19b | ||
|
|
1b4c75a76e | ||
|
|
3400e36ac4 | ||
|
|
78e2ae4f36 | ||
|
|
957944f6a5 | ||
|
|
9eab500e2c | ||
|
|
573f4675a1 | ||
|
|
e6bde3e1f4 | ||
|
|
5869174021 | ||
|
|
449761b6ca | ||
|
|
39d5ce19e2 | ||
|
|
3b156bc5c9 | ||
|
|
a4f5124b61 | ||
|
|
47a34c2f54 | ||
|
|
8a7446fb40 | ||
|
|
705e7d1cf1 | ||
|
|
44a90b4e12 | ||
|
|
54e5a65cf0 | ||
|
|
06a2c380bd | ||
|
|
33ac1fed2a | ||
|
|
cc65a7cd11 | ||
|
|
d600a54034 | ||
|
|
ba06225b01 | ||
|
|
ce60ab8e00 | ||
|
|
14f6fd19ef | ||
|
|
1d8351f921 | ||
|
|
6a55b052f5 | ||
|
|
2a36b83dea | ||
|
|
14acc4feb9 | ||
|
|
0657ca2969 | ||
|
|
e90c3a78d1 | ||
|
|
63c9bc5c1c | ||
|
|
a6bbc81962 | ||
|
|
b800fb5846 | ||
|
|
172a629da3 | ||
|
|
6d1f7b36a7 | ||
|
|
673ee4aeed | ||
|
|
25b787f6f2 | ||
|
|
6b74ef77e6 | ||
|
|
278201e87c | ||
|
|
703cdfe174 | ||
|
|
02988989ad | ||
|
|
25c17d3704 | ||
|
|
9b4d832d17 | ||
|
|
52ab19dec6 | ||
|
|
9973fe4326 | ||
|
|
2479f2d65d | ||
|
|
9056cb7026 | ||
|
|
cd9d9b31ef | ||
|
|
ff841c28e3 | ||
|
|
ca1379d9f8 | ||
|
|
5127f94423 | ||
|
|
f5910ab950 | ||
|
|
22efaccd4a | ||
|
|
c8466a2e7a | ||
|
|
209a9fa8c3 | ||
|
|
bc1af12655 | ||
|
|
e7e4cb7579 | ||
|
|
1b39db664c | ||
|
|
7397b9fa87 | ||
|
|
5bed5fb8fd | ||
|
|
fd795b4361 | ||
|
|
b2c0915a71 | ||
|
|
095083bcfb | ||
|
|
4ba72f7eeb | ||
|
|
6cb39795a9 | ||
|
|
00ba16f536 | ||
|
|
988a839623 | ||
|
|
8fa61e628c | ||
|
|
8f3620e07b | ||
|
|
190f70f332 | ||
|
|
6730683919 | ||
|
|
51b12567e8 | ||
|
|
d01cdeded8 | ||
|
|
d22a03f1a5 | ||
|
|
ab0aeec434 | ||
|
|
47ff51ce4e | ||
|
|
1d62ef357d | ||
|
|
59ef66f46d | ||
|
|
77479215a6 | ||
|
|
11da1f72b1 | ||
|
|
6375b9d14d | ||
|
|
42bc12f56d | ||
|
|
8515112811 | ||
|
|
bedb87674b | ||
|
|
029c038a49 | ||
|
|
95e905a5ae | ||
|
|
8a8ec32f2c | ||
|
|
cd958398af | ||
|
|
2eedafd506 | ||
|
|
9a88c0d579 | ||
|
|
cb6fc466d1 | ||
|
|
a6154cbb43 | ||
|
|
9c20967d24 | ||
|
|
79bbadad2f | ||
|
|
c3b44cee94 | ||
|
|
1968496ce1 | ||
|
|
e1feb46de9 | ||
|
|
f51a082049 | ||
|
|
0f1927b4b1 | ||
|
|
9baccc0784 | ||
|
|
95c9cc7f99 | ||
|
|
0ed8e04233 | ||
|
|
b46c328811 | ||
|
|
c7d88ed95b | ||
|
|
94da1a30dc | ||
|
|
219a5db60c | ||
|
|
a5cfedcae9 | ||
|
|
9b8a632f37 | ||
|
|
be0426d9a2 | ||
|
|
8fac0a02e5 | ||
|
|
fa696a2901 | ||
|
|
f5615b1c54 | ||
|
|
9850c294d1 | ||
|
|
8929f32068 | ||
|
|
3019bb5c97 | ||
|
|
95044d27ce | ||
|
|
16ac92b2ef | ||
|
|
f095964f7b | ||
|
|
cfba793fcf | ||
|
|
c50a11e58a | ||
|
|
a7282a5794 | ||
|
|
efaf313422 | ||
|
|
d9f5753f58 | ||
|
|
179318f1d8 | ||
|
|
dd33d24346 | ||
|
|
fa684eabab | ||
|
|
4b5bd6eed7 | ||
|
|
63f5946527 | ||
|
|
8dac3ebf96 | ||
|
|
09e783fbf6 | ||
|
|
b18b686545 | ||
|
|
e99ea41117 | ||
|
|
172a37ec8c | ||
|
|
da6b341b63 | ||
|
|
16d93b1775 | ||
|
|
e15cf063c6 | ||
|
|
5ac9e3e47a | ||
|
|
743a61bf49 | ||
|
|
c790ea07dd | ||
|
|
b4f980b349 | ||
|
|
673f23b6a0 | ||
|
|
8c325f3a8a | ||
|
|
f71516f36f | ||
|
|
1752386a6c | ||
|
|
112675c782 | ||
|
|
3b6ba7641d | ||
|
|
477a35a685 | ||
|
|
2a0a39a95a | ||
|
|
dfc77db51d | ||
|
|
c9596fd8c4 | ||
|
|
78373f3746 | ||
|
|
ebc3db8aed | ||
|
|
756601c1ce | ||
|
|
8bb5077e76 | ||
|
|
5b85f01427 | ||
|
|
a7d594e566 | ||
|
|
481e6671c2 | ||
|
|
b890e02a6a | ||
|
|
4772840b4c | ||
|
|
cd7adc997b | ||
|
|
9fdc901b7a | ||
|
|
76ec3473d6 | ||
|
|
b29ae67501 | ||
|
|
24f1fb566d | ||
|
|
a253294890 | ||
|
|
0b53de1bb6 | ||
|
|
746c68c9f6 | ||
|
|
ec008b4a16 | ||
|
|
1d65e34fe5 | ||
|
|
8ae78703ca | ||
|
|
bd4b9a9fd3 | ||
|
|
f09677d376 | ||
|
|
f669e3f6c4 | ||
|
|
961c5ea962 | ||
|
|
a1c61a1146 | ||
|
|
797dff4752 | ||
|
|
711ad638b8 | ||
|
|
4956c3328c | ||
|
|
96a82381d1 | ||
|
|
68190c8c76 | ||
|
|
dde47bc1fc | ||
|
|
744deaed8d | ||
|
|
a62910c8b6 | ||
|
|
e259a09b89 | ||
|
|
6472d8438a | ||
|
|
dc8a402a4a | ||
|
|
f72374488e | ||
|
|
3c6d9a4b8e | ||
|
|
3073388403 | ||
|
|
67f856c997 | ||
|
|
742fd0b444 | ||
|
|
e1d69c0eae | ||
|
|
49d4190615 | ||
|
|
64d39765ca | ||
|
|
aca8f64415 | ||
|
|
855b600268 | ||
|
|
476d613e20 | ||
|
|
fb8a4d0a41 | ||
|
|
130f3f6d41 | ||
|
|
ed997af043 | ||
|
|
3c47f6917a | ||
|
|
e32a486493 | ||
|
|
208da935a1 | ||
|
|
1dda9a875e | ||
|
|
b26175b7c6 | ||
|
|
ffc6befb38 | ||
|
|
9df7c96d08 | ||
|
|
32fa66f0a2 | ||
|
|
7153ed022c | ||
|
|
50e4e71abb | ||
|
|
d6e65a3d63 | ||
|
|
7a77b12834 | ||
|
|
fe387931a4 | ||
|
|
dc83e32173 | ||
|
|
f5b29bad33 | ||
|
|
f599aa4852 | ||
|
|
6f05de2fcc | ||
|
|
02016c77f1 | ||
|
|
93eede7c6b | ||
|
|
944e396823 | ||
|
|
8a654834ac | ||
|
|
bb76fa80cd | ||
|
|
53f44a4029 | ||
|
|
182dcc7e5f | ||
|
|
2d272e0207 | ||
|
|
9384a50879 | ||
|
|
00d4f35f2c | ||
|
|
483557163e | ||
|
|
2679252b04 | ||
|
|
5a95c827b4 | ||
|
|
f938d8c878 | ||
|
|
bfc0122c1b | ||
|
|
79691541ae | ||
|
|
05d0542875 | ||
|
|
1d22b39a1d | ||
|
|
549b68cf71 | ||
|
|
55f87de2e0 | ||
|
|
b8424e20aa | ||
|
|
bbe3a30e77 | ||
|
|
cdc5348a06 | ||
|
|
e64a3978e6 | ||
|
|
f2feb12708 | ||
|
|
5319c5f832 | ||
|
|
7531fe14fe | ||
|
|
0086c6373b | ||
|
|
10dc19652e | ||
|
|
2f2c4964c5 | ||
|
|
cb4203b6db | ||
|
|
bb5b62466e | ||
|
|
6407b5452b | ||
|
|
a742fecf9c | ||
|
|
60415b9222 | ||
|
|
ffb39ef074 | ||
|
|
d73f5ed2b5 | ||
|
|
7af973b60d | ||
|
|
49eddc9da5 | ||
|
|
4b1de108d1 | ||
|
|
e46c735efa | ||
|
|
56da7deb4c | ||
|
|
5f4649ee2b | ||
|
|
7cc2c4f621 | ||
|
|
cc449f9304 | ||
|
|
28425efbe7 | ||
|
|
8c3a22aa5c | ||
|
|
f3e5933599 | ||
|
|
39d8750ef9 | ||
|
|
52b0c244a8 | ||
|
|
ee95a00ce2 | ||
|
|
11566ec923 | ||
|
|
a78ff08202 | ||
|
|
ceb3969c8b | ||
|
|
6f048ef40e | ||
|
|
aac4fe37e8 | ||
|
|
ebfa941a4f | ||
|
|
47c70eea9e | ||
|
|
fe7c40cb7c | ||
|
|
8973626a4b | ||
|
|
ace5d999e2 | ||
|
|
52829a244b | ||
|
|
71c247fe56 | ||
|
|
b34066f579 | ||
|
|
b286c15c51 | ||
|
|
aff4f6c859 | ||
|
|
1f8f73fa30 | ||
|
|
620d6624a9 | ||
|
|
287f42ae64 | ||
|
|
d976c97d13 | ||
|
|
6d549b0754 | ||
|
|
02dd64558f | ||
|
|
ea5e2f5580 | ||
|
|
b65c7a75fe | ||
|
|
39f5dce51c | ||
|
|
f77ec1faf6 | ||
|
|
cd3a1e0159 | ||
|
|
3f20fadb81 | ||
|
|
1c6428dd3b | ||
|
|
aca620241a | ||
|
|
808b36e088 | ||
|
|
1613375cc0 | ||
|
|
787ef957d2 | ||
|
|
b915d0aed5 | ||
|
|
16dc5b5327 | ||
|
|
d25e98d9c4 | ||
|
|
227cdc1ec8 | ||
|
|
c2d0c63db0 | ||
|
|
f5c9807a48 | ||
|
|
7e9b1d5e16 | ||
|
|
5070c4eea9 | ||
|
|
33d9c51b6f | ||
|
|
d0f9d478c2 | ||
|
|
f296ec46c8 | ||
|
|
64d19e480b | ||
|
|
2c585a9328 | ||
|
|
45b0d9e19b | ||
|
|
026a4b896c | ||
|
|
78237fcd11 | ||
|
|
73cc3dcb92 | ||
|
|
4cff03e7fe | ||
|
|
8e35f131d5 | ||
|
|
aafb9d7e4f | ||
|
|
652f30bdbd | ||
|
|
f4ba7b2a0c | ||
|
|
592490d709 | ||
|
|
5ac676d201 | ||
|
|
abfe0a925a | ||
|
|
fa11f4f45b | ||
|
|
1e8dbfe4b7 | ||
|
|
f82f2bd287 | ||
|
|
76742c3869 | ||
|
|
9407e7e418 | ||
|
|
120552b36e | ||
|
|
9fb58c7ae3 | ||
|
|
b917da765c | ||
|
|
cc59510cd0 | ||
|
|
86aab7023c | ||
|
|
e3b0a41ba9 | ||
|
|
802c55dfc8 | ||
|
|
280f7814aa | ||
|
|
3174681bd8 | ||
|
|
853f80b928 | ||
|
|
8bdad377d7 | ||
|
|
9e1c2a5bd1 | ||
|
|
b1777b6011 | ||
|
|
904be3005f | ||
|
|
95eaf4c887 | ||
|
|
e3923b7b22 | ||
|
|
a43993e3ec | ||
|
|
bc6b3fb5f4 | ||
|
|
df47f5d47b | ||
|
|
319e64bacc | ||
|
|
ecf044bed1 | ||
|
|
76e1de38e8 | ||
|
|
1648a74ed7 | ||
|
|
c63a7884cb | ||
|
|
cffbd41f26 | ||
|
|
c443187d0b | ||
|
|
8c305d8390 | ||
|
|
0345c5c03b | ||
|
|
cc6ac4c9d9 | ||
|
|
2ceba45782 | ||
|
|
ed423ed041 | ||
|
|
f58a52eaa4 | ||
|
|
688404011b | ||
|
|
8881e0fb75 | ||
|
|
61de7e144e | ||
|
|
815934ff5c | ||
|
|
c3ff9e49e8 | ||
|
|
d52875aa8f | ||
|
|
9027cd290c | ||
|
|
58a7203ede | ||
|
|
5a84016a91 | ||
|
|
bb0269f484 | ||
|
|
1adc9349fc | ||
|
|
06215c83f2 | ||
|
|
8a828137ee | ||
|
|
de4b1c8f09 | ||
|
|
265d40f04a | ||
|
|
b64e52c0c0 | ||
|
|
ac02e5c0a6 | ||
|
|
427a415fbf | ||
|
|
9a3414aaa7 | ||
|
|
7f5486dade | ||
|
|
c8897ecf9b | ||
|
|
2c8b94d469 | ||
|
|
36c1cfae51 | ||
|
|
41ad78750e | ||
|
|
3eaa4d5b38 | ||
|
|
35176f9550 | ||
|
|
eb2c7268ea | ||
|
|
80311355ae | ||
|
|
39d1590ace | ||
|
|
0b36b650a4 | ||
|
|
39575e8542 | ||
|
|
326ef498b0 | ||
|
|
5148bad82e | ||
|
|
518f02472f | ||
|
|
ee5a132eb2 | ||
|
|
654dc5bff3 | ||
|
|
27aeb4f05f | ||
|
|
13dcf7f72a | ||
|
|
65c26f17df | ||
|
|
3bedba71d5 | ||
|
|
1ba64d9745 | ||
|
|
84bf570243 | ||
|
|
28d50bccf9 | ||
|
|
66711c2e9d | ||
|
|
92d8c37d7d | ||
|
|
5824f75669 | ||
|
|
deb8adafc9 | ||
|
|
d2649b237c | ||
|
|
255233fe38 | ||
|
|
6532c258f6 | ||
|
|
4ac3984b7c | ||
|
|
83e2b10904 | ||
|
|
26c1793d85 | ||
|
|
23a9eea26b | ||
|
|
af9b536dd9 | ||
|
|
e4874f86f9 | ||
|
|
e300a957e4 | ||
|
|
1c38afcd25 | ||
|
|
a06faa7685 | ||
|
|
019ab2b21d | ||
|
|
f6ba5f5d51 | ||
|
|
c4cbb64643 | ||
|
|
8260cec713 | ||
|
|
428af3c0e8 | ||
|
|
68544715bf | ||
|
|
d2ea5633fb | ||
|
|
f4520e2752 | ||
|
|
475802afbc | ||
|
|
3aa9b5f0b6 | ||
|
|
6c5dbf7bd0 | ||
|
|
3b3dc5032b | ||
|
|
a170668d9d | ||
|
|
f8dac39076 | ||
|
|
fe4ae61552 | ||
|
|
45efe7cd56 | ||
|
|
78cac7f0e6 | ||
|
|
a1a1b8e94a | ||
|
|
641a658059 | ||
|
|
08f3d653cc | ||
|
|
f2bbafe6c2 | ||
|
|
cb80280eaf | ||
|
|
f22f954ae3 | ||
|
|
2556855bd7 | ||
|
|
365662a2af | ||
|
|
3e0ff7f43f | ||
|
|
8c3753326f | ||
|
|
dbcf6de2d5 | ||
|
|
a5308995b7 | ||
|
|
270ac88b51 | ||
|
|
e505067759 | ||
|
|
a9c7142d7b | ||
|
|
7a40c3526f | ||
|
|
3253d9d3fb |
33
.github/workflows/pr-testing.yml
vendored
Normal file
33
.github/workflows/pr-testing.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: PR testing
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
mavenTesting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Cache local Maven repository
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-maven-
|
||||
- name: Set up the Java JDK
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'adopt'
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
mvn -B clean test -DskipTests=false --file pom.xml
|
||||
if [ -f "target/site/jacoco/index.html" ]; then echo "Total coverage: $(cat target/site/jacoco/index.html | grep -o 'Total[^%]*%' | grep -o '[0-9]*%')"; fi
|
||||
|
||||
- name: Log coverage percentage
|
||||
run: |
|
||||
if [ ! -f "target/site/jacoco/index.html" ]; then echo "No coverage information available"; fi
|
||||
if [ -f "target/site/jacoco/index.html" ]; then echo "Total coverage: $(cat target/site/jacoco/index.html | grep -o 'Total[^%]*%' | grep -o '[0-9]*%')"; fi
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
/db*
|
||||
/lists/
|
||||
/bin/
|
||||
/target/
|
||||
/qortal-backup/
|
||||
/log.txt.*
|
||||
/arbitrary*
|
||||
/Qortal-BTC*
|
||||
@@ -14,8 +16,18 @@
|
||||
/settings.json
|
||||
/testnet*
|
||||
/settings*.json
|
||||
/testchain.json
|
||||
/run-testnet.sh
|
||||
/testchain*.json
|
||||
/run-testnet*.sh
|
||||
/.idea
|
||||
/qortal.iml
|
||||
*.DS_Store
|
||||
.DS_Store
|
||||
/src/main/resources/resources
|
||||
/*.jar
|
||||
/run.pid
|
||||
/run.log
|
||||
/WindowsInstaller/Install Files/qortal.jar
|
||||
/*.7z
|
||||
/tmp
|
||||
/data*
|
||||
/src/test/resources/arbitrary/*/.qortal/cache
|
||||
apikey.txt
|
||||
|
||||
26
Dockerfile
Normal file
26
Dockerfile
Normal file
@@ -0,0 +1,26 @@
|
||||
FROM maven:3-openjdk-11 as builder
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./ /work/
|
||||
RUN mvn clean package
|
||||
|
||||
###
|
||||
FROM openjdk:11
|
||||
|
||||
RUN useradd -r -u 1000 -g users qortal && \
|
||||
mkdir /usr/local/qortal /qortal && \
|
||||
chown 1000:100 /qortal
|
||||
|
||||
COPY --from=builder /work/log4j2.properties /usr/local/qortal/
|
||||
COPY --from=builder /work/target/qortal*.jar /usr/local/qortal/qortal.jar
|
||||
|
||||
USER 1000:100
|
||||
|
||||
EXPOSE 12391 12392
|
||||
HEALTHCHECK --start-period=5m CMD curl -sf http://127.0.0.1:12391/admin/info || exit 1
|
||||
|
||||
WORKDIR /qortal
|
||||
VOLUME /qortal
|
||||
|
||||
ENTRYPOINT ["java"]
|
||||
CMD ["-Djava.net.preferIPv4Stack=false", "-jar", "/usr/local/qortal/qortal.jar"]
|
||||
30
TestNets.md
30
TestNets.md
@@ -41,13 +41,39 @@
|
||||
- Start up at least as many nodes as `minBlockchainPeers` (or adjust this value instead)
|
||||
- Probably best to perform API call `DELETE /peers/known`
|
||||
- Add other nodes via API call `POST /peers <peer-hostname-or-IP>`
|
||||
- Add minting private key to node(s) via API call `POST /admin/mintingaccounts <minting-private-key>`
|
||||
This key must have corresponding `REWARD_SHARE` transaction in testnet genesis block
|
||||
- Add minting private key to nodes via API call `POST /admin/mintingaccounts <minting-private-key>`
|
||||
The keys must have corresponding `REWARD_SHARE` transactions in testnet genesis block
|
||||
- You must have at least 2 separate minting keys and two separate nodes. Assign one minting key to each node.
|
||||
- Alternatively, comment out the `if (mintedLastBlock) { }` conditional in BlockMinter.java to allow for a single node and key.
|
||||
- Wait for genesis block timestamp to pass
|
||||
- A node should mint block 2 approximately 60 seconds after genesis block timestamp
|
||||
- Other testnet nodes will sync *as long as there is at least `minBlockchainPeers` peers with an "up-to-date" chain`
|
||||
- You can also use API call `POST /admin/forcesync <connected-peer-IP-and-port>` on stuck nodes
|
||||
|
||||
## Single-node testnet
|
||||
|
||||
A single-node testnet is possible with code modifications, for basic testing, or to more easily start a new testnet.
|
||||
To do so, follow these steps:
|
||||
- Comment out the `if (mintedLastBlock) { }` conditional in BlockMinter.java
|
||||
- Comment out the `minBlockchainPeers` validation in Settings.validate()
|
||||
- Set `minBlockchainPeers` to 0 in settings.json
|
||||
- Set `Synchronizer.RECOVERY_MODE_TIMEOUT` to `0`
|
||||
- All other steps should remain the same. Only a single reward share key is needed.
|
||||
- Remember to put these values back after introducing other nodes
|
||||
|
||||
## Fixed network
|
||||
|
||||
To restrict a testnet to a set of private nodes, you can use the "fixed network" feature.
|
||||
This ensures that the testnet nodes only communicate with each other and not other known peers.
|
||||
To do this, add the following setting to each testnet node, substituting the IP addresses:
|
||||
```
|
||||
"fixedNetwork": [
|
||||
"192.168.0.101:62392",
|
||||
"192.168.0.102:62392",
|
||||
"192.168.0.103:62392"
|
||||
]
|
||||
```
|
||||
|
||||
## Dealing with stuck chain
|
||||
|
||||
Maybe your nodes have been offline and no-one has minted a recent testnet block.
|
||||
|
||||
@@ -61,7 +61,7 @@ appender.rolling.type = RollingFile
|
||||
appender.rolling.name = FILE
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||
appender.rolling.filePattern = ${dirname:-}${filename}.%i
|
||||
appender.rolling.filePattern = ./${filename}.%i
|
||||
appender.rolling.policy.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policy.size = 4MB
|
||||
# Set the immediate flush to true (default)
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ configured paths, or create a dummy `D:` drive with the expected layout.
|
||||
|
||||
Typical build procedure:
|
||||
|
||||
* Overwrite the `qortal.jar` file in `Install-Files\`
|
||||
* Place the `qortal.jar` file in `Install-Files\`
|
||||
* Open AdvancedInstaller with qortal.aip file
|
||||
* If releasing a new version, change version number in:
|
||||
+ "Product Information" side menu
|
||||
|
||||
BIN
WindowsInstaller/qortal.ico
Executable file → Normal file
BIN
WindowsInstaller/qortal.ico
Executable file → Normal file
Binary file not shown.
|
Before Width: | Height: | Size: 250 KiB After Width: | Height: | Size: 42 KiB |
BIN
lib/com/dosse/WaifUPnP/1.1/WaifUPnP-1.1.jar
Normal file
BIN
lib/com/dosse/WaifUPnP/1.1/WaifUPnP-1.1.jar
Normal file
Binary file not shown.
9
lib/com/dosse/WaifUPnP/1.1/WaifUPnP-1.1.pom
Normal file
9
lib/com/dosse/WaifUPnP/1.1/WaifUPnP-1.1.pom
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<version>1.1</version>
|
||||
<description>POM was created from install:install-file</description>
|
||||
</project>
|
||||
12
lib/com/dosse/WaifUPnP/maven-metadata-local.xml
Normal file
12
lib/com/dosse/WaifUPnP/maven-metadata-local.xml
Normal file
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<versioning>
|
||||
<release>1.1</release>
|
||||
<versions>
|
||||
<version>1.1</version>
|
||||
</versions>
|
||||
<lastUpdated>20220218200127</lastUpdated>
|
||||
</versioning>
|
||||
</metadata>
|
||||
@@ -61,7 +61,7 @@ appender.rolling.type = RollingFile
|
||||
appender.rolling.name = FILE
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||
appender.rolling.filePattern = ${dirname:-}${filename}.%i
|
||||
appender.rolling.filePattern = ./${filename}.%i
|
||||
appender.rolling.policy.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policy.size = 4MB
|
||||
# Set the immediate flush to true (default)
|
||||
|
||||
73
pom.xml
73
pom.xml
@@ -3,28 +3,37 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.qortal</groupId>
|
||||
<artifactId>qortal</artifactId>
|
||||
<version>1.5.1</version>
|
||||
<version>3.2.5</version>
|
||||
<packaging>jar</packaging>
|
||||
<properties>
|
||||
<skipTests>true</skipTests>
|
||||
<altcoinj.version>bf9fb80</altcoinj.version>
|
||||
<bitcoinj.version>0.15.6</bitcoinj.version>
|
||||
<altcoinj.version>6628cfd</altcoinj.version>
|
||||
<bitcoinj.version>0.15.10</bitcoinj.version>
|
||||
<bouncycastle.version>1.64</bouncycastle.version>
|
||||
<build.timestamp>${maven.build.timestamp}</build.timestamp>
|
||||
<ciyam-at.version>1.3.8</ciyam-at.version>
|
||||
<commons-net.version>3.6</commons-net.version>
|
||||
<commons-text.version>1.8</commons-text.version>
|
||||
<commons-io.version>2.6</commons-io.version>
|
||||
<commons-compress.version>1.21</commons-compress.version>
|
||||
<commons-lang3.version>3.12.0</commons-lang3.version>
|
||||
<xz.version>1.9</xz.version>
|
||||
<dagger.version>1.2.2</dagger.version>
|
||||
<guava.version>28.1-jre</guava.version>
|
||||
<hsqldb.version>2.5.1</hsqldb.version>
|
||||
<homoglyph.version>1.2.1</homoglyph.version>
|
||||
<icu4j.version>70.1</icu4j.version>
|
||||
<upnp.version>1.1</upnp.version>
|
||||
<jersey.version>2.29.1</jersey.version>
|
||||
<jetty.version>9.4.29.v20200521</jetty.version>
|
||||
<log4j.version>2.12.1</log4j.version>
|
||||
<log4j.version>2.17.1</log4j.version>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<slf4j.version>1.7.12</slf4j.version>
|
||||
<swagger-api.version>2.0.9</swagger-api.version>
|
||||
<swagger-ui.version>3.23.8</swagger-ui.version>
|
||||
<package-info-maven-plugin.version>1.1.0</package-info-maven-plugin.version>
|
||||
<jsoup.version>1.13.1</jsoup.version>
|
||||
<java-diff-utils.version>4.10</java-diff-utils.version>
|
||||
</properties>
|
||||
<build>
|
||||
<sourceDirectory>src/main/java</sourceDirectory>
|
||||
@@ -421,6 +430,12 @@
|
||||
<artifactId>AT</artifactId>
|
||||
<version>${ciyam-at.version}</version>
|
||||
</dependency>
|
||||
<!-- UPnP support -->
|
||||
<dependency>
|
||||
<groupId>com.dosse</groupId>
|
||||
<artifactId>WaifUPnP</artifactId>
|
||||
<version>${upnp.version}</version>
|
||||
</dependency>
|
||||
<!-- Bitcoin support -->
|
||||
<dependency>
|
||||
<groupId>org.bitcoinj</groupId>
|
||||
@@ -429,7 +444,7 @@
|
||||
</dependency>
|
||||
<!-- For Litecoin, etc. support, requires bitcoinj -->
|
||||
<dependency>
|
||||
<groupId>com.github.jjos2372</groupId>
|
||||
<groupId>com.github.qortal</groupId>
|
||||
<artifactId>altcoinj</artifactId>
|
||||
<version>${altcoinj.version}</version>
|
||||
</dependency>
|
||||
@@ -439,11 +454,36 @@
|
||||
<artifactId>json-simple</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20210307</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
<version>${commons-text.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.tukaani</groupId>
|
||||
<artifactId>xz</artifactId>
|
||||
<version>${xz.version}</version>
|
||||
</dependency>
|
||||
<!-- For bitset/bitmap compression -->
|
||||
<dependency>
|
||||
<groupId>io.druid</groupId>
|
||||
@@ -530,7 +570,18 @@
|
||||
<dependency>
|
||||
<groupId>net.codebox</groupId>
|
||||
<artifactId>homoglyph</artifactId>
|
||||
<version>1.2.0</version>
|
||||
<version>${homoglyph.version}</version>
|
||||
</dependency>
|
||||
<!-- Unicode support -->
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
<artifactId>icu4j</artifactId>
|
||||
<version>${icu4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
<artifactId>icu4j-charset</artifactId>
|
||||
<version>${icu4j.version}</version>
|
||||
</dependency>
|
||||
<!-- Jetty -->
|
||||
<dependency>
|
||||
@@ -644,5 +695,15 @@
|
||||
<artifactId>bctls-jdk15on</artifactId>
|
||||
<version>${bouncycastle.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
<version>${jsoup.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.github.java-diff-utils</groupId>
|
||||
<artifactId>java-diff-utils</artifactId>
|
||||
<version>${java-diff-utils.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
||||
BIN
src/.DS_Store
vendored
BIN
src/.DS_Store
vendored
Binary file not shown.
BIN
src/main/.DS_Store
vendored
BIN
src/main/.DS_Store
vendored
Binary file not shown.
@@ -7,14 +7,13 @@ import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.security.Security;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||
import org.bouncycastle.jsse.provider.BouncyCastleJsseProvider;
|
||||
import org.qortal.api.ApiKey;
|
||||
import org.qortal.api.ApiRequest;
|
||||
import org.qortal.controller.AutoUpdate;
|
||||
import org.qortal.settings.Settings;
|
||||
@@ -70,14 +69,40 @@ public class ApplyUpdate {
|
||||
String baseUri = "http://localhost:" + Settings.getInstance().getApiPort() + "/";
|
||||
LOGGER.info(() -> String.format("Shutting down node using API via %s", baseUri));
|
||||
|
||||
// The /admin/stop endpoint requires an API key, which may or may not be already generated
|
||||
boolean apiKeyNewlyGenerated = false;
|
||||
ApiKey apiKey = null;
|
||||
try {
|
||||
apiKey = new ApiKey();
|
||||
if (!apiKey.generated()) {
|
||||
apiKey.generate();
|
||||
apiKeyNewlyGenerated = true;
|
||||
LOGGER.info("Generated API key");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error loading API key: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Create GET params
|
||||
Map<String, String> params = new HashMap<>();
|
||||
if (apiKey != null) {
|
||||
params.put("apiKey", apiKey.toString());
|
||||
}
|
||||
|
||||
// Attempt to stop the node
|
||||
int attempt;
|
||||
for (attempt = 0; attempt < MAX_ATTEMPTS; ++attempt) {
|
||||
final int attemptForLogging = attempt;
|
||||
LOGGER.info(() -> String.format("Attempt #%d out of %d to shutdown node", attemptForLogging + 1, MAX_ATTEMPTS));
|
||||
String response = ApiRequest.perform(baseUri + "admin/stop", null);
|
||||
if (response == null)
|
||||
String response = ApiRequest.perform(baseUri + "admin/stop", params);
|
||||
if (response == null) {
|
||||
// No response - consider node shut down
|
||||
if (apiKeyNewlyGenerated) {
|
||||
// API key was newly generated for this auto update, so we need to remove it
|
||||
ApplyUpdate.removeGeneratedApiKey();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGGER.info(() -> String.format("Response from API: %s", response));
|
||||
|
||||
@@ -89,6 +114,11 @@ public class ApplyUpdate {
|
||||
}
|
||||
}
|
||||
|
||||
if (apiKeyNewlyGenerated) {
|
||||
// API key was newly generated for this auto update, so we need to remove it
|
||||
ApplyUpdate.removeGeneratedApiKey();
|
||||
}
|
||||
|
||||
if (attempt == MAX_ATTEMPTS) {
|
||||
LOGGER.error("Failed to shutdown node - giving up");
|
||||
return false;
|
||||
@@ -97,6 +127,19 @@ public class ApplyUpdate {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void removeGeneratedApiKey() {
|
||||
try {
|
||||
LOGGER.info("Removing newly generated API key...");
|
||||
|
||||
// Delete the API key since it was only generated for this auto update
|
||||
ApiKey apiKey = new ApiKey();
|
||||
apiKey.delete();
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error loading or deleting API key: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static void replaceJar() {
|
||||
// Assuming current working directory contains the JAR files
|
||||
Path realJar = Paths.get(JAR_FILENAME);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package org.qortal;
|
||||
|
||||
import java.security.Security;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -57,10 +58,10 @@ public class RepositoryMaintenance {
|
||||
|
||||
LOGGER.info("Starting repository periodic maintenance. This can take a while...");
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
repository.performPeriodicMaintenance();
|
||||
repository.performPeriodicMaintenance(null);
|
||||
|
||||
LOGGER.info("Repository periodic maintenance completed");
|
||||
} catch (DataException e) {
|
||||
} catch (DataException | TimeoutException e) {
|
||||
LOGGER.error("Repository periodic maintenance failed", e);
|
||||
}
|
||||
|
||||
|
||||
@@ -8,11 +8,13 @@ import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.block.BlockChain;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.data.account.AccountBalanceData;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.NONE) // Stops JAX-RS errors when unmarshalling blockchain config
|
||||
@@ -59,7 +61,17 @@ public class Account {
|
||||
// Balance manipulations - assetId is 0 for QORT
|
||||
|
||||
public long getConfirmedBalance(long assetId) throws DataException {
|
||||
AccountBalanceData accountBalanceData = this.repository.getAccountRepository().getBalance(this.address, assetId);
|
||||
AccountBalanceData accountBalanceData;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Lite nodes request data from peers instead of the local db
|
||||
accountBalanceData = LiteNode.getInstance().fetchAccountBalance(this.address, assetId);
|
||||
}
|
||||
else {
|
||||
// All other node types fetch from the local db
|
||||
accountBalanceData = this.repository.getAccountRepository().getBalance(this.address, assetId);
|
||||
}
|
||||
|
||||
if (accountBalanceData == null)
|
||||
return 0;
|
||||
|
||||
@@ -205,6 +217,12 @@ public class Account {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Returns account's blockMinted (0+) or null if account not found in repository. */
|
||||
public Integer getBlocksMinted() throws DataException {
|
||||
return this.repository.getAccountRepository().getMintedBlockCount(this.address);
|
||||
}
|
||||
|
||||
|
||||
/** Returns whether account can build reward-shares.
|
||||
* <p>
|
||||
* To be able to create reward-shares, the account needs to pass at least one of these tests:<br>
|
||||
@@ -272,7 +290,7 @@ public class Account {
|
||||
/**
|
||||
* Returns 'effective' minting level, or zero if reward-share does not exist.
|
||||
* <p>
|
||||
* For founder accounts, this returns "founderEffectiveMintingLevel" from blockchain config.
|
||||
* this is being used on src/main/java/org/qortal/api/resource/AddressesResource.java to fulfil the online accounts api call
|
||||
*
|
||||
* @param repository
|
||||
* @param rewardSharePublicKey
|
||||
@@ -288,5 +306,26 @@ public class Account {
|
||||
Account rewardShareMinter = new Account(repository, rewardShareData.getMinter());
|
||||
return rewardShareMinter.getEffectiveMintingLevel();
|
||||
}
|
||||
/**
|
||||
* Returns 'effective' minting level, with a fix for the zero level.
|
||||
* <p>
|
||||
* For founder accounts, this returns "founderEffectiveMintingLevel" from blockchain config.
|
||||
*
|
||||
* @param repository
|
||||
* @param rewardSharePublicKey
|
||||
* @return 0+
|
||||
* @throws DataException
|
||||
*/
|
||||
public static int getRewardShareEffectiveMintingLevelIncludingLevelZero(Repository repository, byte[] rewardSharePublicKey) throws DataException {
|
||||
// Find actual minter and get their effective minting level
|
||||
RewardShareData rewardShareData = repository.getAccountRepository().getRewardShare(rewardSharePublicKey);
|
||||
if (rewardShareData == null)
|
||||
return 0;
|
||||
|
||||
else if(!rewardShareData.getMinter().equals(rewardShareData.getRecipient()))//the minter is different than the recipient this means sponsorship
|
||||
return 0;
|
||||
|
||||
Account rewardShareMinter = new Account(repository, rewardShareData.getMinter());
|
||||
return rewardShareMinter.getEffectiveMintingLevel();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,7 +129,14 @@ public enum ApiError {
|
||||
// Foreign blockchain
|
||||
FOREIGN_BLOCKCHAIN_NETWORK_ISSUE(1201, 500),
|
||||
FOREIGN_BLOCKCHAIN_BALANCE_ISSUE(1202, 402),
|
||||
FOREIGN_BLOCKCHAIN_TOO_SOON(1203, 408);
|
||||
FOREIGN_BLOCKCHAIN_TOO_SOON(1203, 408),
|
||||
|
||||
// Trade portal
|
||||
ORDER_SIZE_TOO_SMALL(1300, 402),
|
||||
|
||||
// Data
|
||||
FILE_NOT_FOUND(1401, 404),
|
||||
NO_REPLY(1402, 404);
|
||||
|
||||
private static final Map<Integer, ApiError> map = stream(ApiError.values()).collect(toMap(apiError -> apiError.code, apiError -> apiError));
|
||||
|
||||
@@ -157,4 +164,4 @@ public enum ApiError {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,4 +16,8 @@ public enum ApiExceptionFactory {
|
||||
return createException(request, apiError, null);
|
||||
}
|
||||
|
||||
public ApiException createCustomException(HttpServletRequest request, ApiError apiError, String message) {
|
||||
return new ApiException(apiError.getStatus(), apiError.getCode(), message, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
107
src/main/java/org/qortal/api/ApiKey.java
Normal file
107
src/main/java/org/qortal/api/ApiKey.java
Normal file
@@ -0,0 +1,107 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class ApiKey {
|
||||
|
||||
private String apiKey;
|
||||
|
||||
public ApiKey() throws IOException {
|
||||
this.load();
|
||||
}
|
||||
|
||||
public void generate() throws IOException {
|
||||
byte[] apiKey = new byte[16];
|
||||
new SecureRandom().nextBytes(apiKey);
|
||||
this.apiKey = Base58.encode(apiKey);
|
||||
|
||||
this.save();
|
||||
}
|
||||
|
||||
|
||||
/* Filesystem */
|
||||
|
||||
private Path getFilePath() {
|
||||
return Paths.get(Settings.getInstance().getApiKeyPath(), "apikey.txt");
|
||||
}
|
||||
|
||||
private boolean load() throws IOException {
|
||||
Path path = this.getFilePath();
|
||||
File apiKeyFile = new File(path.toString());
|
||||
if (!apiKeyFile.exists()) {
|
||||
// Try settings - to allow legacy API keys to be supported
|
||||
return this.loadLegacyApiKey();
|
||||
}
|
||||
|
||||
try {
|
||||
this.apiKey = new String(Files.readAllBytes(path));
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new IOException(String.format("Couldn't read contents from file %s", path.toString()));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean loadLegacyApiKey() {
|
||||
String legacyApiKey = Settings.getInstance().getApiKey();
|
||||
if (legacyApiKey != null && !legacyApiKey.isEmpty()) {
|
||||
this.apiKey = Settings.getInstance().getApiKey();
|
||||
|
||||
try {
|
||||
// Save it to the apikey file
|
||||
this.save();
|
||||
} catch (IOException e) {
|
||||
// Ignore failures as it will be reloaded from settings next time
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void save() throws IOException {
|
||||
if (this.apiKey == null || this.apiKey.isEmpty()) {
|
||||
throw new IllegalStateException("Unable to save a blank API key");
|
||||
}
|
||||
|
||||
Path filePath = this.getFilePath();
|
||||
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(filePath.toString()));
|
||||
writer.write(this.apiKey);
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public void delete() throws IOException {
|
||||
this.apiKey = null;
|
||||
|
||||
Path filePath = this.getFilePath();
|
||||
if (Files.exists(filePath)) {
|
||||
Files.delete(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public boolean generated() {
|
||||
return (this.apiKey != null);
|
||||
}
|
||||
|
||||
public boolean exists() {
|
||||
return this.getFilePath().toFile().exists();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.apiKey;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -14,6 +14,7 @@ import java.security.SecureRandom;
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.checkerframework.checker.units.qual.A;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RedirectPatternRule;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
@@ -39,13 +40,7 @@ import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.api.websocket.ActiveChatsWebSocket;
|
||||
import org.qortal.api.websocket.AdminStatusWebSocket;
|
||||
import org.qortal.api.websocket.BlocksWebSocket;
|
||||
import org.qortal.api.websocket.ChatMessagesWebSocket;
|
||||
import org.qortal.api.websocket.PresenceWebSocket;
|
||||
import org.qortal.api.websocket.TradeBotWebSocket;
|
||||
import org.qortal.api.websocket.TradeOffersWebSocket;
|
||||
import org.qortal.api.websocket.*;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
public class ApiService {
|
||||
@@ -54,6 +49,7 @@ public class ApiService {
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
private ApiKey apiKey;
|
||||
|
||||
private ApiService() {
|
||||
this.config = new ResourceConfig();
|
||||
@@ -74,6 +70,15 @@ public class ApiService {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void setApiKey(ApiKey apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
|
||||
public ApiKey getApiKey() {
|
||||
return this.apiKey;
|
||||
}
|
||||
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
@@ -201,6 +206,9 @@ public class ApiService {
|
||||
context.addServlet(ChatMessagesWebSocket.class, "/websockets/chat/messages");
|
||||
context.addServlet(TradeOffersWebSocket.class, "/websockets/crosschain/tradeoffers");
|
||||
context.addServlet(TradeBotWebSocket.class, "/websockets/crosschain/tradebot");
|
||||
context.addServlet(TradePresenceWebSocket.class, "/websockets/crosschain/tradepresence");
|
||||
|
||||
// Deprecated
|
||||
context.addServlet(PresenceWebSocket.class, "/websockets/presence");
|
||||
|
||||
// Start server
|
||||
|
||||
@@ -2,7 +2,7 @@ package org.qortal.api;
|
||||
|
||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||
|
||||
import org.bitcoinj.core.Base58;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
public class Base58TypeAdapter extends XmlAdapter<String, byte[]> {
|
||||
|
||||
|
||||
171
src/main/java/org/qortal/api/DomainMapService.java
Normal file
171
src/main/java/org/qortal/api/DomainMapService.java
Normal file
@@ -0,0 +1,171 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
import org.eclipse.jetty.rewrite.handler.RewritePatternRule;
|
||||
import org.eclipse.jetty.server.*;
|
||||
import org.eclipse.jetty.server.handler.ErrorHandler;
|
||||
import org.eclipse.jetty.server.handler.InetAccessHandler;
|
||||
import org.eclipse.jetty.servlet.FilterHolder;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.servlets.CrossOriginFilter;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyStore;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class DomainMapService {
|
||||
|
||||
private static DomainMapService instance;
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
|
||||
private DomainMapService() {
|
||||
this.config = new ResourceConfig();
|
||||
this.config.packages("org.qortal.api.domainmap.resource");
|
||||
this.config.register(OpenApiResource.class);
|
||||
this.config.register(ApiDefinition.class);
|
||||
this.config.register(AnnotationPostProcessor.class);
|
||||
}
|
||||
|
||||
public static DomainMapService getInstance() {
|
||||
if (instance == null)
|
||||
instance = new DomainMapService();
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
public Iterable<Class<?>> getResources() {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
|
||||
// SSL support if requested
|
||||
String keystorePathname = Settings.getInstance().getSslKeystorePathname();
|
||||
String keystorePassword = Settings.getInstance().getSslKeystorePassword();
|
||||
|
||||
if (keystorePathname != null && keystorePassword != null) {
|
||||
// SSL version
|
||||
if (!Files.isReadable(Path.of(keystorePathname)))
|
||||
throw new RuntimeException("Failed to start SSL API due to broken keystore");
|
||||
|
||||
// BouncyCastle-specific SSLContext build
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS", "BCJSSE");
|
||||
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance("PKIX", "BCJSSE");
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType(), "BC");
|
||||
|
||||
try (InputStream keystoreStream = Files.newInputStream(Paths.get(keystorePathname))) {
|
||||
keyStore.load(keystoreStream, keystorePassword.toCharArray());
|
||||
}
|
||||
|
||||
keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
|
||||
sslContext.init(keyManagerFactory.getKeyManagers(), null, new SecureRandom());
|
||||
|
||||
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
|
||||
sslContextFactory.setSslContext(sslContext);
|
||||
|
||||
this.server = new Server();
|
||||
|
||||
HttpConfiguration httpConfig = new HttpConfiguration();
|
||||
httpConfig.setSecureScheme("https");
|
||||
httpConfig.setSecurePort(Settings.getInstance().getDomainMapPort());
|
||||
|
||||
SecureRequestCustomizer src = new SecureRequestCustomizer();
|
||||
httpConfig.addCustomizer(src);
|
||||
|
||||
HttpConnectionFactory httpConnectionFactory = new HttpConnectionFactory(httpConfig);
|
||||
SslConnectionFactory sslConnectionFactory = new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString());
|
||||
|
||||
ServerConnector portUnifiedConnector = new ServerConnector(this.server,
|
||||
new DetectorConnectionFactory(sslConnectionFactory),
|
||||
httpConnectionFactory);
|
||||
portUnifiedConnector.setHost(Settings.getInstance().getBindAddress());
|
||||
portUnifiedConnector.setPort(Settings.getInstance().getDomainMapPort());
|
||||
|
||||
this.server.addConnector(portUnifiedConnector);
|
||||
} else {
|
||||
// Non-SSL
|
||||
InetAddress bindAddr = InetAddress.getByName(Settings.getInstance().getBindAddress());
|
||||
InetSocketAddress endpoint = new InetSocketAddress(bindAddr, Settings.getInstance().getDomainMapPort());
|
||||
this.server = new Server(endpoint);
|
||||
}
|
||||
|
||||
// Error handler
|
||||
ErrorHandler errorHandler = new ApiErrorHandler();
|
||||
this.server.setErrorHandler(errorHandler);
|
||||
|
||||
// Request logging
|
||||
if (Settings.getInstance().isDomainMapLoggingEnabled()) {
|
||||
RequestLogWriter logWriter = new RequestLogWriter("domainmap-requests.log");
|
||||
logWriter.setAppend(true);
|
||||
logWriter.setTimeZone("UTC");
|
||||
RequestLog requestLog = new CustomRequestLog(logWriter, CustomRequestLog.EXTENDED_NCSA_FORMAT);
|
||||
this.server.setRequestLog(requestLog);
|
||||
}
|
||||
|
||||
// Access handler (currently no whitelist is used)
|
||||
InetAccessHandler accessHandler = new InetAccessHandler();
|
||||
this.server.setHandler(accessHandler);
|
||||
|
||||
// URL rewriting
|
||||
RewriteHandler rewriteHandler = new RewriteHandler();
|
||||
accessHandler.setHandler(rewriteHandler);
|
||||
|
||||
// Context
|
||||
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
|
||||
context.setContextPath("/");
|
||||
rewriteHandler.setHandler(context);
|
||||
|
||||
// Cross-origin resource sharing
|
||||
FilterHolder corsFilterHolder = new FilterHolder(CrossOriginFilter.class);
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET, POST, DELETE");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.CHAIN_PREFLIGHT_PARAM, "false");
|
||||
context.addFilter(corsFilterHolder, "/*", null);
|
||||
|
||||
// API servlet
|
||||
ServletContainer container = new ServletContainer(this.config);
|
||||
ServletHolder apiServlet = new ServletHolder(container);
|
||||
apiServlet.setInitOrder(1);
|
||||
context.addServlet(apiServlet, "/*");
|
||||
|
||||
// Start server
|
||||
this.server.start();
|
||||
} catch (Exception e) {
|
||||
// Failed to start
|
||||
throw new RuntimeException("Failed to start API", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
try {
|
||||
// Stop server
|
||||
this.server.stop();
|
||||
} catch (Exception e) {
|
||||
// Failed to stop
|
||||
}
|
||||
|
||||
this.server = null;
|
||||
}
|
||||
|
||||
}
|
||||
170
src/main/java/org/qortal/api/GatewayService.java
Normal file
170
src/main/java/org/qortal/api/GatewayService.java
Normal file
@@ -0,0 +1,170 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource;
|
||||
import org.eclipse.jetty.http.HttpVersion;
|
||||
import org.eclipse.jetty.rewrite.handler.RewriteHandler;
|
||||
import org.eclipse.jetty.server.*;
|
||||
import org.eclipse.jetty.server.handler.ErrorHandler;
|
||||
import org.eclipse.jetty.server.handler.InetAccessHandler;
|
||||
import org.eclipse.jetty.servlet.FilterHolder;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.servlets.CrossOriginFilter;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
import org.qortal.api.resource.AnnotationPostProcessor;
|
||||
import org.qortal.api.resource.ApiDefinition;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyStore;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
public class GatewayService {
|
||||
|
||||
private static GatewayService instance;
|
||||
|
||||
private final ResourceConfig config;
|
||||
private Server server;
|
||||
|
||||
private GatewayService() {
|
||||
this.config = new ResourceConfig();
|
||||
this.config.packages("org.qortal.api.gateway.resource");
|
||||
this.config.register(OpenApiResource.class);
|
||||
this.config.register(ApiDefinition.class);
|
||||
this.config.register(AnnotationPostProcessor.class);
|
||||
}
|
||||
|
||||
public static GatewayService getInstance() {
|
||||
if (instance == null)
|
||||
instance = new GatewayService();
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
public Iterable<Class<?>> getResources() {
|
||||
return this.config.getClasses();
|
||||
}
|
||||
|
||||
public void start() {
|
||||
try {
|
||||
// Create API server
|
||||
|
||||
// SSL support if requested
|
||||
String keystorePathname = Settings.getInstance().getSslKeystorePathname();
|
||||
String keystorePassword = Settings.getInstance().getSslKeystorePassword();
|
||||
|
||||
if (keystorePathname != null && keystorePassword != null) {
|
||||
// SSL version
|
||||
if (!Files.isReadable(Path.of(keystorePathname)))
|
||||
throw new RuntimeException("Failed to start SSL API due to broken keystore");
|
||||
|
||||
// BouncyCastle-specific SSLContext build
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS", "BCJSSE");
|
||||
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance("PKIX", "BCJSSE");
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType(), "BC");
|
||||
|
||||
try (InputStream keystoreStream = Files.newInputStream(Paths.get(keystorePathname))) {
|
||||
keyStore.load(keystoreStream, keystorePassword.toCharArray());
|
||||
}
|
||||
|
||||
keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
|
||||
sslContext.init(keyManagerFactory.getKeyManagers(), null, new SecureRandom());
|
||||
|
||||
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
|
||||
sslContextFactory.setSslContext(sslContext);
|
||||
|
||||
this.server = new Server();
|
||||
|
||||
HttpConfiguration httpConfig = new HttpConfiguration();
|
||||
httpConfig.setSecureScheme("https");
|
||||
httpConfig.setSecurePort(Settings.getInstance().getGatewayPort());
|
||||
|
||||
SecureRequestCustomizer src = new SecureRequestCustomizer();
|
||||
httpConfig.addCustomizer(src);
|
||||
|
||||
HttpConnectionFactory httpConnectionFactory = new HttpConnectionFactory(httpConfig);
|
||||
SslConnectionFactory sslConnectionFactory = new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString());
|
||||
|
||||
ServerConnector portUnifiedConnector = new ServerConnector(this.server,
|
||||
new DetectorConnectionFactory(sslConnectionFactory),
|
||||
httpConnectionFactory);
|
||||
portUnifiedConnector.setHost(Settings.getInstance().getBindAddress());
|
||||
portUnifiedConnector.setPort(Settings.getInstance().getGatewayPort());
|
||||
|
||||
this.server.addConnector(portUnifiedConnector);
|
||||
} else {
|
||||
// Non-SSL
|
||||
InetAddress bindAddr = InetAddress.getByName(Settings.getInstance().getBindAddress());
|
||||
InetSocketAddress endpoint = new InetSocketAddress(bindAddr, Settings.getInstance().getGatewayPort());
|
||||
this.server = new Server(endpoint);
|
||||
}
|
||||
|
||||
// Error handler
|
||||
ErrorHandler errorHandler = new ApiErrorHandler();
|
||||
this.server.setErrorHandler(errorHandler);
|
||||
|
||||
// Request logging
|
||||
if (Settings.getInstance().isGatewayLoggingEnabled()) {
|
||||
RequestLogWriter logWriter = new RequestLogWriter("gateway-requests.log");
|
||||
logWriter.setAppend(true);
|
||||
logWriter.setTimeZone("UTC");
|
||||
RequestLog requestLog = new CustomRequestLog(logWriter, CustomRequestLog.EXTENDED_NCSA_FORMAT);
|
||||
this.server.setRequestLog(requestLog);
|
||||
}
|
||||
|
||||
// Access handler (currently no whitelist is used)
|
||||
InetAccessHandler accessHandler = new InetAccessHandler();
|
||||
this.server.setHandler(accessHandler);
|
||||
|
||||
// URL rewriting
|
||||
RewriteHandler rewriteHandler = new RewriteHandler();
|
||||
accessHandler.setHandler(rewriteHandler);
|
||||
|
||||
// Context
|
||||
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
|
||||
context.setContextPath("/");
|
||||
rewriteHandler.setHandler(context);
|
||||
|
||||
// Cross-origin resource sharing
|
||||
FilterHolder corsFilterHolder = new FilterHolder(CrossOriginFilter.class);
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET, POST, DELETE");
|
||||
corsFilterHolder.setInitParameter(CrossOriginFilter.CHAIN_PREFLIGHT_PARAM, "false");
|
||||
context.addFilter(corsFilterHolder, "/*", null);
|
||||
|
||||
// API servlet
|
||||
ServletContainer container = new ServletContainer(this.config);
|
||||
ServletHolder apiServlet = new ServletHolder(container);
|
||||
apiServlet.setInitOrder(1);
|
||||
context.addServlet(apiServlet, "/*");
|
||||
|
||||
// Start server
|
||||
this.server.start();
|
||||
} catch (Exception e) {
|
||||
// Failed to start
|
||||
throw new RuntimeException("Failed to start API", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
try {
|
||||
// Stop server
|
||||
this.server.stop();
|
||||
} catch (Exception e) {
|
||||
// Failed to stop
|
||||
}
|
||||
|
||||
this.server = null;
|
||||
}
|
||||
|
||||
}
|
||||
51
src/main/java/org/qortal/api/HTMLParser.java
Normal file
51
src/main/java/org/qortal/api/HTMLParser.java
Normal file
@@ -0,0 +1,51 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
public class HTMLParser {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(HTMLParser.class);
|
||||
|
||||
private String linkPrefix;
|
||||
private byte[] data;
|
||||
|
||||
public HTMLParser(String resourceId, String inPath, String prefix, boolean usePrefix, byte[] data) {
|
||||
String inPathWithoutFilename = inPath.substring(0, inPath.lastIndexOf('/'));
|
||||
this.linkPrefix = usePrefix ? String.format("%s/%s%s", prefix, resourceId, inPathWithoutFilename) : "";
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public void addAdditionalHeaderTags() {
|
||||
String fileContents = new String(data);
|
||||
Document document = Jsoup.parse(fileContents);
|
||||
String baseUrl = this.linkPrefix + "/";
|
||||
Elements head = document.getElementsByTag("head");
|
||||
if (!head.isEmpty()) {
|
||||
// Add base href tag
|
||||
String baseElement = String.format("<base href=\"%s\">", baseUrl);
|
||||
head.get(0).prepend(baseElement);
|
||||
|
||||
// Add meta charset tag
|
||||
String metaCharsetElement = "<meta charset=\"UTF-8\">";
|
||||
head.get(0).prepend(metaCharsetElement);
|
||||
|
||||
}
|
||||
String html = document.html();
|
||||
this.data = html.getBytes();
|
||||
}
|
||||
|
||||
public static boolean isHtmlFile(String path) {
|
||||
if (path.endsWith(".html") || path.endsWith(".htm")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public byte[] getData() {
|
||||
return this.data;
|
||||
}
|
||||
}
|
||||
@@ -1,33 +1,111 @@
|
||||
package org.qortal.api;
|
||||
|
||||
import org.qortal.arbitrary.ArbitraryDataResource;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataRenderManager;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
public abstract class Security {
|
||||
|
||||
public static final String API_KEY_HEADER = "X-API-KEY";
|
||||
|
||||
public static void checkApiCallAllowed(HttpServletRequest request) {
|
||||
String expectedApiKey = Settings.getInstance().getApiKey();
|
||||
// We may want to allow automatic authentication for local requests, if enabled in settings
|
||||
boolean localAuthBypassEnabled = Settings.getInstance().isLocalAuthBypassEnabled();
|
||||
if (localAuthBypassEnabled) {
|
||||
try {
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
// Request originates from loopback address, so allow it
|
||||
return;
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
// Ignore failure, and fallback to API key authentication
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve the API key
|
||||
ApiKey apiKey = Security.getApiKey(request);
|
||||
if (!apiKey.generated()) {
|
||||
// Not generated an API key yet, so disallow sensitive API calls
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "API key not generated");
|
||||
}
|
||||
|
||||
// We require an API key to be passed
|
||||
String passedApiKey = request.getHeader(API_KEY_HEADER);
|
||||
if (passedApiKey == null) {
|
||||
// Try query string - this is needed to avoid a CORS preflight. See: https://stackoverflow.com/a/43881141
|
||||
passedApiKey = request.getParameter("apiKey");
|
||||
}
|
||||
if (passedApiKey == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Missing 'X-API-KEY' header");
|
||||
}
|
||||
|
||||
if ((expectedApiKey != null && !expectedApiKey.equals(passedApiKey)) ||
|
||||
(passedApiKey != null && !passedApiKey.equals(expectedApiKey)))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
// The API keys must match
|
||||
if (!apiKey.toString().equals(passedApiKey)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "API key invalid");
|
||||
}
|
||||
}
|
||||
|
||||
InetAddress remoteAddr;
|
||||
public static void disallowLoopbackRequests(HttpServletRequest request) {
|
||||
try {
|
||||
remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Local requests not allowed");
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
}
|
||||
|
||||
if (!remoteAddr.isLoopbackAddress())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
public static void disallowLoopbackRequestsIfAuthBypassEnabled(HttpServletRequest request) {
|
||||
if (Settings.getInstance().isLocalAuthBypassEnabled()) {
|
||||
try {
|
||||
InetAddress remoteAddr = InetAddress.getByName(request.getRemoteAddr());
|
||||
if (remoteAddr.isLoopbackAddress()) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Local requests not allowed when localAuthBypassEnabled is enabled in settings");
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void requirePriorAuthorization(HttpServletRequest request, String resourceId, Service service, String identifier) {
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, identifier);
|
||||
if (!ArbitraryDataRenderManager.getInstance().isAuthorized(resource)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Call /render/authorize first");
|
||||
}
|
||||
}
|
||||
|
||||
public static void requirePriorAuthorizationOrApiKey(HttpServletRequest request, String resourceId, Service service, String identifier) {
|
||||
try {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
} catch (ApiException e) {
|
||||
// API call wasn't allowed, but maybe it was pre-authorized
|
||||
Security.requirePriorAuthorization(request, resourceId, service, identifier);
|
||||
}
|
||||
}
|
||||
|
||||
public static ApiKey getApiKey(HttpServletRequest request) {
|
||||
ApiKey apiKey = ApiService.getInstance().getApiKey();
|
||||
if (apiKey == null) {
|
||||
try {
|
||||
apiKey = new ApiKey();
|
||||
} catch (IOException e) {
|
||||
// Couldn't load API key - so we need to treat it as not generated, and therefore unauthorized
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.UNAUTHORIZED);
|
||||
}
|
||||
ApiService.getInstance().setApiKey(apiKey);
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,58 @@
|
||||
package org.qortal.api.domainmap.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataRenderer;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
@Path("/")
|
||||
@Tag(name = "Domain Map")
|
||||
public class DomainMapResource {
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
|
||||
@GET
|
||||
public HttpServletResponse getIndexByDomainMap() {
|
||||
return this.getDomainMap("/");
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{path:.*}")
|
||||
public HttpServletResponse getPathByDomainMap(@PathParam("path") String inPath) {
|
||||
return this.getDomainMap(inPath);
|
||||
}
|
||||
|
||||
private HttpServletResponse getDomainMap(String inPath) {
|
||||
Map<String, String> domainMap = Settings.getInstance().getSimpleDomainMap();
|
||||
if (domainMap != null && domainMap.containsKey(request.getServerName())) {
|
||||
// Build synchronously, so that we don't need to make the summary API endpoints available over
|
||||
// the domain map server. This means that there will be no loading screen, but this is potentially
|
||||
// preferred in this situation anyway (e.g. to avoid confusing search engine robots).
|
||||
return this.get(domainMap.get(request.getServerName()), ResourceIdType.NAME, Service.WEBSITE, inPath, null, "", false, false);
|
||||
}
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
package org.qortal.api.gateway.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataReader;
|
||||
import org.qortal.arbitrary.ArbitraryDataRenderer;
|
||||
import org.qortal.arbitrary.ArbitraryDataResource;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.data.arbitrary.ArbitraryResourceStatus;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
|
||||
|
||||
@Path("/")
|
||||
@Tag(name = "Gateway")
|
||||
public class GatewayResource {
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
/**
|
||||
* We need to allow resource status checking (and building) via the gateway, as the node's API port
|
||||
* may not be forwarded and will almost certainly not be authenticated. Since gateways allow for
|
||||
* all resources to be loaded except those that are blocked, there is no need for authentication.
|
||||
*/
|
||||
@GET
|
||||
@Path("/arbitrary/resource/status/{service}/{name}")
|
||||
public ArbitraryResourceStatus getDefaultResourceStatus(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@QueryParam("build") Boolean build) {
|
||||
|
||||
return this.getStatus(service, name, null, build);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/arbitrary/resource/status/{service}/{name}/{identifier}")
|
||||
public ArbitraryResourceStatus getResourceStatus(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@PathParam("identifier") String identifier,
|
||||
@QueryParam("build") Boolean build) {
|
||||
|
||||
return this.getStatus(service, name, identifier, build);
|
||||
}
|
||||
|
||||
private ArbitraryResourceStatus getStatus(Service service, String name, String identifier, Boolean build) {
|
||||
|
||||
// If "build=true" has been specified in the query string, build the resource before returning its status
|
||||
if (build != null && build == true) {
|
||||
ArbitraryDataReader reader = new ArbitraryDataReader(name, ArbitraryDataFile.ResourceIdType.NAME, service, null);
|
||||
try {
|
||||
if (!reader.isBuilding()) {
|
||||
reader.loadSynchronously(false);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// No need to handle exception, as it will be reflected in the status
|
||||
}
|
||||
}
|
||||
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(name, ResourceIdType.NAME, service, identifier);
|
||||
return resource.getStatus(false);
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
public HttpServletResponse getRoot() {
|
||||
return ArbitraryDataRenderer.getResponse(response, 200, "");
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
@Path("{name}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByName(@PathParam("name") String name,
|
||||
@PathParam("path") String inPath) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, inPath, null, "", true, true);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{name}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByName(@PathParam("name") String name) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, "/", null, "", true, true);
|
||||
}
|
||||
|
||||
|
||||
// Optional /site alternative for backwards support
|
||||
|
||||
@GET
|
||||
@Path("/site/{name}/{path:.*}")
|
||||
public HttpServletResponse getSitePathByName(@PathParam("name") String name,
|
||||
@PathParam("path") String inPath) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, inPath, null, "/site", true, true);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/site/{name}")
|
||||
public HttpServletResponse getSiteIndexByName(@PathParam("name") String name) {
|
||||
// Block requests from localhost, to prevent websites/apps from running javascript that fetches unvetted data
|
||||
Security.disallowLoopbackRequests(request);
|
||||
return this.get(name, ResourceIdType.NAME, Service.WEBSITE, "/", null, "/site", true, true);
|
||||
}
|
||||
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
23
src/main/java/org/qortal/api/model/BlockMintingInfo.java
Normal file
23
src/main/java/org/qortal/api/model/BlockMintingInfo.java
Normal file
@@ -0,0 +1,23 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class BlockMintingInfo {
|
||||
|
||||
public byte[] minterPublicKey;
|
||||
public int minterLevel;
|
||||
public int onlineAccountsCount;
|
||||
public BigDecimal maxDistance;
|
||||
public BigInteger keyDistance;
|
||||
public double keyDistanceRatio;
|
||||
public long timestamp;
|
||||
public long timeDelta;
|
||||
|
||||
public BlockMintingInfo() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,61 +1,74 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import org.qortal.data.network.PeerChainTipData;
|
||||
import org.qortal.data.network.PeerData;
|
||||
import org.qortal.network.Handshake;
|
||||
import org.qortal.network.Peer;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class ConnectedPeer {
|
||||
|
||||
public enum Direction {
|
||||
INBOUND,
|
||||
OUTBOUND;
|
||||
}
|
||||
public Direction direction;
|
||||
public Handshake handshakeStatus;
|
||||
public Long lastPing;
|
||||
public Long connectedWhen;
|
||||
public Long peersConnectedWhen;
|
||||
public enum Direction {
|
||||
INBOUND,
|
||||
OUTBOUND;
|
||||
}
|
||||
|
||||
public String address;
|
||||
public String version;
|
||||
public Direction direction;
|
||||
public Handshake handshakeStatus;
|
||||
public Long lastPing;
|
||||
public Long connectedWhen;
|
||||
public Long peersConnectedWhen;
|
||||
|
||||
public String nodeId;
|
||||
public String address;
|
||||
public String version;
|
||||
|
||||
public Integer lastHeight;
|
||||
@Schema(example = "base58")
|
||||
public byte[] lastBlockSignature;
|
||||
public Long lastBlockTimestamp;
|
||||
public String nodeId;
|
||||
|
||||
protected ConnectedPeer() {
|
||||
}
|
||||
public Integer lastHeight;
|
||||
@Schema(example = "base58")
|
||||
public byte[] lastBlockSignature;
|
||||
public Long lastBlockTimestamp;
|
||||
public UUID connectionId;
|
||||
public String age;
|
||||
|
||||
public ConnectedPeer(Peer peer) {
|
||||
this.direction = peer.isOutbound() ? Direction.OUTBOUND : Direction.INBOUND;
|
||||
this.handshakeStatus = peer.getHandshakeStatus();
|
||||
this.lastPing = peer.getLastPing();
|
||||
protected ConnectedPeer() {
|
||||
}
|
||||
|
||||
PeerData peerData = peer.getPeerData();
|
||||
this.connectedWhen = peer.getConnectionTimestamp();
|
||||
this.peersConnectedWhen = peer.getPeersConnectionTimestamp();
|
||||
public ConnectedPeer(Peer peer) {
|
||||
this.direction = peer.isOutbound() ? Direction.OUTBOUND : Direction.INBOUND;
|
||||
this.handshakeStatus = peer.getHandshakeStatus();
|
||||
this.lastPing = peer.getLastPing();
|
||||
|
||||
this.address = peerData.getAddress().toString();
|
||||
PeerData peerData = peer.getPeerData();
|
||||
this.connectedWhen = peer.getConnectionTimestamp();
|
||||
this.peersConnectedWhen = peer.getPeersConnectionTimestamp();
|
||||
|
||||
this.version = peer.getPeersVersionString();
|
||||
this.nodeId = peer.getPeersNodeId();
|
||||
this.address = peerData.getAddress().toString();
|
||||
|
||||
PeerChainTipData peerChainTipData = peer.getChainTipData();
|
||||
if (peerChainTipData != null) {
|
||||
this.lastHeight = peerChainTipData.getLastHeight();
|
||||
this.lastBlockSignature = peerChainTipData.getLastBlockSignature();
|
||||
this.lastBlockTimestamp = peerChainTipData.getLastBlockTimestamp();
|
||||
}
|
||||
}
|
||||
this.version = peer.getPeersVersionString();
|
||||
this.nodeId = peer.getPeersNodeId();
|
||||
this.connectionId = peer.getPeerConnectionId();
|
||||
if (peer.getConnectionEstablishedTime() > 0) {
|
||||
long age = (System.currentTimeMillis() - peer.getConnectionEstablishedTime());
|
||||
long minutes = TimeUnit.MILLISECONDS.toMinutes(age);
|
||||
long seconds = TimeUnit.MILLISECONDS.toSeconds(age) - TimeUnit.MINUTES.toSeconds(minutes);
|
||||
this.age = String.format("%dm %ds", minutes, seconds);
|
||||
} else {
|
||||
this.age = "connecting...";
|
||||
}
|
||||
|
||||
PeerChainTipData peerChainTipData = peer.getChainTipData();
|
||||
if (peerChainTipData != null) {
|
||||
this.lastHeight = peerChainTipData.getLastHeight();
|
||||
this.lastBlockSignature = peerChainTipData.getLastBlockSignature();
|
||||
this.lastBlockTimestamp = peerChainTipData.getLastBlockTimestamp();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class CrossChainDualSecretRequest {
|
||||
|
||||
@Schema(description = "Public key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPublicKey;
|
||||
|
||||
@Schema(description = "Qortal AT address")
|
||||
public String atAddress;
|
||||
|
||||
@Schema(description = "secret-A (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secretA;
|
||||
|
||||
@Schema(description = "secret-B (32 bytes)", example = "EN2Bgx3BcEMtxFCewmCVSMkfZjVKYhx3KEXC5A21KBGx")
|
||||
public byte[] secretB;
|
||||
|
||||
@Schema(description = "Qortal address for receiving QORT from AT")
|
||||
public String receivingAddress;
|
||||
|
||||
public CrossChainDualSecretRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -8,17 +8,14 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class CrossChainSecretRequest {
|
||||
|
||||
@Schema(description = "Public key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPublicKey;
|
||||
@Schema(description = "Private key to match AT's trade 'partner'", example = "C6wuddsBV3HzRrXUtezE7P5MoRXp5m3mEDokRDGZB6ry")
|
||||
public byte[] partnerPrivateKey;
|
||||
|
||||
@Schema(description = "Qortal AT address")
|
||||
public String atAddress;
|
||||
|
||||
@Schema(description = "secret-A (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secretA;
|
||||
|
||||
@Schema(description = "secret-B (32 bytes)", example = "EN2Bgx3BcEMtxFCewmCVSMkfZjVKYhx3KEXC5A21KBGx")
|
||||
public byte[] secretB;
|
||||
@Schema(description = "Secret (32 bytes)", example = "FHMzten4he9jZ4HGb4297Utj6F5g2w7serjq2EnAg2s1")
|
||||
public byte[] secret;
|
||||
|
||||
@Schema(description = "Qortal address for receiving QORT from AT")
|
||||
public String receivingAddress;
|
||||
|
||||
@@ -25,6 +25,12 @@ public class CrossChainTradeSummary {
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
private long foreignAmount;
|
||||
|
||||
private String atAddress;
|
||||
|
||||
private String sellerAddress;
|
||||
|
||||
private String buyerReceivingAddress;
|
||||
|
||||
protected CrossChainTradeSummary() {
|
||||
/* For JAXB */
|
||||
}
|
||||
@@ -34,6 +40,9 @@ public class CrossChainTradeSummary {
|
||||
this.qortAmount = crossChainTradeData.qortAmount;
|
||||
this.foreignAmount = crossChainTradeData.expectedForeignAmount;
|
||||
this.btcAmount = this.foreignAmount;
|
||||
this.sellerAddress = crossChainTradeData.qortalCreator;
|
||||
this.buyerReceivingAddress = crossChainTradeData.qortalPartnerReceivingAddress;
|
||||
this.atAddress = crossChainTradeData.qortalAtAddress;
|
||||
}
|
||||
|
||||
public long getTradeTimestamp() {
|
||||
@@ -48,7 +57,11 @@ public class CrossChainTradeSummary {
|
||||
return this.btcAmount;
|
||||
}
|
||||
|
||||
public long getForeignAmount() {
|
||||
return this.foreignAmount;
|
||||
}
|
||||
public long getForeignAmount() { return this.foreignAmount; }
|
||||
|
||||
public String getAtAddress() { return this.atAddress; }
|
||||
|
||||
public String getSellerAddress() { return this.sellerAddress; }
|
||||
|
||||
public String getBuyerReceivingAddressAddress() { return this.buyerReceivingAddress; }
|
||||
}
|
||||
|
||||
18
src/main/java/org/qortal/api/model/ListRequest.java
Normal file
18
src/main/java/org/qortal/api/model/ListRequest.java
Normal file
@@ -0,0 +1,18 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import java.util.List;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class ListRequest {
|
||||
|
||||
@Schema(description = "A list of items")
|
||||
public List<String> items;
|
||||
|
||||
public ListRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -12,6 +12,7 @@ public class NodeInfo {
|
||||
public long buildTimestamp;
|
||||
public String nodeId;
|
||||
public boolean isTestNet;
|
||||
public String type;
|
||||
|
||||
public NodeInfo() {
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.network.Network;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
@@ -22,10 +23,10 @@ public class NodeStatus {
|
||||
public NodeStatus() {
|
||||
this.isMintingPossible = Controller.getInstance().isMintingPossible();
|
||||
|
||||
this.syncPercent = Controller.getInstance().getSyncPercent();
|
||||
this.isSynchronizing = this.syncPercent != null;
|
||||
this.syncPercent = Synchronizer.getInstance().getSyncPercent();
|
||||
this.isSynchronizing = Synchronizer.getInstance().isSynchronizing();
|
||||
|
||||
this.numberOfConnections = Network.getInstance().getHandshakedPeers().size();
|
||||
this.numberOfConnections = Network.getInstance().getImmutableHandshakedPeers().size();
|
||||
|
||||
this.height = Controller.getInstance().getChainHeight();
|
||||
}
|
||||
|
||||
15
src/main/java/org/qortal/api/model/PeersSummary.java
Normal file
15
src/main/java/org/qortal/api/model/PeersSummary.java
Normal file
@@ -0,0 +1,15 @@
|
||||
package org.qortal.api.model;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class PeersSummary {
|
||||
|
||||
public int inboundConnections;
|
||||
public int outboundConnections;
|
||||
|
||||
public PeersSummary() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class DigibyteSendRequest {
|
||||
|
||||
@Schema(description = "Digibyte BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Digibyte address ('legacy' P2PKH only)", example = "1DigByteEaterAddressDontSendf59kuE")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of DGB to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long digibyteAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 DGB (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public DigibyteSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class DogecoinSendRequest {
|
||||
|
||||
@Schema(description = "Dogecoin BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Dogecoin address ('legacy' P2PKH only)", example = "DoGecoinEaterAddressDontSendhLfzKD")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of DOGE to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long dogecoinAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 DOGE (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public DogecoinSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package org.qortal.api.model.crosschain;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class RavencoinSendRequest {
|
||||
|
||||
@Schema(description = "Ravencoin BIP32 extended private key", example = "tprv___________________________________________________________________________________________________________")
|
||||
public String xprv58;
|
||||
|
||||
@Schema(description = "Recipient's Ravencoin address ('legacy' P2PKH only)", example = "1RvnCoinEaterAddressDontSendf59kuE")
|
||||
public String receivingAddress;
|
||||
|
||||
@Schema(description = "Amount of RVN to send", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long ravencoinAmount;
|
||||
|
||||
@Schema(description = "Transaction fee per byte (optional). Default is 0.00000100 RVN (100 sats) per byte", example = "0.00000100", type = "number")
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public Long feePerByte;
|
||||
|
||||
public RavencoinSendRequest() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -12,14 +12,11 @@ import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -33,11 +30,13 @@ import org.qortal.api.Security;
|
||||
import org.qortal.api.model.ApiOnlineAccount;
|
||||
import org.qortal.api.model.RewardShareKeyRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.controller.OnlineAccountsManager;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
import org.qortal.data.network.OnlineAccountData;
|
||||
import org.qortal.data.network.OnlineAccountLevel;
|
||||
import org.qortal.data.transaction.PublicizeTransactionData;
|
||||
import org.qortal.data.transaction.RewardShareTransactionData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
@@ -111,18 +110,26 @@ public class AddressesResource {
|
||||
if (!Crypto.isValidAddress(address))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
byte[] lastReference = null;
|
||||
AccountData accountData;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
AccountData accountData = repository.getAccountRepository().getAccount(address);
|
||||
// Not found?
|
||||
if (accountData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
lastReference = accountData.getReference();
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Lite nodes request data from peers instead of the local db
|
||||
accountData = LiteNode.getInstance().fetchAccountData(address);
|
||||
}
|
||||
else {
|
||||
// All other node types request data from local db
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
accountData = repository.getAccountRepository().getAccount(address);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
// Not found?
|
||||
if (accountData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
byte[] lastReference = accountData.getReference();
|
||||
|
||||
if (lastReference == null || lastReference.length == 0)
|
||||
return "false";
|
||||
@@ -158,7 +165,7 @@ public class AddressesResource {
|
||||
)
|
||||
@ApiErrors({ApiError.PUBLIC_KEY_NOT_FOUND, ApiError.REPOSITORY_ISSUE})
|
||||
public List<ApiOnlineAccount> getOnlineAccounts() {
|
||||
List<OnlineAccountData> onlineAccounts = Controller.getInstance().getOnlineAccounts();
|
||||
List<OnlineAccountData> onlineAccounts = OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
|
||||
// Map OnlineAccountData entries to OnlineAccount via reward-share data
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -180,6 +187,66 @@ public class AddressesResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/online/levels")
|
||||
@Operation(
|
||||
summary = "Return currently 'online' accounts counts, grouped by level",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "online accounts",
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, array = @ArraySchema(schema = @Schema(implementation = ApiOnlineAccount.class)))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.PUBLIC_KEY_NOT_FOUND, ApiError.REPOSITORY_ISSUE})
|
||||
public List<OnlineAccountLevel> getOnlineAccountsByLevel() {
|
||||
List<OnlineAccountData> onlineAccounts = OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<OnlineAccountLevel> onlineAccountLevels = new ArrayList<>();
|
||||
|
||||
for (OnlineAccountData onlineAccountData : onlineAccounts) {
|
||||
try {
|
||||
final int minterLevel = Account.getRewardShareEffectiveMintingLevelIncludingLevelZero(repository, onlineAccountData.getPublicKey());
|
||||
|
||||
OnlineAccountLevel onlineAccountLevel = onlineAccountLevels.stream()
|
||||
.filter(a -> a.getLevel() == minterLevel)
|
||||
.findFirst().orElse(null);
|
||||
|
||||
// Note: I don't think we can use the level as the List index here because there will be gaps.
|
||||
// So we are forced to manually look up the existing item each time.
|
||||
// There's probably a nice shorthand java way of doing this, but this approach gets the same result.
|
||||
|
||||
if (onlineAccountLevel == null) {
|
||||
// No entry exists for this level yet, so create one
|
||||
onlineAccountLevel = new OnlineAccountLevel(minterLevel, 1);
|
||||
onlineAccountLevels.add(onlineAccountLevel);
|
||||
}
|
||||
else {
|
||||
// Already exists - so increment the count
|
||||
int existingCount = onlineAccountLevel.getCount();
|
||||
onlineAccountLevel.setCount(++existingCount);
|
||||
|
||||
// Then replace the existing item
|
||||
int index = onlineAccountLevels.indexOf(onlineAccountLevel);
|
||||
onlineAccountLevels.set(index, onlineAccountLevel);
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by level
|
||||
onlineAccountLevels.sort(Comparator.comparingInt(OnlineAccountLevel::getLevel));
|
||||
|
||||
return onlineAccountLevels;
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/balance/{address}")
|
||||
@Operation(
|
||||
@@ -475,7 +542,7 @@ public class AddressesResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String computePublicize(String rawBytes58) {
|
||||
public String computePublicize(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String rawBytes58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
@@ -22,32 +22,28 @@ import java.time.OffsetDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.appender.RollingFileAppender;
|
||||
import org.qortal.account.Account;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.model.ActivitySummary;
|
||||
import org.qortal.api.model.NodeInfo;
|
||||
import org.qortal.api.model.NodeStatus;
|
||||
import org.qortal.block.BlockChain;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.controller.Synchronizer.SynchronizationResult;
|
||||
import org.qortal.data.account.MintingAccountData;
|
||||
import org.qortal.data.account.RewardShareData;
|
||||
@@ -67,6 +63,8 @@ import com.google.common.collect.Lists;
|
||||
@Tag(name = "Admin")
|
||||
public class AdminResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(AdminResource.class);
|
||||
|
||||
private static final int MAX_LOG_LINES = 500;
|
||||
|
||||
@Context
|
||||
@@ -76,7 +74,8 @@ public class AdminResource {
|
||||
@Path("/unused")
|
||||
@Parameter(in = ParameterIn.PATH, name = "assetid", description = "Asset ID, 0 is native coin", schema = @Schema(type = "integer"))
|
||||
@Parameter(in = ParameterIn.PATH, name = "otherassetid", description = "Asset ID, 0 is native coin", schema = @Schema(type = "integer"))
|
||||
@Parameter(in = ParameterIn.PATH, name = "address", description = "an account address", example = "QgV4s3xnzLhVBEJxcYui4u4q11yhUHsd9v")
|
||||
@Parameter(in = ParameterIn.PATH, name = "address", description = "An account address", example = "QgV4s3xnzLhVBEJxcYui4u4q11yhUHsd9v")
|
||||
@Parameter(in = ParameterIn.PATH, name = "path", description = "Local path to folder containing the files", schema = @Schema(type = "String", defaultValue = "/Users/user/Documents/MyStaticWebsite"))
|
||||
@Parameter(in = ParameterIn.QUERY, name = "count", description = "Maximum number of entries to return, 0 means none", schema = @Schema(type = "integer", defaultValue = "20"))
|
||||
@Parameter(in = ParameterIn.QUERY, name = "limit", description = "Maximum number of entries to return, 0 means unlimited", schema = @Schema(type = "integer", defaultValue = "20"))
|
||||
@Parameter(in = ParameterIn.QUERY, name = "offset", description = "Starting entry in results, 0 is first entry", schema = @Schema(type = "integer"))
|
||||
@@ -120,10 +119,23 @@ public class AdminResource {
|
||||
nodeInfo.buildTimestamp = Controller.getInstance().getBuildTimestamp();
|
||||
nodeInfo.nodeId = Network.getInstance().getOurNodeId();
|
||||
nodeInfo.isTestNet = Settings.getInstance().isTestNet();
|
||||
nodeInfo.type = getNodeType();
|
||||
|
||||
return nodeInfo;
|
||||
}
|
||||
|
||||
private String getNodeType() {
|
||||
if (Settings.getInstance().isTopOnly()) {
|
||||
return "topOnly";
|
||||
}
|
||||
else if (Settings.getInstance().isLite()) {
|
||||
return "lite";
|
||||
}
|
||||
else {
|
||||
return "full";
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/status")
|
||||
@Operation(
|
||||
@@ -134,10 +146,7 @@ public class AdminResource {
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public NodeStatus status() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
NodeStatus nodeStatus = new NodeStatus();
|
||||
|
||||
return nodeStatus;
|
||||
@@ -156,7 +165,7 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String shutdown() {
|
||||
public String shutdown(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
new Thread(() -> {
|
||||
@@ -185,7 +194,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public ActivitySummary summary() {
|
||||
public ActivitySummary summary(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
ActivitySummary summary = new ActivitySummary();
|
||||
@@ -231,7 +240,7 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public Controller.StatsSnapshot getEngineStats() {
|
||||
public Controller.StatsSnapshot getEngineStats(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
return Controller.getInstance().getStatsSnapshot();
|
||||
@@ -249,9 +258,7 @@ public class AdminResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<MintingAccountData> getMintingAccounts() {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<MintingAccountData> mintingAccounts = repository.getAccountRepository().getMintingAccounts();
|
||||
@@ -297,7 +304,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.REPOSITORY_ISSUE, ApiError.CANNOT_MINT})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addMintingAccount(String seed58) {
|
||||
public String addMintingAccount(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String seed58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -320,6 +327,7 @@ public class AdminResource {
|
||||
|
||||
repository.getAccountRepository().save(mintingAccountData);
|
||||
repository.saveChanges();
|
||||
repository.exportNodeLocalData();//after adding new minting account let's persist it to the backup MintingAccounts.json
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY, e);
|
||||
} catch (DataException e) {
|
||||
@@ -350,7 +358,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String deleteMintingAccount(String key58) {
|
||||
public String deleteMintingAccount(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -360,6 +368,7 @@ public class AdminResource {
|
||||
return "false";
|
||||
|
||||
repository.saveChanges();
|
||||
repository.exportNodeLocalData();//after removing new minting account let's persist it to the backup MintingAccounts.json
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY, e);
|
||||
} catch (DataException e) {
|
||||
@@ -385,6 +394,10 @@ public class AdminResource {
|
||||
) @QueryParam("limit") Integer limit, @Parameter(
|
||||
ref = "offset"
|
||||
) @QueryParam("offset") Integer offset, @Parameter(
|
||||
name = "tail",
|
||||
description = "Fetch most recent log lines",
|
||||
schema = @Schema(type = "boolean")
|
||||
) @QueryParam("tail") Boolean tail, @Parameter(
|
||||
ref = "reverse"
|
||||
) @QueryParam("reverse") Boolean reverse) {
|
||||
LoggerContext loggerContext = (LoggerContext) LogManager.getContext();
|
||||
@@ -400,6 +413,13 @@ public class AdminResource {
|
||||
if (reverse != null && reverse)
|
||||
logLines = Lists.reverse(logLines);
|
||||
|
||||
// Tail mode - return the last X lines (where X = limit)
|
||||
if (tail != null && tail) {
|
||||
if (limit != null && limit > 0) {
|
||||
offset = logLines.size() - limit;
|
||||
}
|
||||
}
|
||||
|
||||
// offset out of bounds?
|
||||
if (offset != null && (offset < 0 || offset >= logLines.size()))
|
||||
return "";
|
||||
@@ -420,7 +440,7 @@ public class AdminResource {
|
||||
|
||||
limit = Math.min(limit, logLines.size());
|
||||
|
||||
logLines.subList(limit - 1, logLines.size()).clear();
|
||||
logLines.subList(limit, logLines.size()).clear();
|
||||
|
||||
return String.join("\n", logLines);
|
||||
} catch (IOException e) {
|
||||
@@ -450,7 +470,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_HEIGHT, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String orphan(String targetHeightString) {
|
||||
public String orphan(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetHeightString) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -459,6 +479,23 @@ public class AdminResource {
|
||||
if (targetHeight <= 0 || targetHeight > Controller.getInstance().getChainHeight())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_HEIGHT);
|
||||
|
||||
// Make sure we're not orphaning as far back as the archived blocks
|
||||
// FUTURE: we could support this by first importing earlier blocks from the archive
|
||||
if (Settings.getInstance().isTopOnly() ||
|
||||
Settings.getInstance().isArchiveEnabled()) {
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Find the first unarchived block
|
||||
int oldestBlock = repository.getBlockArchiveRepository().getBlockArchiveHeight();
|
||||
// Add some extra blocks just in case we're currently archiving/pruning
|
||||
oldestBlock += 100;
|
||||
if (targetHeight <= oldestBlock) {
|
||||
LOGGER.info("Unable to orphan beyond block {} because it is archived", oldestBlock);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_HEIGHT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (BlockChain.orphan(targetHeight))
|
||||
return "true";
|
||||
else
|
||||
@@ -492,7 +529,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String forceSync(String targetPeerAddress) {
|
||||
public String forceSync(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetPeerAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -500,7 +537,7 @@ public class AdminResource {
|
||||
PeerAddress peerAddress = PeerAddress.fromString(targetPeerAddress);
|
||||
InetSocketAddress resolvedAddress = peerAddress.toSocketAddress();
|
||||
|
||||
List<Peer> peers = Network.getInstance().getHandshakedPeers();
|
||||
List<Peer> peers = Network.getInstance().getImmutableHandshakedPeers();
|
||||
Peer targetPeer = peers.stream().filter(peer -> peer.getResolvedAddress().equals(resolvedAddress)).findFirst().orElse(null);
|
||||
|
||||
if (targetPeer == null)
|
||||
@@ -514,7 +551,7 @@ public class AdminResource {
|
||||
SynchronizationResult syncResult;
|
||||
try {
|
||||
do {
|
||||
syncResult = Controller.getInstance().actuallySynchronize(targetPeer, true);
|
||||
syncResult = Synchronizer.getInstance().actuallySynchronize(targetPeer, true);
|
||||
} while (syncResult == SynchronizationResult.OK);
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
@@ -534,27 +571,16 @@ public class AdminResource {
|
||||
@Path("/repository/data")
|
||||
@Operation(
|
||||
summary = "Export sensitive/node-local data from repository.",
|
||||
description = "Exports data to .script files on local machine"
|
||||
description = "Exports data to .json files on local machine"
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String exportRepository() {
|
||||
public String exportRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
repository.exportNodeLocalData(true);
|
||||
return "true";
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// We couldn't lock blockchain to perform export
|
||||
return "false";
|
||||
repository.exportNodeLocalData();
|
||||
return "true";
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -564,13 +590,13 @@ public class AdminResource {
|
||||
@Path("/repository/data")
|
||||
@Operation(
|
||||
summary = "Import data into repository.",
|
||||
description = "Imports data from file on local machine. Filename is forced to 'import.script' if apiKey is not set.",
|
||||
description = "Imports data from file on local machine. Filename is forced to 'qortal-backup/TradeBotStates.json' if apiKey is not set.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string", example = "MintingAccounts.script"
|
||||
type = "string", example = "qortal-backup/TradeBotStates.json"
|
||||
)
|
||||
)
|
||||
),
|
||||
@@ -583,13 +609,9 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String importRepository(String filename) {
|
||||
public String importRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String filename) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
// Hard-coded because it's too dangerous to allow user-supplied filenames in weaker security contexts
|
||||
if (Settings.getInstance().getApiKey() == null)
|
||||
filename = "import.script";
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
|
||||
@@ -600,6 +622,10 @@ public class AdminResource {
|
||||
repository.saveChanges();
|
||||
|
||||
return "true";
|
||||
|
||||
} catch (IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA, e);
|
||||
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
@@ -625,7 +651,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String checkpointRepository() {
|
||||
public String checkpointRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
RepositoryManager.setRequestedCheckpoint(Boolean.TRUE);
|
||||
@@ -646,7 +672,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String backupRepository() {
|
||||
public String backupRepository(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -655,14 +681,16 @@ public class AdminResource {
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
repository.backup(true);
|
||||
// Timeout if the database isn't ready for backing up after 60 seconds
|
||||
long timeout = 60 * 1000L;
|
||||
repository.backup(true, "backup", timeout);
|
||||
repository.saveChanges();
|
||||
|
||||
return "true";
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
} catch (InterruptedException | TimeoutException e) {
|
||||
// We couldn't lock blockchain to perform backup
|
||||
return "false";
|
||||
} catch (DataException e) {
|
||||
@@ -678,7 +706,7 @@ public class AdminResource {
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public void performRepositoryMaintenance() {
|
||||
public void performRepositoryMaintenance(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -687,15 +715,71 @@ public class AdminResource {
|
||||
blockchainLock.lockInterruptibly();
|
||||
|
||||
try {
|
||||
repository.performPeriodicMaintenance();
|
||||
// Timeout if the database isn't ready to start after 60 seconds
|
||||
long timeout = 60 * 1000L;
|
||||
repository.performPeriodicMaintenance(timeout);
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// No big deal
|
||||
} catch (DataException e) {
|
||||
} catch (DataException | TimeoutException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/apikey/generate")
|
||||
@Operation(
|
||||
summary = "Generate an API key",
|
||||
description = "This request is unauthenticated if no API key has been generated yet. " +
|
||||
"If an API key already exists, it needs to be passed as a header and this endpoint " +
|
||||
"will then generate a new key which replaces the existing one.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "API key string",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String generateApiKey(@HeaderParam(Security.API_KEY_HEADER) String apiKeyHeader) {
|
||||
ApiKey apiKey = Security.getApiKey(request);
|
||||
|
||||
// If the API key is already generated, we need to authenticate this request
|
||||
if (apiKey.generated() && apiKey.exists()) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
}
|
||||
|
||||
// Not generated yet - so we are safe to generate one
|
||||
// FUTURE: we may want to restrict this to local/loopback only?
|
||||
|
||||
try {
|
||||
apiKey.generate();
|
||||
} catch (IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.UNAUTHORIZED, "Unable to generate API key");
|
||||
}
|
||||
|
||||
return apiKey.toString();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apikey/test")
|
||||
@Operation(
|
||||
summary = "Test an API key",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "true if authenticated",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String testApiKey(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
return "true";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,6 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import com.google.common.primitives.Ints;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
@@ -8,7 +9,14 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.math.RoundingMode;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
@@ -20,18 +28,25 @@ import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.qortal.account.Account;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.BlockMintingInfo;
|
||||
import org.qortal.api.model.BlockSignerSummary;
|
||||
import org.qortal.block.Block;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.data.block.BlockData;
|
||||
import org.qortal.data.block.BlockSummaryData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
import org.qortal.repository.BlockArchiveReader;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transform.TransformationException;
|
||||
import org.qortal.transform.block.BlockTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
@Path("/blocks")
|
||||
@@ -60,7 +75,8 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_SIGNATURE, ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getBlock(@PathParam("signature") String signature58) {
|
||||
public BlockData getBlock(@PathParam("signature") String signature58,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
// Decode signature
|
||||
byte[] signature;
|
||||
try {
|
||||
@@ -70,16 +86,80 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
return blockData;
|
||||
// Not found, so try the block archive
|
||||
blockData = repository.getBlockArchiveRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/data")
|
||||
@Operation(
|
||||
summary = "Fetch serialized, base58 encoded block data using base58 signature",
|
||||
description = "Returns serialized data for the block that matches the given signature",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "the block data",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_SIGNATURE, ApiError.BLOCK_UNKNOWN, ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public String getSerializedBlockData(@PathParam("signature") String signature58) {
|
||||
// Decode signature
|
||||
byte[] signature;
|
||||
try {
|
||||
signature = Base58.decode(signature58);
|
||||
} catch (NumberFormatException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_SIGNATURE, e);
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
Block block = new Block(repository, blockData);
|
||||
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
|
||||
bytes.write(Ints.toByteArray(block.getBlockData().getHeight()));
|
||||
bytes.write(BlockTransformer.toBytes(block));
|
||||
return Base58.encode(bytes.toByteArray());
|
||||
}
|
||||
|
||||
// Not found, so try the block archive
|
||||
byte[] bytes = BlockArchiveReader.getInstance().fetchSerializedBlockBytesForSignature(signature, false, repository);
|
||||
if (bytes != null) {
|
||||
return Base58.encode(bytes);
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA, e);
|
||||
} catch (DataException | IOException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/transactions")
|
||||
@Operation(
|
||||
@@ -117,8 +197,12 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
if (repository.getBlockRepository().getHeightFromSignature(signature) == 0)
|
||||
// Check if the block exists in either the database or archive
|
||||
if (repository.getBlockRepository().getHeightFromSignature(signature) == 0 &&
|
||||
repository.getBlockArchiveRepository().getHeightFromSignature(signature) == 0) {
|
||||
// Not found in either the database or archive
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
return repository.getBlockRepository().getTransactionsFromSignature(signature, limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
@@ -147,7 +231,19 @@ public class BlocksResource {
|
||||
})
|
||||
public BlockData getFirstBlock() {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getBlockRepository().fromHeight(1);
|
||||
// Check the database first
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(1);
|
||||
if (blockData != null) {
|
||||
return blockData;
|
||||
}
|
||||
|
||||
// Try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(1);
|
||||
if (blockData != null) {
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -172,9 +268,15 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getLastBlock() {
|
||||
public BlockData getLastBlock(@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getBlockRepository().getLastBlock();
|
||||
BlockData blockData = repository.getBlockRepository().getLastBlock();
|
||||
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
|
||||
return blockData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -209,17 +311,28 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
BlockData childBlockData = null;
|
||||
|
||||
// Check if block exists in database
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return repository.getBlockRepository().fromReference(signature);
|
||||
}
|
||||
|
||||
// Check block exists
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
BlockData childBlockData = repository.getBlockRepository().fromReference(signature);
|
||||
// Not found, so try the archive
|
||||
// This also checks that the parent block exists
|
||||
// It will return null if either the parent or child don't exit
|
||||
childBlockData = repository.getBlockArchiveRepository().fromReference(signature);
|
||||
|
||||
// Check child block exists
|
||||
if (childBlockData == null)
|
||||
if (childBlockData == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
// Check child block's reference matches the supplied signature
|
||||
if (!Arrays.equals(childBlockData.getReference(), signature)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
return childBlockData;
|
||||
} catch (DataException e) {
|
||||
@@ -285,13 +398,20 @@ public class BlocksResource {
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Firstly check the database
|
||||
BlockData blockData = repository.getBlockRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return blockData.getHeight();
|
||||
}
|
||||
|
||||
// Check block exists
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromSignature(signature);
|
||||
if (blockData != null) {
|
||||
return blockData.getHeight();
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
return blockData.getHeight();
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -316,13 +436,101 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getByHeight(@PathParam("height") int height) {
|
||||
public BlockData getByHeight(@PathParam("height") int height,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Firstly check the database
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
return blockData;
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData != null) {
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/byheight/{height}/mintinginfo")
|
||||
@Operation(
|
||||
summary = "Fetch block minter info using block height",
|
||||
description = "Returns the minter info for the block with given height",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "the block",
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
implementation = BlockData.class
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockMintingInfo getBlockMintingInfoByHeight(@PathParam("height") int height) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Try the database
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
|
||||
// Not found, so try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
|
||||
// Still not found
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
Block block = new Block(repository, blockData);
|
||||
BlockData parentBlockData = repository.getBlockRepository().fromSignature(blockData.getReference());
|
||||
if (parentBlockData == null) {
|
||||
// Parent block not found - try the archive
|
||||
parentBlockData = repository.getBlockArchiveRepository().fromSignature(blockData.getReference());
|
||||
if (parentBlockData == null) {
|
||||
|
||||
// Still not found
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
}
|
||||
|
||||
int minterLevel = Account.getRewardShareEffectiveMintingLevel(repository, blockData.getMinterPublicKey());
|
||||
if (minterLevel == 0)
|
||||
// This may be unavailable when requesting a trimmed block
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
BigInteger distance = block.calcKeyDistance(parentBlockData.getHeight(), parentBlockData.getSignature(), blockData.getMinterPublicKey(), minterLevel);
|
||||
double ratio = new BigDecimal(distance).divide(new BigDecimal(block.MAX_DISTANCE), 40, RoundingMode.DOWN).doubleValue();
|
||||
long timestamp = block.calcTimestamp(parentBlockData, blockData.getMinterPublicKey(), minterLevel);
|
||||
long timeDelta = timestamp - parentBlockData.getTimestamp();
|
||||
|
||||
BlockMintingInfo blockMintingInfo = new BlockMintingInfo();
|
||||
blockMintingInfo.minterPublicKey = blockData.getMinterPublicKey();
|
||||
blockMintingInfo.minterLevel = minterLevel;
|
||||
blockMintingInfo.onlineAccountsCount = blockData.getOnlineAccountsCount();
|
||||
blockMintingInfo.maxDistance = new BigDecimal(block.MAX_DISTANCE);
|
||||
blockMintingInfo.keyDistance = distance;
|
||||
blockMintingInfo.keyDistanceRatio = ratio;
|
||||
blockMintingInfo.timestamp = timestamp;
|
||||
blockMintingInfo.timeDelta = timeDelta;
|
||||
|
||||
return blockMintingInfo;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -346,15 +554,37 @@ public class BlocksResource {
|
||||
@ApiErrors({
|
||||
ApiError.BLOCK_UNKNOWN, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public BlockData getByTimestamp(@PathParam("timestamp") long timestamp) {
|
||||
public BlockData getByTimestamp(@PathParam("timestamp") long timestamp,
|
||||
@QueryParam("includeOnlineSignatures") Boolean includeOnlineSignatures) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
int height = repository.getBlockRepository().getHeightFromTimestamp(timestamp);
|
||||
if (height == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
BlockData blockData = null;
|
||||
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null)
|
||||
// Try the Blocks table
|
||||
int height = repository.getBlockRepository().getHeightFromTimestamp(timestamp);
|
||||
if (height > 1) {
|
||||
// Found match in Blocks table
|
||||
blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
return blockData;
|
||||
}
|
||||
|
||||
// Not found in Blocks table, so try the archive
|
||||
height = repository.getBlockArchiveRepository().getHeightFromTimestamp(timestamp);
|
||||
if (height > 1) {
|
||||
// Found match in archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
}
|
||||
|
||||
// Ensure block exists
|
||||
if (blockData == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCK_UNKNOWN);
|
||||
}
|
||||
|
||||
if (includeOnlineSignatures == null || includeOnlineSignatures == false) {
|
||||
blockData.setOnlineAccountsSignatures(null);
|
||||
}
|
||||
|
||||
return blockData;
|
||||
} catch (DataException e) {
|
||||
@@ -391,9 +621,14 @@ public class BlocksResource {
|
||||
|
||||
for (/* count already set */; count > 0; --count, ++height) {
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null)
|
||||
// Run out of blocks!
|
||||
break;
|
||||
if (blockData == null) {
|
||||
// Not found - try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
// Run out of blocks!
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
blocks.add(blockData);
|
||||
}
|
||||
@@ -438,7 +673,29 @@ public class BlocksResource {
|
||||
if (accountData == null || accountData.getPublicKey() == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.PUBLIC_KEY_NOT_FOUND);
|
||||
|
||||
return repository.getBlockRepository().getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
|
||||
List<BlockSummaryData> summaries = repository.getBlockRepository()
|
||||
.getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
|
||||
// Add any from the archive
|
||||
List<BlockSummaryData> archivedSummaries = repository.getBlockArchiveRepository()
|
||||
.getBlockSummariesBySigner(accountData.getPublicKey(), limit, offset, reverse);
|
||||
if (archivedSummaries != null && !archivedSummaries.isEmpty()) {
|
||||
summaries.addAll(archivedSummaries);
|
||||
}
|
||||
else {
|
||||
summaries = archivedSummaries;
|
||||
}
|
||||
|
||||
// Sort the results (because they may have been obtained from two places)
|
||||
if (reverse != null && reverse) {
|
||||
summaries.sort((s1, s2) -> Integer.valueOf(s2.getHeight()).compareTo(Integer.valueOf(s1.getHeight())));
|
||||
}
|
||||
else {
|
||||
summaries.sort(Comparator.comparing(s -> Integer.valueOf(s.getHeight())));
|
||||
}
|
||||
|
||||
return summaries;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -474,7 +731,8 @@ public class BlocksResource {
|
||||
if (!Crypto.isValidAddress(address))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
return repository.getBlockRepository().getBlockSigners(addresses, limit, offset, reverse);
|
||||
// This method pulls data from both Blocks and BlockArchive, so no need to query serparately
|
||||
return repository.getBlockArchiveRepository().getBlockSigners(addresses, limit, offset, reverse);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -514,7 +772,76 @@ public class BlocksResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getBlockRepository().getBlockSummaries(startHeight, endHeight, count);
|
||||
|
||||
/*
|
||||
* start end count result
|
||||
* 10 40 null blocks 10 to 39 (excludes end block, ignore count)
|
||||
*
|
||||
* null null null blocks 1 to 50 (assume count=50, maybe start=1)
|
||||
* 30 null null blocks 30 to 79 (assume count=50)
|
||||
* 30 null 10 blocks 30 to 39
|
||||
*
|
||||
* null null 50 last 50 blocks? so if max(blocks.height) is 200, then blocks 151 to 200
|
||||
* null 200 null blocks 150 to 199 (excludes end block, assume count=50)
|
||||
* null 200 10 blocks 190 to 199 (excludes end block)
|
||||
*/
|
||||
|
||||
List<BlockSummaryData> blockSummaries = new ArrayList<>();
|
||||
|
||||
// Use the latest X blocks if only a count is specified
|
||||
if (startHeight == null && endHeight == null && count != null) {
|
||||
BlockData chainTip = repository.getBlockRepository().getLastBlock();
|
||||
startHeight = chainTip.getHeight() - count;
|
||||
endHeight = chainTip.getHeight();
|
||||
}
|
||||
|
||||
// ... otherwise default the start height to 1
|
||||
if (startHeight == null && endHeight == null) {
|
||||
startHeight = 1;
|
||||
}
|
||||
|
||||
// Default the count to 50
|
||||
if (count == null) {
|
||||
count = 50;
|
||||
}
|
||||
|
||||
// If both a start and end height exist, ignore the count
|
||||
if (startHeight != null && endHeight != null) {
|
||||
if (startHeight > 0 && endHeight > 0) {
|
||||
count = Integer.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
// Derive start height from end height if missing
|
||||
if (startHeight == null || startHeight == 0) {
|
||||
if (endHeight != null && endHeight > 0) {
|
||||
if (count != null) {
|
||||
startHeight = endHeight - count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (/* count already set */; count > 0; --count, ++startHeight) {
|
||||
if (endHeight != null && startHeight >= endHeight) {
|
||||
break;
|
||||
}
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(startHeight);
|
||||
if (blockData == null) {
|
||||
// Not found - try the archive
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(startHeight);
|
||||
if (blockData == null) {
|
||||
// Run out of blocks!
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (blockData != null) {
|
||||
BlockSummaryData blockSummaryData = new BlockSummaryData(blockData);
|
||||
blockSummaries.add(blockSummaryData);
|
||||
}
|
||||
}
|
||||
|
||||
return blockSummaries;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
95
src/main/java/org/qortal/api/resource/BootstrapResource.java
Normal file
95
src/main/java/org/qortal/api/resource/BootstrapResource.java
Normal file
@@ -0,0 +1,95 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.repository.Bootstrap;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
@Path("/bootstrap")
|
||||
@Tag(name = "Bootstrap")
|
||||
public class BootstrapResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(BootstrapResource.class);
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/create")
|
||||
@Operation(
|
||||
summary = "Create bootstrap",
|
||||
description = "Builds a bootstrap file for distribution",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "path to file on success, an exception on failure",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String createBootstrap(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
Bootstrap bootstrap = new Bootstrap(repository);
|
||||
try {
|
||||
bootstrap.checkRepositoryState();
|
||||
} catch (DataException e) {
|
||||
LOGGER.info("Not ready to create bootstrap: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
}
|
||||
bootstrap.validateBlockchain();
|
||||
return bootstrap.create();
|
||||
|
||||
} catch (DataException | InterruptedException | IOException e) {
|
||||
LOGGER.info("Unable to create bootstrap", e);
|
||||
throw ApiExceptionFactory.INSTANCE.createCustomException(request, ApiError.REPOSITORY_ISSUE, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/validate")
|
||||
@Operation(
|
||||
summary = "Validate blockchain",
|
||||
description = "Useful to check database integrity prior to creating or after installing a bootstrap. " +
|
||||
"This process is intensive and can take over an hour to run.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "true if valid, false if invalid",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean validateBootstrap(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
Bootstrap bootstrap = new Bootstrap(repository);
|
||||
return bootstrap.validateCompleteBlockchain();
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,11 +13,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -158,7 +154,7 @@ public class ChatResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildChat(ChatTransactionData transactionData) {
|
||||
public String buildChat(@HeaderParam(Security.API_KEY_HEADER) String apiKey, ChatTransactionData transactionData) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
@@ -206,7 +202,7 @@ public class ChatResource {
|
||||
)
|
||||
@ApiErrors({ApiError.TRANSACTION_INVALID, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildChat(String rawBytes58) {
|
||||
public String buildChat(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String rawBytes58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
@@ -5,12 +5,14 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -22,7 +24,7 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainBuildRequest;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.api.model.CrossChainDualSecretRequest;
|
||||
import org.qortal.api.model.CrossChainTradeRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.crosschain.BitcoinACCTv1;
|
||||
@@ -79,7 +81,8 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_DATA, ApiError.INVALID_REFERENCE, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE})
|
||||
public String buildTrade(CrossChainBuildRequest tradeRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildTrade(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainBuildRequest tradeRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] creatorPublicKey = tradeRequest.creatorPublicKey;
|
||||
@@ -174,7 +177,8 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
public String buildTradeMessage(CrossChainTradeRequest tradeRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildTradeMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainTradeRequest tradeRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] tradePublicKey = tradeRequest.tradePublicKey;
|
||||
@@ -242,7 +246,7 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainSecretRequest.class
|
||||
implementation = CrossChainDualSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
@@ -257,7 +261,8 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
public String buildRedeemMessage(CrossChainSecretRequest secretRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainDualSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPublicKey = secretRequest.partnerPublicKey;
|
||||
@@ -360,4 +365,4 @@ public class CrossChainBitcoinACCTv1Resource {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,11 +6,13 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -23,8 +25,8 @@ import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.BitcoinSendRequest;
|
||||
import org.qortal.crosschain.Bitcoin;
|
||||
import org.qortal.crosschain.BitcoinyTransaction;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/btc")
|
||||
@Tag(name = "Cross-Chain (Bitcoin)")
|
||||
@@ -56,7 +58,8 @@ public class CrossChainBitcoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public String getBitcoinWalletBalance(String key58) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getBitcoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Bitcoin bitcoin = Bitcoin.getInstance();
|
||||
@@ -64,11 +67,16 @@ public class CrossChainBitcoinResource {
|
||||
if (!bitcoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Long balance = bitcoin.getWalletBalance(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
try {
|
||||
Long balance = bitcoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@@ -89,12 +97,13 @@ public class CrossChainBitcoinResource {
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = BitcoinyTransaction.class ) ) )
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public List<BitcoinyTransaction> getBitcoinWalletTransactions(String key58) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getBitcoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Bitcoin bitcoin = Bitcoin.getInstance();
|
||||
@@ -113,7 +122,7 @@ public class CrossChainBitcoinResource {
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends BTC from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Bitcoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
description = "Currently supports 'legacy' P2PKH Bitcoin addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
@@ -130,7 +139,8 @@ public class CrossChainBitcoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public String sendBitcoin(BitcoinSendRequest bitcoinSendRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, BitcoinSendRequest bitcoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (bitcoinSendRequest.bitcoinAmount <= 0)
|
||||
@@ -164,4 +174,4 @@ public class CrossChainBitcoinResource {
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,177 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.DigibyteSendRequest;
|
||||
import org.qortal.crosschain.Digibyte;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/dgb")
|
||||
@Tag(name = "Cross-Chain (Digibyte)")
|
||||
public class CrossChainDigibyteResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns DGB balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getDigibyteWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = digibyte.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getDigibyteWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return digibyte.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends DGB from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently supports 'legacy' P2PKH Digibyte addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = DigibyteSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, DigibyteSendRequest digibyteSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (digibyteSendRequest.digibyteAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (digibyteSendRequest.feePerByte != null && digibyteSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Digibyte digibyte = Digibyte.getInstance();
|
||||
|
||||
if (!digibyte.isValidAddress(digibyteSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!digibyte.isValidDeterministicKey(digibyteSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = digibyte.buildSpend(digibyteSendRequest.xprv58,
|
||||
digibyteSendRequest.receivingAddress,
|
||||
digibyteSendRequest.digibyteAmount,
|
||||
digibyteSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
digibyte.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
import org.qortal.crosschain.DogecoinACCTv1;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transaction.MessageTransaction;
|
||||
import org.qortal.transaction.Transaction.ValidationResult;
|
||||
import org.qortal.transform.Transformer;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.Arrays;
|
||||
|
||||
@Path("/crosschain/DogecoinACCTv1")
|
||||
@Tag(name = "Cross-Chain (DogecoinACCTv1)")
|
||||
public class CrossChainDogecoinACCTv1Resource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/redeemmessage")
|
||||
@Operation(
|
||||
summary = "Signs and broadcasts a 'redeem' MESSAGE transaction that sends secrets to AT, releasing funds to partner",
|
||||
description = "Specify address of cross-chain AT that needs to be messaged, Alice's trade private key, the 32-byte secret,<br>"
|
||||
+ "and an address for receiving QORT from AT. All of these can be found in Alice's trade bot data.<br>"
|
||||
+ "AT needs to be in 'trade' mode. Messages sent to an AT in any other mode will be ignored, but still cost fees to send!<br>"
|
||||
+ "You need to use the private key that the AT considers the trade 'partner' otherwise the MESSAGE transaction will be invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPrivateKey = secretRequest.partnerPrivateKey;
|
||||
|
||||
if (partnerPrivateKey == null || partnerPrivateKey.length != Transformer.PRIVATE_KEY_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
if (secretRequest.atAddress == null || !Crypto.isValidAtAddress(secretRequest.atAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (secretRequest.secret == null || secretRequest.secret.length != DogecoinACCTv1.SECRET_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
if (secretRequest.receivingAddress == null || !Crypto.isValidAddress(secretRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, secretRequest.atAddress);
|
||||
CrossChainTradeData crossChainTradeData = DogecoinACCTv1.getInstance().populateTradeData(repository, atData);
|
||||
|
||||
if (crossChainTradeData.mode != AcctMode.TRADING)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] partnerPublicKey = new PrivateKeyAccount(null, partnerPrivateKey).getPublicKey();
|
||||
String partnerAddress = Crypto.toAddress(partnerPublicKey);
|
||||
|
||||
// MESSAGE must come from address that AT considers trade partner
|
||||
if (!crossChainTradeData.qortalPartnerAddress.equals(partnerAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
// Good to make MESSAGE
|
||||
|
||||
byte[] messageData = DogecoinACCTv1.buildRedeemMessage(secretRequest.secret, secretRequest.receivingAddress);
|
||||
|
||||
PrivateKeyAccount sender = new PrivateKeyAccount(repository, partnerPrivateKey);
|
||||
MessageTransaction messageTransaction = MessageTransaction.build(repository, sender, Group.NO_GROUP, secretRequest.atAddress, messageData, false, false);
|
||||
|
||||
messageTransaction.computeNonce();
|
||||
messageTransaction.sign(sender);
|
||||
|
||||
// reset repository state to prevent deadlock
|
||||
repository.discardChanges();
|
||||
ValidationResult result = messageTransaction.importAsUnconfirmed();
|
||||
|
||||
if (result != ValidationResult.OK)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSACTION_INVALID);
|
||||
|
||||
return true;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
private ATData fetchAtDataWithChecking(Repository repository, String atAddress) throws DataException {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
// Must be correct AT - check functionality using code hash
|
||||
if (!Arrays.equals(atData.getCodeHash(), DogecoinACCTv1.CODE_BYTES_HASH))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// No point sending message to AT that's finished
|
||||
if (atData.getIsFinished())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.DogecoinSendRequest;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.Dogecoin;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.List;
|
||||
|
||||
@Path("/crosschain/doge")
|
||||
@Tag(name = "Cross-Chain (Dogecoin)")
|
||||
public class CrossChainDogecoinResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns DOGE balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getDogecoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = dogecoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getDogecoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return dogecoin.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends DOGE from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Dogecoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = DogecoinSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, DogecoinSendRequest dogecoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (dogecoinSendRequest.dogecoinAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (dogecoinSendRequest.feePerByte != null && dogecoinSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Dogecoin dogecoin = Dogecoin.getInstance();
|
||||
|
||||
if (!dogecoin.isValidAddress(dogecoinSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!dogecoin.isValidDeterministicKey(dogecoinSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = dogecoin.buildSpend(dogecoinSendRequest.xprv58,
|
||||
dogecoinSendRequest.receivingAddress,
|
||||
dogecoinSendRequest.dogecoinAmount,
|
||||
dogecoinSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
dogecoin.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,36 +4,40 @@ import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.bitcoinj.core.TransactionOutput;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.bitcoinj.core.*;
|
||||
import org.bitcoinj.script.Script;
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.model.CrossChainBitcoinyHTLCStatus;
|
||||
import org.qortal.crosschain.Bitcoiny;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.BitcoinyHTLC;
|
||||
import org.qortal.crosschain.*;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.data.crosschain.TradeBotData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import com.google.common.hash.HashCode;
|
||||
|
||||
@Path("/crosschain/htlc")
|
||||
@Tag(name = "Cross-Chain (Hash time-locked contracts)")
|
||||
public class CrossChainHtlcResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(CrossChainHtlcResource.class);
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@@ -41,7 +45,7 @@ public class CrossChainHtlcResource {
|
||||
@Path("/address/{blockchain}/{refundPKH}/{locktime}/{redeemPKH}/{hashOfSecret}")
|
||||
@Operation(
|
||||
summary = "Returns HTLC address based on trade info",
|
||||
description = "Blockchain can be BITCOIN or LITECOIN. Public key hashes (PKH) and hash of secret should be 20 bytes (hex). Locktime is seconds since epoch.",
|
||||
description = "Public key hashes (PKH) and hash of secret should be 20 bytes (base58 encoded). Locktime is seconds since epoch.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string"))
|
||||
@@ -50,21 +54,21 @@ public class CrossChainHtlcResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_CRITERIA})
|
||||
public String deriveHtlcAddress(@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundHex,
|
||||
@PathParam("refundPKH") String refundPKH,
|
||||
@PathParam("locktime") int lockTime,
|
||||
@PathParam("redeemPKH") String redeemHex,
|
||||
@PathParam("hashOfSecret") String hashOfSecretHex) {
|
||||
@PathParam("redeemPKH") String redeemPKH,
|
||||
@PathParam("hashOfSecret") String hashOfSecret) {
|
||||
SupportedBlockchain blockchain = SupportedBlockchain.valueOf(blockchainName);
|
||||
if (blockchain == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] refunderPubKeyHash;
|
||||
byte[] redeemerPubKeyHash;
|
||||
byte[] hashOfSecret;
|
||||
byte[] decodedHashOfSecret;
|
||||
|
||||
try {
|
||||
refunderPubKeyHash = HashCode.fromString(refundHex).asBytes();
|
||||
redeemerPubKeyHash = HashCode.fromString(redeemHex).asBytes();
|
||||
refunderPubKeyHash = Base58.decode(refundPKH);
|
||||
redeemerPubKeyHash = Base58.decode(redeemPKH);
|
||||
|
||||
if (refunderPubKeyHash.length != 20 || redeemerPubKeyHash.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PUBLIC_KEY);
|
||||
@@ -73,14 +77,14 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
|
||||
try {
|
||||
hashOfSecret = HashCode.fromString(hashOfSecretHex).asBytes();
|
||||
if (hashOfSecret.length != 20)
|
||||
decodedHashOfSecret = Base58.decode(hashOfSecret);
|
||||
if (decodedHashOfSecret.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, hashOfSecret);
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, decodedHashOfSecret);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) blockchain.getInstance();
|
||||
|
||||
@@ -91,7 +95,7 @@ public class CrossChainHtlcResource {
|
||||
@Path("/status/{blockchain}/{refundPKH}/{locktime}/{redeemPKH}/{hashOfSecret}")
|
||||
@Operation(
|
||||
summary = "Checks HTLC status",
|
||||
description = "Blockchain can be BITCOIN or LITECOIN. Public key hashes (PKH) and hash of secret should be 20 bytes (hex). Locktime is seconds since epoch.",
|
||||
description = "Public key hashes (PKH) and hash of secret should be 20 bytes (base58 encoded). Locktime is seconds since epoch.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, schema = @Schema(implementation = CrossChainBitcoinyHTLCStatus.class))
|
||||
@@ -99,11 +103,13 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
public CrossChainBitcoinyHTLCStatus checkHtlcStatus(@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundHex,
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public CrossChainBitcoinyHTLCStatus checkHtlcStatus(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("blockchain") String blockchainName,
|
||||
@PathParam("refundPKH") String refundPKH,
|
||||
@PathParam("locktime") int lockTime,
|
||||
@PathParam("redeemPKH") String redeemHex,
|
||||
@PathParam("hashOfSecret") String hashOfSecretHex) {
|
||||
@PathParam("redeemPKH") String redeemPKH,
|
||||
@PathParam("hashOfSecret") String hashOfSecret) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
SupportedBlockchain blockchain = SupportedBlockchain.valueOf(blockchainName);
|
||||
@@ -112,11 +118,11 @@ public class CrossChainHtlcResource {
|
||||
|
||||
byte[] refunderPubKeyHash;
|
||||
byte[] redeemerPubKeyHash;
|
||||
byte[] hashOfSecret;
|
||||
byte[] decodedHashOfSecret;
|
||||
|
||||
try {
|
||||
refunderPubKeyHash = HashCode.fromString(refundHex).asBytes();
|
||||
redeemerPubKeyHash = HashCode.fromString(redeemHex).asBytes();
|
||||
refunderPubKeyHash = Base58.decode(refundPKH);
|
||||
redeemerPubKeyHash = Base58.decode(redeemPKH);
|
||||
|
||||
if (refunderPubKeyHash.length != 20 || redeemerPubKeyHash.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PUBLIC_KEY);
|
||||
@@ -125,14 +131,14 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
|
||||
try {
|
||||
hashOfSecret = HashCode.fromString(hashOfSecretHex).asBytes();
|
||||
if (hashOfSecret.length != 20)
|
||||
decodedHashOfSecret = Base58.decode(hashOfSecret);
|
||||
if (decodedHashOfSecret.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, hashOfSecret);
|
||||
byte[] redeemScript = BitcoinyHTLC.buildScript(refunderPubKeyHash, lockTime, redeemerPubKeyHash, decodedHashOfSecret);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) blockchain.getInstance();
|
||||
|
||||
@@ -168,8 +174,484 @@ public class CrossChainHtlcResource {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: refund
|
||||
@POST
|
||||
@Path("/redeem/{ataddress}")
|
||||
@Operation(
|
||||
summary = "Redeems HTLC associated with supplied AT",
|
||||
description = "To be used by a QORT seller (Bob) who needs to redeem LTC/DOGE/etc proceeds that are stuck in a P2SH.<br>" +
|
||||
"This requires Bob's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if the buyer has yet to redeem the QORT held in the AT.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean redeemHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("ataddress") String atAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
// TODO: redeem
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
}
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Attempt to find secret from the buyer's message to AT
|
||||
byte[] decodedSecret = acct.findSecretA(repository, crossChainTradeData);
|
||||
if (decodedSecret == null) {
|
||||
LOGGER.info(() -> String.format("Unable to find secret-A from redeem message to AT %s", atAddress));
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
|
||||
// Search for the tradePrivateKey in the tradebot data
|
||||
byte[] decodedPrivateKey = null;
|
||||
if (tradeBotData != null)
|
||||
decodedPrivateKey = tradeBotData.getTradePrivateKey();
|
||||
|
||||
// Search for the foreign blockchain receiving address in the tradebot data
|
||||
byte[] foreignBlockchainReceivingAccountInfo = null;
|
||||
if (tradeBotData != null)
|
||||
// Use receiving address PKH from tradebot data
|
||||
foreignBlockchainReceivingAccountInfo = tradeBotData.getReceivingAccountInfo();
|
||||
|
||||
return this.doRedeemHtlc(atAddress, decodedPrivateKey, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/redeemAll")
|
||||
@Operation(
|
||||
summary = "Redeems HTLC for all applicable ATs in tradebot data",
|
||||
description = "To be used by a QORT seller (Bob) who needs to redeem LTC/DOGE/etc proceeds that are stuck in P2SH transactions.<br>" +
|
||||
"This requires Bob's trade bot data to be present in the database for any ATs that need redeeming.<br>" +
|
||||
"Returns true if at least one trade is redeemed. More detail is available in the log.txt.* file.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean redeemAllHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
boolean success = false;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
for (TradeBotData tradeBotData : allTradeBotData) {
|
||||
String atAddress = tradeBotData.getAtAddress();
|
||||
if (atAddress == null) {
|
||||
LOGGER.info("Missing AT address in tradebot data", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
String tradeState = tradeBotData.getState();
|
||||
if (tradeState == null) {
|
||||
LOGGER.info("Missing trade state for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeState.startsWith("ALICE")) {
|
||||
LOGGER.info("AT {} isn't redeemable because it is a buy order", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null) {
|
||||
LOGGER.info("Couldn't find AT with address {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null) {
|
||||
LOGGER.info("Couldn't find crosschain trade data for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Attempt to find secret from the buyer's message to AT
|
||||
byte[] decodedSecret = acct.findSecretA(repository, crossChainTradeData);
|
||||
if (decodedSecret == null) {
|
||||
LOGGER.info("Unable to find secret-A from redeem message to AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Search for the tradePrivateKey in the tradebot data
|
||||
byte[] decodedPrivateKey = tradeBotData.getTradePrivateKey();
|
||||
|
||||
// Search for the foreign blockchain receiving address PKH in the tradebot data
|
||||
byte[] foreignBlockchainReceivingAccountInfo = tradeBotData.getReceivingAccountInfo();
|
||||
|
||||
try {
|
||||
LOGGER.info("Attempting to redeem P2SH balance associated with AT {}...", atAddress);
|
||||
boolean redeemed = this.doRedeemHtlc(atAddress, decodedPrivateKey, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
if (redeemed) {
|
||||
LOGGER.info("Redeemed P2SH balance associated with AT {}", atAddress);
|
||||
success = true;
|
||||
}
|
||||
else {
|
||||
LOGGER.info("Couldn't redeem P2SH balance associated with AT {}. Already redeemed?", atAddress);
|
||||
}
|
||||
} catch (ApiException e) {
|
||||
LOGGER.info("Couldn't redeem P2SH balance associated with AT {}. Missing data?", atAddress);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
private boolean doRedeemHtlc(String atAddress, byte[] decodedTradePrivateKey, byte[] decodedSecret,
|
||||
byte[] foreignBlockchainReceivingAccountInfo) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate trade private key
|
||||
if (decodedTradePrivateKey == null || decodedTradePrivateKey.length != 32)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate secret
|
||||
if (decodedSecret == null || decodedSecret.length != 32)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Validate receiving address
|
||||
if (foreignBlockchainReceivingAccountInfo == null || foreignBlockchainReceivingAccountInfo.length != 20)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Make sure the receiving address isn't a QORT address, given that we can share the same field for both QORT and foreign blockchains
|
||||
if (Crypto.isValidAddress(foreignBlockchainReceivingAccountInfo))
|
||||
if (Base58.encode(foreignBlockchainReceivingAccountInfo).startsWith("Q"))
|
||||
// This is likely a QORT address, not a foreign blockchain
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
|
||||
// Use secret-A to redeem P2SH-A
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (bitcoiny.getClass() == Bitcoin.class) {
|
||||
LOGGER.info("Redeeming a Bitcoin HTLC is not yet supported");
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int lockTime = crossChainTradeData.lockTimeA;
|
||||
byte[] redeemScriptA = BitcoinyHTLC.buildScript(crossChainTradeData.partnerForeignPKH, lockTime, crossChainTradeData.creatorForeignPKH, crossChainTradeData.hashOfSecretA);
|
||||
String p2shAddressA = bitcoiny.deriveP2shAddress(redeemScriptA);
|
||||
LOGGER.info(String.format("Redeeming P2SH address: %s", p2shAddressA));
|
||||
|
||||
// Fee for redeem/refund is subtracted from P2SH-A balance.
|
||||
long feeTimestamp = calcFeeTimestamp(lockTime, crossChainTradeData.tradeTimeout);
|
||||
long p2shFee = bitcoiny.getP2shFee(feeTimestamp);
|
||||
long minimumAmountA = crossChainTradeData.expectedForeignAmount + p2shFee;
|
||||
BitcoinyHTLC.Status htlcStatusA = BitcoinyHTLC.determineHtlcStatus(bitcoiny.getBlockchainProvider(), p2shAddressA, minimumAmountA);
|
||||
|
||||
switch (htlcStatusA) {
|
||||
case UNFUNDED:
|
||||
case FUNDING_IN_PROGRESS:
|
||||
// P2SH-A suddenly not funded? Our best bet at this point is to hope for AT auto-refund
|
||||
return false;
|
||||
|
||||
case REDEEM_IN_PROGRESS:
|
||||
case REDEEMED:
|
||||
// Double-check that we have redeemed P2SH-A...
|
||||
return false;
|
||||
|
||||
case REFUND_IN_PROGRESS:
|
||||
case REFUNDED:
|
||||
// Wait for AT to auto-refund
|
||||
return false;
|
||||
|
||||
case FUNDED: {
|
||||
Coin redeemAmount = Coin.valueOf(crossChainTradeData.expectedForeignAmount);
|
||||
ECKey redeemKey = ECKey.fromPrivate(decodedTradePrivateKey);
|
||||
List<TransactionOutput> fundingOutputs = bitcoiny.getUnspentOutputs(p2shAddressA);
|
||||
|
||||
Transaction p2shRedeemTransaction = BitcoinyHTLC.buildRedeemTransaction(bitcoiny.getNetworkParameters(), redeemAmount, redeemKey,
|
||||
fundingOutputs, redeemScriptA, decodedSecret, foreignBlockchainReceivingAccountInfo);
|
||||
|
||||
bitcoiny.broadcastTransaction(p2shRedeemTransaction);
|
||||
LOGGER.info(String.format("P2SH address %s redeemed!", p2shAddressA));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/refund/{ataddress}")
|
||||
@Operation(
|
||||
summary = "Refunds HTLC associated with supplied AT",
|
||||
description = "To be used by a QORT buyer (Alice) who needs to refund their LTC/DOGE/etc that is stuck in a P2SH.<br>" +
|
||||
"This requires Alice's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if it's already redeemed by the seller, or if the lockTime (60 minutes) hasn't passed yet.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean refundHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("ataddress") String atAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
if (tradeBotData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (tradeBotData.getForeignKey() == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// Determine foreign blockchain receive address for refund
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
String receiveAddress = bitcoiny.getUnusedReceiveAddress(tradeBotData.getForeignKey());
|
||||
|
||||
return this.doRefundHtlc(atAddress, receiveAddress);
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/refundAll")
|
||||
@Operation(
|
||||
summary = "Refunds HTLC for all applicable ATs in tradebot data",
|
||||
description = "To be used by a QORT buyer (Alice) who needs to refund their LTC/DOGE/etc proceeds that are stuck in P2SH transactions.<br>" +
|
||||
"This requires Alice's trade bot data to be present in the database for this AT.<br>" +
|
||||
"It will fail if it's already redeemed by the seller, or if the lockTime (60 minutes) hasn't passed yet.",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.ADDRESS_UNKNOWN})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean refundAllHtlc(@HeaderParam(Security.API_KEY_HEADER) String apiKey) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
boolean success = false;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
|
||||
for (TradeBotData tradeBotData : allTradeBotData) {
|
||||
String atAddress = tradeBotData.getAtAddress();
|
||||
if (atAddress == null) {
|
||||
LOGGER.info("Missing AT address in tradebot data", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
String tradeState = tradeBotData.getState();
|
||||
if (tradeState == null) {
|
||||
LOGGER.info("Missing trade state for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeState.startsWith("BOB")) {
|
||||
LOGGER.info("AT {} isn't refundable because it is a sell order", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null) {
|
||||
LOGGER.info("Couldn't find AT with address {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null) {
|
||||
LOGGER.info("Couldn't find crosschain trade data for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tradeBotData.getForeignKey() == null) {
|
||||
LOGGER.info("Couldn't find foreign key for AT {}", atAddress);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Determine foreign blockchain receive address for refund
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
String receivingAddress = bitcoiny.getUnusedReceiveAddress(tradeBotData.getForeignKey());
|
||||
|
||||
LOGGER.info("Attempting to refund P2SH balance associated with AT {}...", atAddress);
|
||||
boolean refunded = this.doRefundHtlc(atAddress, receivingAddress);
|
||||
if (refunded) {
|
||||
LOGGER.info("Refunded P2SH balance associated with AT {}", atAddress);
|
||||
success = true;
|
||||
}
|
||||
else {
|
||||
LOGGER.info("Couldn't refund P2SH balance associated with AT {}. Already redeemed?", atAddress);
|
||||
}
|
||||
} catch (ApiException | ForeignBlockchainException e) {
|
||||
LOGGER.info("Couldn't refund P2SH balance associated with AT {}. Missing data?", atAddress);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
|
||||
private boolean doRefundHtlc(String atAddress, String receiveAddress) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
ACCT acct = SupportedBlockchain.getAcctByCodeHash(atData.getCodeHash());
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
if (crossChainTradeData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// If the AT is "finished" then it will have a zero balance
|
||||
// In these cases we should avoid HTLC refunds if tbe QORT haven't been returned to the seller
|
||||
if (atData.getIsFinished() && crossChainTradeData.mode != AcctMode.REFUNDED && crossChainTradeData.mode != AcctMode.CANCELLED) {
|
||||
LOGGER.info(String.format("Skipping AT %s because the QORT has already been redemed", atAddress));
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
List<TradeBotData> allTradeBotData = repository.getCrossChainRepository().getAllTradeBotData();
|
||||
TradeBotData tradeBotData = allTradeBotData.stream().filter(tradeBotDataItem -> tradeBotDataItem.getAtAddress().equals(atAddress)).findFirst().orElse(null);
|
||||
if (tradeBotData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Bitcoiny bitcoiny = (Bitcoiny) acct.getBlockchain();
|
||||
if (bitcoiny.getClass() == Bitcoin.class) {
|
||||
LOGGER.info("Refunding a Bitcoin HTLC is not yet supported");
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int lockTime = tradeBotData.getLockTimeA();
|
||||
|
||||
// We can't refund P2SH-A until lockTime-A has passed
|
||||
if (NTP.getTime() <= lockTime * 1000L)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
// We can't refund P2SH-A until median block time has passed lockTime-A (see BIP113)
|
||||
int medianBlockTime = bitcoiny.getMedianBlockTime();
|
||||
if (medianBlockTime <= lockTime)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
byte[] redeemScriptA = BitcoinyHTLC.buildScript(tradeBotData.getTradeForeignPublicKeyHash(), lockTime, crossChainTradeData.creatorForeignPKH, tradeBotData.getHashOfSecret());
|
||||
String p2shAddressA = bitcoiny.deriveP2shAddress(redeemScriptA);
|
||||
LOGGER.info(String.format("Refunding P2SH address: %s", p2shAddressA));
|
||||
|
||||
// Fee for redeem/refund is subtracted from P2SH-A balance.
|
||||
long feeTimestamp = calcFeeTimestamp(lockTime, crossChainTradeData.tradeTimeout);
|
||||
long p2shFee = bitcoiny.getP2shFee(feeTimestamp);
|
||||
long minimumAmountA = crossChainTradeData.expectedForeignAmount + p2shFee;
|
||||
BitcoinyHTLC.Status htlcStatusA = BitcoinyHTLC.determineHtlcStatus(bitcoiny.getBlockchainProvider(), p2shAddressA, minimumAmountA);
|
||||
|
||||
switch (htlcStatusA) {
|
||||
case UNFUNDED:
|
||||
case FUNDING_IN_PROGRESS:
|
||||
// Still waiting for P2SH-A to be funded...
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_TOO_SOON);
|
||||
|
||||
case REDEEM_IN_PROGRESS:
|
||||
case REDEEMED:
|
||||
case REFUND_IN_PROGRESS:
|
||||
case REFUNDED:
|
||||
// Too late!
|
||||
return false;
|
||||
|
||||
case FUNDED:{
|
||||
Coin refundAmount = Coin.valueOf(crossChainTradeData.expectedForeignAmount);
|
||||
ECKey refundKey = ECKey.fromPrivate(tradeBotData.getTradePrivateKey());
|
||||
List<TransactionOutput> fundingOutputs = bitcoiny.getUnspentOutputs(p2shAddressA);
|
||||
|
||||
// Validate the destination foreign blockchain address
|
||||
Address receiving = Address.fromString(bitcoiny.getNetworkParameters(), receiveAddress);
|
||||
if (receiving.getOutputScriptType() != Script.ScriptType.P2PKH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Transaction p2shRefundTransaction = BitcoinyHTLC.buildRefundTransaction(bitcoiny.getNetworkParameters(), refundAmount, refundKey,
|
||||
fundingOutputs, redeemScriptA, lockTime, receiving.getHash());
|
||||
|
||||
bitcoiny.broadcastTransaction(p2shRefundTransaction);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private long calcFeeTimestamp(int lockTimeA, int tradeTimeout) {
|
||||
return (lockTimeA - tradeTimeout * 60) * 1000L;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.qortal.account.PrivateKeyAccount;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainSecretRequest;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
import org.qortal.crosschain.LitecoinACCTv1;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.at.ATData;
|
||||
import org.qortal.data.crosschain.CrossChainTradeData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.transaction.MessageTransaction;
|
||||
import org.qortal.transaction.Transaction.ValidationResult;
|
||||
import org.qortal.transform.TransformationException;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.transform.transaction.MessageTransactionTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
@Path("/crosschain/LitecoinACCTv1")
|
||||
@Tag(name = "Cross-Chain (LitecoinACCTv1)")
|
||||
public class CrossChainLitecoinACCTv1Resource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/redeemmessage")
|
||||
@Operation(
|
||||
summary = "Signs and broadcasts a 'redeem' MESSAGE transaction that sends secrets to AT, releasing funds to partner",
|
||||
description = "Specify address of cross-chain AT that needs to be messaged, Alice's trade private key, the 32-byte secret,<br>"
|
||||
+ "and an address for receiving QORT from AT. All of these can be found in Alice's trade bot data.<br>"
|
||||
+ "AT needs to be in 'trade' mode. Messages sent to an AT in any other mode will be ignored, but still cost fees to send!<br>"
|
||||
+ "You need to use the private key that the AT considers the trade 'partner' otherwise the MESSAGE transaction will be invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = CrossChainSecretRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_DATA, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean buildRedeemMessage(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainSecretRequest secretRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] partnerPrivateKey = secretRequest.partnerPrivateKey;
|
||||
|
||||
if (partnerPrivateKey == null || partnerPrivateKey.length != Transformer.PRIVATE_KEY_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
if (secretRequest.atAddress == null || !Crypto.isValidAtAddress(secretRequest.atAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (secretRequest.secret == null || secretRequest.secret.length != LitecoinACCTv1.SECRET_LENGTH)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
if (secretRequest.receivingAddress == null || !Crypto.isValidAddress(secretRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, secretRequest.atAddress);
|
||||
CrossChainTradeData crossChainTradeData = LitecoinACCTv1.getInstance().populateTradeData(repository, atData);
|
||||
|
||||
if (crossChainTradeData.mode != AcctMode.TRADING)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
byte[] partnerPublicKey = new PrivateKeyAccount(null, partnerPrivateKey).getPublicKey();
|
||||
String partnerAddress = Crypto.toAddress(partnerPublicKey);
|
||||
|
||||
// MESSAGE must come from address that AT considers trade partner
|
||||
if (!crossChainTradeData.qortalPartnerAddress.equals(partnerAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
// Good to make MESSAGE
|
||||
|
||||
byte[] messageData = LitecoinACCTv1.buildRedeemMessage(secretRequest.secret, secretRequest.receivingAddress);
|
||||
|
||||
PrivateKeyAccount sender = new PrivateKeyAccount(repository, partnerPrivateKey);
|
||||
MessageTransaction messageTransaction = MessageTransaction.build(repository, sender, Group.NO_GROUP, secretRequest.atAddress, messageData, false, false);
|
||||
|
||||
messageTransaction.computeNonce();
|
||||
messageTransaction.sign(sender);
|
||||
|
||||
// reset repository state to prevent deadlock
|
||||
repository.discardChanges();
|
||||
ValidationResult result = messageTransaction.importAsUnconfirmed();
|
||||
|
||||
if (result != ValidationResult.OK)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSACTION_INVALID);
|
||||
|
||||
return true;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
private ATData fetchAtDataWithChecking(Repository repository, String atAddress) throws DataException {
|
||||
ATData atData = repository.getATRepository().fromATAddress(atAddress);
|
||||
if (atData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ADDRESS_UNKNOWN);
|
||||
|
||||
// Must be correct AT - check functionality using code hash
|
||||
if (!Arrays.equals(atData.getCodeHash(), LitecoinACCTv1.CODE_BYTES_HASH))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// No point sending message to AT that's finished
|
||||
if (atData.getIsFinished())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -6,11 +6,13 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
@@ -22,9 +24,9 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.LitecoinSendRequest;
|
||||
import org.qortal.crosschain.BitcoinyTransaction;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.Litecoin;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/ltc")
|
||||
@Tag(name = "Cross-Chain (Litecoin)")
|
||||
@@ -56,7 +58,8 @@ public class CrossChainLitecoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public String getLitecoinWalletBalance(String key58) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getLitecoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Litecoin litecoin = Litecoin.getInstance();
|
||||
@@ -64,11 +67,16 @@ public class CrossChainLitecoinResource {
|
||||
if (!litecoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Long balance = litecoin.getWalletBalance(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
try {
|
||||
Long balance = litecoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@@ -89,12 +97,13 @@ public class CrossChainLitecoinResource {
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = BitcoinyTransaction.class ) ) )
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public List<BitcoinyTransaction> getLitecoinWalletTransactions(String key58) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getLitecoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Litecoin litecoin = Litecoin.getInstance();
|
||||
@@ -113,7 +122,7 @@ public class CrossChainLitecoinResource {
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends LTC from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Litecoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
description = "Currently supports 'legacy' P2PKH Litecoin addresses and Native SegWit (P2WPKH) addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
@@ -130,7 +139,8 @@ public class CrossChainLitecoinResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
public String sendBitcoin(LitecoinSendRequest litecoinSendRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, LitecoinSendRequest litecoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (litecoinSendRequest.litecoinAmount <= 0)
|
||||
@@ -164,4 +174,4 @@ public class CrossChainLitecoinResource {
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,177 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.bitcoinj.core.Transaction;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.RavencoinSendRequest;
|
||||
import org.qortal.crosschain.Ravencoin;
|
||||
import org.qortal.crosschain.ForeignBlockchainException;
|
||||
import org.qortal.crosschain.SimpleTransaction;
|
||||
|
||||
@Path("/crosschain/rvn")
|
||||
@Tag(name = "Cross-Chain (Ravencoin)")
|
||||
public class CrossChainRavencoinResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
@POST
|
||||
@Path("/walletbalance")
|
||||
@Operation(
|
||||
summary = "Returns RVN balance for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "balance (satoshis)"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getRavencoinWalletBalance(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
Long balance = ravencoin.getWalletBalanceFromTransactions(key58);
|
||||
if (balance == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
|
||||
return balance.toString();
|
||||
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/wallettransactions")
|
||||
@Operation(
|
||||
summary = "Returns transactions for hierarchical, deterministic BIP32 wallet",
|
||||
description = "Supply BIP32 'm' private/public key in base58, starting with 'xprv'/'xpub' for mainnet, 'tprv'/'tpub' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "BIP32 'm' private/public key in base58",
|
||||
example = "tpubD6NzVbkrYhZ4XTPc4btCZ6SMgn8CxmWkj6VBVZ1tfcJfMq4UwAjZbG8U74gGSypL9XBYk2R2BLbDBe8pcEyBKM1edsGQEPKXNbEskZozeZc"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(array = @ArraySchema( schema = @Schema( implementation = SimpleTransaction.class ) ) )
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<SimpleTransaction> getRavencoinWalletTransactions(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String key58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(key58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
try {
|
||||
return ravencoin.getWalletTransactions(key58);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/send")
|
||||
@Operation(
|
||||
summary = "Sends RVN from hierarchical, deterministic BIP32 wallet to specific address",
|
||||
description = "Currently only supports 'legacy' P2PKH Ravencoin addresses. Supply BIP32 'm' private key in base58, starting with 'xprv' for mainnet, 'tprv' for testnet",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = RavencoinSendRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "string", description = "transaction hash"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_CRITERIA, ApiError.INVALID_ADDRESS, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String sendBitcoin(@HeaderParam(Security.API_KEY_HEADER) String apiKey, RavencoinSendRequest ravencoinSendRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (ravencoinSendRequest.ravencoinAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (ravencoinSendRequest.feePerByte != null && ravencoinSendRequest.feePerByte <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
Ravencoin ravencoin = Ravencoin.getInstance();
|
||||
|
||||
if (!ravencoin.isValidAddress(ravencoinSendRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!ravencoin.isValidDeterministicKey(ravencoinSendRequest.xprv58))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_PRIVATE_KEY);
|
||||
|
||||
Transaction spendTransaction = ravencoin.buildSpend(ravencoinSendRequest.xprv58,
|
||||
ravencoinSendRequest.receivingAddress,
|
||||
ravencoinSendRequest.ravencoinAmount,
|
||||
ravencoinSendRequest.feePerByte);
|
||||
|
||||
if (spendTransaction == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE);
|
||||
|
||||
try {
|
||||
ravencoin.broadcastTransaction(spendTransaction);
|
||||
} catch (ForeignBlockchainException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE);
|
||||
}
|
||||
|
||||
return spendTransaction.getTxId().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
@@ -10,20 +11,11 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.*;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -33,6 +25,7 @@ import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.api.model.CrossChainCancelRequest;
|
||||
import org.qortal.api.model.CrossChainTradeSummary;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.ACCT;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
@@ -95,7 +88,7 @@ public class CrossChainResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
final boolean isExecutable = true;
|
||||
List<CrossChainTradeData> crossChainTradesData = new ArrayList<>();
|
||||
List<CrossChainTradeData> crossChainTrades = new ArrayList<>();
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
Map<ByteArray, Supplier<ACCT>> acctsByCodeHash = SupportedBlockchain.getFilteredAcctMap(foreignBlockchain);
|
||||
@@ -108,11 +101,29 @@ public class CrossChainResource {
|
||||
|
||||
for (ATData atData : atsData) {
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
crossChainTradesData.add(crossChainTradeData);
|
||||
if (crossChainTradeData.mode == AcctMode.OFFERING) {
|
||||
crossChainTrades.add(crossChainTradeData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return crossChainTradesData;
|
||||
// Sort the trades by timestamp
|
||||
if (reverse != null && reverse) {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(b.creationTimestamp, a.creationTimestamp));
|
||||
}
|
||||
else {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(a.creationTimestamp, b.creationTimestamp));
|
||||
}
|
||||
|
||||
if (limit != null && limit > 0) {
|
||||
// Make sure to not return more than the limit
|
||||
int upperLimit = Math.min(limit, crossChainTrades.size());
|
||||
crossChainTrades = crossChainTrades.subList(0, upperLimit);
|
||||
}
|
||||
|
||||
crossChainTrades.stream().forEach(CrossChainResource::decorateTradeDataWithPresence);
|
||||
|
||||
return crossChainTrades;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -143,7 +154,11 @@ public class CrossChainResource {
|
||||
if (acct == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
return acct.populateTradeData(repository, atData);
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atData);
|
||||
|
||||
decorateTradeDataWithPresence(crossChainTradeData);
|
||||
|
||||
return crossChainTradeData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -195,6 +210,11 @@ public class CrossChainResource {
|
||||
|
||||
if (minimumTimestamp != null) {
|
||||
minimumFinalHeight = repository.getBlockRepository().getHeightFromTimestamp(minimumTimestamp);
|
||||
// If not found in the block repository it will return either 0 or 1
|
||||
if (minimumFinalHeight == 0 || minimumFinalHeight == 1) {
|
||||
// Try the archive
|
||||
minimumFinalHeight = repository.getBlockArchiveRepository().getHeightFromTimestamp(minimumTimestamp);
|
||||
}
|
||||
|
||||
if (minimumFinalHeight == 0)
|
||||
// We don't have any blocks since minimumTimestamp, let alone trades, so nothing to return
|
||||
@@ -222,12 +242,30 @@ public class CrossChainResource {
|
||||
|
||||
// We also need block timestamp for use as trade timestamp
|
||||
long timestamp = repository.getBlockRepository().getTimestampFromHeight(atState.getHeight());
|
||||
if (timestamp == 0) {
|
||||
// Try the archive
|
||||
timestamp = repository.getBlockArchiveRepository().getTimestampFromHeight(atState.getHeight());
|
||||
}
|
||||
|
||||
CrossChainTradeSummary crossChainTradeSummary = new CrossChainTradeSummary(crossChainTradeData, timestamp);
|
||||
crossChainTrades.add(crossChainTradeSummary);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the trades by timestamp
|
||||
if (reverse != null && reverse) {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(b.getTradeTimestamp(), a.getTradeTimestamp()));
|
||||
}
|
||||
else {
|
||||
crossChainTrades.sort((a, b) -> Longs.compare(a.getTradeTimestamp(), b.getTradeTimestamp()));
|
||||
}
|
||||
|
||||
if (limit != null && limit > 0) {
|
||||
// Make sure to not return more than the limit
|
||||
int upperLimit = Math.min(limit, crossChainTrades.size());
|
||||
crossChainTrades = crossChainTrades.subList(0, upperLimit);
|
||||
}
|
||||
|
||||
return crossChainTrades;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
@@ -255,15 +293,27 @@ public class CrossChainResource {
|
||||
description = "foreign blockchain",
|
||||
example = "LITECOIN",
|
||||
schema = @Schema(implementation = SupportedBlockchain.class)
|
||||
) @PathParam("blockchain") SupportedBlockchain foreignBlockchain) {
|
||||
) @PathParam("blockchain") SupportedBlockchain foreignBlockchain,
|
||||
@Parameter(
|
||||
description = "Maximum number of trades to include in price calculation",
|
||||
example = "10",
|
||||
schema = @Schema(type = "integer", defaultValue = "10")
|
||||
) @QueryParam("maxtrades") Integer maxtrades,
|
||||
@Parameter(
|
||||
description = "Display price in terms of foreign currency per unit QORT",
|
||||
example = "false",
|
||||
schema = @Schema(type = "boolean", defaultValue = "false")
|
||||
) @QueryParam("inverse") Boolean inverse) {
|
||||
// foreignBlockchain is required
|
||||
if (foreignBlockchain == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
// We want both a minimum of 5 trades and enough trades to span at least 4 hours
|
||||
int minimumCount = 5;
|
||||
int maximumCount = maxtrades != null ? maxtrades : 10;
|
||||
long minimumPeriod = 4 * 60 * 60 * 1000L; // ms
|
||||
Boolean isFinished = Boolean.TRUE;
|
||||
boolean useInversePrice = (inverse != null && inverse == true);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
Map<ByteArray, Supplier<ACCT>> acctsByCodeHash = SupportedBlockchain.getFilteredAcctMap(foreignBlockchain);
|
||||
@@ -271,21 +321,49 @@ public class CrossChainResource {
|
||||
long totalForeign = 0;
|
||||
long totalQort = 0;
|
||||
|
||||
Map<Long, CrossChainTradeData> reverseSortedTradeData = new TreeMap<>(Collections.reverseOrder());
|
||||
|
||||
// Collect recent AT states for each ACCT version
|
||||
for (Map.Entry<ByteArray, Supplier<ACCT>> acctInfo : acctsByCodeHash.entrySet()) {
|
||||
byte[] codeHash = acctInfo.getKey().value;
|
||||
ACCT acct = acctInfo.getValue().get();
|
||||
|
||||
List<ATStateData> atStates = repository.getATRepository().getMatchingFinalATStatesQuorum(codeHash,
|
||||
isFinished, acct.getModeByteOffset(), (long) AcctMode.REDEEMED.value, minimumCount, minimumPeriod);
|
||||
isFinished, acct.getModeByteOffset(), (long) AcctMode.REDEEMED.value, minimumCount, maximumCount, minimumPeriod);
|
||||
|
||||
for (ATStateData atState : atStates) {
|
||||
// We also need block timestamp for use as trade timestamp
|
||||
long timestamp = repository.getBlockRepository().getTimestampFromHeight(atState.getHeight());
|
||||
if (timestamp == 0) {
|
||||
// Try the archive
|
||||
timestamp = repository.getBlockArchiveRepository().getTimestampFromHeight(atState.getHeight());
|
||||
}
|
||||
|
||||
CrossChainTradeData crossChainTradeData = acct.populateTradeData(repository, atState);
|
||||
totalForeign += crossChainTradeData.expectedForeignAmount;
|
||||
totalQort += crossChainTradeData.qortAmount;
|
||||
reverseSortedTradeData.put(timestamp, crossChainTradeData);
|
||||
}
|
||||
}
|
||||
|
||||
return Amounts.scaledDivide(totalQort, totalForeign);
|
||||
// Loop through the sorted map and calculate the average price
|
||||
// Also remove elements beyond the maxtrades limit
|
||||
Set set = reverseSortedTradeData.entrySet();
|
||||
Iterator i = set.iterator();
|
||||
int index = 0;
|
||||
while (i.hasNext()) {
|
||||
Map.Entry tradeDataMap = (Map.Entry)i.next();
|
||||
CrossChainTradeData crossChainTradeData = (CrossChainTradeData) tradeDataMap.getValue();
|
||||
|
||||
if (maxtrades != null && index >= maxtrades) {
|
||||
// We've reached the limit
|
||||
break;
|
||||
}
|
||||
|
||||
totalForeign += crossChainTradeData.expectedForeignAmount;
|
||||
totalQort += crossChainTradeData.qortAmount;
|
||||
index++;
|
||||
}
|
||||
|
||||
return useInversePrice ? Amounts.scaledDivide(totalForeign, totalQort) : Amounts.scaledDivide(totalQort, totalForeign);
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -320,7 +398,7 @@ public class CrossChainResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String cancelTrade(CrossChainCancelRequest cancelRequest) {
|
||||
public String cancelTrade(@HeaderParam(Security.API_KEY_HEADER) String apiKey, CrossChainCancelRequest cancelRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
byte[] creatorPublicKey = cancelRequest.creatorPublicKey;
|
||||
@@ -415,4 +493,7 @@ public class CrossChainResource {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
private static void decorateTradeDataWithPresence(CrossChainTradeData crossChainTradeData) {
|
||||
TradeBot.getInstance().decorateTradeDataWithPresence(crossChainTradeData);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,17 +7,14 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
@@ -30,6 +27,7 @@ import org.qortal.api.Security;
|
||||
import org.qortal.api.model.crosschain.TradeBotCreateRequest;
|
||||
import org.qortal.api.model.crosschain.TradeBotRespondRequest;
|
||||
import org.qortal.asset.Asset;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.tradebot.AcctTradeBot;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.crosschain.ForeignBlockchain;
|
||||
@@ -68,7 +66,9 @@ public class CrossChainTradeBotResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<TradeBotData> getTradeBotStates(
|
||||
@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@Parameter(
|
||||
description = "Limit to specific blockchain",
|
||||
example = "LITECOIN",
|
||||
@@ -107,9 +107,10 @@ public class CrossChainTradeBotResource {
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.INSUFFICIENT_BALANCE, ApiError.REPOSITORY_ISSUE})
|
||||
@ApiErrors({ApiError.INVALID_PUBLIC_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.INSUFFICIENT_BALANCE, ApiError.REPOSITORY_ISSUE, ApiError.ORDER_SIZE_TOO_SMALL})
|
||||
@SuppressWarnings("deprecation")
|
||||
public String tradeBotCreator(TradeBotCreateRequest tradeBotCreateRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotCreator(@HeaderParam(Security.API_KEY_HEADER) String apiKey, TradeBotCreateRequest tradeBotCreateRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (tradeBotCreateRequest.foreignBlockchain == null)
|
||||
@@ -128,10 +129,16 @@ public class CrossChainTradeBotResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
|
||||
if (tradeBotCreateRequest.foreignAmount == null || tradeBotCreateRequest.foreignAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
|
||||
if (tradeBotCreateRequest.foreignAmount < foreignBlockchain.getMinimumOrderAmount())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
|
||||
if (tradeBotCreateRequest.qortAmount <= 0 || tradeBotCreateRequest.fundingQortAmount <= 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.ORDER_SIZE_TOO_SMALL);
|
||||
|
||||
if (!Controller.getInstance().isUpToDate())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
// Do some simple checking first
|
||||
@@ -172,7 +179,8 @@ public class CrossChainTradeBotResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_PRIVATE_KEY, ApiError.INVALID_ADDRESS, ApiError.INVALID_CRITERIA, ApiError.FOREIGN_BLOCKCHAIN_BALANCE_ISSUE, ApiError.FOREIGN_BLOCKCHAIN_NETWORK_ISSUE, ApiError.REPOSITORY_ISSUE})
|
||||
@SuppressWarnings("deprecation")
|
||||
public String tradeBotResponder(TradeBotRespondRequest tradeBotRespondRequest) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotResponder(@HeaderParam(Security.API_KEY_HEADER) String apiKey, TradeBotRespondRequest tradeBotRespondRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final String atAddress = tradeBotRespondRequest.atAddress;
|
||||
@@ -190,6 +198,9 @@ public class CrossChainTradeBotResource {
|
||||
if (tradeBotRespondRequest.receivingAddress == null || !Crypto.isValidAddress(tradeBotRespondRequest.receivingAddress))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
if (!Controller.getInstance().isUpToDate())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
|
||||
// Extract data from cross-chain trading AT
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
ATData atData = fetchAtDataWithChecking(repository, atAddress);
|
||||
@@ -250,7 +261,8 @@ public class CrossChainTradeBotResource {
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_ADDRESS, ApiError.REPOSITORY_ISSUE})
|
||||
public String tradeBotDelete(String tradePrivateKey58) {
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String tradeBotDelete(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String tradePrivateKey58) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final byte[] tradePrivateKey;
|
||||
@@ -283,4 +295,4 @@ public class CrossChainTradeBotResource {
|
||||
return atData;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,7 +98,15 @@ public class GroupsResource {
|
||||
ref = "reverse"
|
||||
) @QueryParam("reverse") Boolean reverse) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getGroupRepository().getAllGroups(limit, offset, reverse);
|
||||
List<GroupData> allGroupData = repository.getGroupRepository().getAllGroups(limit, offset, reverse);
|
||||
allGroupData.forEach(groupData -> {
|
||||
try {
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupData.getGroupId());
|
||||
} catch (DataException e) {
|
||||
// Exclude memberCount for this group
|
||||
}
|
||||
});
|
||||
return allGroupData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -150,7 +158,15 @@ public class GroupsResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
return repository.getGroupRepository().getGroupsWithMember(member);
|
||||
List<GroupData> allGroupData = repository.getGroupRepository().getGroupsWithMember(member);
|
||||
allGroupData.forEach(groupData -> {
|
||||
try {
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupData.getGroupId());
|
||||
} catch (DataException e) {
|
||||
// Exclude memberCount for this group
|
||||
}
|
||||
});
|
||||
return allGroupData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
@@ -177,6 +193,7 @@ public class GroupsResource {
|
||||
if (groupData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.GROUP_UNKNOWN);
|
||||
|
||||
groupData.memberCount = repository.getGroupRepository().countGroupMembers(groupId);
|
||||
return groupData;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
@@ -922,4 +939,4 @@ public class GroupsResource {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
176
src/main/java/org/qortal/api/resource/ListsResource.java
Normal file
176
src/main/java/org/qortal/api/resource/ListsResource.java
Normal file
@@ -0,0 +1,176 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.model.ListRequest;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountData;
|
||||
import org.qortal.list.ResourceListManager;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
|
||||
@Path("/lists")
|
||||
@Tag(name = "Lists")
|
||||
public class ListsResource {
|
||||
|
||||
@Context
|
||||
HttpServletRequest request;
|
||||
|
||||
|
||||
@POST
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Add items to a new or existing list",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = ListRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "Returns true if all items were processed, false if any couldn't be " +
|
||||
"processed, or an exception on failure. If false or an exception is returned, " +
|
||||
"the list will not be updated, and the request will need to be re-issued.",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addItemstoList(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("listName") String listName,
|
||||
ListRequest listRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (listName == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
if (listRequest == null || listRequest.items == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int successCount = 0;
|
||||
int errorCount = 0;
|
||||
|
||||
for (String item : listRequest.items) {
|
||||
|
||||
boolean success = ResourceListManager.getInstance().addToList(listName, item, false);
|
||||
if (success) {
|
||||
successCount++;
|
||||
}
|
||||
else {
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (successCount > 0 && errorCount == 0) {
|
||||
// All were successful, so save the list
|
||||
ResourceListManager.getInstance().saveList(listName);
|
||||
return "true";
|
||||
}
|
||||
else {
|
||||
// Something went wrong, so revert
|
||||
ResourceListManager.getInstance().revertList(listName);
|
||||
return "false";
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Remove one or more items from a list",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = ListRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "Returns true if all items were processed, false if any couldn't be " +
|
||||
"processed, or an exception on failure. If false or an exception is returned, " +
|
||||
"the list will not be updated, and the request will need to be re-issued.",
|
||||
content = @Content(mediaType = MediaType.TEXT_PLAIN, schema = @Schema(type = "boolean"))
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removeItemsFromList(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("listName") String listName,
|
||||
ListRequest listRequest) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (listRequest == null || listRequest.items == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA);
|
||||
}
|
||||
|
||||
int successCount = 0;
|
||||
int errorCount = 0;
|
||||
|
||||
for (String address : listRequest.items) {
|
||||
|
||||
// Attempt to remove the item
|
||||
// Don't save as we will do this at the end of the process
|
||||
boolean success = ResourceListManager.getInstance().removeFromList(listName, address, false);
|
||||
if (success) {
|
||||
successCount++;
|
||||
}
|
||||
else {
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (successCount > 0 && errorCount == 0) {
|
||||
// All were successful, so save the list
|
||||
ResourceListManager.getInstance().saveList(listName);
|
||||
return "true";
|
||||
}
|
||||
else {
|
||||
// Something went wrong, so revert
|
||||
ResourceListManager.getInstance().revertList(listName);
|
||||
return "false";
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{listName}")
|
||||
@Operation(
|
||||
summary = "Fetch all items in a list",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "A JSON array of items",
|
||||
content = @Content(mediaType = MediaType.APPLICATION_JSON, array = @ArraySchema(schema = @Schema(implementation = String.class)))
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String getItemsInList(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("listName") String listName) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
return ResourceListManager.getInstance().getJSONStringForList(listName);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -26,6 +26,7 @@ import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.NameSummary;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.naming.NameData;
|
||||
import org.qortal.data.transaction.BuyNameTransactionData;
|
||||
@@ -101,7 +102,14 @@ public class NamesResource {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<NameData> names = repository.getNameRepository().getNamesByOwner(address, limit, offset, reverse);
|
||||
List<NameData> names;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
names = LiteNode.getInstance().fetchAccountNames(address);
|
||||
}
|
||||
else {
|
||||
names = repository.getNameRepository().getNamesByOwner(address, limit, offset, reverse);
|
||||
}
|
||||
|
||||
return names.stream().map(NameSummary::new).collect(Collectors.toList());
|
||||
} catch (DataException e) {
|
||||
@@ -126,10 +134,18 @@ public class NamesResource {
|
||||
@ApiErrors({ApiError.NAME_UNKNOWN, ApiError.REPOSITORY_ISSUE})
|
||||
public NameData getName(@PathParam("name") String name) {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
NameData nameData = repository.getNameRepository().fromName(name);
|
||||
NameData nameData;
|
||||
|
||||
if (nameData == null)
|
||||
if (Settings.getInstance().isLite()) {
|
||||
nameData = LiteNode.getInstance().fetchNameData(name);
|
||||
}
|
||||
else {
|
||||
nameData = repository.getNameRepository().fromName(name);
|
||||
}
|
||||
|
||||
if (nameData == null) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NAME_UNKNOWN);
|
||||
}
|
||||
|
||||
return nameData;
|
||||
} catch (ApiException e) {
|
||||
|
||||
@@ -16,19 +16,18 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiErrors;
|
||||
import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.core.config.Configuration;
|
||||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.qortal.api.*;
|
||||
import org.qortal.api.model.ConnectedPeer;
|
||||
import org.qortal.api.model.PeersSummary;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.controller.Synchronizer.SynchronizationResult;
|
||||
@@ -67,7 +66,7 @@ public class PeersResource {
|
||||
}
|
||||
)
|
||||
public List<ConnectedPeer> getPeers() {
|
||||
return Network.getInstance().getConnectedPeers().stream().map(ConnectedPeer::new).collect(Collectors.toList());
|
||||
return Network.getInstance().getImmutableConnectedPeers().stream().map(ConnectedPeer::new).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@GET
|
||||
@@ -133,9 +132,29 @@ public class PeersResource {
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public ExecuteProduceConsume.StatsSnapshot getEngineStats() {
|
||||
public ExecuteProduceConsume.StatsSnapshot getEngineStats(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @QueryParam("newLoggingLevel") Level newLoggingLevel) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
if (newLoggingLevel != null) {
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
|
||||
String epcClassName = "org.qortal.network.Network.NetworkProcessor";
|
||||
LoggerConfig loggerConfig = config.getLoggerConfig(epcClassName);
|
||||
LoggerConfig specificConfig = loggerConfig;
|
||||
|
||||
// We need a specific configuration for this logger,
|
||||
// otherwise we would change the level of all other loggers
|
||||
// having the original configuration as parent as well
|
||||
if (!loggerConfig.getName().equals(epcClassName)) {
|
||||
specificConfig = new LoggerConfig(epcClassName, newLoggingLevel, true);
|
||||
specificConfig.setParent(loggerConfig);
|
||||
config.addLogger(epcClassName, specificConfig);
|
||||
}
|
||||
specificConfig.setLevel(newLoggingLevel);
|
||||
ctx.updateLoggers();
|
||||
}
|
||||
|
||||
return Network.getInstance().getStatsSnapshot();
|
||||
}
|
||||
|
||||
@@ -171,7 +190,7 @@ public class PeersResource {
|
||||
ApiError.INVALID_NETWORK_ADDRESS, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String addPeer(String address) {
|
||||
public String addPeer(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
final Long addedWhen = NTP.getTime();
|
||||
@@ -226,7 +245,7 @@ public class PeersResource {
|
||||
ApiError.INVALID_NETWORK_ADDRESS, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removePeer(String address) {
|
||||
public String removePeer(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -262,7 +281,7 @@ public class PeersResource {
|
||||
ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String removeKnownPeers(String address) {
|
||||
public String removeKnownPeers(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String address) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -302,7 +321,7 @@ public class PeersResource {
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_DATA, ApiError.REPOSITORY_ISSUE})
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public List<BlockSummaryData> commonBlock(String targetPeerAddress) {
|
||||
public List<BlockSummaryData> commonBlock(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String targetPeerAddress) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
|
||||
try {
|
||||
@@ -310,7 +329,7 @@ public class PeersResource {
|
||||
PeerAddress peerAddress = PeerAddress.fromString(targetPeerAddress);
|
||||
InetSocketAddress resolvedAddress = peerAddress.toSocketAddress();
|
||||
|
||||
List<Peer> peers = Network.getInstance().getHandshakedPeers();
|
||||
List<Peer> peers = Network.getInstance().getImmutableHandshakedPeers();
|
||||
Peer targetPeer = peers.stream().filter(peer -> peer.getResolvedAddress().equals(resolvedAddress)).findFirst().orElse(null);
|
||||
|
||||
if (targetPeer == null)
|
||||
@@ -321,7 +340,7 @@ public class PeersResource {
|
||||
boolean force = true;
|
||||
List<BlockSummaryData> peerBlockSummaries = new ArrayList<>();
|
||||
|
||||
SynchronizationResult findCommonBlockResult = Synchronizer.getInstance().fetchSummariesFromCommonBlock(repository, targetPeer, ourInitialHeight, force, peerBlockSummaries);
|
||||
SynchronizationResult findCommonBlockResult = Synchronizer.getInstance().fetchSummariesFromCommonBlock(repository, targetPeer, ourInitialHeight, force, peerBlockSummaries, true);
|
||||
if (findCommonBlockResult != SynchronizationResult.OK)
|
||||
return null;
|
||||
|
||||
@@ -338,4 +357,36 @@ public class PeersResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/summary")
|
||||
@Operation(
|
||||
summary = "Returns total inbound and outbound connections for connected peers",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
array = @ArraySchema(
|
||||
schema = @Schema(
|
||||
implementation = PeersSummary.class
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
public PeersSummary peersSummary() {
|
||||
PeersSummary peersSummary = new PeersSummary();
|
||||
|
||||
List<Peer> connectedPeers = Network.getInstance().getImmutableConnectedPeers().stream().collect(Collectors.toList());
|
||||
for (Peer peer : connectedPeers) {
|
||||
if (!peer.isOutbound()) {
|
||||
peersSummary.inboundConnections++;
|
||||
}
|
||||
else {
|
||||
peersSummary.outboundConnections++;
|
||||
}
|
||||
}
|
||||
return peersSummary;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
212
src/main/java/org/qortal/api/resource/RenderResource.java
Normal file
212
src/main/java/org/qortal/api/resource/RenderResource.java
Normal file
@@ -0,0 +1,212 @@
|
||||
package org.qortal.api.resource;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.io.*;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.ApiError;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.Security;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.arbitrary.*;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataRenderManager;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
|
||||
@Path("/render")
|
||||
@Tag(name = "Render")
|
||||
public class RenderResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(RenderResource.class);
|
||||
|
||||
@Context HttpServletRequest request;
|
||||
@Context HttpServletResponse response;
|
||||
@Context ServletContext context;
|
||||
|
||||
@POST
|
||||
@Path("/preview")
|
||||
@Operation(
|
||||
summary = "Generate preview URL based on a user-supplied path and service",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string", example = "/Users/user/Documents/MyStaticWebsite"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "a temporary URL to preview the website",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public String preview(@HeaderParam(Security.API_KEY_HEADER) String apiKey, String directoryPath) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Method method = Method.PUT;
|
||||
Compression compression = Compression.ZIP;
|
||||
|
||||
ArbitraryDataWriter arbitraryDataWriter = new ArbitraryDataWriter(Paths.get(directoryPath),
|
||||
null, Service.WEBSITE, null, method, compression,
|
||||
null, null, null, null);
|
||||
try {
|
||||
arbitraryDataWriter.save();
|
||||
} catch (IOException | DataException | InterruptedException | MissingDataException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE);
|
||||
} catch (RuntimeException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
}
|
||||
|
||||
ArbitraryDataFile arbitraryDataFile = arbitraryDataWriter.getArbitraryDataFile();
|
||||
if (arbitraryDataFile != null) {
|
||||
String digest58 = arbitraryDataFile.digest58();
|
||||
if (digest58 != null) {
|
||||
return "http://localhost:12393/render/hash/" + digest58 + "?secret=" + Base58.encode(arbitraryDataFile.getSecret());
|
||||
}
|
||||
}
|
||||
return "Unable to generate preview URL";
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/authorize/{resourceId}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey, @PathParam("resourceId") String resourceId) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, null, null);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("authorize/{service}/{resourceId}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("service") Service service,
|
||||
@PathParam("resourceId") String resourceId) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, null);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("authorize/{service}/{resourceId}/{identifier}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public boolean authorizeResource(@HeaderParam(Security.API_KEY_HEADER) String apiKey,
|
||||
@PathParam("service") Service service,
|
||||
@PathParam("resourceId") String resourceId,
|
||||
@PathParam("identifier") String identifier) {
|
||||
Security.checkApiCallAllowed(request);
|
||||
Security.disallowLoopbackRequestsIfAuthBypassEnabled(request);
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(resourceId, null, service, identifier);
|
||||
ArbitraryDataRenderManager.getInstance().addToAuthorizedResources(resource);
|
||||
return true;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexBySignature(@PathParam("signature") String signature,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, signature, Service.WEBSITE, null);
|
||||
return this.get(signature, ResourceIdType.SIGNATURE, null, "/", null, "/render/signature", true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/signature/{signature}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathBySignature(@PathParam("signature") String signature, @PathParam("path") String inPath,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, signature, Service.WEBSITE, null);
|
||||
return this.get(signature, ResourceIdType.SIGNATURE, null, inPath,null, "/render/signature", true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/hash/{hash}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByHash(@PathParam("hash") String hash58, @QueryParam("secret") String secret58,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, hash58, Service.WEBSITE, null);
|
||||
return this.get(hash58, ResourceIdType.FILE_HASH, Service.WEBSITE, "/", secret58, "/render/hash", true, false, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/hash/{hash}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByHash(@PathParam("hash") String hash58, @PathParam("path") String inPath,
|
||||
@QueryParam("secret") String secret58,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, hash58, Service.WEBSITE, null);
|
||||
return this.get(hash58, ResourceIdType.FILE_HASH, Service.WEBSITE, inPath, secret58, "/render/hash", true, false, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{service}/{name}/{path:.*}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getPathByName(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@PathParam("path") String inPath,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, name, service, null);
|
||||
String prefix = String.format("/render/%s", service);
|
||||
return this.get(name, ResourceIdType.NAME, service, inPath, null, prefix, true, true, theme);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{service}/{name}")
|
||||
@SecurityRequirement(name = "apiKey")
|
||||
public HttpServletResponse getIndexByName(@PathParam("service") Service service,
|
||||
@PathParam("name") String name,
|
||||
@QueryParam("theme") String theme) {
|
||||
Security.requirePriorAuthorization(request, name, service, null);
|
||||
String prefix = String.format("/render/%s", service);
|
||||
return this.get(name, ResourceIdType.NAME, service, "/", null, prefix, true, true, theme);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private HttpServletResponse get(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async, String theme) {
|
||||
|
||||
ArbitraryDataRenderer renderer = new ArbitraryDataRenderer(resourceId, resourceIdType, service, inPath,
|
||||
secret58, prefix, usePrefix, async, request, response, context);
|
||||
|
||||
if (theme != null) {
|
||||
renderer.setTheme(theme);
|
||||
}
|
||||
return renderer.render();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -9,7 +9,10 @@ import io.swagger.v3.oas.annotations.parameters.RequestBody;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
@@ -30,6 +33,8 @@ import org.qortal.api.ApiException;
|
||||
import org.qortal.api.ApiExceptionFactory;
|
||||
import org.qortal.api.model.SimpleTransactionSignRequest;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.LiteNode;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
import org.qortal.globalization.Translator;
|
||||
import org.qortal.repository.DataException;
|
||||
@@ -44,6 +49,7 @@ import org.qortal.transform.transaction.TransactionTransformer;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import com.google.common.primitives.Bytes;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
@Path("/transactions")
|
||||
@Tag(name = "Transactions")
|
||||
@@ -348,7 +354,7 @@ public class TransactionsResource {
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<byte[]> signatures = repository.getTransactionRepository().getSignaturesMatchingCriteria(startBlock, blockLimit, txGroupId,
|
||||
txTypes, null, address, confirmationStatus, limit, offset, reverse);
|
||||
txTypes, null, null, address, confirmationStatus, limit, offset, reverse);
|
||||
|
||||
// Expand signatures to transactions
|
||||
List<TransactionData> transactions = new ArrayList<>(signatures.size());
|
||||
@@ -363,6 +369,150 @@ public class TransactionsResource {
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/address/{address}")
|
||||
@Operation(
|
||||
summary = "Returns transactions for given address",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "transactions",
|
||||
content = @Content(
|
||||
array = @ArraySchema(
|
||||
schema = @Schema(
|
||||
implementation = TransactionData.class
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.INVALID_ADDRESS, ApiError.REPOSITORY_ISSUE})
|
||||
public List<TransactionData> getAddressTransactions(@PathParam("address") String address,
|
||||
@Parameter(ref = "limit") @QueryParam("limit") Integer limit,
|
||||
@Parameter(ref = "offset") @QueryParam("offset") Integer offset,
|
||||
@Parameter(ref = "reverse") @QueryParam("reverse") Boolean reverse) {
|
||||
if (!Crypto.isValidAddress(address)) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_ADDRESS);
|
||||
}
|
||||
|
||||
if (limit == null) {
|
||||
limit = 0;
|
||||
}
|
||||
if (offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
List<TransactionData> transactions;
|
||||
|
||||
if (Settings.getInstance().isLite()) {
|
||||
// Fetch from network
|
||||
transactions = LiteNode.getInstance().fetchAccountTransactions(address, limit, offset);
|
||||
|
||||
// Sort the data, since we can't guarantee the order that a peer sent it in
|
||||
if (reverse) {
|
||||
transactions.sort(Comparator.comparingLong(TransactionData::getTimestamp).reversed());
|
||||
} else {
|
||||
transactions.sort(Comparator.comparingLong(TransactionData::getTimestamp));
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Fetch from local db
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
List<byte[]> signatures = repository.getTransactionRepository().getSignaturesMatchingCriteria(null, null, null,
|
||||
null, null, null, address, TransactionsResource.ConfirmationStatus.CONFIRMED, limit, offset, reverse);
|
||||
|
||||
// Expand signatures to transactions
|
||||
transactions = new ArrayList<>(signatures.size());
|
||||
for (byte[] signature : signatures) {
|
||||
transactions.add(repository.getTransactionRepository().fromSignature(signature));
|
||||
}
|
||||
} catch (ApiException e) {
|
||||
throw e;
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
}
|
||||
}
|
||||
|
||||
return transactions;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/unitfee")
|
||||
@Operation(
|
||||
summary = "Get transaction unit fee",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "number"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public long getTransactionUnitFee(@QueryParam("txType") TransactionType txType,
|
||||
@QueryParam("timestamp") Long timestamp,
|
||||
@QueryParam("level") Integer accountLevel) {
|
||||
try {
|
||||
if (timestamp == null) {
|
||||
timestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
Constructor<?> constructor = txType.constructor;
|
||||
Transaction transaction = (Transaction) constructor.newInstance(null, null);
|
||||
// FUTURE: add accountLevel parameter to transaction.getUnitFee() if needed
|
||||
return transaction.getUnitFee(timestamp);
|
||||
|
||||
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_CRITERIA, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/fee")
|
||||
@Operation(
|
||||
summary = "Get recommended fee for supplied transaction data",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.INVALID_CRITERIA, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public long getRecommendedTransactionFee(String rawInputBytes58) {
|
||||
byte[] rawInputBytes = Base58.decode(rawInputBytes58);
|
||||
if (rawInputBytes.length == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.JSON);
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Append null signature on the end before transformation
|
||||
byte[] rawBytes = Bytes.concat(rawInputBytes, new byte[TransactionTransformer.SIGNATURE_LENGTH]);
|
||||
|
||||
TransactionData transactionData = TransactionTransformer.fromBytes(rawBytes);
|
||||
if (transactionData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
Transaction transaction = Transaction.fromData(repository, transactionData);
|
||||
return transaction.calcRecommendedFee();
|
||||
|
||||
} catch (DataException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.REPOSITORY_ISSUE, e);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSFORMATION_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/creator/{publickey}")
|
||||
@Operation(
|
||||
@@ -418,32 +568,83 @@ public class TransactionsResource {
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/sign")
|
||||
@Path("/convert")
|
||||
@Operation(
|
||||
summary = "Sign a raw, unsigned transaction",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = SimpleTransactionSignRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, signed transaction encoded in Base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
summary = "Convert transaction bytes into bytes for signing",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string",
|
||||
description = "raw, unsigned transaction in base58 encoding",
|
||||
example = "raw transaction base58"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, unsigned transaction encoded in Base58, ready for signing",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.NON_PRODUCTION, ApiError.INVALID_PRIVATE_KEY, ApiError.TRANSFORMATION_ERROR
|
||||
ApiError.NON_PRODUCTION, ApiError.TRANSFORMATION_ERROR
|
||||
})
|
||||
public String convertTransactionForSigning(String rawInputBytes58) {
|
||||
byte[] rawInputBytes = Base58.decode(rawInputBytes58);
|
||||
if (rawInputBytes.length == 0)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.JSON);
|
||||
|
||||
try {
|
||||
// Append null signature on the end before transformation
|
||||
byte[] rawBytes = Bytes.concat(rawInputBytes, new byte[TransactionTransformer.SIGNATURE_LENGTH]);
|
||||
|
||||
TransactionData transactionData = TransactionTransformer.fromBytes(rawBytes);
|
||||
if (transactionData == null)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
|
||||
byte[] convertedBytes = TransactionTransformer.toBytesForSigning(transactionData);
|
||||
|
||||
return Base58.encode(convertedBytes);
|
||||
} catch (TransformationException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.TRANSFORMATION_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/sign")
|
||||
@Operation(
|
||||
summary = "Sign a raw, unsigned transaction",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.APPLICATION_JSON,
|
||||
schema = @Schema(
|
||||
implementation = SimpleTransactionSignRequest.class
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "raw, signed transaction encoded in Base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({
|
||||
ApiError.NON_PRODUCTION, ApiError.INVALID_PRIVATE_KEY, ApiError.TRANSFORMATION_ERROR
|
||||
})
|
||||
public String signTransaction(SimpleTransactionSignRequest signRequest) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
@@ -507,7 +708,10 @@ public class TransactionsResource {
|
||||
ApiError.BLOCKCHAIN_NEEDS_SYNC, ApiError.INVALID_SIGNATURE, ApiError.INVALID_DATA, ApiError.TRANSFORMATION_ERROR, ApiError.REPOSITORY_ISSUE
|
||||
})
|
||||
public String processTransaction(String rawBytes58) {
|
||||
if (!Controller.getInstance().isUpToDate())
|
||||
// Only allow a transaction to be processed if our latest block is less than 30 minutes old
|
||||
// If older than this, we should first wait until the blockchain is synced
|
||||
final Long minLatestBlockTimestamp = NTP.getTime() - (30 * 60 * 1000L);
|
||||
if (!Controller.getInstance().isUpToDate(minLatestBlockTimestamp))
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.BLOCKCHAIN_NEEDS_SYNC);
|
||||
|
||||
byte[] rawBytes = Base58.decode(rawBytes58);
|
||||
|
||||
@@ -33,7 +33,6 @@ import org.qortal.transaction.Transaction.TransactionType;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.transform.transaction.TransactionTransformer;
|
||||
import org.qortal.transform.transaction.TransactionTransformer.Transformation;
|
||||
import org.qortal.utils.BIP39;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import com.google.common.hash.HashCode;
|
||||
@@ -195,123 +194,6 @@ public class UtilsResource {
|
||||
return Base58.encode(random);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/mnemonic")
|
||||
@Operation(
|
||||
summary = "Generate 12-word BIP39 mnemonic",
|
||||
description = "Optionally pass 16-byte, base58-encoded entropy or entropy will be internally generated.<br>"
|
||||
+ "Example entropy input: YcVfxkQb6JRzqk5kF2tNLv",
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "mnemonic",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.NON_PRODUCTION, ApiError.INVALID_DATA})
|
||||
public String getMnemonic(@QueryParam("entropy") String suppliedEntropy) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NON_PRODUCTION);
|
||||
|
||||
/*
|
||||
* BIP39 word lists have 2048 entries so can be represented by 11 bits.
|
||||
* UUID (128bits) and another 4 bits gives 132 bits.
|
||||
* 132 bits, divided by 11, gives 12 words.
|
||||
*/
|
||||
byte[] entropy;
|
||||
if (suppliedEntropy != null) {
|
||||
// Use caller-supplied entropy input
|
||||
try {
|
||||
entropy = Base58.decode(suppliedEntropy);
|
||||
} catch (NumberFormatException e) {
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
}
|
||||
|
||||
// Must be 16-bytes
|
||||
if (entropy.length != 16)
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.INVALID_DATA);
|
||||
} else {
|
||||
// Generate entropy internally
|
||||
UUID uuid = UUID.randomUUID();
|
||||
|
||||
byte[] uuidMSB = Longs.toByteArray(uuid.getMostSignificantBits());
|
||||
byte[] uuidLSB = Longs.toByteArray(uuid.getLeastSignificantBits());
|
||||
entropy = Bytes.concat(uuidMSB, uuidLSB);
|
||||
}
|
||||
|
||||
// Use SHA256 to generate more bits
|
||||
byte[] hash = Crypto.digest(entropy);
|
||||
|
||||
// Append first 4 bits from hash to end. (Actually 8 bits but we only use 4).
|
||||
byte checksum = (byte) (hash[0] & 0xf0);
|
||||
entropy = Bytes.concat(entropy, new byte[] {
|
||||
checksum
|
||||
});
|
||||
|
||||
return BIP39.encode(entropy, "en");
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/mnemonic")
|
||||
@Operation(
|
||||
summary = "Calculate binary entropy from 12-word BIP39 mnemonic",
|
||||
description = "Returns the base58-encoded binary form, or \"false\" if mnemonic is invalid.",
|
||||
requestBody = @RequestBody(
|
||||
required = true,
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
),
|
||||
responses = {
|
||||
@ApiResponse(
|
||||
description = "entropy in base58",
|
||||
content = @Content(
|
||||
mediaType = MediaType.TEXT_PLAIN,
|
||||
schema = @Schema(
|
||||
type = "string"
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@ApiErrors({ApiError.NON_PRODUCTION})
|
||||
public String fromMnemonic(String mnemonic) {
|
||||
if (Settings.getInstance().isApiRestricted())
|
||||
throw ApiExceptionFactory.INSTANCE.createException(request, ApiError.NON_PRODUCTION);
|
||||
|
||||
if (mnemonic.isEmpty())
|
||||
return "false";
|
||||
|
||||
// Strip leading/trailing whitespace if any
|
||||
mnemonic = mnemonic.trim();
|
||||
|
||||
String[] phraseWords = mnemonic.split(" ");
|
||||
if (phraseWords.length != 12)
|
||||
return "false";
|
||||
|
||||
// Convert BIP39 mnemonic to binary
|
||||
byte[] binary = BIP39.decode(phraseWords, "en");
|
||||
if (binary == null)
|
||||
return "false";
|
||||
|
||||
byte[] entropy = Arrays.copyOf(binary, 16); // 132 bits is 16.5 bytes, but we're discarding checksum nybble
|
||||
|
||||
byte checksumNybble = (byte) (binary[16] & 0xf0);
|
||||
byte[] checksum = Crypto.digest(entropy);
|
||||
if (checksumNybble != (byte) (checksum[0] & 0xf0))
|
||||
return "false";
|
||||
|
||||
return Base58.encode(entropy);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/privatekey")
|
||||
@Operation(
|
||||
|
||||
@@ -115,6 +115,9 @@ public class ChatMessagesWebSocket extends ApiWebSocket {
|
||||
}
|
||||
|
||||
private void onNotify(Session session, ChatTransactionData chatTransactionData, List<String> involvingAddresses) {
|
||||
if (chatTransactionData == null)
|
||||
return;
|
||||
|
||||
// We only want direct/non-group messages where sender/recipient match our addresses
|
||||
String recipient = chatTransactionData.getRecipient();
|
||||
if (recipient == null)
|
||||
|
||||
@@ -20,6 +20,7 @@ import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage;
|
||||
import org.eclipse.jetty.websocket.api.annotations.WebSocket;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.PresenceTransactionData;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
@@ -99,13 +100,13 @@ public class PresenceWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
// We use NewBlockEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof Controller.NewTransactionEvent) && !(event instanceof Controller.NewBlockEvent))
|
||||
// We use Synchronizer.NewChainTipEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof Controller.NewTransactionEvent) && !(event instanceof Synchronizer.NewChainTipEvent))
|
||||
return;
|
||||
|
||||
removeOldEntries();
|
||||
|
||||
if (event instanceof Controller.NewBlockEvent)
|
||||
if (event instanceof Synchronizer.NewChainTipEvent)
|
||||
// We only wanted a chance to cull old entries
|
||||
return;
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ import org.eclipse.jetty.websocket.api.annotations.WebSocket;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.api.model.CrossChainOfferSummary;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.Synchronizer;
|
||||
import org.qortal.crosschain.SupportedBlockchain;
|
||||
import org.qortal.crosschain.ACCT;
|
||||
import org.qortal.crosschain.AcctMode;
|
||||
@@ -80,10 +81,10 @@ public class TradeOffersWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
if (!(event instanceof Controller.NewBlockEvent))
|
||||
if (!(event instanceof Synchronizer.NewChainTipEvent))
|
||||
return;
|
||||
|
||||
BlockData blockData = ((Controller.NewBlockEvent) event).getBlockData();
|
||||
BlockData blockData = ((Synchronizer.NewChainTipEvent) event).getNewChainTip();
|
||||
|
||||
// Process any new info
|
||||
|
||||
|
||||
@@ -0,0 +1,137 @@
|
||||
package org.qortal.api.websocket;
|
||||
|
||||
import org.eclipse.jetty.websocket.api.Session;
|
||||
import org.eclipse.jetty.websocket.api.annotations.*;
|
||||
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.tradebot.TradeBot;
|
||||
import org.qortal.data.network.TradePresenceData;
|
||||
import org.qortal.event.Event;
|
||||
import org.qortal.event.EventBus;
|
||||
import org.qortal.event.Listener;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.*;
|
||||
|
||||
@WebSocket
|
||||
@SuppressWarnings("serial")
|
||||
public class TradePresenceWebSocket extends ApiWebSocket implements Listener {
|
||||
|
||||
/** Map key is public key in base58, map value is trade presence */
|
||||
private static final Map<String, TradePresenceData> currentEntries = Collections.synchronizedMap(new HashMap<>());
|
||||
|
||||
@Override
|
||||
public void configure(WebSocketServletFactory factory) {
|
||||
factory.register(TradePresenceWebSocket.class);
|
||||
|
||||
populateCurrentInfo();
|
||||
|
||||
EventBus.INSTANCE.addListener(this::listen);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void listen(Event event) {
|
||||
// XXX - Suggest we change this to something like Synchronizer.NewChainTipEvent?
|
||||
// We use NewBlockEvent as a proxy for 1-minute timer
|
||||
if (!(event instanceof TradeBot.TradePresenceEvent) && !(event instanceof Controller.NewBlockEvent))
|
||||
return;
|
||||
|
||||
removeOldEntries();
|
||||
|
||||
if (event instanceof Controller.NewBlockEvent)
|
||||
// We only wanted a chance to cull old entries
|
||||
return;
|
||||
|
||||
TradePresenceData tradePresence = ((TradeBot.TradePresenceEvent) event).getTradePresenceData();
|
||||
|
||||
boolean somethingChanged = mergePresence(tradePresence);
|
||||
|
||||
if (!somethingChanged)
|
||||
// nothing changed
|
||||
return;
|
||||
|
||||
List<TradePresenceData> tradePresences = Collections.singletonList(tradePresence);
|
||||
|
||||
// Notify sessions
|
||||
for (Session session : getSessions()) {
|
||||
sendTradePresences(session, tradePresences);
|
||||
}
|
||||
}
|
||||
|
||||
@OnWebSocketConnect
|
||||
@Override
|
||||
public void onWebSocketConnect(Session session) {
|
||||
Map<String, List<String>> queryParams = session.getUpgradeRequest().getParameterMap();
|
||||
|
||||
List<TradePresenceData> tradePresences;
|
||||
|
||||
synchronized (currentEntries) {
|
||||
tradePresences = List.copyOf(currentEntries.values());
|
||||
}
|
||||
|
||||
if (!sendTradePresences(session, tradePresences)) {
|
||||
session.close(4002, "websocket issue");
|
||||
return;
|
||||
}
|
||||
|
||||
super.onWebSocketConnect(session);
|
||||
}
|
||||
|
||||
@OnWebSocketClose
|
||||
@Override
|
||||
public void onWebSocketClose(Session session, int statusCode, String reason) {
|
||||
// clean up
|
||||
super.onWebSocketClose(session, statusCode, reason);
|
||||
}
|
||||
|
||||
@OnWebSocketError
|
||||
public void onWebSocketError(Session session, Throwable throwable) {
|
||||
/* ignored */
|
||||
}
|
||||
|
||||
@OnWebSocketMessage
|
||||
public void onWebSocketMessage(Session session, String message) {
|
||||
/* ignored */
|
||||
}
|
||||
|
||||
private boolean sendTradePresences(Session session, List<TradePresenceData> tradePresences) {
|
||||
try {
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
marshall(stringWriter, tradePresences);
|
||||
|
||||
String output = stringWriter.toString();
|
||||
session.getRemote().sendStringByFuture(output);
|
||||
} catch (IOException e) {
|
||||
// No output this time?
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void populateCurrentInfo() {
|
||||
// We want ALL trade presences
|
||||
TradeBot.getInstance().getAllTradePresences().stream()
|
||||
.forEach(TradePresenceWebSocket::mergePresence);
|
||||
}
|
||||
|
||||
/** Merge trade presence into cache of current entries, returns true if cache was updated. */
|
||||
private static boolean mergePresence(TradePresenceData tradePresence) {
|
||||
// Put/replace for this publickey making sure we keep newest timestamp
|
||||
String pubKey58 = Base58.encode(tradePresence.getPublicKey());
|
||||
|
||||
TradePresenceData newEntry = currentEntries.compute(pubKey58, (k, v) -> v == null || v.getTimestamp() < tradePresence.getTimestamp() ? tradePresence : v);
|
||||
|
||||
return newEntry == tradePresence;
|
||||
}
|
||||
|
||||
private static void removeOldEntries() {
|
||||
long now = NTP.getTime();
|
||||
|
||||
currentEntries.values().removeIf(v -> v.getTimestamp() < now);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ArbitraryDataBuildQueueItem extends ArbitraryDataResource {
|
||||
|
||||
private final Long creationTimestamp;
|
||||
private Long buildStartTimestamp = null;
|
||||
private Long buildEndTimestamp = null;
|
||||
private Integer priority = 0;
|
||||
private boolean failed = false;
|
||||
|
||||
private static int HIGH_PRIORITY_THRESHOLD = 5;
|
||||
|
||||
/* The maximum amount of time to spend on a single build */
|
||||
// TODO: interrupt an in-progress build
|
||||
public static long BUILD_TIMEOUT = 60*1000L; // 60 seconds
|
||||
/* The amount of time to remember that a build has failed, to avoid retries */
|
||||
public static long FAILURE_TIMEOUT = 5*60*1000L; // 5 minutes
|
||||
|
||||
public ArbitraryDataBuildQueueItem(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
super(resourceId, resourceIdType, service, identifier);
|
||||
|
||||
this.creationTimestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
public void prepareForBuild() {
|
||||
this.buildStartTimestamp = NTP.getTime();
|
||||
}
|
||||
|
||||
public void build() throws IOException, DataException, MissingDataException {
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
this.buildStartTimestamp = null;
|
||||
throw new DataException("NTP time hasn't synced yet");
|
||||
}
|
||||
|
||||
if (this.buildStartTimestamp == null) {
|
||||
this.buildStartTimestamp = now;
|
||||
}
|
||||
ArbitraryDataReader arbitraryDataReader =
|
||||
new ArbitraryDataReader(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(true);
|
||||
} finally {
|
||||
this.buildEndTimestamp = NTP.getTime();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isBuilding() {
|
||||
return this.buildStartTimestamp != null;
|
||||
}
|
||||
|
||||
public boolean isQueued() {
|
||||
return this.buildStartTimestamp == null;
|
||||
}
|
||||
|
||||
public boolean hasReachedBuildTimeout(Long now) {
|
||||
if (now == null || this.creationTimestamp == null) {
|
||||
return true;
|
||||
}
|
||||
return now - this.creationTimestamp > BUILD_TIMEOUT;
|
||||
}
|
||||
|
||||
public boolean hasReachedFailureTimeout(Long now) {
|
||||
if (now == null || this.buildStartTimestamp == null) {
|
||||
return true;
|
||||
}
|
||||
return now - this.buildStartTimestamp > FAILURE_TIMEOUT;
|
||||
}
|
||||
|
||||
public Long getBuildStartTimestamp() {
|
||||
return this.buildStartTimestamp;
|
||||
}
|
||||
|
||||
public Integer getPriority() {
|
||||
if (this.priority != null) {
|
||||
return this.priority;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void setPriority(Integer priority) {
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
public boolean isHighPriority() {
|
||||
return this.priority >= HIGH_PRIORITY_THRESHOLD;
|
||||
}
|
||||
|
||||
public void setFailed(boolean failed) {
|
||||
this.failed = failed;
|
||||
}
|
||||
}
|
||||
280
src/main/java/org/qortal/arbitrary/ArbitraryDataBuilder.java
Normal file
280
src/main/java/org/qortal/arbitrary/ArbitraryDataBuilder.java
Normal file
@@ -0,0 +1,280 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataCache;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.Method;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataBuilder {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataBuilder.class);
|
||||
|
||||
private final String name;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
|
||||
private boolean canRequestMissingFiles;
|
||||
|
||||
private List<ArbitraryTransactionData> transactions;
|
||||
private ArbitraryTransactionData latestPutTransaction;
|
||||
private final List<Path> paths;
|
||||
private byte[] latestSignature;
|
||||
private Path finalPath;
|
||||
private int layerCount;
|
||||
|
||||
public ArbitraryDataBuilder(String name, Service service, String identifier) {
|
||||
this.name = name;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
this.paths = new ArrayList<>();
|
||||
|
||||
// By default we can request missing files
|
||||
// Callers can use setCanRequestMissingFiles(false) to prevent it
|
||||
this.canRequestMissingFiles = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process transactions, but do not build anything
|
||||
* This is useful for checking the status of a given resource
|
||||
*
|
||||
* @throws DataException
|
||||
* @throws IOException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void process() throws DataException, IOException, MissingDataException {
|
||||
this.fetchTransactions();
|
||||
this.validateTransactions();
|
||||
this.processTransactions();
|
||||
this.validatePaths();
|
||||
this.findLatestSignature();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the latest state of a given resource
|
||||
*
|
||||
* @throws DataException
|
||||
* @throws IOException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void build() throws DataException, IOException, MissingDataException {
|
||||
this.process();
|
||||
this.buildLatestState();
|
||||
this.cacheLatestSignature();
|
||||
}
|
||||
|
||||
private void fetchTransactions() throws DataException {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent PUT
|
||||
ArbitraryTransactionData latestPut = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.name, this.service, Method.PUT, this.identifier);
|
||||
if (latestPut == null) {
|
||||
String message = String.format("Couldn't find PUT transaction for name %s, service %s and identifier %s",
|
||||
this.name, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestPutTransaction = latestPut;
|
||||
|
||||
// Load all transactions since the latest PUT
|
||||
List<ArbitraryTransactionData> transactionDataList = repository.getArbitraryRepository()
|
||||
.getArbitraryTransactions(this.name, this.service, this.identifier, latestPut.getTimestamp());
|
||||
|
||||
this.transactions = transactionDataList;
|
||||
this.layerCount = transactionDataList.size();
|
||||
}
|
||||
}
|
||||
|
||||
private void validateTransactions() throws DataException {
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
ArbitraryTransactionData latestPut = this.latestPutTransaction;
|
||||
|
||||
if (latestPut == null) {
|
||||
throw new DataException("Cannot PATCH without existing PUT. Deploy using PUT first.");
|
||||
}
|
||||
if (latestPut.getMethod() != Method.PUT) {
|
||||
throw new DataException("Expected PUT but received PATCH");
|
||||
}
|
||||
if (transactionDataList.size() == 0) {
|
||||
throw new DataException(String.format("No transactions found for name %s, service %s, " +
|
||||
"identifier: %s, since %d", name, service, this.identifierString(), latestPut.getTimestamp()));
|
||||
}
|
||||
|
||||
// Verify that the signature of the first transaction matches the latest PUT
|
||||
ArbitraryTransactionData firstTransaction = transactionDataList.get(0);
|
||||
if (!Arrays.equals(firstTransaction.getSignature(), latestPut.getSignature())) {
|
||||
throw new DataException("First transaction did not match latest PUT transaction");
|
||||
}
|
||||
|
||||
// Remove the first transaction, as it should be the only PUT
|
||||
transactionDataList.remove(0);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (transactionData == null) {
|
||||
throw new DataException("Transaction not found");
|
||||
}
|
||||
if (transactionData.getMethod() != Method.PATCH) {
|
||||
throw new DataException("Expected PATCH but received PUT");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void processTransactions() throws IOException, DataException, MissingDataException {
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
int count = 0;
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
LOGGER.trace("Found arbitrary transaction {}", Base58.encode(transactionData.getSignature()));
|
||||
count++;
|
||||
|
||||
// Build the data file, overwriting anything that was previously there
|
||||
String sig58 = Base58.encode(transactionData.getSignature());
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(sig58, ResourceIdType.TRANSACTION_DATA,
|
||||
this.service, this.identifier);
|
||||
arbitraryDataReader.setTransactionData(transactionData);
|
||||
arbitraryDataReader.setCanRequestMissingFiles(this.canRequestMissingFiles);
|
||||
boolean hasMissingData = false;
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(true);
|
||||
}
|
||||
catch (MissingDataException e) {
|
||||
hasMissingData = true;
|
||||
}
|
||||
|
||||
// Handle missing data
|
||||
if (hasMissingData) {
|
||||
if (!this.canRequestMissingFiles) {
|
||||
throw new MissingDataException("Files are missing but were not requested.");
|
||||
}
|
||||
if (count == transactionDataList.size()) {
|
||||
// This is the final transaction in the list, so we need to fail
|
||||
throw new MissingDataException("Requesting missing files. Please wait and try again.");
|
||||
}
|
||||
// There are more transactions, so we should process them to give them the opportunity to request data
|
||||
continue;
|
||||
}
|
||||
|
||||
// By this point we should have all data needed to build the layers
|
||||
Path path = arbitraryDataReader.getFilePath();
|
||||
if (path == null) {
|
||||
throw new DataException(String.format("Null path when building data from transaction %s", sig58));
|
||||
}
|
||||
if (!Files.exists(path)) {
|
||||
throw new DataException(String.format("Path doesn't exist when building data from transaction %s", sig58));
|
||||
}
|
||||
paths.add(path);
|
||||
}
|
||||
}
|
||||
|
||||
private void findLatestSignature() throws DataException {
|
||||
if (this.transactions.size() == 0) {
|
||||
throw new DataException("Unable to find latest signature from empty transaction list");
|
||||
}
|
||||
|
||||
// Find the latest signature
|
||||
ArbitraryTransactionData latestTransaction = this.transactions.get(this.transactions.size() - 1);
|
||||
if (latestTransaction == null) {
|
||||
throw new DataException("Unable to find latest signature from null transaction");
|
||||
}
|
||||
|
||||
this.latestSignature = latestTransaction.getSignature();
|
||||
}
|
||||
|
||||
private void validatePaths() throws DataException {
|
||||
if (this.paths.isEmpty()) {
|
||||
throw new DataException("No paths available from which to build latest state");
|
||||
}
|
||||
}
|
||||
|
||||
private void buildLatestState() throws IOException, DataException {
|
||||
if (this.paths.size() == 1) {
|
||||
// No patching needed
|
||||
this.finalPath = this.paths.get(0);
|
||||
return;
|
||||
}
|
||||
|
||||
Path pathBefore = this.paths.get(0);
|
||||
boolean validateAllLayers = Settings.getInstance().shouldValidateAllDataLayers();
|
||||
|
||||
// Loop from the second path onwards
|
||||
for (int i=1; i<paths.size(); i++) {
|
||||
String identifierPrefix = this.identifier != null ? String.format("[%s]", this.identifier) : "";
|
||||
LOGGER.debug(String.format("[%s][%s]%s Applying layer %d...", this.service, this.name, identifierPrefix, i));
|
||||
|
||||
// Create an instance of ArbitraryDataCombiner
|
||||
Path pathAfter = this.paths.get(i);
|
||||
byte[] signatureBefore = this.transactions.get(i-1).getSignature();
|
||||
ArbitraryDataCombiner combiner = new ArbitraryDataCombiner(pathBefore, pathAfter, signatureBefore);
|
||||
|
||||
// We only want to validate this layer's hash if it's the final layer, or if the settings
|
||||
// indicate that we should validate interim layers too
|
||||
boolean isFinalLayer = (i == paths.size() - 1);
|
||||
combiner.setShouldValidateHashes(isFinalLayer || validateAllLayers);
|
||||
|
||||
// Now combine this layer with the last, and set the output path to the "before" path for the next cycle
|
||||
combiner.combine();
|
||||
combiner.cleanup();
|
||||
pathBefore = combiner.getFinalPath();
|
||||
}
|
||||
this.finalPath = pathBefore;
|
||||
}
|
||||
|
||||
private void cacheLatestSignature() throws IOException, DataException {
|
||||
byte[] latestTransactionSignature = this.transactions.get(this.transactions.size()-1).getSignature();
|
||||
if (latestTransactionSignature == null) {
|
||||
throw new DataException("Missing latest transaction signature");
|
||||
}
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
throw new DataException("NTP time not synced yet");
|
||||
}
|
||||
|
||||
ArbitraryDataMetadataCache cache = new ArbitraryDataMetadataCache(this.finalPath);
|
||||
cache.setSignature(latestTransactionSignature);
|
||||
cache.setTimestamp(NTP.getTime());
|
||||
cache.write();
|
||||
}
|
||||
|
||||
private String identifierString() {
|
||||
return identifier != null ? identifier : "";
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
public byte[] getLatestSignature() {
|
||||
return this.latestSignature;
|
||||
}
|
||||
|
||||
public int getLayerCount() {
|
||||
return this.layerCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the below setter to ensure that we only read existing
|
||||
* data without requesting any missing files,
|
||||
*
|
||||
* @param canRequestMissingFiles
|
||||
*/
|
||||
public void setCanRequestMissingFiles(boolean canRequestMissingFiles) {
|
||||
this.canRequestMissingFiles = canRequestMissingFiles;
|
||||
}
|
||||
|
||||
}
|
||||
162
src/main/java/org/qortal/arbitrary/ArbitraryDataCache.java
Normal file
162
src/main/java/org/qortal/arbitrary/ArbitraryDataCache.java
Normal file
@@ -0,0 +1,162 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataCache;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataCache {
|
||||
|
||||
private final boolean overwrite;
|
||||
private final Path filePath;
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
|
||||
public ArbitraryDataCache(Path filePath, boolean overwrite, String resourceId,
|
||||
ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
this.filePath = filePath;
|
||||
this.overwrite = overwrite;
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
}
|
||||
|
||||
public boolean isCachedDataAvailable() {
|
||||
return !this.shouldInvalidate();
|
||||
}
|
||||
|
||||
public boolean shouldInvalidate() {
|
||||
try {
|
||||
// If the user has requested an overwrite, always invalidate the cache
|
||||
if (this.overwrite) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Overwrite is false, but we still need to invalidate if no files exist
|
||||
if (!Files.exists(this.filePath) || FilesystemUtils.isDirectoryEmpty(this.filePath)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// We might want to overwrite anyway, if an updated version is available
|
||||
if (this.shouldInvalidateResource()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
// Something went wrong, so invalidate the cache just in case
|
||||
return true;
|
||||
}
|
||||
|
||||
// No need to invalidate the cache
|
||||
// Remember that it's up to date, so that we won't check again for a while
|
||||
ArbitraryDataManager.getInstance().addResourceToCache(this.getArbitraryDataResource());
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateResource() {
|
||||
switch (this.resourceIdType) {
|
||||
|
||||
case NAME:
|
||||
return this.shouldInvalidateName();
|
||||
|
||||
default:
|
||||
// Other resource ID types remain constant, so no need to invalidate
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateName() {
|
||||
// To avoid spamming the database too often, we shouldn't check sigs or invalidate when rate limited
|
||||
if (this.rateLimitInEffect()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the state's sig doesn't match the latest transaction's sig, we need to invalidate
|
||||
// This means that an updated layer is available
|
||||
return this.shouldInvalidateDueToSignatureMismatch();
|
||||
}
|
||||
|
||||
/**
|
||||
* rateLimitInEffect()
|
||||
*
|
||||
* When loading a website, we need to check the cache for every static asset loaded by the page.
|
||||
* This would involve asking the database for the latest transaction every time.
|
||||
* To reduce database load and page load times, we maintain an in-memory list to "rate limit" lookups.
|
||||
* Once a resource ID is in this in-memory list, we will avoid cache invalidations until it
|
||||
* has been present in the list for a certain amount of time.
|
||||
* Items are automatically removed from the list when a new arbitrary transaction arrives, so this
|
||||
* should not prevent updates from taking effect immediately.
|
||||
*
|
||||
* @return whether to avoid lookups for this resource due to the in-memory cache
|
||||
*/
|
||||
private boolean rateLimitInEffect() {
|
||||
return ArbitraryDataManager.getInstance().isResourceCached(this.getArbitraryDataResource());
|
||||
}
|
||||
|
||||
private boolean shouldInvalidateDueToSignatureMismatch() {
|
||||
|
||||
// Fetch the latest transaction for this name and service
|
||||
byte[] latestTransactionSig = this.fetchLatestTransactionSignature();
|
||||
|
||||
// Now fetch the transaction signature stored in the cache metadata
|
||||
byte[] cachedSig = this.fetchCachedSignature();
|
||||
|
||||
// If either are null, we should invalidate
|
||||
if (latestTransactionSig == null || cachedSig == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if they match
|
||||
return !Arrays.equals(latestTransactionSig, cachedSig);
|
||||
}
|
||||
|
||||
private byte[] fetchLatestTransactionSignature() {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Find latest transaction for name and service, with any method
|
||||
ArbitraryTransactionData latestTransaction = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, null, this.identifier);
|
||||
|
||||
if (latestTransaction != null) {
|
||||
return latestTransaction.getSignature();
|
||||
}
|
||||
|
||||
} catch (DataException e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private byte[] fetchCachedSignature() {
|
||||
try {
|
||||
// Fetch the transaction signature stored in the cache metadata
|
||||
ArbitraryDataMetadataCache cache = new ArbitraryDataMetadataCache(this.filePath);
|
||||
cache.read();
|
||||
return cache.getSignature();
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private ArbitraryDataResource getArbitraryDataResource() {
|
||||
// TODO: pass an ArbitraryDataResource into the constructor, rather than individual components
|
||||
return new ArbitraryDataResource(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
}
|
||||
|
||||
}
|
||||
170
src/main/java/org/qortal/arbitrary/ArbitraryDataCombiner.java
Normal file
170
src/main/java/org/qortal/arbitrary/ArbitraryDataCombiner.java
Normal file
@@ -0,0 +1,170 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.nio.file.DirectoryNotEmptyException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataCombiner {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataCombiner.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private final byte[] signatureBefore;
|
||||
private boolean shouldValidateHashes;
|
||||
private Path finalPath;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataCombiner(Path pathBefore, Path pathAfter, byte[] signatureBefore) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.signatureBefore = signatureBefore;
|
||||
}
|
||||
|
||||
public void combine() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.readMetadata();
|
||||
this.validatePreviousSignature();
|
||||
this.validatePreviousHash();
|
||||
this.process();
|
||||
this.validateCurrentHash();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
public void cleanup() {
|
||||
this.cleanupPath(this.pathBefore);
|
||||
this.cleanupPath(this.pathAfter);
|
||||
}
|
||||
|
||||
private void cleanupPath(Path path) {
|
||||
// Delete pathBefore, if it exists in our data/temp directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(path)) {
|
||||
File directory = new File(path.toString());
|
||||
try {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup directory {}", directory.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the parent directory of pathBefore if it is empty (and exists in our data/temp directory)
|
||||
Path parentDirectory = path.getParent();
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(parentDirectory)) {
|
||||
try {
|
||||
Files.deleteIfExists(parentDirectory);
|
||||
} catch (DirectoryNotEmptyException e) {
|
||||
// No need to log anything
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup parent directory {}", parentDirectory.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
if (this.pathBefore == null || this.pathAfter == null) {
|
||||
throw new DataException("No paths available to build patch");
|
||||
}
|
||||
if (!Files.exists(this.pathBefore) || !Files.exists(this.pathAfter)) {
|
||||
throw new DataException("Unable to create patch because at least one path doesn't exist");
|
||||
}
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void readMetadata() throws IOException, DataException {
|
||||
this.metadata = new ArbitraryDataMetadataPatch(this.pathAfter);
|
||||
this.metadata.read();
|
||||
}
|
||||
|
||||
private void validatePreviousSignature() throws DataException {
|
||||
if (this.signatureBefore == null) {
|
||||
throw new DataException("No previous signature passed to the combiner");
|
||||
}
|
||||
|
||||
byte[] previousSignature = this.metadata.getPreviousSignature();
|
||||
if (previousSignature == null) {
|
||||
throw new DataException("Unable to extract previous signature from patch metadata");
|
||||
}
|
||||
|
||||
// Compare the signatures
|
||||
if (!Arrays.equals(previousSignature, this.signatureBefore)) {
|
||||
throw new DataException("Previous signatures do not match - transactions out of order?");
|
||||
}
|
||||
}
|
||||
|
||||
private void validatePreviousHash() throws IOException, DataException {
|
||||
if (!Settings.getInstance().shouldValidateAllDataLayers()) {
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] previousHash = this.metadata.getPreviousHash();
|
||||
if (previousHash == null) {
|
||||
throw new DataException("Unable to extract previous hash from patch metadata");
|
||||
}
|
||||
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathBefore);
|
||||
digest.compute();
|
||||
boolean valid = digest.isHashValid(previousHash);
|
||||
if (!valid) {
|
||||
String previousHash58 = Base58.encode(previousHash);
|
||||
throw new InvalidObjectException(String.format("Previous state hash mismatch. " +
|
||||
"Patch prevHash: %s, actual: %s", previousHash58, digest.getHash58()));
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws IOException, DataException {
|
||||
ArbitraryDataMerge merge = new ArbitraryDataMerge(this.pathBefore, this.pathAfter);
|
||||
merge.compute();
|
||||
this.finalPath = merge.getMergePath();
|
||||
}
|
||||
|
||||
private void validateCurrentHash() throws IOException, DataException {
|
||||
if (!this.shouldValidateHashes) {
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] currentHash = this.metadata.getCurrentHash();
|
||||
if (currentHash == null) {
|
||||
throw new DataException("Unable to extract current hash from patch metadata");
|
||||
}
|
||||
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.finalPath);
|
||||
digest.compute();
|
||||
boolean valid = digest.isHashValid(currentHash);
|
||||
if (!valid) {
|
||||
String currentHash58 = Base58.encode(currentHash);
|
||||
throw new InvalidObjectException(String.format("Current state hash mismatch. " +
|
||||
"Patch curHash: %s, actual: %s", currentHash58, digest.getHash58()));
|
||||
}
|
||||
}
|
||||
|
||||
public void setShouldValidateHashes(boolean shouldValidateHashes) {
|
||||
this.shouldValidateHashes = shouldValidateHashes;
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
}
|
||||
141
src/main/java/org/qortal/arbitrary/ArbitraryDataCreatePatch.java
Normal file
141
src/main/java/org/qortal/arbitrary/ArbitraryDataCreatePatch.java
Normal file
@@ -0,0 +1,141 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ArbitraryDataCreatePatch {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataCreatePatch.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private Path pathAfter;
|
||||
private final byte[] previousSignature;
|
||||
|
||||
private Path finalPath;
|
||||
private int totalFileCount;
|
||||
private int fileDifferencesCount;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
private Path workingPath;
|
||||
private String identifier;
|
||||
|
||||
public ArbitraryDataCreatePatch(Path pathBefore, Path pathAfter, byte[] previousSignature) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.previousSignature = previousSignature;
|
||||
}
|
||||
|
||||
public void create() throws DataException, IOException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.copyFiles();
|
||||
this.process();
|
||||
|
||||
} catch (Exception e) {
|
||||
this.cleanupOnFailure();
|
||||
throw e;
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
if (this.pathBefore == null || this.pathAfter == null) {
|
||||
throw new DataException("No paths available to build patch");
|
||||
}
|
||||
if (!Files.exists(this.pathBefore) || !Files.exists(this.pathAfter)) {
|
||||
throw new DataException("Unable to create patch because at least one path doesn't exist");
|
||||
}
|
||||
|
||||
this.createRandomIdentifier();
|
||||
this.createWorkingDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
this.cleanupWorkingPath();
|
||||
}
|
||||
|
||||
private void cleanupWorkingPath() {
|
||||
try {
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, true);
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to cleanup working directory");
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanupOnFailure() {
|
||||
try {
|
||||
FilesystemUtils.safeDeleteDirectory(this.finalPath, true);
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to cleanup diff directory on failure");
|
||||
}
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "patch", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.workingPath = tempDir;
|
||||
}
|
||||
|
||||
private void copyFiles() throws IOException {
|
||||
// When dealing with single files, we need to copy them to a container directory
|
||||
// in order for the structure to align with the previous revision and therefore
|
||||
// make comparisons possible.
|
||||
|
||||
if (this.pathAfter.toFile().isFile()) {
|
||||
// Create a "data" directory within the working directory
|
||||
Path workingDataPath = Paths.get(this.workingPath.toString(), "data");
|
||||
Files.createDirectories(workingDataPath);
|
||||
// Copy to temp directory
|
||||
// Filename is currently hardcoded to "data"
|
||||
String filename = "data"; //this.pathAfter.getFileName().toString();
|
||||
Files.copy(this.pathAfter, Paths.get(workingDataPath.toString(), filename));
|
||||
// Update pathAfter to point to the new path
|
||||
this.pathAfter = workingDataPath;
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws IOException, DataException {
|
||||
|
||||
ArbitraryDataDiff diff = new ArbitraryDataDiff(this.pathBefore, this.pathAfter, this.previousSignature);
|
||||
this.finalPath = diff.getDiffPath();
|
||||
diff.compute();
|
||||
|
||||
this.totalFileCount = diff.getTotalFileCount();
|
||||
this.metadata = diff.getMetadata();
|
||||
}
|
||||
|
||||
public Path getFinalPath() {
|
||||
return this.finalPath;
|
||||
}
|
||||
|
||||
public int getTotalFileCount() {
|
||||
return this.totalFileCount;
|
||||
}
|
||||
|
||||
public ArbitraryDataMetadataPatch getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
}
|
||||
383
src/main/java/org/qortal/arbitrary/ArbitraryDataDiff.java
Normal file
383
src/main/java/org/qortal/arbitrary/ArbitraryDataDiff.java
Normal file
@@ -0,0 +1,383 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.patch.UnifiedDiffPatch;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
public class ArbitraryDataDiff {
|
||||
|
||||
/** Only create a patch if both the before and after file sizes are within defined limit **/
|
||||
private static final long MAX_DIFF_FILE_SIZE = 100 * 1024L; // 100kiB
|
||||
|
||||
|
||||
public enum DiffType {
|
||||
COMPLETE_FILE,
|
||||
UNIFIED_DIFF
|
||||
}
|
||||
|
||||
public static class ModifiedPath {
|
||||
private Path path;
|
||||
private DiffType diffType;
|
||||
|
||||
public ModifiedPath(Path path, DiffType diffType) {
|
||||
this.path = path;
|
||||
this.diffType = diffType;
|
||||
}
|
||||
|
||||
public ModifiedPath(JSONObject jsonObject) {
|
||||
String pathString = jsonObject.getString("path");
|
||||
if (pathString != null) {
|
||||
this.path = Paths.get(pathString);
|
||||
}
|
||||
|
||||
String diffTypeString = jsonObject.getString("type");
|
||||
if (diffTypeString != null) {
|
||||
this.diffType = DiffType.valueOf(diffTypeString);
|
||||
}
|
||||
}
|
||||
|
||||
public Path getPath() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public DiffType getDiffType() {
|
||||
return this.diffType;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return this.path.toString();
|
||||
}
|
||||
}
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataDiff.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private final byte[] previousSignature;
|
||||
private byte[] previousHash;
|
||||
private byte[] currentHash;
|
||||
private Path diffPath;
|
||||
private String identifier;
|
||||
|
||||
private final List<Path> addedPaths;
|
||||
private final List<ModifiedPath> modifiedPaths;
|
||||
private final List<Path> removedPaths;
|
||||
|
||||
private int totalFileCount;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataDiff(Path pathBefore, Path pathAfter, byte[] previousSignature) throws DataException {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
this.previousSignature = previousSignature;
|
||||
|
||||
this.addedPaths = new ArrayList<>();
|
||||
this.modifiedPaths = new ArrayList<>();
|
||||
this.removedPaths = new ArrayList<>();
|
||||
|
||||
this.createRandomIdentifier();
|
||||
this.createOutputDirectory();
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.hashPreviousState();
|
||||
this.findAddedOrModifiedFiles();
|
||||
this.findRemovedFiles();
|
||||
this.validate();
|
||||
this.hashCurrentState();
|
||||
this.writeMetadata();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() {
|
||||
LOGGER.debug("Generating diff...");
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createOutputDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "diff", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.diffPath = tempDir;
|
||||
}
|
||||
|
||||
private void hashPreviousState() throws IOException, DataException {
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathBefore);
|
||||
digest.compute();
|
||||
this.previousHash = digest.getHash();
|
||||
}
|
||||
|
||||
private void findAddedOrModifiedFiles() throws IOException {
|
||||
try {
|
||||
final Path pathBeforeAbsolute = this.pathBefore.toAbsolutePath();
|
||||
final Path pathAfterAbsolute = this.pathAfter.toAbsolutePath();
|
||||
final Path diffPathAbsolute = this.diffPath.toAbsolutePath();
|
||||
final ArbitraryDataDiff diff = this;
|
||||
|
||||
// Check for additions or modifications
|
||||
Files.walkFileTree(this.pathAfter, new FileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path after, BasicFileAttributes attrs) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path afterPathAbsolute, BasicFileAttributes attrs) throws IOException {
|
||||
Path afterPathRelative = pathAfterAbsolute.relativize(afterPathAbsolute.toAbsolutePath());
|
||||
Path beforePathAbsolute = pathBeforeAbsolute.resolve(afterPathRelative);
|
||||
|
||||
if (afterPathRelative.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
boolean wasAdded = false;
|
||||
boolean wasModified = false;
|
||||
|
||||
if (!Files.exists(beforePathAbsolute)) {
|
||||
LOGGER.trace("File was added: {}", afterPathRelative.toString());
|
||||
diff.addedPaths.add(afterPathRelative);
|
||||
wasAdded = true;
|
||||
}
|
||||
else if (Files.size(afterPathAbsolute) != Files.size(beforePathAbsolute)) {
|
||||
// Check file size first because it's quicker
|
||||
LOGGER.trace("File size was modified: {}", afterPathRelative.toString());
|
||||
wasModified = true;
|
||||
}
|
||||
else if (!Arrays.equals(ArbitraryDataDiff.digestFromPath(afterPathAbsolute), ArbitraryDataDiff.digestFromPath(beforePathAbsolute))) {
|
||||
// Check hashes as a last resort
|
||||
LOGGER.trace("File contents were modified: {}", afterPathRelative.toString());
|
||||
wasModified = true;
|
||||
}
|
||||
|
||||
if (wasAdded) {
|
||||
diff.copyFilePathToBaseDir(afterPathAbsolute, diffPathAbsolute, afterPathRelative);
|
||||
}
|
||||
if (wasModified) {
|
||||
try {
|
||||
diff.pathModified(beforePathAbsolute, afterPathAbsolute, afterPathRelative, diffPathAbsolute);
|
||||
} catch (DataException e) {
|
||||
// We can only throw IOExceptions because we are overriding FileVisitor.visitFile()
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Keep a tally of the total number of files to help with decision making
|
||||
diff.totalFileCount++;
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFileFailed(Path file, IOException e){
|
||||
LOGGER.info("File visit failed: {}, error: {}", file.toString(), e.getMessage());
|
||||
// TODO: throw exception?
|
||||
return FileVisitResult.TERMINATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("IOException when walking through file tree: {}", e.getMessage());
|
||||
throw(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void findRemovedFiles() throws IOException {
|
||||
try {
|
||||
final Path pathBeforeAbsolute = this.pathBefore.toAbsolutePath();
|
||||
final Path pathAfterAbsolute = this.pathAfter.toAbsolutePath();
|
||||
final ArbitraryDataDiff diff = this;
|
||||
|
||||
// Check for removals
|
||||
Files.walkFileTree(this.pathBefore, new FileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path before, BasicFileAttributes attrs) {
|
||||
Path directoryPathBefore = pathBeforeAbsolute.relativize(before.toAbsolutePath());
|
||||
Path directoryPathAfter = pathAfterAbsolute.resolve(directoryPathBefore);
|
||||
|
||||
if (directoryPathBefore.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (!Files.exists(directoryPathAfter)) {
|
||||
LOGGER.trace("Directory was removed: {}", directoryPathAfter.toString());
|
||||
diff.removedPaths.add(directoryPathBefore);
|
||||
// TODO: we might need to mark directories differently to files
|
||||
}
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path before, BasicFileAttributes attrs) {
|
||||
Path filePathBefore = pathBeforeAbsolute.relativize(before.toAbsolutePath());
|
||||
Path filePathAfter = pathAfterAbsolute.resolve(filePathBefore);
|
||||
|
||||
if (filePathBefore.startsWith(".qortal")) {
|
||||
// Ignore the .qortal metadata folder
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (!Files.exists(filePathAfter)) {
|
||||
LOGGER.trace("File was removed: {}", filePathBefore.toString());
|
||||
diff.removedPaths.add(filePathBefore);
|
||||
}
|
||||
|
||||
// Keep a tally of the total number of files to help with decision making
|
||||
diff.totalFileCount++;
|
||||
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFileFailed(Path file, IOException e){
|
||||
LOGGER.info("File visit failed: {}, error: {}", file.toString(), e.getMessage());
|
||||
// TODO: throw exception?
|
||||
return FileVisitResult.TERMINATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new IOException(String.format("IOException when walking through file tree: %s", e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void validate() throws DataException {
|
||||
if (this.addedPaths.isEmpty() && this.modifiedPaths.isEmpty() && this.removedPaths.isEmpty()) {
|
||||
throw new DataException("Current state matches previous state. Nothing to do.");
|
||||
}
|
||||
}
|
||||
|
||||
private void hashCurrentState() throws IOException, DataException {
|
||||
ArbitraryDataDigest digest = new ArbitraryDataDigest(this.pathAfter);
|
||||
digest.compute();
|
||||
this.currentHash = digest.getHash();
|
||||
}
|
||||
|
||||
private void writeMetadata() throws IOException, DataException {
|
||||
ArbitraryDataMetadataPatch metadata = new ArbitraryDataMetadataPatch(this.diffPath);
|
||||
metadata.setAddedPaths(this.addedPaths);
|
||||
metadata.setModifiedPaths(this.modifiedPaths);
|
||||
metadata.setRemovedPaths(this.removedPaths);
|
||||
metadata.setPreviousSignature(this.previousSignature);
|
||||
metadata.setPreviousHash(this.previousHash);
|
||||
metadata.setCurrentHash(this.currentHash);
|
||||
metadata.write();
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
|
||||
private void pathModified(Path beforePathAbsolute, Path afterPathAbsolute, Path afterPathRelative,
|
||||
Path destinationBasePathAbsolute) throws IOException, DataException {
|
||||
|
||||
Path destination = Paths.get(destinationBasePathAbsolute.toString(), afterPathRelative.toString());
|
||||
long beforeSize = Files.size(beforePathAbsolute);
|
||||
long afterSize = Files.size(afterPathAbsolute);
|
||||
DiffType diffType;
|
||||
|
||||
if (beforeSize > MAX_DIFF_FILE_SIZE || afterSize > MAX_DIFF_FILE_SIZE) {
|
||||
// Files are large, so don't attempt a diff
|
||||
this.copyFilePathToBaseDir(afterPathAbsolute, destinationBasePathAbsolute, afterPathRelative);
|
||||
diffType = DiffType.COMPLETE_FILE;
|
||||
}
|
||||
else {
|
||||
// Attempt to create patch using java-diff-utils
|
||||
UnifiedDiffPatch unifiedDiffPatch = new UnifiedDiffPatch(beforePathAbsolute, afterPathAbsolute, destination);
|
||||
unifiedDiffPatch.create();
|
||||
if (unifiedDiffPatch.isValid()) {
|
||||
diffType = DiffType.UNIFIED_DIFF;
|
||||
}
|
||||
else {
|
||||
// Diff failed validation, so copy the whole file instead
|
||||
this.copyFilePathToBaseDir(afterPathAbsolute, destinationBasePathAbsolute, afterPathRelative);
|
||||
diffType = DiffType.COMPLETE_FILE;
|
||||
}
|
||||
}
|
||||
|
||||
ModifiedPath modifiedPath = new ModifiedPath(afterPathRelative, diffType);
|
||||
this.modifiedPaths.add(modifiedPath);
|
||||
}
|
||||
|
||||
private void copyFilePathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
// Ensure parent folders exist in the destination
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
File file = new File(dest.toString());
|
||||
File parent = file.getParentFile();
|
||||
if (parent != null) {
|
||||
parent.mkdirs();
|
||||
}
|
||||
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
|
||||
|
||||
public Path getDiffPath() {
|
||||
return this.diffPath;
|
||||
}
|
||||
|
||||
public int getTotalFileCount() {
|
||||
return this.totalFileCount;
|
||||
}
|
||||
|
||||
public ArbitraryDataMetadataPatch getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
|
||||
// Utils
|
||||
|
||||
private static byte[] digestFromPath(Path path) {
|
||||
try {
|
||||
return Crypto.digest(path.toFile());
|
||||
} catch (IOException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
73
src/main/java/org/qortal/arbitrary/ArbitraryDataDigest.java
Normal file
73
src/main/java/org/qortal/arbitrary/ArbitraryDataDigest.java
Normal file
@@ -0,0 +1,73 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ArbitraryDataDigest {
|
||||
|
||||
private final Path path;
|
||||
private byte[] hash;
|
||||
|
||||
public ArbitraryDataDigest(Path path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
List<Path> allPaths = Files.walk(path).filter(Files::isRegularFile).sorted().collect(Collectors.toList());
|
||||
Path basePathAbsolute = this.path.toAbsolutePath();
|
||||
|
||||
MessageDigest sha256;
|
||||
try {
|
||||
sha256 = MessageDigest.getInstance("SHA-256");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new DataException("SHA-256 hashing algorithm unavailable");
|
||||
}
|
||||
|
||||
for (Path path : allPaths) {
|
||||
// We need to work with paths relative to the base path, to ensure the same hash
|
||||
// is generated on different systems
|
||||
Path relativePath = basePathAbsolute.relativize(path.toAbsolutePath());
|
||||
|
||||
// Exclude Qortal folder since it can be different each time
|
||||
// We only care about hashing the actual user data
|
||||
if (relativePath.startsWith(".qortal/")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Hash path
|
||||
byte[] filePathBytes = relativePath.toString().getBytes(StandardCharsets.UTF_8);
|
||||
sha256.update(filePathBytes);
|
||||
|
||||
// Hash contents
|
||||
byte[] fileContent = Files.readAllBytes(path);
|
||||
sha256.update(fileContent);
|
||||
}
|
||||
this.hash = sha256.digest();
|
||||
}
|
||||
|
||||
public boolean isHashValid(byte[] hash) {
|
||||
return Arrays.equals(hash, this.hash);
|
||||
}
|
||||
|
||||
public byte[] getHash() {
|
||||
return this.hash;
|
||||
}
|
||||
|
||||
public String getHash58() {
|
||||
if (this.hash == null) {
|
||||
return null;
|
||||
}
|
||||
return Base58.encode(this.hash);
|
||||
}
|
||||
|
||||
}
|
||||
797
src/main/java/org/qortal/arbitrary/ArbitraryDataFile.java
Normal file
797
src/main/java/org/qortal/arbitrary/ArbitraryDataFile.java
Normal file
@@ -0,0 +1,797 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
|
||||
public class ArbitraryDataFile {
|
||||
|
||||
// Validation results
|
||||
public enum ValidationResult {
|
||||
OK(1),
|
||||
FILE_TOO_LARGE(10),
|
||||
FILE_NOT_FOUND(11);
|
||||
|
||||
public final int value;
|
||||
|
||||
private static final Map<Integer, ArbitraryDataFile.ValidationResult> map = stream(ArbitraryDataFile.ValidationResult.values()).collect(toMap(result -> result.value, result -> result));
|
||||
|
||||
ValidationResult(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile.ValidationResult valueOf(int value) {
|
||||
return map.get(value);
|
||||
}
|
||||
}
|
||||
|
||||
// Resource ID types
|
||||
public enum ResourceIdType {
|
||||
SIGNATURE,
|
||||
FILE_HASH,
|
||||
TRANSACTION_DATA,
|
||||
NAME
|
||||
}
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFile.class);
|
||||
|
||||
public static final long MAX_FILE_SIZE = 500 * 1024 * 1024; // 500MiB
|
||||
protected static final int MAX_CHUNK_SIZE = 1 * 1024 * 1024; // 1MiB
|
||||
public static final int CHUNK_SIZE = 512 * 1024; // 0.5MiB
|
||||
public static int SHORT_DIGEST_LENGTH = 8;
|
||||
|
||||
protected Path filePath;
|
||||
protected String hash58;
|
||||
protected byte[] signature;
|
||||
private ArrayList<ArbitraryDataFileChunk> chunks;
|
||||
private byte[] secret;
|
||||
|
||||
// Metadata
|
||||
private byte[] metadataHash;
|
||||
private ArbitraryDataFile metadataFile;
|
||||
private ArbitraryDataTransactionMetadata metadata;
|
||||
|
||||
|
||||
public ArbitraryDataFile() {
|
||||
}
|
||||
|
||||
public ArbitraryDataFile(String hash58, byte[] signature) throws DataException {
|
||||
this.filePath = ArbitraryDataFile.getOutputFilePath(hash58, signature, false);
|
||||
this.chunks = new ArrayList<>();
|
||||
this.hash58 = hash58;
|
||||
this.signature = signature;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile(byte[] fileContent, byte[] signature) throws DataException {
|
||||
if (fileContent == null) {
|
||||
LOGGER.error("fileContent is null");
|
||||
return;
|
||||
}
|
||||
|
||||
this.hash58 = Base58.encode(Crypto.digest(fileContent));
|
||||
this.signature = signature;
|
||||
LOGGER.trace(String.format("File digest: %s, size: %d bytes", this.hash58, fileContent.length));
|
||||
|
||||
Path outputFilePath = getOutputFilePath(this.hash58, signature, true);
|
||||
File outputFile = outputFilePath.toFile();
|
||||
try (FileOutputStream outputStream = new FileOutputStream(outputFile)) {
|
||||
outputStream.write(fileContent);
|
||||
outputStream.close();
|
||||
this.filePath = outputFilePath;
|
||||
// Verify hash
|
||||
String digest58 = this.digest58();
|
||||
if (!this.hash58.equals(digest58)) {
|
||||
LOGGER.error("Hash {} does not match file digest {} for signature: {}", this.hash58, digest58, Base58.encode(signature));
|
||||
this.delete();
|
||||
throw new DataException("Data file digest validation failed");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to write data to file");
|
||||
}
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromHash58(String hash58, byte[] signature) throws DataException {
|
||||
return new ArbitraryDataFile(hash58, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromHash(byte[] hash, byte[] signature) throws DataException {
|
||||
if (hash == null) {
|
||||
return null;
|
||||
}
|
||||
return ArbitraryDataFile.fromHash58(Base58.encode(hash), signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromPath(Path path, byte[] signature) {
|
||||
if (path == null) {
|
||||
return null;
|
||||
}
|
||||
File file = path.toFile();
|
||||
if (file.exists()) {
|
||||
try {
|
||||
byte[] digest = Crypto.digest(file);
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
|
||||
|
||||
// Copy file to data directory if needed
|
||||
if (Files.exists(path) && !arbitraryDataFile.isInBaseDirectory(path)) {
|
||||
arbitraryDataFile.copyToDataDirectory(path, signature);
|
||||
}
|
||||
// Or, if it's already in the data directory, we may need to move it
|
||||
else if (!path.equals(arbitraryDataFile.getFilePath())) {
|
||||
// Wrong path, so relocate (but don't cleanup, as the source folder may still be needed by the caller)
|
||||
Path dest = arbitraryDataFile.getFilePath();
|
||||
FilesystemUtils.moveFile(path, dest, false);
|
||||
}
|
||||
return arbitraryDataFile;
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static ArbitraryDataFile fromFile(File file, byte[] signature) {
|
||||
return ArbitraryDataFile.fromPath(Paths.get(file.getPath()), signature);
|
||||
}
|
||||
|
||||
private Path copyToDataDirectory(Path sourcePath, byte[] signature) throws DataException {
|
||||
if (this.hash58 == null || this.filePath == null) {
|
||||
return null;
|
||||
}
|
||||
Path outputFilePath = getOutputFilePath(this.hash58, signature, true);
|
||||
sourcePath = sourcePath.toAbsolutePath();
|
||||
Path destPath = outputFilePath.toAbsolutePath();
|
||||
try {
|
||||
return Files.copy(sourcePath, destPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Unable to copy file %s to data directory %s", sourcePath, destPath));
|
||||
}
|
||||
}
|
||||
|
||||
public static Path getOutputFilePath(String hash58, byte[] signature, boolean createDirectories) throws DataException {
|
||||
Path directory;
|
||||
|
||||
if (hash58 == null) {
|
||||
return null;
|
||||
}
|
||||
if (signature != null) {
|
||||
// Key by signature
|
||||
String signature58 = Base58.encode(signature);
|
||||
String sig58First2Chars = signature58.substring(0, 2).toLowerCase();
|
||||
String sig58Next2Chars = signature58.substring(2, 4).toLowerCase();
|
||||
directory = Paths.get(Settings.getInstance().getDataPath(), sig58First2Chars, sig58Next2Chars, signature58);
|
||||
}
|
||||
else {
|
||||
// Put files without signatures in a "_misc" directory, and the files will be relocated later
|
||||
String hash58First2Chars = hash58.substring(0, 2).toLowerCase();
|
||||
String hash58Next2Chars = hash58.substring(2, 4).toLowerCase();
|
||||
directory = Paths.get(Settings.getInstance().getDataPath(), "_misc", hash58First2Chars, hash58Next2Chars);
|
||||
}
|
||||
|
||||
if (createDirectories) {
|
||||
try {
|
||||
Files.createDirectories(directory);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create data subdirectory");
|
||||
}
|
||||
}
|
||||
return Paths.get(directory.toString(), hash58);
|
||||
}
|
||||
|
||||
public ValidationResult isValid() {
|
||||
try {
|
||||
// Ensure the file exists on disk
|
||||
if (!Files.exists(this.filePath)) {
|
||||
LOGGER.error("File doesn't exist at path {}", this.filePath);
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
// Validate the file size
|
||||
long fileSize = Files.size(this.filePath);
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
LOGGER.error(String.format("ArbitraryDataFile is too large: %d bytes (max size: %d bytes)", fileSize, MAX_FILE_SIZE));
|
||||
return ArbitraryDataFile.ValidationResult.FILE_TOO_LARGE;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
|
||||
public void validateFileSize(long expectedSize) throws DataException {
|
||||
// Verify that we can determine the file's size
|
||||
long fileSize = 0;
|
||||
try {
|
||||
fileSize = Files.size(this.getFilePath());
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Couldn't get file size for transaction %s", Base58.encode(signature)));
|
||||
}
|
||||
|
||||
// Ensure the file's size matches the size reported by the transaction
|
||||
if (fileSize != expectedSize) {
|
||||
throw new DataException(String.format("File size mismatch for transaction %s", Base58.encode(signature)));
|
||||
}
|
||||
}
|
||||
|
||||
private void addChunk(ArbitraryDataFileChunk chunk) {
|
||||
this.chunks.add(chunk);
|
||||
}
|
||||
|
||||
private void addChunkHashes(List<byte[]> chunkHashes) throws DataException {
|
||||
if (chunkHashes == null || chunkHashes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
for (byte[] chunkHash : chunkHashes) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
this.addChunk(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
public List<byte[]> getChunkHashes() {
|
||||
List<byte[]> hashes = new ArrayList<>();
|
||||
if (this.chunks == null || this.chunks.isEmpty()) {
|
||||
return hashes;
|
||||
}
|
||||
|
||||
for (ArbitraryDataFileChunk chunkData : this.chunks) {
|
||||
hashes.add(chunkData.getHash());
|
||||
}
|
||||
|
||||
return hashes;
|
||||
}
|
||||
|
||||
public int split(int chunkSize) throws DataException {
|
||||
try {
|
||||
|
||||
File file = this.getFile();
|
||||
byte[] buffer = new byte[chunkSize];
|
||||
this.chunks = new ArrayList<>();
|
||||
|
||||
if (file != null) {
|
||||
try (FileInputStream fileInputStream = new FileInputStream(file);
|
||||
BufferedInputStream bis = new BufferedInputStream(fileInputStream)) {
|
||||
|
||||
int numberOfBytes;
|
||||
while ((numberOfBytes = bis.read(buffer)) > 0) {
|
||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
||||
out.write(buffer, 0, numberOfBytes);
|
||||
out.flush();
|
||||
|
||||
ArbitraryDataFileChunk chunk = new ArbitraryDataFileChunk(out.toByteArray(), this.signature);
|
||||
ValidationResult validationResult = chunk.isValid();
|
||||
if (validationResult == ValidationResult.OK) {
|
||||
this.chunks.add(chunk);
|
||||
} else {
|
||||
throw new DataException(String.format("Chunk %s is invalid", chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new DataException("Unable to split file into chunks");
|
||||
}
|
||||
|
||||
return this.chunks.size();
|
||||
}
|
||||
|
||||
public boolean join() {
|
||||
// Ensure we have chunks
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
|
||||
// Create temporary path for joined file
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "join");
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Join the chunks
|
||||
Path outputPath = Paths.get(tempDir.toString(), this.chunks.get(0).digest58());
|
||||
File outputFile = new File(outputPath.toString());
|
||||
try (BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile))) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
File sourceFile = chunk.filePath.toFile();
|
||||
BufferedInputStream in = new BufferedInputStream(new FileInputStream(sourceFile));
|
||||
byte[] buffer = new byte[2048];
|
||||
int inSize;
|
||||
while ((inSize = in.read(buffer)) != -1) {
|
||||
out.write(buffer, 0, inSize);
|
||||
}
|
||||
in.close();
|
||||
}
|
||||
out.close();
|
||||
|
||||
// Copy temporary file to data directory
|
||||
this.filePath = this.copyToDataDirectory(outputPath, this.signature);
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(outputPath)) {
|
||||
Files.delete(outputPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (FileNotFoundException e) {
|
||||
return false;
|
||||
} catch (IOException | DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean delete() {
|
||||
// Delete the complete file
|
||||
// ... but only if it's inside the Qortal data or temp directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
if (Files.exists(this.filePath)) {
|
||||
try {
|
||||
Files.delete(this.filePath);
|
||||
this.cleanupFilesystem();
|
||||
LOGGER.debug("Deleted file {}", this.filePath);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
LOGGER.warn("Couldn't delete file at path {}", this.filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean delete(int attempts) {
|
||||
// Keep trying to delete the data until it is deleted, or we reach 10 attempts
|
||||
for (int i=0; i<attempts; i++) {
|
||||
if (this.delete()) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
Thread.sleep(1000L);
|
||||
} catch (InterruptedException e) {
|
||||
// Fall through to exit method
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean deleteAllChunks() {
|
||||
boolean success = false;
|
||||
|
||||
// Delete the individual chunks
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
Iterator iterator = this.chunks.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
ArbitraryDataFileChunk chunk = (ArbitraryDataFileChunk) iterator.next();
|
||||
success = chunk.delete();
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
public boolean deleteMetadata() {
|
||||
if (this.metadataFile != null && this.metadataFile.exists()) {
|
||||
return this.metadataFile.delete();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean deleteAll() {
|
||||
// Delete the complete file
|
||||
boolean fileDeleted = this.delete();
|
||||
|
||||
// Delete the metadata file
|
||||
boolean metadataDeleted = this.deleteMetadata();
|
||||
|
||||
// Delete the individual chunks
|
||||
boolean chunksDeleted = this.deleteAllChunks();
|
||||
|
||||
return fileDeleted || metadataDeleted || chunksDeleted;
|
||||
}
|
||||
|
||||
protected void cleanupFilesystem() throws IOException {
|
||||
// It is essential that use a separate path reference in this method
|
||||
// as we don't want to modify this.filePath
|
||||
Path path = this.filePath;
|
||||
|
||||
FilesystemUtils.safeDeleteEmptyParentDirectories(path);
|
||||
}
|
||||
|
||||
public byte[] getBytes() {
|
||||
try {
|
||||
return Files.readAllBytes(this.filePath);
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to read bytes for file");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Helper methods */
|
||||
|
||||
private boolean isInBaseDirectory(Path filePath) {
|
||||
Path path = filePath.toAbsolutePath();
|
||||
String dataPath = Settings.getInstance().getDataPath();
|
||||
String basePath = Paths.get(dataPath).toAbsolutePath().toString();
|
||||
return path.startsWith(basePath);
|
||||
}
|
||||
|
||||
public boolean exists() {
|
||||
File file = this.filePath.toFile();
|
||||
return file.exists();
|
||||
}
|
||||
|
||||
public boolean chunkExists(byte[] hash) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (Arrays.equals(hash, chunk.getHash())) {
|
||||
return chunk.exists();
|
||||
}
|
||||
}
|
||||
if (Arrays.equals(hash, this.metadataHash)) {
|
||||
if (this.metadataFile != null) {
|
||||
return this.metadataFile.exists();
|
||||
}
|
||||
}
|
||||
if (Arrays.equals(this.getHash(), hash)) {
|
||||
return this.exists();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean allChunksExist() {
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have the chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
|
||||
// If the chunks array is empty, then this resource has no chunks,
|
||||
// so we must return false to avoid confusing the caller.
|
||||
if (chunks.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, we need to check each chunk individually
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (!chunk.exists()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so assume we don't have all the chunks
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean anyChunksExist() throws DataException {
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have any chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (chunk.exists()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so assume we don't have all the chunks
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean allFilesExist() {
|
||||
if (this.exists()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Complete file doesn't exist, so check the chunks
|
||||
if (this.allChunksExist()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of file hashes for this transaction that we do not hold locally
|
||||
*
|
||||
* @return a List of chunk hashes, or null if we are unable to determine what is missing
|
||||
*/
|
||||
public List<byte[]> missingHashes() {
|
||||
List<byte[]> missingHashes = new ArrayList<>();
|
||||
try {
|
||||
if (this.metadataHash == null) {
|
||||
// We don't have any metadata so can't check if we have the chunks
|
||||
// Even if this transaction has no chunks, we don't have the file either (already checked above)
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.metadataFile == null) {
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(this.metadataHash, this.signature);
|
||||
}
|
||||
|
||||
// If the metadata file doesn't exist, we can't check if we have the chunks
|
||||
if (!metadataFile.getFilePath().toFile().exists()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.metadata == null) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
}
|
||||
|
||||
// Read the metadata
|
||||
List<byte[]> chunks = metadata.getChunks();
|
||||
for (byte[] chunkHash : chunks) {
|
||||
ArbitraryDataFileChunk chunk = ArbitraryDataFileChunk.fromHash(chunkHash, this.signature);
|
||||
if (!chunk.exists()) {
|
||||
missingHashes.add(chunkHash);
|
||||
}
|
||||
}
|
||||
|
||||
return missingHashes;
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so we can't make a sensible decision
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean containsChunk(byte[] hash) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (Arrays.equals(hash, chunk.getHash())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public long size() {
|
||||
try {
|
||||
return Files.size(this.filePath);
|
||||
} catch (IOException e) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public int chunkCount() {
|
||||
return this.chunks.size();
|
||||
}
|
||||
|
||||
public List<ArbitraryDataFileChunk> getChunks() {
|
||||
return this.chunks;
|
||||
}
|
||||
|
||||
public byte[] chunkHashes() throws DataException {
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
// Return null if we only have one chunk, with the same hash as the parent
|
||||
if (Arrays.equals(this.digest(), this.chunks.get(0).digest())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
byte[] chunkHash = chunk.digest();
|
||||
if (chunkHash.length != 32) {
|
||||
LOGGER.info("Invalid chunk hash length: {}", chunkHash.length);
|
||||
throw new DataException("Invalid chunk hash length");
|
||||
}
|
||||
outputStream.write(chunk.digest());
|
||||
}
|
||||
return outputStream.toByteArray();
|
||||
} catch (IOException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<byte[]> chunkHashList() {
|
||||
List<byte[]> chunks = new ArrayList<>();
|
||||
|
||||
if (this.chunks != null && this.chunks.size() > 0) {
|
||||
// Return null if we only have one chunk, with the same hash as the parent
|
||||
if (Arrays.equals(this.digest(), this.chunks.get(0).digest())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
byte[] chunkHash = chunk.digest();
|
||||
if (chunkHash.length != 32) {
|
||||
LOGGER.info("Invalid chunk hash length: {}", chunkHash.length);
|
||||
throw new DataException("Invalid chunk hash length");
|
||||
}
|
||||
chunks.add(chunkHash);
|
||||
}
|
||||
return chunks;
|
||||
|
||||
} catch (DataException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void loadMetadata() throws DataException {
|
||||
try {
|
||||
this.metadata.read();
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
throw new DataException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private File getFile() {
|
||||
File file = this.filePath.toFile();
|
||||
if (file.exists()) {
|
||||
return file;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Path getFilePath() {
|
||||
return this.filePath;
|
||||
}
|
||||
|
||||
public byte[] digest() {
|
||||
File file = this.getFile();
|
||||
if (file != null && file.exists()) {
|
||||
try {
|
||||
return Crypto.digest(file);
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String digest58() {
|
||||
if (this.digest() != null) {
|
||||
return Base58.encode(this.digest());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String shortHash58() {
|
||||
if (this.hash58 == null) {
|
||||
return null;
|
||||
}
|
||||
return this.hash58.substring(0, Math.min(this.hash58.length(), SHORT_DIGEST_LENGTH));
|
||||
}
|
||||
|
||||
public String getHash58() {
|
||||
return this.hash58;
|
||||
}
|
||||
|
||||
public byte[] getHash() {
|
||||
return Base58.decode(this.hash58);
|
||||
}
|
||||
|
||||
public String printChunks() {
|
||||
String outputString = "";
|
||||
if (this.chunkCount() > 0) {
|
||||
for (ArbitraryDataFileChunk chunk : this.chunks) {
|
||||
if (outputString.length() > 0) {
|
||||
outputString = outputString.concat(",");
|
||||
}
|
||||
outputString = outputString.concat(chunk.digest58());
|
||||
}
|
||||
}
|
||||
return outputString;
|
||||
}
|
||||
|
||||
public void setSecret(byte[] secret) {
|
||||
this.secret = secret;
|
||||
}
|
||||
|
||||
public byte[] getSecret() {
|
||||
return this.secret;
|
||||
}
|
||||
|
||||
public byte[] getSignature() {
|
||||
return this.signature;
|
||||
}
|
||||
|
||||
public void setMetadataFile(ArbitraryDataFile metadataFile) {
|
||||
this.metadataFile = metadataFile;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile getMetadataFile() {
|
||||
return this.metadataFile;
|
||||
}
|
||||
|
||||
public void setMetadataHash(byte[] hash) throws DataException {
|
||||
this.metadataHash = hash;
|
||||
|
||||
if (hash == null) {
|
||||
return;
|
||||
}
|
||||
this.metadataFile = ArbitraryDataFile.fromHash(hash, this.signature);
|
||||
if (metadataFile.exists()) {
|
||||
this.setMetadata(new ArbitraryDataTransactionMetadata(this.metadataFile.getFilePath()));
|
||||
this.addChunkHashes(this.metadata.getChunks());
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] getMetadataHash() {
|
||||
return this.metadataHash;
|
||||
}
|
||||
|
||||
public void setMetadata(ArbitraryDataTransactionMetadata metadata) throws DataException {
|
||||
this.metadata = metadata;
|
||||
this.loadMetadata();
|
||||
}
|
||||
|
||||
public ArbitraryDataTransactionMetadata getMetadata() {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.shortHash58();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
public class ArbitraryDataFileChunk extends ArbitraryDataFile {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataFileChunk.class);
|
||||
|
||||
public ArbitraryDataFileChunk(String hash58, byte[] signature) throws DataException {
|
||||
super(hash58, signature);
|
||||
}
|
||||
|
||||
public ArbitraryDataFileChunk(byte[] fileContent, byte[] signature) throws DataException {
|
||||
super(fileContent, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFileChunk fromHash58(String hash58, byte[] signature) throws DataException {
|
||||
return new ArbitraryDataFileChunk(hash58, signature);
|
||||
}
|
||||
|
||||
public static ArbitraryDataFileChunk fromHash(byte[] hash, byte[] signature) throws DataException {
|
||||
return ArbitraryDataFileChunk.fromHash58(Base58.encode(hash), signature);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult isValid() {
|
||||
// DataChunk validation applies here too
|
||||
ValidationResult superclassValidationResult = super.isValid();
|
||||
if (superclassValidationResult != ValidationResult.OK) {
|
||||
return superclassValidationResult;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate the file size (chunks have stricter limits)
|
||||
long fileSize = Files.size(this.filePath);
|
||||
if (fileSize > MAX_CHUNK_SIZE) {
|
||||
LOGGER.error(String.format("DataFileChunk is too large: %d bytes (max chunk size: %d bytes)", fileSize, MAX_CHUNK_SIZE));
|
||||
return ValidationResult.FILE_TOO_LARGE;
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
return ValidationResult.FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
}
|
||||
176
src/main/java/org/qortal/arbitrary/ArbitraryDataMerge.java
Normal file
176
src/main/java/org/qortal/arbitrary/ArbitraryDataMerge.java
Normal file
@@ -0,0 +1,176 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.patch.UnifiedDiffPatch;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ArbitraryDataMerge {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMerge.class);
|
||||
|
||||
private final Path pathBefore;
|
||||
private final Path pathAfter;
|
||||
private Path mergePath;
|
||||
private String identifier;
|
||||
private ArbitraryDataMetadataPatch metadata;
|
||||
|
||||
public ArbitraryDataMerge(Path pathBefore, Path pathAfter) {
|
||||
this.pathBefore = pathBefore;
|
||||
this.pathAfter = pathAfter;
|
||||
}
|
||||
|
||||
public void compute() throws IOException, DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.copyPreviousStateToMergePath();
|
||||
this.loadMetadata();
|
||||
this.applyDifferences();
|
||||
this.copyMetadata();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
this.createRandomIdentifier();
|
||||
this.createOutputDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createOutputDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "merge", this.identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.mergePath = tempDir;
|
||||
}
|
||||
|
||||
private void copyPreviousStateToMergePath() throws IOException {
|
||||
ArbitraryDataMerge.copyDirPathToBaseDir(this.pathBefore, this.mergePath, Paths.get(""));
|
||||
}
|
||||
|
||||
private void loadMetadata() throws IOException, DataException {
|
||||
this.metadata = new ArbitraryDataMetadataPatch(this.pathAfter);
|
||||
this.metadata.read();
|
||||
}
|
||||
|
||||
private void applyDifferences() throws IOException, DataException {
|
||||
|
||||
List<Path> addedPaths = this.metadata.getAddedPaths();
|
||||
for (Path path : addedPaths) {
|
||||
LOGGER.trace("File was added: {}", path.toString());
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), path.toString());
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, path);
|
||||
}
|
||||
|
||||
List<ModifiedPath> modifiedPaths = this.metadata.getModifiedPaths();
|
||||
for (ModifiedPath modifiedPath : modifiedPaths) {
|
||||
LOGGER.trace("File was modified: {}", modifiedPath.toString());
|
||||
this.applyPatch(modifiedPath);
|
||||
}
|
||||
|
||||
List<Path> removedPaths = this.metadata.getRemovedPaths();
|
||||
for (Path path : removedPaths) {
|
||||
LOGGER.trace("File was removed: {}", path.toString());
|
||||
ArbitraryDataMerge.deletePathInBaseDir(this.mergePath, path);
|
||||
}
|
||||
}
|
||||
|
||||
private void applyPatch(ModifiedPath modifiedPath) throws IOException, DataException {
|
||||
if (modifiedPath.getDiffType() == DiffType.UNIFIED_DIFF) {
|
||||
// Create destination file from patch
|
||||
UnifiedDiffPatch unifiedDiffPatch = new UnifiedDiffPatch(pathBefore, pathAfter, mergePath);
|
||||
unifiedDiffPatch.apply(modifiedPath.getPath());
|
||||
}
|
||||
else if (modifiedPath.getDiffType() == DiffType.COMPLETE_FILE) {
|
||||
// Copy complete file
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), modifiedPath.getPath().toString());
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, modifiedPath.getPath());
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unrecognized patch diff type: %s", modifiedPath.getDiffType()));
|
||||
}
|
||||
}
|
||||
|
||||
private void copyMetadata() throws IOException {
|
||||
Path filePath = Paths.get(this.pathAfter.toString(), ".qortal");
|
||||
ArbitraryDataMerge.copyPathToBaseDir(filePath, this.mergePath, Paths.get(".qortal"));
|
||||
}
|
||||
|
||||
|
||||
private static void copyPathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
File sourceFile = source.toFile();
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
|
||||
if (sourceFile.isFile()) {
|
||||
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
else if (sourceFile.isDirectory()) {
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
}
|
||||
else {
|
||||
throw new IOException(String.format("Invalid file: %s", source.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private static void copyDirPathToBaseDir(Path source, Path base, Path relativePath) throws IOException {
|
||||
if (!Files.exists(source)) {
|
||||
throw new IOException(String.format("File not found: %s", source.toString()));
|
||||
}
|
||||
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
LOGGER.trace("Copying {} to {}", source, dest);
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
}
|
||||
|
||||
private static void deletePathInBaseDir(Path base, Path relativePath) throws IOException {
|
||||
Path dest = Paths.get(base.toString(), relativePath.toString());
|
||||
File file = new File(dest.toString());
|
||||
if (file.exists() && file.isFile()) {
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(dest)) {
|
||||
LOGGER.trace("Deleting file {}", dest);
|
||||
Files.delete(dest);
|
||||
}
|
||||
}
|
||||
if (file.exists() && file.isDirectory()) {
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(dest)) {
|
||||
LOGGER.trace("Deleting directory {}", dest);
|
||||
FileUtils.deleteDirectory(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Path getMergePath() {
|
||||
return this.mergePath;
|
||||
}
|
||||
|
||||
}
|
||||
566
src/main/java/org/qortal/arbitrary/ArbitraryDataReader.java
Normal file
566
src/main/java/org/qortal/arbitrary/ArbitraryDataReader.java
Normal file
@@ -0,0 +1,566 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataBuildManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataStorageManager;
|
||||
import org.qortal.crypto.AES;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.utils.ArbitraryTransactionUtils;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.ZipUtils;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.NoSuchPaddingException;
|
||||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.security.InvalidAlgorithmParameterException;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ArbitraryDataReader {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataReader.class);
|
||||
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private ArbitraryTransactionData transactionData;
|
||||
private String secret58;
|
||||
private Path filePath;
|
||||
private boolean canRequestMissingFiles;
|
||||
|
||||
// Intermediate paths
|
||||
private final Path workingPath;
|
||||
private final Path uncompressedPath;
|
||||
|
||||
// Stats (available for synchronous builds only)
|
||||
private int layerCount;
|
||||
private byte[] latestSignature;
|
||||
|
||||
public ArbitraryDataReader(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
// Ensure names are always lowercase
|
||||
if (resourceIdType == ResourceIdType.NAME) {
|
||||
resourceId = resourceId.toLowerCase();
|
||||
}
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.identifier = identifier;
|
||||
|
||||
this.workingPath = this.buildWorkingPath();
|
||||
this.uncompressedPath = Paths.get(this.workingPath.toString(), "data");
|
||||
|
||||
// By default we can request missing files
|
||||
// Callers can use setCanRequestMissingFiles(false) to prevent it
|
||||
this.canRequestMissingFiles = true;
|
||||
}
|
||||
|
||||
private Path buildWorkingPath() {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = this.identifier != null ? this.identifier : "default";
|
||||
return Paths.get(baseDir, "reader", this.resourceIdType.toString(), this.resourceId, this.service.toString(), identifier);
|
||||
}
|
||||
|
||||
public boolean isCachedDataAvailable() {
|
||||
// If this resource is in the build queue then we shouldn't attempt to serve
|
||||
// cached data, as it may not be fully built
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(this.createQueueItem())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Not in the build queue - so check the cache itself
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, false,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
this.filePath = this.uncompressedPath;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isBuilding() {
|
||||
return ArbitraryDataBuildManager.getInstance().isInBuildQueue(this.createQueueItem());
|
||||
}
|
||||
|
||||
private ArbitraryDataBuildQueueItem createQueueItem() {
|
||||
return new ArbitraryDataBuildQueueItem(this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* loadAsynchronously
|
||||
*
|
||||
* Attempts to load the resource asynchronously
|
||||
* This adds the build task to a queue, and the result will be cached when complete
|
||||
* To check the status of the build, periodically call isCachedDataAvailable()
|
||||
* Once it returns true, you can then use getFilePath() to access the data itself.
|
||||
*
|
||||
* @param overwrite - set to true to force rebuild an existing cache
|
||||
* @return true if added or already present in queue; false if not
|
||||
*/
|
||||
public boolean loadAsynchronously(boolean overwrite, int priority) {
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, overwrite,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
// Use cached data
|
||||
this.filePath = this.uncompressedPath;
|
||||
return true;
|
||||
}
|
||||
|
||||
ArbitraryDataBuildQueueItem item = this.createQueueItem();
|
||||
item.setPriority(priority);
|
||||
return ArbitraryDataBuildManager.getInstance().addToBuildQueue(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* loadSynchronously
|
||||
*
|
||||
* Attempts to load the resource synchronously
|
||||
* Warning: this can block for a long time when building or fetching complex data
|
||||
* If no exception is thrown, you can then use getFilePath() to access the data immediately after returning
|
||||
*
|
||||
* @param overwrite - set to true to force rebuild an existing cache
|
||||
* @throws IOException
|
||||
* @throws DataException
|
||||
* @throws MissingDataException
|
||||
*/
|
||||
public void loadSynchronously(boolean overwrite) throws DataException, IOException, MissingDataException {
|
||||
try {
|
||||
ArbitraryDataCache cache = new ArbitraryDataCache(this.uncompressedPath, overwrite,
|
||||
this.resourceId, this.resourceIdType, this.service, this.identifier);
|
||||
if (cache.isCachedDataAvailable()) {
|
||||
// Use cached data
|
||||
this.filePath = this.uncompressedPath;
|
||||
return;
|
||||
}
|
||||
|
||||
this.preExecute();
|
||||
this.deleteExistingFiles();
|
||||
this.fetch();
|
||||
this.decrypt();
|
||||
this.uncompress();
|
||||
this.validate();
|
||||
|
||||
} catch (DataException e) {
|
||||
this.deleteWorkingDirectory();
|
||||
throw new DataException(e.getMessage());
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
ArbitraryDataBuildManager.getInstance().setBuildInProgress(true);
|
||||
this.checkEnabled();
|
||||
this.createWorkingDirectory();
|
||||
this.createUncompressedDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
ArbitraryDataBuildManager.getInstance().setBuildInProgress(false);
|
||||
}
|
||||
|
||||
private void checkEnabled() throws DataException {
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
throw new DataException("QDN is disabled in settings");
|
||||
}
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
try {
|
||||
Files.createDirectories(this.workingPath);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Working directory should only be deleted on failure, since it is currently used to
|
||||
* serve a cached version of the resource for subsequent requests.
|
||||
* @throws IOException
|
||||
*/
|
||||
private void deleteWorkingDirectory() throws IOException {
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, true);
|
||||
}
|
||||
|
||||
private void createUncompressedDirectory() throws DataException {
|
||||
try {
|
||||
// Create parent directory
|
||||
Files.createDirectories(this.uncompressedPath.getParent());
|
||||
// Ensure child directory doesn't already exist
|
||||
FileUtils.deleteDirectory(this.uncompressedPath.toFile());
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create uncompressed directory");
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteExistingFiles() {
|
||||
final Path uncompressedPath = this.uncompressedPath;
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(uncompressedPath)) {
|
||||
if (Files.exists(uncompressedPath)) {
|
||||
LOGGER.trace("Attempting to delete path {}", this.uncompressedPath);
|
||||
try {
|
||||
Files.walkFileTree(uncompressedPath, new SimpleFileVisitor<>() {
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException {
|
||||
// Don't delete the parent directory, as we want to leave an empty folder
|
||||
if (dir.compareTo(uncompressedPath) == 0) {
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
if (e == null) {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Unable to delete file or directory: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void fetch() throws DataException, IOException, MissingDataException {
|
||||
switch (resourceIdType) {
|
||||
|
||||
case FILE_HASH:
|
||||
this.fetchFromFileHash();
|
||||
break;
|
||||
|
||||
case NAME:
|
||||
this.fetchFromName();
|
||||
break;
|
||||
|
||||
case SIGNATURE:
|
||||
this.fetchFromSignature();
|
||||
break;
|
||||
|
||||
case TRANSACTION_DATA:
|
||||
this.fetchFromTransactionData(this.transactionData);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new DataException(String.format("Unknown resource ID type specified: %s", resourceIdType.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchFromFileHash() throws DataException {
|
||||
// Load data file directly from the hash (without a signature)
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash58(resourceId, null);
|
||||
// Set filePath to the location of the ArbitraryDataFile
|
||||
this.filePath = arbitraryDataFile.getFilePath();
|
||||
}
|
||||
|
||||
private void fetchFromName() throws DataException, IOException, MissingDataException {
|
||||
try {
|
||||
|
||||
// Build the existing state using past transactions
|
||||
ArbitraryDataBuilder builder = new ArbitraryDataBuilder(this.resourceId, this.service, this.identifier);
|
||||
builder.build();
|
||||
Path builtPath = builder.getFinalPath();
|
||||
if (builtPath == null) {
|
||||
throw new DataException("Unable to build path");
|
||||
}
|
||||
|
||||
// Update stats
|
||||
this.layerCount = builder.getLayerCount();
|
||||
this.latestSignature = builder.getLatestSignature();
|
||||
|
||||
// Set filePath to the builtPath
|
||||
this.filePath = builtPath;
|
||||
|
||||
} catch (InvalidObjectException e) {
|
||||
// Hash validation failed. Invalidate the cache for this name, so it can be rebuilt
|
||||
LOGGER.info("Deleting {}", this.workingPath.toString());
|
||||
FilesystemUtils.safeDeleteDirectory(this.workingPath, false);
|
||||
throw(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchFromSignature() throws DataException, IOException, MissingDataException {
|
||||
|
||||
// Load the full transaction data from the database so we can access the file hashes
|
||||
ArbitraryTransactionData transactionData;
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
transactionData = (ArbitraryTransactionData) repository.getTransactionRepository().fromSignature(Base58.decode(resourceId));
|
||||
}
|
||||
if (transactionData == null) {
|
||||
throw new DataException(String.format("Transaction data not found for signature %s", this.resourceId));
|
||||
}
|
||||
|
||||
this.fetchFromTransactionData(transactionData);
|
||||
}
|
||||
|
||||
private void fetchFromTransactionData(ArbitraryTransactionData transactionData) throws DataException, IOException, MissingDataException {
|
||||
if (transactionData == null) {
|
||||
throw new DataException(String.format("Transaction data not found for signature %s", this.resourceId));
|
||||
}
|
||||
|
||||
// Load hashes
|
||||
byte[] digest = transactionData.getData();
|
||||
byte[] metadataHash = transactionData.getMetadataHash();
|
||||
byte[] signature = transactionData.getSignature();
|
||||
|
||||
// Load secret
|
||||
byte[] secret = transactionData.getSecret();
|
||||
if (secret != null) {
|
||||
this.secret58 = Base58.encode(secret);
|
||||
}
|
||||
|
||||
// Load data file(s)
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(digest, signature);
|
||||
ArbitraryTransactionUtils.checkAndRelocateMiscFiles(transactionData);
|
||||
arbitraryDataFile.setMetadataHash(metadataHash);
|
||||
|
||||
if (!arbitraryDataFile.allFilesExist()) {
|
||||
if (ArbitraryDataStorageManager.getInstance().isNameBlocked(transactionData.getName())) {
|
||||
throw new DataException(
|
||||
String.format("Unable to request missing data for file %s because the name is blocked", arbitraryDataFile));
|
||||
}
|
||||
else {
|
||||
// Ask the arbitrary data manager to fetch data for this transaction
|
||||
String message;
|
||||
if (this.canRequestMissingFiles) {
|
||||
boolean requested = ArbitraryDataManager.getInstance().fetchData(transactionData);
|
||||
|
||||
if (requested) {
|
||||
message = String.format("Requested missing data for file %s", arbitraryDataFile);
|
||||
} else {
|
||||
message = String.format("Unable to reissue request for missing file %s for signature %s due to rate limit. Please try again later.", arbitraryDataFile, Base58.encode(transactionData.getSignature()));
|
||||
}
|
||||
}
|
||||
else {
|
||||
message = String.format("Missing data for file %s", arbitraryDataFile);
|
||||
}
|
||||
|
||||
// Throw a missing data exception, which allows subsequent layers to fetch data
|
||||
LOGGER.trace(message);
|
||||
throw new MissingDataException(message);
|
||||
}
|
||||
}
|
||||
|
||||
if (arbitraryDataFile.allChunksExist() && !arbitraryDataFile.exists()) {
|
||||
// We have all the chunks but not the complete file, so join them
|
||||
arbitraryDataFile.join();
|
||||
}
|
||||
|
||||
// If the complete file still doesn't exist then something went wrong
|
||||
if (!arbitraryDataFile.exists()) {
|
||||
throw new IOException(String.format("File doesn't exist: %s", arbitraryDataFile));
|
||||
}
|
||||
// Ensure the complete hash matches the joined chunks
|
||||
if (!Arrays.equals(arbitraryDataFile.digest(), digest)) {
|
||||
// Delete the invalid file
|
||||
arbitraryDataFile.delete();
|
||||
throw new DataException("Unable to validate complete file hash");
|
||||
}
|
||||
// Ensure the file's size matches the size reported by the transaction (throws a DataException if not)
|
||||
arbitraryDataFile.validateFileSize(transactionData.getSize());
|
||||
|
||||
// Set filePath to the location of the ArbitraryDataFile
|
||||
this.filePath = arbitraryDataFile.getFilePath();
|
||||
}
|
||||
|
||||
private void decrypt() throws DataException {
|
||||
try {
|
||||
// First try with explicit parameters (CBC mode with PKCS5 padding)
|
||||
this.decryptUsingAlgo("AES/CBC/PKCS5Padding");
|
||||
|
||||
} catch (DataException e) {
|
||||
// Something went wrong, so fall back to default AES params (necessary for legacy resource support)
|
||||
this.decryptUsingAlgo("AES");
|
||||
|
||||
// TODO: delete files and block this resource if privateDataEnabled is false and the second attempt fails too
|
||||
}
|
||||
}
|
||||
|
||||
private void decryptUsingAlgo(String algorithm) throws DataException {
|
||||
// Decrypt if we have the secret key.
|
||||
byte[] secret = this.secret58 != null ? Base58.decode(this.secret58) : null;
|
||||
if (secret != null && secret.length == Transformer.AES256_LENGTH) {
|
||||
try {
|
||||
Path unencryptedPath = Paths.get(this.workingPath.toString(), "zipped.zip");
|
||||
SecretKey aesKey = new SecretKeySpec(secret, 0, secret.length, algorithm);
|
||||
AES.decryptFile(algorithm, aesKey, this.filePath.toString(), unencryptedPath.toString());
|
||||
|
||||
// Replace filePath pointer with the encrypted file path
|
||||
// Don't delete the original ArbitraryDataFile, as this is handled in the cleanup phase
|
||||
this.filePath = unencryptedPath;
|
||||
|
||||
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException
|
||||
| BadPaddingException | IllegalBlockSizeException | IOException | InvalidKeyException e) {
|
||||
throw new DataException(String.format("Unable to decrypt file at path %s: %s", this.filePath, e.getMessage()));
|
||||
}
|
||||
} else {
|
||||
// Assume it is unencrypted. This will be the case when we have built a custom path by combining
|
||||
// multiple decrypted archives into a single state.
|
||||
}
|
||||
}
|
||||
|
||||
private void uncompress() throws IOException, DataException {
|
||||
if (this.filePath == null || !Files.exists(this.filePath)) {
|
||||
throw new DataException("Can't uncompress non-existent file path");
|
||||
}
|
||||
File file = new File(this.filePath.toString());
|
||||
if (file.isDirectory()) {
|
||||
// Already a directory - nothing to uncompress
|
||||
// We still need to copy the directory to its final destination if it's not already there
|
||||
this.moveFilePathToFinalDestination();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Default to ZIP compression - this is needed for previews
|
||||
Compression compression = transactionData != null ? transactionData.getCompression() : Compression.ZIP;
|
||||
|
||||
// Handle each type of compression
|
||||
if (compression == Compression.ZIP) {
|
||||
ZipUtils.unzip(this.filePath.toString(), this.uncompressedPath.getParent().toString());
|
||||
}
|
||||
else if (compression == Compression.NONE) {
|
||||
Files.createDirectories(this.uncompressedPath);
|
||||
Path finalPath = Paths.get(this.uncompressedPath.toString(), "data");
|
||||
this.filePath.toFile().renameTo(finalPath.toFile());
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unrecognized compression type: %s", transactionData.getCompression()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new DataException(String.format("Unable to unzip file: %s", e.getMessage()));
|
||||
}
|
||||
|
||||
if (!this.uncompressedPath.toFile().exists()) {
|
||||
throw new DataException(String.format("Unable to unzip file: %s", this.filePath));
|
||||
}
|
||||
|
||||
// Delete original compressed file
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
if (Files.exists(this.filePath)) {
|
||||
Files.delete(this.filePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace filePath pointer with the uncompressed file path
|
||||
this.filePath = this.uncompressedPath;
|
||||
}
|
||||
|
||||
private void validate() throws IOException, DataException {
|
||||
if (this.service.isValidationRequired()) {
|
||||
Service.ValidationResult result = this.service.validate(this.filePath);
|
||||
if (result != Service.ValidationResult.OK) {
|
||||
throw new DataException(String.format("Validation of %s failed: %s", this.service, result.toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void moveFilePathToFinalDestination() throws IOException, DataException {
|
||||
if (this.filePath.compareTo(this.uncompressedPath) != 0) {
|
||||
File source = new File(this.filePath.toString());
|
||||
File dest = new File(this.uncompressedPath.toString());
|
||||
if (!source.exists()) {
|
||||
throw new DataException("Source directory doesn't exist");
|
||||
}
|
||||
// Ensure destination directory doesn't exist
|
||||
FileUtils.deleteDirectory(dest);
|
||||
// Move files to destination
|
||||
FilesystemUtils.copyAndReplaceDirectory(source.toString(), dest.toString());
|
||||
|
||||
try {
|
||||
// Delete existing
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
File directory = new File(this.filePath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
// ... and its parent directory if empty
|
||||
Path parentDirectory = this.filePath.getParent();
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(parentDirectory)) {
|
||||
Files.deleteIfExists(parentDirectory);
|
||||
}
|
||||
|
||||
} catch (DirectoryNotEmptyException e) {
|
||||
// No need to log anything
|
||||
} catch (IOException e) {
|
||||
// This will eventually be cleaned up by a maintenance process, so log the error and continue
|
||||
LOGGER.debug("Unable to cleanup directories: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Finally, update filePath to point to uncompressedPath
|
||||
this.filePath = this.uncompressedPath;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void setTransactionData(ArbitraryTransactionData transactionData) {
|
||||
this.transactionData = transactionData;
|
||||
}
|
||||
|
||||
public void setSecret58(String secret58) {
|
||||
this.secret58 = secret58;
|
||||
}
|
||||
|
||||
public Path getFilePath() {
|
||||
return this.filePath;
|
||||
}
|
||||
|
||||
public int getLayerCount() {
|
||||
return this.layerCount;
|
||||
}
|
||||
|
||||
public byte[] getLatestSignature() {
|
||||
return this.latestSignature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the below setter to ensure that we only read existing
|
||||
* data without requesting any missing files,
|
||||
*
|
||||
* @param canRequestMissingFiles - whether or not fetching missing files is allowed
|
||||
*/
|
||||
public void setCanRequestMissingFiles(boolean canRequestMissingFiles) {
|
||||
this.canRequestMissingFiles = canRequestMissingFiles;
|
||||
}
|
||||
|
||||
}
|
||||
219
src/main/java/org/qortal/arbitrary/ArbitraryDataRenderer.java
Normal file
219
src/main/java/org/qortal/arbitrary/ArbitraryDataRenderer.java
Normal file
@@ -0,0 +1,219 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import com.google.common.io.Resources;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.api.HTMLParser;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataRenderer {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataRenderer.class);
|
||||
|
||||
private final String resourceId;
|
||||
private final ResourceIdType resourceIdType;
|
||||
private final Service service;
|
||||
private String theme = "light";
|
||||
private String inPath;
|
||||
private final String secret58;
|
||||
private final String prefix;
|
||||
private final boolean usePrefix;
|
||||
private final boolean async;
|
||||
private final HttpServletRequest request;
|
||||
private final HttpServletResponse response;
|
||||
private final ServletContext context;
|
||||
|
||||
public ArbitraryDataRenderer(String resourceId, ResourceIdType resourceIdType, Service service, String inPath,
|
||||
String secret58, String prefix, boolean usePrefix, boolean async,
|
||||
HttpServletRequest request, HttpServletResponse response, ServletContext context) {
|
||||
|
||||
this.resourceId = resourceId;
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
this.inPath = inPath;
|
||||
this.secret58 = secret58;
|
||||
this.prefix = prefix;
|
||||
this.usePrefix = usePrefix;
|
||||
this.async = async;
|
||||
this.request = request;
|
||||
this.response = response;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public HttpServletResponse render() {
|
||||
if (!inPath.startsWith(File.separator)) {
|
||||
inPath = File.separator + inPath;
|
||||
}
|
||||
|
||||
// Don't render data if QDN is disabled
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
return ArbitraryDataRenderer.getResponse(response, 500, "QDN is disabled in settings");
|
||||
}
|
||||
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(resourceId, resourceIdType, service, null);
|
||||
arbitraryDataReader.setSecret58(secret58); // Optional, used for loading encrypted file hashes only
|
||||
try {
|
||||
if (!arbitraryDataReader.isCachedDataAvailable()) {
|
||||
// If async is requested, show a loading screen whilst build is in progress
|
||||
if (async) {
|
||||
arbitraryDataReader.loadAsynchronously(false, 10);
|
||||
return this.getLoadingResponse(service, resourceId, theme);
|
||||
}
|
||||
|
||||
// Otherwise, loop until we have data
|
||||
int attempts = 0;
|
||||
while (!Controller.isStopping()) {
|
||||
attempts++;
|
||||
if (!arbitraryDataReader.isBuilding()) {
|
||||
try {
|
||||
arbitraryDataReader.loadSynchronously(false);
|
||||
break;
|
||||
} catch (MissingDataException e) {
|
||||
if (attempts > 5) {
|
||||
// Give up after 5 attempts
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Data unavailable. Please try again later.");
|
||||
}
|
||||
}
|
||||
}
|
||||
Thread.sleep(3000L);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.info(String.format("Unable to load %s %s: %s", service, resourceId, e.getMessage()));
|
||||
return ArbitraryDataRenderer.getResponse(response, 500, "Error 500: Internal Server Error");
|
||||
}
|
||||
|
||||
java.nio.file.Path path = arbitraryDataReader.getFilePath();
|
||||
if (path == null) {
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
String unzippedPath = path.toString();
|
||||
|
||||
try {
|
||||
String filename = this.getFilename(unzippedPath, inPath);
|
||||
String filePath = Paths.get(unzippedPath, filename).toString();
|
||||
|
||||
if (HTMLParser.isHtmlFile(filename)) {
|
||||
// HTML file - needs to be parsed
|
||||
byte[] data = Files.readAllBytes(Paths.get(filePath)); // TODO: limit file size that can be read into memory
|
||||
HTMLParser htmlParser = new HTMLParser(resourceId, inPath, prefix, usePrefix, data);
|
||||
htmlParser.addAdditionalHeaderTags();
|
||||
response.addHeader("Content-Security-Policy", "default-src 'self' 'unsafe-inline' 'unsafe-eval'; media-src 'self' blob:; img-src 'self' data: blob:;");
|
||||
response.setContentType(context.getMimeType(filename));
|
||||
response.setContentLength(htmlParser.getData().length);
|
||||
response.getOutputStream().write(htmlParser.getData());
|
||||
}
|
||||
else {
|
||||
// Regular file - can be streamed directly
|
||||
File file = new File(filePath);
|
||||
FileInputStream inputStream = new FileInputStream(file);
|
||||
response.addHeader("Content-Security-Policy", "default-src 'self'");
|
||||
response.setContentType(context.getMimeType(filename));
|
||||
int bytesRead, length = 0;
|
||||
byte[] buffer = new byte[10240];
|
||||
while ((bytesRead = inputStream.read(buffer)) != -1) {
|
||||
response.getOutputStream().write(buffer, 0, bytesRead);
|
||||
length += bytesRead;
|
||||
}
|
||||
response.setContentLength(length);
|
||||
inputStream.close();
|
||||
}
|
||||
return response;
|
||||
} catch (FileNotFoundException | NoSuchFileException e) {
|
||||
LOGGER.info("Unable to serve file: {}", e.getMessage());
|
||||
if (inPath.equals("/")) {
|
||||
// Delete the unzipped folder if no index file was found
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(unzippedPath));
|
||||
} catch (IOException ioException) {
|
||||
LOGGER.debug("Unable to delete directory: {}", unzippedPath, e);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Unable to serve file at path {}: {}", inPath, e.getMessage());
|
||||
}
|
||||
|
||||
return ArbitraryDataRenderer.getResponse(response, 404, "Error 404: File Not Found");
|
||||
}
|
||||
|
||||
private String getFilename(String directory, String userPath) {
|
||||
if (userPath == null || userPath.endsWith("/") || userPath.equals("")) {
|
||||
// Locate index file
|
||||
List<String> indexFiles = ArbitraryDataRenderer.indexFiles();
|
||||
for (String indexFile : indexFiles) {
|
||||
Path path = Paths.get(directory, indexFile);
|
||||
if (Files.exists(path)) {
|
||||
return userPath + indexFile;
|
||||
}
|
||||
}
|
||||
}
|
||||
return userPath;
|
||||
}
|
||||
|
||||
private HttpServletResponse getLoadingResponse(Service service, String name, String theme) {
|
||||
String responseString = "";
|
||||
URL url = Resources.getResource("loading/index.html");
|
||||
try {
|
||||
responseString = Resources.toString(url, StandardCharsets.UTF_8);
|
||||
|
||||
// Replace vars
|
||||
responseString = responseString.replace("%%SERVICE%%", service.toString());
|
||||
responseString = responseString.replace("%%NAME%%", name);
|
||||
responseString = responseString.replace("%%THEME%%", theme);
|
||||
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Unable to show loading screen: {}", e.getMessage());
|
||||
}
|
||||
return ArbitraryDataRenderer.getResponse(response, 503, responseString);
|
||||
}
|
||||
|
||||
public static HttpServletResponse getResponse(HttpServletResponse response, int responseCode, String responseString) {
|
||||
try {
|
||||
byte[] responseData = responseString.getBytes();
|
||||
response.setStatus(responseCode);
|
||||
response.setContentLength(responseData.length);
|
||||
response.getOutputStream().write(responseData);
|
||||
} catch (IOException e) {
|
||||
LOGGER.info("Error writing {} response", responseCode);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
public static List<String> indexFiles() {
|
||||
List<String> indexFiles = new ArrayList<>();
|
||||
indexFiles.add("index.html");
|
||||
indexFiles.add("index.htm");
|
||||
indexFiles.add("default.html");
|
||||
indexFiles.add("default.htm");
|
||||
indexFiles.add("home.html");
|
||||
indexFiles.add("home.htm");
|
||||
return indexFiles;
|
||||
}
|
||||
|
||||
public void setTheme(String theme) {
|
||||
this.theme = theme;
|
||||
}
|
||||
|
||||
}
|
||||
407
src/main/java/org/qortal/arbitrary/ArbitraryDataResource.java
Normal file
407
src/main/java/org/qortal/arbitrary/ArbitraryDataResource.java
Normal file
@@ -0,0 +1,407 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataBuildManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataManager;
|
||||
import org.qortal.controller.arbitrary.ArbitraryDataStorageManager;
|
||||
import org.qortal.data.arbitrary.ArbitraryResourceStatus;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.list.ResourceListManager;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.ArbitraryTransactionUtils;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.qortal.data.arbitrary.ArbitraryResourceStatus.Status;
|
||||
|
||||
public class ArbitraryDataResource {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataResource.class);
|
||||
|
||||
protected final String resourceId;
|
||||
protected final ResourceIdType resourceIdType;
|
||||
protected final Service service;
|
||||
protected final String identifier;
|
||||
|
||||
private List<ArbitraryTransactionData> transactions;
|
||||
private ArbitraryTransactionData latestPutTransaction;
|
||||
private ArbitraryTransactionData latestTransaction;
|
||||
private int layerCount;
|
||||
private Integer localChunkCount = null;
|
||||
private Integer totalChunkCount = null;
|
||||
|
||||
public ArbitraryDataResource(String resourceId, ResourceIdType resourceIdType, Service service, String identifier) {
|
||||
this.resourceId = resourceId.toLowerCase();
|
||||
this.resourceIdType = resourceIdType;
|
||||
this.service = service;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
}
|
||||
|
||||
public ArbitraryResourceStatus getStatus(boolean quick) {
|
||||
// Calculate the chunk counts
|
||||
// Avoid this for "quick" statuses, to speed things up
|
||||
if (!quick) {
|
||||
this.calculateChunkCounts();
|
||||
}
|
||||
|
||||
if (resourceIdType != ResourceIdType.NAME) {
|
||||
// We only support statuses for resources with a name
|
||||
return new ArbitraryResourceStatus(Status.UNSUPPORTED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if the name is blocked
|
||||
if (ResourceListManager.getInstance()
|
||||
.listContains("blockedNames", this.resourceId, false)) {
|
||||
return new ArbitraryResourceStatus(Status.BLOCKED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if a build has failed
|
||||
ArbitraryDataBuildQueueItem queueItem =
|
||||
new ArbitraryDataBuildQueueItem(resourceId, resourceIdType, service, identifier);
|
||||
if (ArbitraryDataBuildManager.getInstance().isInFailedBuildsList(queueItem)) {
|
||||
return new ArbitraryResourceStatus(Status.BUILD_FAILED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Firstly check the cache to see if it's already built
|
||||
ArbitraryDataReader arbitraryDataReader = new ArbitraryDataReader(
|
||||
resourceId, resourceIdType, service, identifier);
|
||||
if (arbitraryDataReader.isCachedDataAvailable()) {
|
||||
return new ArbitraryResourceStatus(Status.READY, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if we have all data locally for this resource
|
||||
if (!this.allFilesDownloaded()) {
|
||||
if (this.isDownloading()) {
|
||||
return new ArbitraryResourceStatus(Status.DOWNLOADING, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
else if (this.isDataPotentiallyAvailable()) {
|
||||
return new ArbitraryResourceStatus(Status.PUBLISHED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
return new ArbitraryResourceStatus(Status.MISSING_DATA, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// Check if there's a build in progress
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(queueItem)) {
|
||||
return new ArbitraryResourceStatus(Status.BUILDING, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
// We have all data locally
|
||||
return new ArbitraryResourceStatus(Status.DOWNLOADED, this.localChunkCount, this.totalChunkCount);
|
||||
}
|
||||
|
||||
public ArbitraryDataTransactionMetadata getLatestTransactionMetadata() {
|
||||
this.fetchLatestTransaction();
|
||||
|
||||
if (latestTransaction != null) {
|
||||
byte[] signature = latestTransaction.getSignature();
|
||||
byte[] metadataHash = latestTransaction.getMetadataHash();
|
||||
if (metadataHash == null) {
|
||||
// This resource doesn't have metadata
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ArbitraryDataFile metadataFile = ArbitraryDataFile.fromHash(metadataHash, signature);
|
||||
if (metadataFile.exists()) {
|
||||
ArbitraryDataTransactionMetadata transactionMetadata = new ArbitraryDataTransactionMetadata(metadataFile.getFilePath());
|
||||
transactionMetadata.read();
|
||||
return transactionMetadata;
|
||||
}
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean delete() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
byte[] hash = transactionData.getData();
|
||||
byte[] metadataHash = transactionData.getMetadataHash();
|
||||
byte[] signature = transactionData.getSignature();
|
||||
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash, signature);
|
||||
arbitraryDataFile.setMetadataHash(metadataHash);
|
||||
|
||||
// Delete any chunks or complete files from each transaction
|
||||
arbitraryDataFile.deleteAll();
|
||||
}
|
||||
|
||||
// Also delete cached data for the entire resource
|
||||
this.deleteCache();
|
||||
|
||||
// Invalidate the hosted transactions cache as we have removed an item
|
||||
ArbitraryDataStorageManager.getInstance().invalidateHostedTransactionsCache();
|
||||
|
||||
return true;
|
||||
|
||||
} catch (DataException | IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteCache() throws IOException {
|
||||
// Don't delete anything if there's a build in progress
|
||||
ArbitraryDataBuildQueueItem queueItem =
|
||||
new ArbitraryDataBuildQueueItem(resourceId, resourceIdType, service, identifier);
|
||||
if (ArbitraryDataBuildManager.getInstance().isInBuildQueue(queueItem)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = this.identifier != null ? this.identifier : "default";
|
||||
Path cachePath = Paths.get(baseDir, "reader", this.resourceIdType.toString(), this.resourceId, this.service.toString(), identifier);
|
||||
if (cachePath.toFile().exists()) {
|
||||
boolean success = FilesystemUtils.safeDeleteDirectory(cachePath, true);
|
||||
if (success) {
|
||||
LOGGER.info("Cleared cache for resource {}", this.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean allFilesDownloaded() {
|
||||
// Use chunk counts to speed things up if we can
|
||||
if (this.localChunkCount != null && this.totalChunkCount != null &&
|
||||
this.localChunkCount >= this.totalChunkCount) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (!ArbitraryTransactionUtils.completeFileExists(transactionData) ||
|
||||
!ArbitraryTransactionUtils.allChunksExist(transactionData)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void calculateChunkCounts() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
int localChunkCount = 0;
|
||||
int totalChunkCount = 0;
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
localChunkCount += ArbitraryTransactionUtils.ourChunkCount(transactionData);
|
||||
totalChunkCount += ArbitraryTransactionUtils.totalChunkCount(transactionData);
|
||||
}
|
||||
|
||||
this.localChunkCount = localChunkCount;
|
||||
this.totalChunkCount = totalChunkCount;
|
||||
|
||||
} catch (DataException e) {}
|
||||
}
|
||||
|
||||
private boolean isRateLimited() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
if (ArbitraryDataManager.getInstance().isSignatureRateLimited(transactionData.getSignature())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Best guess as to whether data might be available
|
||||
* This is only used to give an indication to the user of progress
|
||||
* @return - whether data might be available on the network
|
||||
*/
|
||||
private boolean isDataPotentiallyAvailable() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
long lastRequestTime = ArbitraryDataManager.getInstance().lastRequestForSignature(transactionData.getSignature());
|
||||
// If we haven't requested yet, or requested in the last 30 seconds, there's still a
|
||||
// chance that data is on its way but hasn't arrived yet
|
||||
if (lastRequestTime == 0 || now - lastRequestTime < 30 * 1000L) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Best guess as to whether we are currently downloading a resource
|
||||
* This is only used to give an indication to the user of progress
|
||||
* @return - whether we are trying to download the resource
|
||||
*/
|
||||
private boolean isDownloading() {
|
||||
try {
|
||||
this.fetchTransactions();
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ArbitraryTransactionData> transactionDataList = new ArrayList<>(this.transactions);
|
||||
|
||||
for (ArbitraryTransactionData transactionData : transactionDataList) {
|
||||
long lastRequestTime = ArbitraryDataManager.getInstance().lastRequestForSignature(transactionData.getSignature());
|
||||
// If were have requested data in the last 30 seconds, treat it as "downloading"
|
||||
if (lastRequestTime > 0 && now - lastRequestTime < 30 * 1000L) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// FUTURE: we may want to check for file hashes (including the metadata file hash) in
|
||||
// ArbitraryDataManager.arbitraryDataFileRequests and return true if one is found.
|
||||
|
||||
return false;
|
||||
|
||||
} catch (DataException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void fetchTransactions() throws DataException {
|
||||
if (this.transactions != null && !this.transactions.isEmpty()) {
|
||||
// Already fetched
|
||||
return;
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent PUT
|
||||
ArbitraryTransactionData latestPut = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, ArbitraryTransactionData.Method.PUT, this.identifier);
|
||||
if (latestPut == null) {
|
||||
String message = String.format("Couldn't find PUT transaction for name %s, service %s and identifier %s",
|
||||
this.resourceId, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestPutTransaction = latestPut;
|
||||
|
||||
// Load all transactions since the latest PUT
|
||||
List<ArbitraryTransactionData> transactionDataList = repository.getArbitraryRepository()
|
||||
.getArbitraryTransactions(this.resourceId, this.service, this.identifier, latestPut.getTimestamp());
|
||||
|
||||
this.transactions = transactionDataList;
|
||||
this.layerCount = transactionDataList.size();
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info(String.format("Repository error when fetching transactions for resource %s: %s", this, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchLatestTransaction() {
|
||||
if (this.latestTransaction != null) {
|
||||
// Already fetched
|
||||
return;
|
||||
}
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
|
||||
// Get the most recent transaction
|
||||
ArbitraryTransactionData latestTransaction = repository.getArbitraryRepository()
|
||||
.getLatestTransaction(this.resourceId, this.service, null, this.identifier);
|
||||
if (latestTransaction == null) {
|
||||
String message = String.format("Couldn't find transaction for name %s, service %s and identifier %s",
|
||||
this.resourceId, this.service, this.identifierString());
|
||||
throw new DataException(message);
|
||||
}
|
||||
this.latestTransaction = latestTransaction;
|
||||
|
||||
} catch (DataException e) {
|
||||
LOGGER.info(String.format("Repository error when fetching latest transaction for resource %s: %s", this, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private String resourceIdString() {
|
||||
return resourceId != null ? resourceId : "";
|
||||
}
|
||||
|
||||
private String resourceIdTypeString() {
|
||||
return resourceIdType != null ? resourceIdType.toString() : "";
|
||||
}
|
||||
|
||||
private String serviceString() {
|
||||
return service != null ? service.toString() : "";
|
||||
}
|
||||
|
||||
private String identifierString() {
|
||||
return identifier != null ? identifier : "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("%s %s %s", this.serviceString(), this.resourceIdString(), this.identifierString());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return unique key used to identify this resource
|
||||
*/
|
||||
public String getUniqueKey() {
|
||||
return String.format("%s-%s-%s", this.service, this.resourceId, this.identifier).toLowerCase();
|
||||
}
|
||||
|
||||
public String getResourceId() {
|
||||
return this.resourceId;
|
||||
}
|
||||
|
||||
public Service getService() {
|
||||
return this.service;
|
||||
}
|
||||
|
||||
public String getIdentifier() {
|
||||
return this.identifier;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,334 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.ResourceIdType;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataMetadataPatch;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Category;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.PaymentData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.data.transaction.BaseTransactionData;
|
||||
import org.qortal.group.Group;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.transaction.ArbitraryTransaction;
|
||||
import org.qortal.transaction.Transaction;
|
||||
import org.qortal.transform.Transformer;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.NTP;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Random;
|
||||
|
||||
public class ArbitraryDataTransactionBuilder {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataTransactionBuilder.class);
|
||||
|
||||
// Min transaction version required
|
||||
private static final int MIN_TRANSACTION_VERSION = 5;
|
||||
|
||||
// Maximum number of PATCH layers allowed
|
||||
private static final int MAX_LAYERS = 10;
|
||||
// Maximum size difference (out of 1) allowed for PATCH transactions
|
||||
private static final double MAX_SIZE_DIFF = 0.2f;
|
||||
// Maximum proportion of files modified relative to total
|
||||
private static final double MAX_FILE_DIFF = 0.5f;
|
||||
|
||||
private final String publicKey58;
|
||||
private final Path path;
|
||||
private final String name;
|
||||
private Method method;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private final Repository repository;
|
||||
|
||||
// Metadata
|
||||
private final String title;
|
||||
private final String description;
|
||||
private final List<String> tags;
|
||||
private final Category category;
|
||||
|
||||
private int chunkSize = ArbitraryDataFile.CHUNK_SIZE;
|
||||
|
||||
private ArbitraryTransactionData arbitraryTransactionData;
|
||||
private ArbitraryDataFile arbitraryDataFile;
|
||||
|
||||
public ArbitraryDataTransactionBuilder(Repository repository, String publicKey58, Path path, String name,
|
||||
Method method, Service service, String identifier,
|
||||
String title, String description, List<String> tags, Category category) {
|
||||
this.repository = repository;
|
||||
this.publicKey58 = publicKey58;
|
||||
this.path = path;
|
||||
this.name = name;
|
||||
this.method = method;
|
||||
this.service = service;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
|
||||
// Metadata (optional)
|
||||
this.title = ArbitraryDataTransactionMetadata.limitTitle(title);
|
||||
this.description = ArbitraryDataTransactionMetadata.limitDescription(description);
|
||||
this.tags = ArbitraryDataTransactionMetadata.limitTags(tags);
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public void build() throws DataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.checkMethod();
|
||||
this.createTransaction();
|
||||
}
|
||||
finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void postExecute() {
|
||||
|
||||
}
|
||||
|
||||
private void checkMethod() throws DataException {
|
||||
if (this.method == null) {
|
||||
// We need to automatically determine the method
|
||||
this.method = this.determineMethodAutomatically();
|
||||
}
|
||||
}
|
||||
|
||||
private Method determineMethodAutomatically() throws DataException {
|
||||
ArbitraryDataReader reader = new ArbitraryDataReader(this.name, ResourceIdType.NAME, this.service, this.identifier);
|
||||
try {
|
||||
reader.loadSynchronously(true);
|
||||
} catch (Exception e) {
|
||||
// Catch all exceptions if the existing resource cannot be loaded first time
|
||||
// In these cases it's simplest to just use a PUT transaction
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Get existing metadata and see if it matches the new metadata
|
||||
ArbitraryDataResource resource = new ArbitraryDataResource(this.name, ResourceIdType.NAME, this.service, this.identifier);
|
||||
ArbitraryDataTransactionMetadata existingMetadata = resource.getLatestTransactionMetadata();
|
||||
|
||||
try {
|
||||
// Check layer count
|
||||
int layerCount = reader.getLayerCount();
|
||||
if (layerCount >= MAX_LAYERS) {
|
||||
LOGGER.info("Reached maximum layer count ({} / {}) - using PUT", layerCount, MAX_LAYERS);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check size of differences between this layer and previous layer
|
||||
ArbitraryDataCreatePatch patch = new ArbitraryDataCreatePatch(reader.getFilePath(), this.path, reader.getLatestSignature());
|
||||
try {
|
||||
patch.create();
|
||||
}
|
||||
catch (DataException | IOException e) {
|
||||
// Handle matching states separately, as it's best to block transactions with duplicate states
|
||||
if (e.getMessage().equals("Current state matches previous state. Nothing to do.")) {
|
||||
// Only throw an exception if the metadata is also identical, as well as the data
|
||||
if (this.isMetadataEqual(existingMetadata)) {
|
||||
throw new DataException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info("Caught exception when creating patch: {}", e.getMessage());
|
||||
LOGGER.info("Unable to load existing resource - using PUT to overwrite it.");
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
long diffSize = FilesystemUtils.getDirectorySize(patch.getFinalPath());
|
||||
long existingStateSize = FilesystemUtils.getDirectorySize(reader.getFilePath());
|
||||
double difference = (double) diffSize / (double) existingStateSize;
|
||||
if (difference > MAX_SIZE_DIFF) {
|
||||
LOGGER.info("Reached maximum difference ({} / {}) - using PUT", difference, MAX_SIZE_DIFF);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check number of modified files
|
||||
ArbitraryDataMetadataPatch metadata = patch.getMetadata();
|
||||
int totalFileCount = patch.getTotalFileCount();
|
||||
int differencesCount = metadata.getFileDifferencesCount();
|
||||
difference = (double) differencesCount / (double) totalFileCount;
|
||||
if (difference > MAX_FILE_DIFF) {
|
||||
LOGGER.info("Reached maximum file differences ({} / {}) - using PUT", difference, MAX_FILE_DIFF);
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// Check the patch types
|
||||
// Limit this check to single file resources only for now
|
||||
boolean atLeastOnePatch = false;
|
||||
if (totalFileCount == 1) {
|
||||
for (ModifiedPath path : metadata.getModifiedPaths()) {
|
||||
if (path.getDiffType() != DiffType.COMPLETE_FILE) {
|
||||
atLeastOnePatch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!atLeastOnePatch) {
|
||||
LOGGER.info("Patch consists of complete files only - using PUT");
|
||||
return Method.PUT;
|
||||
}
|
||||
|
||||
// State is appropriate for a PATCH transaction
|
||||
return Method.PATCH;
|
||||
}
|
||||
catch (IOException e) {
|
||||
// IMPORTANT: Don't catch DataException here, as they must be passed to the caller
|
||||
LOGGER.info("Caught exception: {}", e.getMessage());
|
||||
LOGGER.info("Unable to load existing resource - using PUT to overwrite it.");
|
||||
return Method.PUT;
|
||||
}
|
||||
}
|
||||
|
||||
private void createTransaction() throws DataException {
|
||||
arbitraryDataFile = null;
|
||||
try {
|
||||
Long now = NTP.getTime();
|
||||
if (now == null) {
|
||||
throw new DataException("NTP time not synced yet");
|
||||
}
|
||||
|
||||
// Ensure that this chain supports transactions necessary for complex arbitrary data
|
||||
int transactionVersion = Transaction.getVersionByTimestamp(now);
|
||||
if (transactionVersion < MIN_TRANSACTION_VERSION) {
|
||||
throw new DataException("Transaction version unsupported on this blockchain.");
|
||||
}
|
||||
|
||||
if (publicKey58 == null || path == null) {
|
||||
throw new DataException("Missing public key or path");
|
||||
}
|
||||
byte[] creatorPublicKey = Base58.decode(publicKey58);
|
||||
final String creatorAddress = Crypto.toAddress(creatorPublicKey);
|
||||
byte[] lastReference = repository.getAccountRepository().getLastReference(creatorAddress);
|
||||
if (lastReference == null) {
|
||||
// Use a random last reference on the very first transaction for an account
|
||||
// Code copied from CrossChainResource.buildAtMessage()
|
||||
// We already require PoW on all arbitrary transactions, so no additional logic is needed
|
||||
Random random = new Random();
|
||||
lastReference = new byte[Transformer.SIGNATURE_LENGTH];
|
||||
random.nextBytes(lastReference);
|
||||
}
|
||||
|
||||
Compression compression = Compression.ZIP;
|
||||
|
||||
// FUTURE? Use zip compression for directories, or no compression for single files
|
||||
// Compression compression = (path.toFile().isDirectory()) ? Compression.ZIP : Compression.NONE;
|
||||
|
||||
ArbitraryDataWriter arbitraryDataWriter = new ArbitraryDataWriter(path, name, service, identifier, method,
|
||||
compression, title, description, tags, category);
|
||||
try {
|
||||
arbitraryDataWriter.setChunkSize(this.chunkSize);
|
||||
arbitraryDataWriter.save();
|
||||
} catch (IOException | DataException | InterruptedException | RuntimeException | MissingDataException e) {
|
||||
LOGGER.info("Unable to create arbitrary data file: {}", e.getMessage());
|
||||
throw new DataException(e.getMessage());
|
||||
}
|
||||
|
||||
// Get main file
|
||||
arbitraryDataFile = arbitraryDataWriter.getArbitraryDataFile();
|
||||
if (arbitraryDataFile == null) {
|
||||
throw new DataException("Arbitrary data file is null");
|
||||
}
|
||||
|
||||
// Get chunks metadata file
|
||||
ArbitraryDataFile metadataFile = arbitraryDataFile.getMetadataFile();
|
||||
if (metadataFile == null && arbitraryDataFile.chunkCount() > 1) {
|
||||
throw new DataException(String.format("Chunks metadata data file is null but there are %d chunks", arbitraryDataFile.chunkCount()));
|
||||
}
|
||||
|
||||
String digest58 = arbitraryDataFile.digest58();
|
||||
if (digest58 == null) {
|
||||
LOGGER.error("Unable to calculate file digest");
|
||||
throw new DataException("Unable to calculate file digest");
|
||||
}
|
||||
|
||||
final BaseTransactionData baseTransactionData = new BaseTransactionData(now, Group.NO_GROUP,
|
||||
lastReference, creatorPublicKey, 0L, null);
|
||||
final int size = (int) arbitraryDataFile.size();
|
||||
final int version = 5;
|
||||
final int nonce = 0;
|
||||
byte[] secret = arbitraryDataFile.getSecret();
|
||||
final ArbitraryTransactionData.DataType dataType = ArbitraryTransactionData.DataType.DATA_HASH;
|
||||
final byte[] digest = arbitraryDataFile.digest();
|
||||
final byte[] metadataHash = (metadataFile != null) ? metadataFile.getHash() : null;
|
||||
final List<PaymentData> payments = new ArrayList<>();
|
||||
|
||||
ArbitraryTransactionData transactionData = new ArbitraryTransactionData(baseTransactionData,
|
||||
version, service, nonce, size, name, identifier, method,
|
||||
secret, compression, digest, dataType, metadataHash, payments);
|
||||
|
||||
this.arbitraryTransactionData = transactionData;
|
||||
|
||||
} catch (DataException e) {
|
||||
if (arbitraryDataFile != null) {
|
||||
arbitraryDataFile.deleteAll();
|
||||
}
|
||||
throw(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private boolean isMetadataEqual(ArbitraryDataTransactionMetadata existingMetadata) {
|
||||
if (!Objects.equals(existingMetadata.getTitle(), this.title)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getDescription(), this.description)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getCategory(), this.category)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(existingMetadata.getTags(), this.tags)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void computeNonce() throws DataException {
|
||||
if (this.arbitraryTransactionData == null) {
|
||||
throw new DataException("Arbitrary transaction data is required to compute nonce");
|
||||
}
|
||||
|
||||
ArbitraryTransaction transaction = (ArbitraryTransaction) Transaction.fromData(repository, this.arbitraryTransactionData);
|
||||
LOGGER.info("Computing nonce...");
|
||||
transaction.computeNonce();
|
||||
|
||||
Transaction.ValidationResult result = transaction.isValidUnconfirmed();
|
||||
if (result != Transaction.ValidationResult.OK) {
|
||||
arbitraryDataFile.deleteAll();
|
||||
throw new DataException(String.format("Arbitrary transaction invalid: %s", result));
|
||||
}
|
||||
LOGGER.info("Transaction is valid");
|
||||
}
|
||||
|
||||
public ArbitraryTransactionData getArbitraryTransactionData() {
|
||||
return this.arbitraryTransactionData;
|
||||
}
|
||||
|
||||
public ArbitraryDataFile getArbitraryDataFile() {
|
||||
return this.arbitraryDataFile;
|
||||
}
|
||||
|
||||
public void setChunkSize(int chunkSize) {
|
||||
this.chunkSize = chunkSize;
|
||||
}
|
||||
|
||||
}
|
||||
388
src/main/java/org/qortal/arbitrary/ArbitraryDataWriter.java
Normal file
388
src/main/java/org/qortal/arbitrary/ArbitraryDataWriter.java
Normal file
@@ -0,0 +1,388 @@
|
||||
package org.qortal.arbitrary;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.arbitrary.exception.MissingDataException;
|
||||
import org.qortal.arbitrary.metadata.ArbitraryDataTransactionMetadata;
|
||||
import org.qortal.arbitrary.misc.Category;
|
||||
import org.qortal.arbitrary.misc.Service;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.ArbitraryTransactionData.*;
|
||||
import org.qortal.crypto.AES;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.arbitrary.ArbitraryDataFile.*;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
import org.qortal.utils.ZipUtils;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.NoSuchPaddingException;
|
||||
import javax.crypto.SecretKey;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.InvalidAlgorithmParameterException;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ArbitraryDataWriter {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataWriter.class);
|
||||
|
||||
private Path filePath;
|
||||
private final String name;
|
||||
private final Service service;
|
||||
private final String identifier;
|
||||
private final Method method;
|
||||
private final Compression compression;
|
||||
|
||||
// Metadata
|
||||
private final String title;
|
||||
private final String description;
|
||||
private final List<String> tags;
|
||||
private final Category category;
|
||||
|
||||
private int chunkSize = ArbitraryDataFile.CHUNK_SIZE;
|
||||
|
||||
private SecretKey aesKey;
|
||||
private ArbitraryDataFile arbitraryDataFile;
|
||||
|
||||
// Intermediate paths to cleanup
|
||||
private Path workingPath;
|
||||
private Path compressedPath;
|
||||
private Path encryptedPath;
|
||||
|
||||
public ArbitraryDataWriter(Path filePath, String name, Service service, String identifier, Method method, Compression compression,
|
||||
String title, String description, List<String> tags, Category category) {
|
||||
this.filePath = filePath;
|
||||
this.name = name;
|
||||
this.service = service;
|
||||
this.method = method;
|
||||
this.compression = compression;
|
||||
|
||||
// If identifier is a blank string, or reserved keyword "default", treat it as null
|
||||
if (identifier == null || identifier.equals("") || identifier.equals("default")) {
|
||||
identifier = null;
|
||||
}
|
||||
this.identifier = identifier;
|
||||
|
||||
// Metadata (optional)
|
||||
this.title = ArbitraryDataTransactionMetadata.limitTitle(title);
|
||||
this.description = ArbitraryDataTransactionMetadata.limitDescription(description);
|
||||
this.tags = ArbitraryDataTransactionMetadata.limitTags(tags);
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public void save() throws IOException, DataException, InterruptedException, MissingDataException {
|
||||
try {
|
||||
this.preExecute();
|
||||
this.validateService();
|
||||
this.process();
|
||||
this.compress();
|
||||
this.encrypt();
|
||||
this.split();
|
||||
this.createMetadataFile();
|
||||
this.validate();
|
||||
|
||||
} finally {
|
||||
this.postExecute();
|
||||
}
|
||||
}
|
||||
|
||||
private void preExecute() throws DataException {
|
||||
this.checkEnabled();
|
||||
|
||||
// Enforce compression when uploading a directory
|
||||
File file = new File(this.filePath.toString());
|
||||
if (file.isDirectory() && compression == Compression.NONE) {
|
||||
throw new DataException("Unable to upload a directory without compression");
|
||||
}
|
||||
|
||||
// Create temporary working directory
|
||||
this.createWorkingDirectory();
|
||||
}
|
||||
|
||||
private void postExecute() throws IOException {
|
||||
this.cleanupFilesystem();
|
||||
}
|
||||
|
||||
private void checkEnabled() throws DataException {
|
||||
if (!Settings.getInstance().isQdnEnabled()) {
|
||||
throw new DataException("QDN is disabled in settings");
|
||||
}
|
||||
}
|
||||
|
||||
private void createWorkingDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
String identifier = Base58.encode(Crypto.digest(this.filePath.toString().getBytes()));
|
||||
Path tempDir = Paths.get(baseDir, "writer", identifier);
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.workingPath = tempDir;
|
||||
}
|
||||
|
||||
private void validateService() throws IOException, DataException {
|
||||
if (this.service.isValidationRequired()) {
|
||||
Service.ValidationResult result = this.service.validate(this.filePath);
|
||||
if (result != Service.ValidationResult.OK) {
|
||||
throw new DataException(String.format("Validation of %s failed: %s", this.service, result.toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void process() throws DataException, IOException, MissingDataException {
|
||||
switch (this.method) {
|
||||
|
||||
case PUT:
|
||||
// Nothing to do
|
||||
break;
|
||||
|
||||
case PATCH:
|
||||
this.processPatch();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new DataException(String.format("Unknown method specified: %s", method.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private void processPatch() throws DataException, IOException, MissingDataException {
|
||||
|
||||
// Build the existing state using past transactions
|
||||
ArbitraryDataBuilder builder = new ArbitraryDataBuilder(this.name, this.service, this.identifier);
|
||||
builder.build();
|
||||
Path builtPath = builder.getFinalPath();
|
||||
|
||||
// Obtain the latest signature, so this can be included in the patch
|
||||
byte[] latestSignature = builder.getLatestSignature();
|
||||
|
||||
// Compute a diff of the latest changes on top of the previous state
|
||||
// Then use only the differences as our data payload
|
||||
ArbitraryDataCreatePatch patch = new ArbitraryDataCreatePatch(builtPath, this.filePath, latestSignature);
|
||||
patch.create();
|
||||
this.filePath = patch.getFinalPath();
|
||||
|
||||
// Delete the input directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(builtPath)) {
|
||||
File directory = new File(builtPath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
// Validate the patch
|
||||
this.validatePatch();
|
||||
}
|
||||
|
||||
private void validatePatch() throws DataException {
|
||||
if (this.filePath == null) {
|
||||
throw new DataException("Null path after creating patch");
|
||||
}
|
||||
|
||||
File qortalMetadataDirectoryFile = Paths.get(this.filePath.toString(), ".qortal").toFile();
|
||||
if (!qortalMetadataDirectoryFile.exists()) {
|
||||
throw new DataException("Qortal metadata folder doesn't exist in patch");
|
||||
}
|
||||
if (!qortalMetadataDirectoryFile.isDirectory()) {
|
||||
throw new DataException("Qortal metadata folder isn't a directory");
|
||||
}
|
||||
|
||||
File qortalPatchMetadataFile = Paths.get(this.filePath.toString(), ".qortal", "patch").toFile();
|
||||
if (!qortalPatchMetadataFile.exists()) {
|
||||
throw new DataException("Qortal patch metadata file doesn't exist in patch");
|
||||
}
|
||||
if (!qortalPatchMetadataFile.isFile()) {
|
||||
throw new DataException("Qortal patch metadata file isn't a file");
|
||||
}
|
||||
}
|
||||
|
||||
private void compress() throws InterruptedException, DataException {
|
||||
// Compress the data if requested
|
||||
if (this.compression != Compression.NONE) {
|
||||
this.compressedPath = Paths.get(this.workingPath.toString(), "data.zip");
|
||||
try {
|
||||
|
||||
if (this.compression == Compression.ZIP) {
|
||||
LOGGER.info("Compressing...");
|
||||
String enclosingFolderName = "data";
|
||||
ZipUtils.zip(this.filePath.toString(), this.compressedPath.toString(), enclosingFolderName);
|
||||
}
|
||||
else {
|
||||
throw new DataException(String.format("Unknown compression type specified: %s", compression.toString()));
|
||||
}
|
||||
// FUTURE: other compression types
|
||||
|
||||
// Delete the input directory
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
File directory = new File(this.filePath.toString());
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
// Replace filePath pointer with the zipped file path
|
||||
this.filePath = this.compressedPath;
|
||||
|
||||
} catch (IOException | DataException e) {
|
||||
throw new DataException("Unable to zip directory", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void encrypt() throws DataException {
|
||||
this.encryptedPath = Paths.get(this.workingPath.toString(), "data.zip.encrypted");
|
||||
try {
|
||||
// Encrypt the file with AES
|
||||
LOGGER.info("Encrypting...");
|
||||
this.aesKey = AES.generateKey(256);
|
||||
AES.encryptFile("AES/CBC/PKCS5Padding", this.aesKey, this.filePath.toString(), this.encryptedPath.toString());
|
||||
|
||||
// Delete the input file
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.filePath)) {
|
||||
Files.delete(this.filePath);
|
||||
}
|
||||
// Replace filePath pointer with the encrypted file path
|
||||
this.filePath = this.encryptedPath;
|
||||
|
||||
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException
|
||||
| BadPaddingException | IllegalBlockSizeException | IOException | InvalidKeyException e) {
|
||||
throw new DataException(String.format("Unable to encrypt file %s: %s", this.filePath, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void split() throws IOException, DataException {
|
||||
// We don't have a signature yet, so use null to put the file in a generic folder
|
||||
this.arbitraryDataFile = ArbitraryDataFile.fromPath(this.filePath, null);
|
||||
if (this.arbitraryDataFile == null) {
|
||||
throw new IOException("No file available when trying to split");
|
||||
}
|
||||
|
||||
int chunkCount = this.arbitraryDataFile.split(this.chunkSize);
|
||||
if (chunkCount > 0) {
|
||||
LOGGER.info(String.format("Successfully split into %d chunk%s", chunkCount, (chunkCount == 1 ? "" : "s")));
|
||||
}
|
||||
else {
|
||||
throw new DataException("Unable to split file into chunks");
|
||||
}
|
||||
}
|
||||
|
||||
private void createMetadataFile() throws IOException, DataException {
|
||||
// If we have at least one chunk, we need to create an index file containing their hashes
|
||||
if (this.needsMetadataFile()) {
|
||||
// Create the JSON file
|
||||
Path chunkFilePath = Paths.get(this.workingPath.toString(), "metadata.json");
|
||||
ArbitraryDataTransactionMetadata metadata = new ArbitraryDataTransactionMetadata(chunkFilePath);
|
||||
metadata.setTitle(this.title);
|
||||
metadata.setDescription(this.description);
|
||||
metadata.setTags(this.tags);
|
||||
metadata.setCategory(this.category);
|
||||
metadata.setChunks(this.arbitraryDataFile.chunkHashList());
|
||||
metadata.write();
|
||||
|
||||
// Create an ArbitraryDataFile from the JSON file (we don't have a signature yet)
|
||||
ArbitraryDataFile metadataFile = ArbitraryDataFile.fromPath(chunkFilePath, null);
|
||||
this.arbitraryDataFile.setMetadataFile(metadataFile);
|
||||
}
|
||||
}
|
||||
|
||||
private void validate() throws IOException, DataException {
|
||||
if (this.arbitraryDataFile == null) {
|
||||
throw new DataException("No file available when validating");
|
||||
}
|
||||
this.arbitraryDataFile.setSecret(this.aesKey.getEncoded());
|
||||
|
||||
// Validate the file
|
||||
ValidationResult validationResult = this.arbitraryDataFile.isValid();
|
||||
if (validationResult != ValidationResult.OK) {
|
||||
throw new DataException(String.format("File %s failed validation: %s", this.arbitraryDataFile, validationResult));
|
||||
}
|
||||
LOGGER.info("Whole file hash is valid: {}", this.arbitraryDataFile.digest58());
|
||||
|
||||
// Validate each chunk
|
||||
for (ArbitraryDataFileChunk chunk : this.arbitraryDataFile.getChunks()) {
|
||||
validationResult = chunk.isValid();
|
||||
if (validationResult != ValidationResult.OK) {
|
||||
throw new DataException(String.format("Chunk %s failed validation: %s", chunk, validationResult));
|
||||
}
|
||||
}
|
||||
LOGGER.info("Chunk hashes are valid");
|
||||
|
||||
// Validate chunks metadata file
|
||||
if (this.arbitraryDataFile.chunkCount() > 1) {
|
||||
ArbitraryDataFile metadataFile = this.arbitraryDataFile.getMetadataFile();
|
||||
if (metadataFile == null || !metadataFile.exists()) {
|
||||
throw new DataException("No metadata file available, but there are multiple chunks");
|
||||
}
|
||||
// Read the file
|
||||
ArbitraryDataTransactionMetadata metadata = new ArbitraryDataTransactionMetadata(metadataFile.getFilePath());
|
||||
metadata.read();
|
||||
// Check all chunks exist
|
||||
for (byte[] chunk : this.arbitraryDataFile.chunkHashList()) {
|
||||
if (!metadata.containsChunk(chunk)) {
|
||||
throw new DataException(String.format("Missing chunk %s in metadata file", Base58.encode(chunk)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that the metadata is correct
|
||||
if (!Objects.equals(metadata.getTitle(), this.title)) {
|
||||
throw new DataException("Metadata mismatch: title");
|
||||
}
|
||||
if (!Objects.equals(metadata.getDescription(), this.description)) {
|
||||
throw new DataException("Metadata mismatch: description");
|
||||
}
|
||||
if (!Objects.equals(metadata.getTags(), this.tags)) {
|
||||
throw new DataException("Metadata mismatch: tags");
|
||||
}
|
||||
if (!Objects.equals(metadata.getCategory(), this.category)) {
|
||||
throw new DataException("Metadata mismatch: category");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void cleanupFilesystem() throws IOException {
|
||||
// Clean up
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.compressedPath)) {
|
||||
File zippedFile = new File(this.compressedPath.toString());
|
||||
if (zippedFile.exists()) {
|
||||
zippedFile.delete();
|
||||
}
|
||||
}
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.encryptedPath)) {
|
||||
File encryptedFile = new File(this.encryptedPath.toString());
|
||||
if (encryptedFile.exists()) {
|
||||
encryptedFile.delete();
|
||||
}
|
||||
}
|
||||
if (FilesystemUtils.pathInsideDataOrTempPath(this.workingPath)) {
|
||||
FileUtils.deleteDirectory(new File(this.workingPath.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean needsMetadataFile() {
|
||||
if (this.arbitraryDataFile.chunkCount() > 1) {
|
||||
return true;
|
||||
}
|
||||
if (this.title != null || this.description != null || this.tags != null || this.category != null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
public ArbitraryDataFile getArbitraryDataFile() {
|
||||
return this.arbitraryDataFile;
|
||||
}
|
||||
|
||||
public void setChunkSize(int chunkSize) {
|
||||
this.chunkSize = chunkSize;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package org.qortal.arbitrary.exception;
|
||||
|
||||
public class MissingDataException extends Exception {
|
||||
|
||||
public MissingDataException() {
|
||||
}
|
||||
|
||||
public MissingDataException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public MissingDataException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public MissingDataException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.repository.DataException;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* ArbitraryDataMetadata
|
||||
*
|
||||
* This is a base class to handle reading and writing JSON to the supplied filePath.
|
||||
*
|
||||
* It is not usable on its own; it must be subclassed, with two methods overridden:
|
||||
*
|
||||
* readJson() - code to unserialize the JSON file
|
||||
* buildJson() - code to serialize the JSON file
|
||||
*
|
||||
*/
|
||||
public class ArbitraryDataMetadata {
|
||||
|
||||
protected static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMetadata.class);
|
||||
|
||||
protected Path filePath;
|
||||
|
||||
protected String jsonString;
|
||||
|
||||
public ArbitraryDataMetadata(Path filePath) {
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
protected void readJson() throws DataException {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
protected void buildJson() {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
|
||||
public void read() throws IOException, DataException {
|
||||
this.loadJson();
|
||||
this.readJson();
|
||||
}
|
||||
|
||||
public void write() throws IOException, DataException {
|
||||
this.buildJson();
|
||||
this.createParentDirectories();
|
||||
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(this.filePath.toString()));
|
||||
writer.write(this.jsonString);
|
||||
writer.newLine();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
|
||||
protected void loadJson() throws IOException {
|
||||
File metadataFile = new File(this.filePath.toString());
|
||||
if (!metadataFile.exists()) {
|
||||
throw new IOException(String.format("Metadata file doesn't exist: %s", this.filePath.toString()));
|
||||
}
|
||||
|
||||
this.jsonString = new String(Files.readAllBytes(this.filePath));
|
||||
}
|
||||
|
||||
|
||||
protected void createParentDirectories() throws DataException {
|
||||
try {
|
||||
Files.createDirectories(this.filePath.getParent());
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create parent directories");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getJsonString() {
|
||||
return this.jsonString;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class ArbitraryDataMetadataCache extends ArbitraryDataQortalMetadata {
|
||||
|
||||
private byte[] signature;
|
||||
private long timestamp;
|
||||
|
||||
public ArbitraryDataMetadataCache(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String fileName() {
|
||||
return "cache";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readJson() throws DataException {
|
||||
if (this.jsonString == null) {
|
||||
throw new DataException("Patch JSON string is null");
|
||||
}
|
||||
|
||||
JSONObject cache = new JSONObject(this.jsonString);
|
||||
if (cache.has("signature")) {
|
||||
String sig = cache.getString("signature");
|
||||
if (sig != null) {
|
||||
this.signature = Base58.decode(sig);
|
||||
}
|
||||
}
|
||||
if (cache.has("timestamp")) {
|
||||
this.timestamp = cache.getLong("timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildJson() {
|
||||
JSONObject patch = new JSONObject();
|
||||
patch.put("signature", Base58.encode(this.signature));
|
||||
patch.put("timestamp", this.timestamp);
|
||||
|
||||
this.jsonString = patch.toString(2);
|
||||
LOGGER.trace("Cache metadata: {}", this.jsonString);
|
||||
}
|
||||
|
||||
|
||||
public void setSignature(byte[] signature) {
|
||||
this.signature = signature;
|
||||
}
|
||||
|
||||
public byte[] getSignature() {
|
||||
return this.signature;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return this.timestamp;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,182 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.ArbitraryDataDiff.*;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataMetadataPatch extends ArbitraryDataQortalMetadata {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(ArbitraryDataMetadataPatch.class);
|
||||
|
||||
private List<Path> addedPaths;
|
||||
private List<ModifiedPath> modifiedPaths;
|
||||
private List<Path> removedPaths;
|
||||
private byte[] previousSignature;
|
||||
private byte[] previousHash;
|
||||
private byte[] currentHash;
|
||||
|
||||
public ArbitraryDataMetadataPatch(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
this.addedPaths = new ArrayList<>();
|
||||
this.modifiedPaths = new ArrayList<>();
|
||||
this.removedPaths = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String fileName() {
|
||||
return "patch";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readJson() throws DataException {
|
||||
if (this.jsonString == null) {
|
||||
throw new DataException("Patch JSON string is null");
|
||||
}
|
||||
|
||||
JSONObject patch = new JSONObject(this.jsonString);
|
||||
if (patch.has("prevSig")) {
|
||||
String prevSig = patch.getString("prevSig");
|
||||
if (prevSig != null) {
|
||||
this.previousSignature = Base58.decode(prevSig);
|
||||
}
|
||||
}
|
||||
if (patch.has("prevHash")) {
|
||||
String prevHash = patch.getString("prevHash");
|
||||
if (prevHash != null) {
|
||||
this.previousHash = Base58.decode(prevHash);
|
||||
}
|
||||
}
|
||||
if (patch.has("curHash")) {
|
||||
String curHash = patch.getString("curHash");
|
||||
if (curHash != null) {
|
||||
this.currentHash = Base58.decode(curHash);
|
||||
}
|
||||
}
|
||||
if (patch.has("added")) {
|
||||
JSONArray added = (JSONArray) patch.get("added");
|
||||
if (added != null) {
|
||||
for (int i=0; i<added.length(); i++) {
|
||||
String pathString = added.getString(i);
|
||||
this.addedPaths.add(Paths.get(pathString));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (patch.has("modified")) {
|
||||
JSONArray modified = (JSONArray) patch.get("modified");
|
||||
if (modified != null) {
|
||||
for (int i=0; i<modified.length(); i++) {
|
||||
JSONObject jsonObject = modified.getJSONObject(i);
|
||||
ModifiedPath modifiedPath = new ModifiedPath(jsonObject);
|
||||
this.modifiedPaths.add(modifiedPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (patch.has("removed")) {
|
||||
JSONArray removed = (JSONArray) patch.get("removed");
|
||||
if (removed != null) {
|
||||
for (int i=0; i<removed.length(); i++) {
|
||||
String pathString = removed.getString(i);
|
||||
this.removedPaths.add(Paths.get(pathString));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildJson() {
|
||||
JSONObject patch = new JSONObject();
|
||||
// Attempt to use a LinkedHashMap so that the order of fields is maintained
|
||||
try {
|
||||
Field changeMap = patch.getClass().getDeclaredField("map");
|
||||
changeMap.setAccessible(true);
|
||||
changeMap.set(patch, new LinkedHashMap<>());
|
||||
changeMap.setAccessible(false);
|
||||
} catch (IllegalAccessException | NoSuchFieldException e) {
|
||||
// Don't worry about failures as this is for optional ordering only
|
||||
}
|
||||
|
||||
patch.put("prevSig", Base58.encode(this.previousSignature));
|
||||
patch.put("prevHash", Base58.encode(this.previousHash));
|
||||
patch.put("curHash", Base58.encode(this.currentHash));
|
||||
patch.put("added", new JSONArray(this.addedPaths));
|
||||
patch.put("removed", new JSONArray(this.removedPaths));
|
||||
|
||||
JSONArray modifiedPaths = new JSONArray();
|
||||
for (ModifiedPath modifiedPath : this.modifiedPaths) {
|
||||
JSONObject modifiedPathJson = new JSONObject();
|
||||
modifiedPathJson.put("path", modifiedPath.getPath());
|
||||
modifiedPathJson.put("type", modifiedPath.getDiffType());
|
||||
modifiedPaths.put(modifiedPathJson);
|
||||
}
|
||||
patch.put("modified", modifiedPaths);
|
||||
|
||||
this.jsonString = patch.toString(2);
|
||||
LOGGER.debug("Patch metadata: {}", this.jsonString);
|
||||
}
|
||||
|
||||
public void setAddedPaths(List<Path> addedPaths) {
|
||||
this.addedPaths = addedPaths;
|
||||
}
|
||||
|
||||
public List<Path> getAddedPaths() {
|
||||
return this.addedPaths;
|
||||
}
|
||||
|
||||
public void setModifiedPaths(List<ModifiedPath> modifiedPaths) {
|
||||
this.modifiedPaths = modifiedPaths;
|
||||
}
|
||||
|
||||
public List<ModifiedPath> getModifiedPaths() {
|
||||
return this.modifiedPaths;
|
||||
}
|
||||
|
||||
public void setRemovedPaths(List<Path> removedPaths) {
|
||||
this.removedPaths = removedPaths;
|
||||
}
|
||||
|
||||
public List<Path> getRemovedPaths() {
|
||||
return this.removedPaths;
|
||||
}
|
||||
|
||||
public void setPreviousSignature(byte[] previousSignature) {
|
||||
this.previousSignature = previousSignature;
|
||||
}
|
||||
|
||||
public byte[] getPreviousSignature() {
|
||||
return this.previousSignature;
|
||||
}
|
||||
|
||||
public void setPreviousHash(byte[] previousHash) {
|
||||
this.previousHash = previousHash;
|
||||
}
|
||||
|
||||
public byte[] getPreviousHash() {
|
||||
return this.previousHash;
|
||||
}
|
||||
|
||||
public void setCurrentHash(byte[] currentHash) {
|
||||
this.currentHash = currentHash;
|
||||
}
|
||||
|
||||
public byte[] getCurrentHash() {
|
||||
return this.currentHash;
|
||||
}
|
||||
|
||||
|
||||
public int getFileDifferencesCount() {
|
||||
return this.addedPaths.size() + this.modifiedPaths.size() + this.removedPaths.size();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.repository.DataException;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
/**
|
||||
* ArbitraryDataQortalMetadata
|
||||
*
|
||||
* This is a base class to handle reading and writing JSON to a .qortal folder
|
||||
* within the supplied filePath. This is used when storing data against an existing
|
||||
* arbitrary data file structure.
|
||||
*
|
||||
* It is not usable on its own; it must be subclassed, with three methods overridden:
|
||||
*
|
||||
* fileName() - the file name to use within the .qortal folder
|
||||
* readJson() - code to unserialize the JSON file
|
||||
* buildJson() - code to serialize the JSON file
|
||||
*
|
||||
*/
|
||||
public class ArbitraryDataQortalMetadata extends ArbitraryDataMetadata {
|
||||
|
||||
protected static final Logger LOGGER = LogManager.getLogger(ArbitraryDataQortalMetadata.class);
|
||||
|
||||
protected Path filePath;
|
||||
protected Path qortalDirectoryPath;
|
||||
|
||||
protected String jsonString;
|
||||
|
||||
public ArbitraryDataQortalMetadata(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
this.qortalDirectoryPath = Paths.get(filePath.toString(), ".qortal");
|
||||
}
|
||||
|
||||
protected String fileName() {
|
||||
// To be overridden
|
||||
return null;
|
||||
}
|
||||
|
||||
protected void readJson() throws DataException {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
protected void buildJson() {
|
||||
// To be overridden
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void read() throws IOException, DataException {
|
||||
this.loadJson();
|
||||
this.readJson();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write() throws IOException, DataException {
|
||||
this.buildJson();
|
||||
this.createParentDirectories();
|
||||
this.createQortalDirectory();
|
||||
|
||||
Path patchPath = Paths.get(this.qortalDirectoryPath.toString(), this.fileName());
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(patchPath.toString()));
|
||||
writer.write(this.jsonString);
|
||||
writer.newLine();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadJson() throws IOException {
|
||||
Path path = Paths.get(this.qortalDirectoryPath.toString(), this.fileName());
|
||||
File patchFile = new File(path.toString());
|
||||
if (!patchFile.exists()) {
|
||||
throw new IOException(String.format("Patch file doesn't exist: %s", path.toString()));
|
||||
}
|
||||
|
||||
this.jsonString = new String(Files.readAllBytes(path));
|
||||
}
|
||||
|
||||
|
||||
protected void createQortalDirectory() throws DataException {
|
||||
try {
|
||||
Files.createDirectories(this.qortalDirectoryPath);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create .qortal directory");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getJsonString() {
|
||||
return this.jsonString;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,225 @@
|
||||
package org.qortal.arbitrary.metadata;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.misc.Category;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.utils.Base58;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class ArbitraryDataTransactionMetadata extends ArbitraryDataMetadata {
|
||||
|
||||
private List<byte[]> chunks;
|
||||
private String title;
|
||||
private String description;
|
||||
private List<String> tags;
|
||||
private Category category;
|
||||
|
||||
private static int MAX_TITLE_LENGTH = 80;
|
||||
private static int MAX_DESCRIPTION_LENGTH = 500;
|
||||
private static int MAX_TAG_LENGTH = 20;
|
||||
private static int MAX_TAGS_COUNT = 5;
|
||||
|
||||
public ArbitraryDataTransactionMetadata(Path filePath) {
|
||||
super(filePath);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readJson() throws DataException {
|
||||
if (this.jsonString == null) {
|
||||
throw new DataException("Transaction metadata JSON string is null");
|
||||
}
|
||||
|
||||
JSONObject metadata = new JSONObject(this.jsonString);
|
||||
|
||||
if (metadata.has("title")) {
|
||||
this.title = metadata.getString("title");
|
||||
}
|
||||
|
||||
if (metadata.has("description")) {
|
||||
this.description = metadata.getString("description");
|
||||
}
|
||||
|
||||
List<String> tagsList = new ArrayList<>();
|
||||
if (metadata.has("tags")) {
|
||||
JSONArray tags = metadata.getJSONArray("tags");
|
||||
if (tags != null) {
|
||||
for (int i=0; i<tags.length(); i++) {
|
||||
String tag = tags.getString(i);
|
||||
if (tag != null) {
|
||||
tagsList.add(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.tags = tagsList;
|
||||
}
|
||||
|
||||
if (metadata.has("category")) {
|
||||
this.category = Category.uncategorizedValueOf(metadata.getString("category"));
|
||||
}
|
||||
|
||||
List<byte[]> chunksList = new ArrayList<>();
|
||||
if (metadata.has("chunks")) {
|
||||
JSONArray chunks = metadata.getJSONArray("chunks");
|
||||
if (chunks != null) {
|
||||
for (int i=0; i<chunks.length(); i++) {
|
||||
String chunk = chunks.getString(i);
|
||||
if (chunk != null) {
|
||||
chunksList.add(Base58.decode(chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
this.chunks = chunksList;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildJson() {
|
||||
JSONObject outer = new JSONObject();
|
||||
|
||||
if (this.title != null && !this.title.isEmpty()) {
|
||||
outer.put("title", this.title);
|
||||
}
|
||||
|
||||
if (this.description != null && !this.description.isEmpty()) {
|
||||
outer.put("description", this.description);
|
||||
}
|
||||
|
||||
JSONArray tags = new JSONArray();
|
||||
if (this.tags != null) {
|
||||
for (String tag : this.tags) {
|
||||
tags.put(tag);
|
||||
}
|
||||
outer.put("tags", tags);
|
||||
}
|
||||
|
||||
if (this.category != null) {
|
||||
outer.put("category", this.category.toString());
|
||||
}
|
||||
|
||||
JSONArray chunks = new JSONArray();
|
||||
if (this.chunks != null) {
|
||||
for (byte[] chunk : this.chunks) {
|
||||
chunks.put(Base58.encode(chunk));
|
||||
}
|
||||
}
|
||||
outer.put("chunks", chunks);
|
||||
|
||||
this.jsonString = outer.toString(2);
|
||||
LOGGER.trace("Transaction metadata: {}", this.jsonString);
|
||||
}
|
||||
|
||||
|
||||
public void setChunks(List<byte[]> chunks) {
|
||||
this.chunks = chunks;
|
||||
}
|
||||
|
||||
public List<byte[]> getChunks() {
|
||||
return this.chunks;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return this.title;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return this.description;
|
||||
}
|
||||
|
||||
public void setTags(List<String> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<String> getTags() {
|
||||
return this.tags;
|
||||
}
|
||||
|
||||
public void setCategory(Category category) {
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public Category getCategory() {
|
||||
return this.category;
|
||||
}
|
||||
|
||||
public boolean containsChunk(byte[] chunk) {
|
||||
for (byte[] c : this.chunks) {
|
||||
if (Arrays.equals(c, chunk)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// Static helper methods
|
||||
|
||||
public static String limitTitle(String title) {
|
||||
if (title == null) {
|
||||
return null;
|
||||
}
|
||||
if (title.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return title.substring(0, Math.min(title.length(), MAX_TITLE_LENGTH));
|
||||
}
|
||||
|
||||
public static String limitDescription(String description) {
|
||||
if (description == null) {
|
||||
return null;
|
||||
}
|
||||
if (description.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return description.substring(0, Math.min(description.length(), MAX_DESCRIPTION_LENGTH));
|
||||
}
|
||||
|
||||
public static List<String> limitTags(List<String> tags) {
|
||||
if (tags == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Ensure tags list is mutable
|
||||
List<String> mutableTags = new ArrayList<>(tags);
|
||||
|
||||
int tagCount = mutableTags.size();
|
||||
if (tagCount == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Remove tags over the limit
|
||||
// This is cleaner than truncating, which results in malformed tags
|
||||
// Also remove tags that are empty
|
||||
Iterator iterator = mutableTags.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
String tag = (String) iterator.next();
|
||||
if (tag == null || tag.length() > MAX_TAG_LENGTH || tag.isEmpty()) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
// Limit the total number of tags
|
||||
if (tagCount > MAX_TAGS_COUNT) {
|
||||
mutableTags = mutableTags.subList(0, MAX_TAGS_COUNT);
|
||||
}
|
||||
|
||||
return mutableTags;
|
||||
}
|
||||
|
||||
}
|
||||
81
src/main/java/org/qortal/arbitrary/misc/Category.java
Normal file
81
src/main/java/org/qortal/arbitrary/misc/Category.java
Normal file
@@ -0,0 +1,81 @@
|
||||
package org.qortal.arbitrary.misc;
|
||||
|
||||
public enum Category {
|
||||
ART("Art and Design"),
|
||||
AUTOMOTIVE("Automotive"),
|
||||
BEAUTY("Beauty"),
|
||||
BOOKS("Books and Reference"),
|
||||
BUSINESS("Business"),
|
||||
COMMUNICATIONS("Communications"),
|
||||
CRYPTOCURRENCY("Cryptocurrency and Blockchain"),
|
||||
CULTURE("Culture"),
|
||||
DATING("Dating"),
|
||||
DESIGN("Design"),
|
||||
ENTERTAINMENT("Entertainment"),
|
||||
EVENTS("Events"),
|
||||
FAITH("Faith and Religion"),
|
||||
FASHION("Fashion"),
|
||||
FINANCE("Finance"),
|
||||
FOOD("Food and Drink"),
|
||||
GAMING("Gaming"),
|
||||
GEOGRAPHY("Geography"),
|
||||
HEALTH("Health"),
|
||||
HISTORY("History"),
|
||||
HOME("Home"),
|
||||
KNOWLEDGE("Knowledge Share"),
|
||||
LANGUAGE("Language"),
|
||||
LIFESTYLE("Lifestyle"),
|
||||
MANUFACTURING("Manufacturing"),
|
||||
MAPS("Maps and Navigation"),
|
||||
MUSIC("Music"),
|
||||
NEWS("News"),
|
||||
OTHER("Other"),
|
||||
PETS("Pets"),
|
||||
PHILOSOPHY("Philosophy"),
|
||||
PHOTOGRAPHY("Photography"),
|
||||
POLITICS("Politics"),
|
||||
PRODUCE("Products and Services"),
|
||||
PRODUCTIVITY("Productivity"),
|
||||
PSYCHOLOGY("Psychology"),
|
||||
QORTAL("Qortal"),
|
||||
SCIENCE("Science"),
|
||||
SELF_CARE("Self Care"),
|
||||
SELF_SUFFICIENCY("Self-Sufficiency and Homesteading"),
|
||||
SHOPPING("Shopping"),
|
||||
SOCIAL("Social"),
|
||||
SOFTWARE("Software"),
|
||||
SPIRITUALITY("Spirituality"),
|
||||
SPORTS("Sports"),
|
||||
STORYTELLING("Storytelling"),
|
||||
TECHNOLOGY("Technology"),
|
||||
TOOLS("Tools"),
|
||||
TRAVEL("Travel"),
|
||||
UNCATEGORIZED("Uncategorized"),
|
||||
VIDEO("Video"),
|
||||
WEATHER("Weather");
|
||||
|
||||
private final String name;
|
||||
|
||||
Category(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as valueOf() but with fallback to UNCATEGORIZED if there's no match
|
||||
* @param name
|
||||
* @return a Category (using UNCATEGORIZED if no match found)
|
||||
*/
|
||||
public static Category uncategorizedValueOf(String name) {
|
||||
try {
|
||||
return Category.valueOf(name);
|
||||
}
|
||||
catch (IllegalArgumentException e) {
|
||||
return Category.UNCATEGORIZED;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
131
src/main/java/org/qortal/arbitrary/misc/Service.java
Normal file
131
src/main/java/org/qortal/arbitrary/misc/Service.java
Normal file
@@ -0,0 +1,131 @@
|
||||
package org.qortal.arbitrary.misc;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.qortal.arbitrary.ArbitraryDataRenderer;
|
||||
import org.qortal.transaction.Transaction;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
public enum Service {
|
||||
AUTO_UPDATE(1, false, null, null),
|
||||
ARBITRARY_DATA(100, false, null, null),
|
||||
WEBSITE(200, true, null, null) {
|
||||
@Override
|
||||
public ValidationResult validate(Path path) {
|
||||
// Custom validation function to require an index HTML file in the root directory
|
||||
List<String> fileNames = ArbitraryDataRenderer.indexFiles();
|
||||
String[] files = path.toFile().list();
|
||||
if (files != null) {
|
||||
for (String file : files) {
|
||||
Path fileName = Paths.get(file).getFileName();
|
||||
if (fileName != null && fileNames.contains(fileName.toString())) {
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
return ValidationResult.MISSING_INDEX_FILE;
|
||||
}
|
||||
},
|
||||
GIT_REPOSITORY(300, false, null, null),
|
||||
IMAGE(400, true, 10*1024*1024L, null),
|
||||
THUMBNAIL(410, true, 500*1024L, null),
|
||||
VIDEO(500, false, null, null),
|
||||
AUDIO(600, false, null, null),
|
||||
BLOG(700, false, null, null),
|
||||
BLOG_POST(777, false, null, null),
|
||||
BLOG_COMMENT(778, false, null, null),
|
||||
DOCUMENT(800, false, null, null),
|
||||
LIST(900, true, null, null),
|
||||
PLAYLIST(910, true, null, null),
|
||||
APP(1000, false, null, null),
|
||||
METADATA(1100, false, null, null),
|
||||
QORTAL_METADATA(1111, true, 10*1024L, Arrays.asList("title", "description", "tags"));
|
||||
|
||||
public final int value;
|
||||
private final boolean requiresValidation;
|
||||
private final Long maxSize;
|
||||
private final List<String> requiredKeys;
|
||||
|
||||
private static final Map<Integer, Service> map = stream(Service.values())
|
||||
.collect(toMap(service -> service.value, service -> service));
|
||||
|
||||
Service(int value, boolean requiresValidation, Long maxSize, List<String> requiredKeys) {
|
||||
this.value = value;
|
||||
this.requiresValidation = requiresValidation;
|
||||
this.maxSize = maxSize;
|
||||
this.requiredKeys = requiredKeys;
|
||||
}
|
||||
|
||||
public ValidationResult validate(Path path) throws IOException {
|
||||
if (!this.isValidationRequired()) {
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
|
||||
byte[] data = FilesystemUtils.getSingleFileContents(path);
|
||||
long size = FilesystemUtils.getDirectorySize(path);
|
||||
|
||||
// Validate max size if needed
|
||||
if (this.maxSize != null) {
|
||||
if (size > this.maxSize) {
|
||||
return ValidationResult.EXCEEDS_SIZE_LIMIT;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate required keys if needed
|
||||
if (this.requiredKeys != null) {
|
||||
if (data == null) {
|
||||
return ValidationResult.MISSING_KEYS;
|
||||
}
|
||||
JSONObject json = Service.toJsonObject(data);
|
||||
for (String key : this.requiredKeys) {
|
||||
if (!json.has(key)) {
|
||||
return ValidationResult.MISSING_KEYS;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validation passed
|
||||
return ValidationResult.OK;
|
||||
}
|
||||
|
||||
public boolean isValidationRequired() {
|
||||
return this.requiresValidation;
|
||||
}
|
||||
|
||||
public static Service valueOf(int value) {
|
||||
return map.get(value);
|
||||
}
|
||||
|
||||
public static JSONObject toJsonObject(byte[] data) {
|
||||
String dataString = new String(data);
|
||||
return new JSONObject(dataString);
|
||||
}
|
||||
|
||||
public enum ValidationResult {
|
||||
OK(1),
|
||||
MISSING_KEYS(2),
|
||||
EXCEEDS_SIZE_LIMIT(3),
|
||||
MISSING_INDEX_FILE(4);
|
||||
|
||||
public final int value;
|
||||
|
||||
private static final Map<Integer, Transaction.ValidationResult> map = stream(Transaction.ValidationResult.values()).collect(toMap(result -> result.value, result -> result));
|
||||
|
||||
ValidationResult(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static Transaction.ValidationResult valueOf(int value) {
|
||||
return map.get(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
229
src/main/java/org/qortal/arbitrary/patch/UnifiedDiffPatch.java
Normal file
229
src/main/java/org/qortal/arbitrary/patch/UnifiedDiffPatch.java
Normal file
@@ -0,0 +1,229 @@
|
||||
package org.qortal.arbitrary.patch;
|
||||
|
||||
import com.github.difflib.DiffUtils;
|
||||
import com.github.difflib.UnifiedDiffUtils;
|
||||
import com.github.difflib.patch.Patch;
|
||||
import com.github.difflib.patch.PatchFailedException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.FilesystemUtils;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class UnifiedDiffPatch {
|
||||
|
||||
private static final Logger LOGGER = LogManager.getLogger(UnifiedDiffPatch.class);
|
||||
|
||||
private final Path before;
|
||||
private final Path after;
|
||||
private final Path destination;
|
||||
|
||||
private String identifier;
|
||||
private Path validationPath;
|
||||
|
||||
public UnifiedDiffPatch(Path before, Path after, Path destination) {
|
||||
this.before = before;
|
||||
this.after = after;
|
||||
this.destination = destination;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a patch based on the differences in path "after"
|
||||
* compared with base path "before", outputting the patch
|
||||
* to the "destination" path.
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
public void create() throws IOException {
|
||||
if (!Files.exists(before)) {
|
||||
throw new IOException(String.format("File not found (before): %s", before.toString()));
|
||||
}
|
||||
if (!Files.exists(after)) {
|
||||
throw new IOException(String.format("File not found (after): %s", after.toString()));
|
||||
}
|
||||
|
||||
// Ensure parent folders exist in the destination
|
||||
File file = new File(destination.toString());
|
||||
File parent = file.getParentFile();
|
||||
if (parent != null) {
|
||||
parent.mkdirs();
|
||||
}
|
||||
|
||||
// Delete an existing file if it exists
|
||||
File destFile = destination.toFile();
|
||||
if (destFile.exists() && destFile.isFile()) {
|
||||
Files.delete(destination);
|
||||
}
|
||||
|
||||
// Load the two files into memory
|
||||
List<String> original = FileUtils.readLines(before.toFile(), StandardCharsets.UTF_8);
|
||||
List<String> revised = FileUtils.readLines(after.toFile(), StandardCharsets.UTF_8);
|
||||
|
||||
// Check if the original file ends with a newline
|
||||
boolean endsWithNewline = FilesystemUtils.fileEndsWithNewline(before);
|
||||
|
||||
// Generate diff information
|
||||
Patch<String> diff = DiffUtils.diff(original, revised);
|
||||
|
||||
// Generate unified diff format
|
||||
String originalFileName = before.getFileName().toString();
|
||||
String revisedFileName = after.getFileName().toString();
|
||||
List<String> unifiedDiff = UnifiedDiffUtils.generateUnifiedDiff(originalFileName, revisedFileName, original, diff, 0);
|
||||
|
||||
// Write the diff to the destination directory
|
||||
FileWriter fileWriter = new FileWriter(destination.toString(), true);
|
||||
BufferedWriter writer = new BufferedWriter(fileWriter);
|
||||
for (int i=0; i<unifiedDiff.size(); i++) {
|
||||
String line = unifiedDiff.get(i);
|
||||
writer.append(line);
|
||||
// Add a newline if this isn't the last line, or the original ended with a newline
|
||||
if (i < unifiedDiff.size()-1 || endsWithNewline) {
|
||||
writer.newLine();
|
||||
}
|
||||
}
|
||||
writer.flush();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the patch at the "destination" path to ensure
|
||||
* it works correctly and is smaller than the original file
|
||||
*
|
||||
* @return true if valid, false if invalid
|
||||
*/
|
||||
public boolean isValid() throws DataException {
|
||||
this.createRandomIdentifier();
|
||||
this.createTempValidationDirectory();
|
||||
|
||||
// Merge the patch with the original path
|
||||
Path tempPath = Paths.get(this.validationPath.toString(), this.identifier);
|
||||
|
||||
try {
|
||||
UnifiedDiffPatch unifiedDiffPatch = new UnifiedDiffPatch(before, destination, tempPath);
|
||||
unifiedDiffPatch.apply(null);
|
||||
|
||||
byte[] inputDigest = Crypto.digest(after.toFile());
|
||||
byte[] outputDigest = Crypto.digest(tempPath.toFile());
|
||||
if (Arrays.equals(inputDigest, outputDigest)) {
|
||||
// Patch is valid, but we might want to reject if it's larger than the original file
|
||||
long originalSize = Files.size(after);
|
||||
long patchSize = Files.size(destination);
|
||||
if (patchSize < originalSize) {
|
||||
// Patch file is smaller than the original file size, so treat it as valid
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
LOGGER.info("Checksum mismatch when verifying patch for file {}", destination.toString());
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
catch (IOException e) {
|
||||
LOGGER.info("Failed to compute merge for file {}: {}", destination.toString(), e.getMessage());
|
||||
}
|
||||
finally {
|
||||
try {
|
||||
Files.delete(tempPath);
|
||||
} catch (IOException e) {
|
||||
// Not important - will be cleaned up later
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a patch at path "after" on top of base path "before",
|
||||
* outputting the combined results to the "destination" path.
|
||||
* If before and after are directories, a relative path suffix
|
||||
* can be used to specify the file within these folder structures.
|
||||
*
|
||||
* @param pathSuffix - a file path to append to the base paths, or null if the base paths are already files
|
||||
* @throws IOException
|
||||
*/
|
||||
public void apply(Path pathSuffix) throws IOException, DataException {
|
||||
Path originalPath = this.before;
|
||||
Path patchPath = this.after;
|
||||
Path mergePath = this.destination;
|
||||
|
||||
// If a path has been supplied, we need to append it to the base paths
|
||||
if (pathSuffix != null) {
|
||||
originalPath = Paths.get(this.before.toString(), pathSuffix.toString());
|
||||
patchPath = Paths.get(this.after.toString(), pathSuffix.toString());
|
||||
mergePath = Paths.get(this.destination.toString(), pathSuffix.toString());
|
||||
}
|
||||
|
||||
if (!patchPath.toFile().exists()) {
|
||||
throw new DataException("Patch file doesn't exist, but its path was included in modifiedPaths");
|
||||
}
|
||||
|
||||
// Delete an existing file, as we are starting from a duplicate of pathBefore
|
||||
File destFile = mergePath.toFile();
|
||||
if (destFile.exists() && destFile.isFile()) {
|
||||
Files.delete(mergePath);
|
||||
}
|
||||
|
||||
List<String> originalContents = FileUtils.readLines(originalPath.toFile(), StandardCharsets.UTF_8);
|
||||
List<String> patchContents = FileUtils.readLines(patchPath.toFile(), StandardCharsets.UTF_8);
|
||||
|
||||
// Check if the patch file (and therefore the original file) ends with a newline
|
||||
boolean endsWithNewline = FilesystemUtils.fileEndsWithNewline(patchPath);
|
||||
|
||||
// At first, parse the unified diff file and get the patch
|
||||
Patch<String> patch = UnifiedDiffUtils.parseUnifiedDiff(patchContents);
|
||||
|
||||
// Then apply the computed patch to the given text
|
||||
try {
|
||||
List<String> patchedContents = DiffUtils.patch(originalContents, patch);
|
||||
|
||||
// Write the patched file to the merge directory
|
||||
FileWriter fileWriter = new FileWriter(mergePath.toString(), true);
|
||||
BufferedWriter writer = new BufferedWriter(fileWriter);
|
||||
for (int i=0; i<patchedContents.size(); i++) {
|
||||
String line = patchedContents.get(i);
|
||||
writer.append(line);
|
||||
// Add a newline if this isn't the last line, or the original ended with a newline
|
||||
if (i < patchedContents.size()-1 || endsWithNewline) {
|
||||
writer.newLine();
|
||||
}
|
||||
}
|
||||
writer.flush();
|
||||
writer.close();
|
||||
|
||||
} catch (PatchFailedException e) {
|
||||
throw new DataException(String.format("Failed to apply patch for path %s: %s", pathSuffix, e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
private void createRandomIdentifier() {
|
||||
this.identifier = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
private void createTempValidationDirectory() throws DataException {
|
||||
// Use the user-specified temp dir, as it is deterministic, and is more likely to be located on reusable storage hardware
|
||||
String baseDir = Settings.getInstance().getTempDataPath();
|
||||
Path tempDir = Paths.get(baseDir, "diff", "validate");
|
||||
try {
|
||||
Files.createDirectories(tempDir);
|
||||
} catch (IOException e) {
|
||||
throw new DataException("Unable to create temp directory");
|
||||
}
|
||||
this.validationPath = tempDir;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
package org.qortal.at;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.ciyam.at.MachineState;
|
||||
@@ -56,12 +58,12 @@ public class AT {
|
||||
|
||||
this.atData = new ATData(atAddress, creatorPublicKey, creation, machineState.version, assetId, codeBytes, codeHash,
|
||||
machineState.isSleeping(), machineState.getSleepUntilHeight(), machineState.isFinished(), machineState.hadFatalError(),
|
||||
machineState.isFrozen(), machineState.getFrozenBalance());
|
||||
machineState.isFrozen(), machineState.getFrozenBalance(), null);
|
||||
|
||||
byte[] stateData = machineState.toBytes();
|
||||
byte[] stateHash = Crypto.digest(stateData);
|
||||
|
||||
this.atStateData = new ATStateData(atAddress, height, stateData, stateHash, 0L, true);
|
||||
this.atStateData = new ATStateData(atAddress, height, stateData, stateHash, 0L, true, null);
|
||||
}
|
||||
|
||||
// Getters / setters
|
||||
@@ -84,13 +86,28 @@ public class AT {
|
||||
this.repository.getATRepository().delete(this.atData.getATAddress());
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially execute AT.
|
||||
* <p>
|
||||
* Note that sleep-until-message support might set/reset
|
||||
* sleep-related flags/values.
|
||||
* <p>
|
||||
* {@link #getATStateData()} will return null if nothing happened.
|
||||
* <p>
|
||||
* @param blockHeight
|
||||
* @param blockTimestamp
|
||||
* @return AT-generated transactions, possibly empty
|
||||
* @throws DataException
|
||||
*/
|
||||
public List<AtTransaction> run(int blockHeight, long blockTimestamp) throws DataException {
|
||||
String atAddress = this.atData.getATAddress();
|
||||
|
||||
QortalATAPI api = new QortalATAPI(repository, this.atData, blockTimestamp);
|
||||
QortalAtLoggerFactory loggerFactory = QortalAtLoggerFactory.getInstance();
|
||||
|
||||
byte[] codeBytes = this.atData.getCodeBytes();
|
||||
if (!api.willExecute(blockHeight))
|
||||
// this.atStateData will be null
|
||||
return Collections.emptyList();
|
||||
|
||||
// Fetch latest ATStateData for this AT
|
||||
ATStateData latestAtStateData = this.repository.getATRepository().getLatestATState(atAddress);
|
||||
@@ -100,8 +117,10 @@ public class AT {
|
||||
throw new IllegalStateException("No previous AT state data found");
|
||||
|
||||
// [Re]create AT machine state using AT state data or from scratch as applicable
|
||||
byte[] codeBytes = this.atData.getCodeBytes();
|
||||
MachineState state = MachineState.fromBytes(api, loggerFactory, latestAtStateData.getStateData(), codeBytes);
|
||||
try {
|
||||
api.preExecute(state);
|
||||
state.execute();
|
||||
} catch (Exception e) {
|
||||
throw new DataException(String.format("Uncaught exception while running AT '%s'", atAddress), e);
|
||||
@@ -109,9 +128,18 @@ public class AT {
|
||||
|
||||
byte[] stateData = state.toBytes();
|
||||
byte[] stateHash = Crypto.digest(stateData);
|
||||
long atFees = api.calcFinalFees(state);
|
||||
|
||||
this.atStateData = new ATStateData(atAddress, blockHeight, stateData, stateHash, atFees, false);
|
||||
// Nothing happened?
|
||||
if (state.getSteps() == 0 && Arrays.equals(stateHash, latestAtStateData.getStateHash()))
|
||||
// We currently want to execute frozen ATs, to maintain backwards support.
|
||||
if (state.isFrozen() == false)
|
||||
// this.atStateData will be null
|
||||
return Collections.emptyList();
|
||||
|
||||
long atFees = api.calcFinalFees(state);
|
||||
Long sleepUntilMessageTimestamp = this.atData.getSleepUntilMessageTimestamp();
|
||||
|
||||
this.atStateData = new ATStateData(atAddress, blockHeight, stateData, stateHash, atFees, false, sleepUntilMessageTimestamp);
|
||||
|
||||
return api.getTransactions();
|
||||
}
|
||||
@@ -130,6 +158,10 @@ public class AT {
|
||||
this.atData.setHadFatalError(state.hadFatalError());
|
||||
this.atData.setIsFrozen(state.isFrozen());
|
||||
this.atData.setFrozenBalance(state.getFrozenBalance());
|
||||
|
||||
// Special sleep-until-message support
|
||||
this.atData.setSleepUntilMessageTimestamp(this.atStateData.getSleepUntilMessageTimestamp());
|
||||
|
||||
this.repository.getATRepository().save(this.atData);
|
||||
}
|
||||
|
||||
@@ -157,6 +189,10 @@ public class AT {
|
||||
this.atData.setHadFatalError(state.hadFatalError());
|
||||
this.atData.setIsFrozen(state.isFrozen());
|
||||
this.atData.setFrozenBalance(state.getFrozenBalance());
|
||||
|
||||
// Special sleep-until-message support
|
||||
this.atData.setSleepUntilMessageTimestamp(previousStateData.getSleepUntilMessageTimestamp());
|
||||
|
||||
this.repository.getATRepository().save(this.atData);
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ import org.qortal.group.Group;
|
||||
import org.qortal.repository.ATRepository;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.ATRepository.NextTransactionInfo;
|
||||
import org.qortal.transaction.AtTransaction;
|
||||
import org.qortal.transaction.Transaction.TransactionType;
|
||||
import org.qortal.utils.Base58;
|
||||
@@ -74,8 +75,45 @@ public class QortalATAPI extends API {
|
||||
return this.transactions;
|
||||
}
|
||||
|
||||
public long calcFinalFees(MachineState state) {
|
||||
return state.getSteps() * this.ciyamAtSettings.feePerStep;
|
||||
public boolean willExecute(int blockHeight) throws DataException {
|
||||
// Sleep-until-message/height checking
|
||||
Long sleepUntilMessageTimestamp = this.atData.getSleepUntilMessageTimestamp();
|
||||
|
||||
if (sleepUntilMessageTimestamp != null) {
|
||||
// Quicker to check height, if sleep-until-height also active
|
||||
Integer sleepUntilHeight = this.atData.getSleepUntilHeight();
|
||||
|
||||
boolean wakeDueToHeight = sleepUntilHeight != null && sleepUntilHeight != 0 && blockHeight >= sleepUntilHeight;
|
||||
|
||||
boolean wakeDueToMessage = false;
|
||||
if (!wakeDueToHeight) {
|
||||
// No avoiding asking repository
|
||||
Timestamp previousTxTimestamp = new Timestamp(sleepUntilMessageTimestamp);
|
||||
NextTransactionInfo nextTransactionInfo = this.repository.getATRepository().findNextTransaction(this.atData.getATAddress(),
|
||||
previousTxTimestamp.blockHeight,
|
||||
previousTxTimestamp.transactionSequence);
|
||||
|
||||
wakeDueToMessage = nextTransactionInfo != null;
|
||||
}
|
||||
|
||||
// Can we skip?
|
||||
if (!wakeDueToHeight && !wakeDueToMessage)
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public void preExecute(MachineState state) {
|
||||
// Sleep-until-message/height checking
|
||||
Long sleepUntilMessageTimestamp = this.atData.getSleepUntilMessageTimestamp();
|
||||
|
||||
if (sleepUntilMessageTimestamp != null) {
|
||||
// We've passed checks, so clear sleep-related flags/values
|
||||
this.setIsSleeping(state, false);
|
||||
this.setSleepUntilHeight(state, 0);
|
||||
this.atData.setSleepUntilMessageTimestamp(null);
|
||||
}
|
||||
}
|
||||
|
||||
// Inherited methods from CIYAM AT API
|
||||
@@ -412,6 +450,10 @@ public class QortalATAPI extends API {
|
||||
|
||||
// Utility methods
|
||||
|
||||
public long calcFinalFees(MachineState state) {
|
||||
return state.getSteps() * this.ciyamAtSettings.feePerStep;
|
||||
}
|
||||
|
||||
/** Returns partial transaction signature, used to verify we're operating on the same transaction and not naively using block height & sequence. */
|
||||
public static byte[] partialSignature(byte[] fullSignature) {
|
||||
return Arrays.copyOfRange(fullSignature, 8, 32);
|
||||
@@ -460,6 +502,15 @@ public class QortalATAPI extends API {
|
||||
}
|
||||
}
|
||||
|
||||
/*package*/ void sleepUntilMessageOrHeight(MachineState state, long txTimestamp, Long sleepUntilHeight) {
|
||||
this.setIsSleeping(state, true);
|
||||
|
||||
this.atData.setSleepUntilMessageTimestamp(txTimestamp);
|
||||
|
||||
if (sleepUntilHeight != null)
|
||||
this.setSleepUntilHeight(state, sleepUntilHeight.intValue());
|
||||
}
|
||||
|
||||
/** Returns AT's account */
|
||||
/* package */ Account getATAccount() {
|
||||
return new Account(this.repository, this.atData.getATAddress());
|
||||
@@ -500,7 +551,7 @@ public class QortalATAPI extends API {
|
||||
* <p>
|
||||
* Otherwise, assume B is a public key.
|
||||
*/
|
||||
private Account getAccountFromB(MachineState state) {
|
||||
/*package*/ Account getAccountFromB(MachineState state) {
|
||||
byte[] bBytes = this.getB(state);
|
||||
|
||||
if ((bBytes[0] == Crypto.ADDRESS_VERSION || bBytes[0] == Crypto.AT_ADDRESS_VERSION)
|
||||
|
||||
@@ -10,9 +10,11 @@ import org.ciyam.at.ExecutionException;
|
||||
import org.ciyam.at.FunctionData;
|
||||
import org.ciyam.at.IllegalFunctionCodeException;
|
||||
import org.ciyam.at.MachineState;
|
||||
import org.qortal.account.Account;
|
||||
import org.qortal.crosschain.Bitcoin;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.transaction.TransactionData;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.settings.Settings;
|
||||
|
||||
/**
|
||||
@@ -84,6 +86,43 @@ public enum QortalFunctionCode {
|
||||
api.setB(state, bBytes);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Sleep AT until a new message arrives after 'tx-timestamp'.<br>
|
||||
* <tt>0x0503 tx-timestamp</tt>
|
||||
*/
|
||||
SLEEP_UNTIL_MESSAGE(0x0503, 1, false) {
|
||||
@Override
|
||||
protected void postCheckExecute(FunctionData functionData, MachineState state, short rawFunctionCode) throws ExecutionException {
|
||||
if (functionData.value1 <= 0)
|
||||
return;
|
||||
|
||||
long txTimestamp = functionData.value1;
|
||||
|
||||
QortalATAPI api = (QortalATAPI) state.getAPI();
|
||||
api.sleepUntilMessageOrHeight(state, txTimestamp, null);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Sleep AT until a new message arrives, after 'tx-timestamp', or height reached.<br>
|
||||
* <tt>0x0504 tx-timestamp height</tt>
|
||||
*/
|
||||
SLEEP_UNTIL_MESSAGE_OR_HEIGHT(0x0504, 2, false) {
|
||||
@Override
|
||||
protected void postCheckExecute(FunctionData functionData, MachineState state, short rawFunctionCode) throws ExecutionException {
|
||||
if (functionData.value1 <= 0)
|
||||
return;
|
||||
|
||||
long txTimestamp = functionData.value1;
|
||||
|
||||
if (functionData.value2 <= 0)
|
||||
return;
|
||||
|
||||
long sleepUntilHeight = functionData.value2;
|
||||
|
||||
QortalATAPI api = (QortalATAPI) state.getAPI();
|
||||
api.sleepUntilMessageOrHeight(state, txTimestamp, sleepUntilHeight);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Convert address in B to 20-byte value in LSB of B1, and all of B2 & B3.<br>
|
||||
* <tt>0x0510</tt>
|
||||
@@ -123,6 +162,68 @@ public enum QortalFunctionCode {
|
||||
protected void postCheckExecute(FunctionData functionData, MachineState state, short rawFunctionCode) throws ExecutionException {
|
||||
convertAddressInB(Crypto.ADDRESS_VERSION, state);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Returns account level of account in B.<br>
|
||||
* <tt>0x0520</tt><br>
|
||||
* B should contain either Qortal address or public key,<br>
|
||||
* e.g. as a result of calling function {@link org.ciyam.at.FunctionCode#PUT_ADDRESS_FROM_TX_IN_A_INTO_B}</code>.
|
||||
* <p></p>
|
||||
* Returns account level, or -1 if account unknown.
|
||||
* <p></p>
|
||||
* @see QortalATAPI#getAccountFromB(MachineState)
|
||||
*/
|
||||
GET_ACCOUNT_LEVEL_FROM_ACCOUNT_IN_B(0x0520, 0, true) {
|
||||
@Override
|
||||
protected void postCheckExecute(FunctionData functionData, MachineState state, short rawFunctionCode) throws ExecutionException {
|
||||
QortalATAPI api = (QortalATAPI) state.getAPI();
|
||||
Account account = api.getAccountFromB(state);
|
||||
|
||||
Integer accountLevel = null;
|
||||
|
||||
if (account != null) {
|
||||
try {
|
||||
accountLevel = account.getLevel();
|
||||
} catch (DataException e) {
|
||||
throw new RuntimeException("AT API unable to fetch account level?", e);
|
||||
}
|
||||
}
|
||||
|
||||
functionData.returnValue = accountLevel != null
|
||||
? accountLevel.longValue()
|
||||
: -1;
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Returns account's minted block count of account in B.<br>
|
||||
* <tt>0x0521</tt><br>
|
||||
* B should contain either Qortal address or public key,<br>
|
||||
* e.g. as a result of calling function {@link org.ciyam.at.FunctionCode#PUT_ADDRESS_FROM_TX_IN_A_INTO_B}</code>.
|
||||
* <p></p>
|
||||
* Returns account level, or -1 if account unknown.
|
||||
* <p></p>
|
||||
* @see QortalATAPI#getAccountFromB(MachineState)
|
||||
*/
|
||||
GET_BLOCKS_MINTED_FROM_ACCOUNT_IN_B(0x0521, 0, true) {
|
||||
@Override
|
||||
protected void postCheckExecute(FunctionData functionData, MachineState state, short rawFunctionCode) throws ExecutionException {
|
||||
QortalATAPI api = (QortalATAPI) state.getAPI();
|
||||
Account account = api.getAccountFromB(state);
|
||||
|
||||
Integer blocksMinted = null;
|
||||
|
||||
if (account != null) {
|
||||
try {
|
||||
blocksMinted = account.getBlocksMinted();
|
||||
} catch (DataException e) {
|
||||
throw new RuntimeException("AT API unable to fetch account's minted block count?", e);
|
||||
}
|
||||
}
|
||||
|
||||
functionData.returnValue = blocksMinted != null
|
||||
? blocksMinted.longValue()
|
||||
: -1;
|
||||
}
|
||||
};
|
||||
|
||||
public final short value;
|
||||
|
||||
@@ -8,13 +8,7 @@ import java.math.BigInteger;
|
||||
import java.math.RoundingMode;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
@@ -28,7 +22,7 @@ import org.qortal.asset.Asset;
|
||||
import org.qortal.at.AT;
|
||||
import org.qortal.block.BlockChain.BlockTimingByHeight;
|
||||
import org.qortal.block.BlockChain.AccountLevelShareBin;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.controller.OnlineAccountsManager;
|
||||
import org.qortal.crypto.Crypto;
|
||||
import org.qortal.data.account.AccountBalanceData;
|
||||
import org.qortal.data.account.AccountData;
|
||||
@@ -232,7 +226,7 @@ public class Block {
|
||||
|
||||
// Other useful constants
|
||||
|
||||
private static final BigInteger MAX_DISTANCE;
|
||||
public static final BigInteger MAX_DISTANCE;
|
||||
static {
|
||||
byte[] maxValue = new byte[Transformer.PUBLIC_KEY_LENGTH];
|
||||
Arrays.fill(maxValue, (byte) 0xFF);
|
||||
@@ -320,7 +314,7 @@ public class Block {
|
||||
byte[] reference = parentBlockData.getSignature();
|
||||
|
||||
// Fetch our list of online accounts
|
||||
List<OnlineAccountData> onlineAccounts = Controller.getInstance().getOnlineAccounts();
|
||||
List<OnlineAccountData> onlineAccounts = OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
if (onlineAccounts.isEmpty()) {
|
||||
LOGGER.error("No online accounts - not even our own?");
|
||||
return null;
|
||||
@@ -333,6 +327,11 @@ public class Block {
|
||||
onlineAccountsTimestamp = onlineAccountData.getTimestamp();
|
||||
}
|
||||
|
||||
// Load sorted list of reward share public keys into memory, so that the indexes can be obtained.
|
||||
// This is up to 100x faster than querying each index separately. For 4150 reward share keys, it
|
||||
// was taking around 5000ms to query individually, vs 50ms using this approach.
|
||||
List<byte[]> allRewardSharePublicKeys = repository.getAccountRepository().getRewardSharePublicKeys();
|
||||
|
||||
// Map using index into sorted list of reward-shares as key
|
||||
Map<Integer, OnlineAccountData> indexedOnlineAccounts = new HashMap<>();
|
||||
for (OnlineAccountData onlineAccountData : onlineAccounts) {
|
||||
@@ -340,7 +339,7 @@ public class Block {
|
||||
if (onlineAccountData.getTimestamp() != onlineAccountsTimestamp)
|
||||
continue;
|
||||
|
||||
Integer accountIndex = repository.getAccountRepository().getRewardShareIndex(onlineAccountData.getPublicKey());
|
||||
Integer accountIndex = getRewardShareIndex(onlineAccountData.getPublicKey(), allRewardSharePublicKeys);
|
||||
if (accountIndex == null)
|
||||
// Online account (reward-share) with current timestamp but reward-share cancelled
|
||||
continue;
|
||||
@@ -476,6 +475,16 @@ public class Block {
|
||||
return this.minter;
|
||||
}
|
||||
|
||||
|
||||
public void setRepository(Repository repository) throws DataException {
|
||||
this.repository = repository;
|
||||
|
||||
for (Transaction transaction : this.getTransactions()) {
|
||||
transaction.setRepository(repository);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// More information
|
||||
|
||||
/**
|
||||
@@ -524,8 +533,10 @@ public class Block {
|
||||
long nonAtTransactionCount = transactionsData.stream().filter(transactionData -> transactionData.getType() != TransactionType.AT).count();
|
||||
|
||||
// The number of non-AT transactions fetched from repository should correspond with Block's transactionCount
|
||||
if (nonAtTransactionCount != this.blockData.getTransactionCount())
|
||||
if (nonAtTransactionCount != this.blockData.getTransactionCount()) {
|
||||
LOGGER.error(() -> String.format("Block's transactions from repository (%d) do not match block's transaction count (%d)", nonAtTransactionCount, this.blockData.getTransactionCount()));
|
||||
throw new IllegalStateException("Block's transactions from repository do not match block's transaction count");
|
||||
}
|
||||
|
||||
this.transactions = new ArrayList<>();
|
||||
|
||||
@@ -831,7 +842,7 @@ public class Block {
|
||||
if (NTP.getTime() >= BlockChain.getInstance().getCalcChainWeightTimestamp() && parentHeight >= maxHeight)
|
||||
break;
|
||||
}
|
||||
LOGGER.debug(String.format("Chain weight calculation was based on %d blocks", blockCount));
|
||||
LOGGER.trace(String.format("Chain weight calculation was based on %d blocks", blockCount));
|
||||
|
||||
return cumulativeWeight;
|
||||
}
|
||||
@@ -976,10 +987,10 @@ public class Block {
|
||||
byte[] onlineTimestampBytes = Longs.toByteArray(onlineTimestamp);
|
||||
|
||||
// If this block is much older than current online timestamp, then there's no point checking current online accounts
|
||||
List<OnlineAccountData> currentOnlineAccounts = onlineTimestamp < NTP.getTime() - Controller.ONLINE_TIMESTAMP_MODULUS
|
||||
List<OnlineAccountData> currentOnlineAccounts = onlineTimestamp < NTP.getTime() - OnlineAccountsManager.ONLINE_TIMESTAMP_MODULUS
|
||||
? null
|
||||
: Controller.getInstance().getOnlineAccounts();
|
||||
List<OnlineAccountData> latestBlocksOnlineAccounts = Controller.getInstance().getLatestBlocksOnlineAccounts();
|
||||
: OnlineAccountsManager.getInstance().getOnlineAccounts();
|
||||
List<OnlineAccountData> latestBlocksOnlineAccounts = OnlineAccountsManager.getInstance().getLatestBlocksOnlineAccounts();
|
||||
|
||||
// Extract online accounts' timestamp signatures from block data
|
||||
List<byte[]> onlineAccountsSignatures = BlockTransformer.decodeTimestampSignatures(this.blockData.getOnlineAccountsSignatures());
|
||||
@@ -1092,9 +1103,14 @@ public class Block {
|
||||
// Create repository savepoint here so we can rollback to it after testing transactions
|
||||
repository.setSavepoint();
|
||||
|
||||
if (this.blockData.getHeight() == 212937)
|
||||
if (this.blockData.getHeight() == 212937) {
|
||||
// Apply fix for block 212937 but fix will be rolled back before we exit method
|
||||
Block212937.processFix(this);
|
||||
}
|
||||
else if (InvalidNameRegistrationBlocks.isAffectedBlock(this.blockData.getHeight())) {
|
||||
// Apply fix for affected name registration blocks, but fix will be rolled back before we exit method
|
||||
InvalidNameRegistrationBlocks.processFix(this);
|
||||
}
|
||||
|
||||
for (Transaction transaction : this.getTransactions()) {
|
||||
TransactionData transactionData = transaction.getTransactionData();
|
||||
@@ -1133,7 +1149,7 @@ public class Block {
|
||||
// Check transaction can even be processed
|
||||
validationResult = transaction.isProcessable();
|
||||
if (validationResult != Transaction.ValidationResult.OK) {
|
||||
LOGGER.debug(String.format("Error during transaction validation, tx %s: %s", Base58.encode(transactionData.getSignature()), validationResult.name()));
|
||||
LOGGER.info(String.format("Error during transaction validation, tx %s: %s", Base58.encode(transactionData.getSignature()), validationResult.name()));
|
||||
return ValidationResult.TRANSACTION_INVALID;
|
||||
}
|
||||
|
||||
@@ -1247,12 +1263,13 @@ public class Block {
|
||||
for (ATData atData : executableATs) {
|
||||
AT at = new AT(this.repository, atData);
|
||||
List<AtTransaction> atTransactions = at.run(this.blockData.getHeight(), this.blockData.getTimestamp());
|
||||
ATStateData atStateData = at.getATStateData();
|
||||
// Didn't execute? (e.g. sleeping)
|
||||
if (atStateData == null)
|
||||
continue;
|
||||
|
||||
allAtTransactions.addAll(atTransactions);
|
||||
|
||||
ATStateData atStateData = at.getATStateData();
|
||||
this.ourAtStates.add(atStateData);
|
||||
|
||||
this.ourAtFees += atStateData.getFees();
|
||||
}
|
||||
|
||||
@@ -1281,6 +1298,21 @@ public class Block {
|
||||
return mintingAccount.canMint();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-process block, and its transactions.
|
||||
* This allows for any database integrity checks prior to validation.
|
||||
* This is called before isValid() and process()
|
||||
*
|
||||
* @throws DataException
|
||||
*/
|
||||
public void preProcess() throws DataException {
|
||||
List<Transaction> blocksTransactions = this.getTransactions();
|
||||
|
||||
for (Transaction transaction : blocksTransactions) {
|
||||
transaction.preProcess();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process block, and its transactions, adding them to the blockchain.
|
||||
*
|
||||
@@ -1336,7 +1368,7 @@ public class Block {
|
||||
postBlockTidy();
|
||||
|
||||
// Give Controller our cached, valid online accounts data (if any) to help reduce CPU load for next block
|
||||
Controller.getInstance().pushLatestBlocksOnlineAccounts(this.cachedValidOnlineAccounts);
|
||||
OnlineAccountsManager.getInstance().pushLatestBlocksOnlineAccounts(this.cachedValidOnlineAccounts);
|
||||
|
||||
// Log some debugging info relating to the block weight calculation
|
||||
this.logDebugInfo();
|
||||
@@ -1555,7 +1587,7 @@ public class Block {
|
||||
postBlockTidy();
|
||||
|
||||
// Remove any cached, valid online accounts data from Controller
|
||||
Controller.getInstance().popLatestBlocksOnlineAccounts();
|
||||
OnlineAccountsManager.getInstance().popLatestBlocksOnlineAccounts();
|
||||
}
|
||||
|
||||
protected void orphanTransactionsFromBlock() throws DataException {
|
||||
@@ -1996,8 +2028,32 @@ public class Block {
|
||||
this.repository.getAccountRepository().tidy();
|
||||
}
|
||||
|
||||
// Utils
|
||||
|
||||
/**
|
||||
* Find index of rewardSharePublicKey in list of rewardSharePublicKeys
|
||||
*
|
||||
* @param rewardSharePublicKey - the key to query
|
||||
* @param rewardSharePublicKeys - a sorted list of keys
|
||||
* @return - the index of the key, or null if not found
|
||||
*/
|
||||
private static Integer getRewardShareIndex(byte[] rewardSharePublicKey, List<byte[]> rewardSharePublicKeys) {
|
||||
int index = 0;
|
||||
for (byte[] publicKey : rewardSharePublicKeys) {
|
||||
if (Arrays.equals(rewardSharePublicKey, publicKey)) {
|
||||
return index;
|
||||
}
|
||||
index++;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void logDebugInfo() {
|
||||
try {
|
||||
// Avoid calculations if possible. We have to check against INFO here, since Level.isMoreSpecificThan() confusingly uses <= rather than just <
|
||||
if (LOGGER.getLevel().isMoreSpecificThan(Level.INFO))
|
||||
return;
|
||||
|
||||
if (this.repository == null || this.getMinter() == null || this.getBlockData() == null)
|
||||
return;
|
||||
|
||||
@@ -2007,9 +2063,10 @@ public class Block {
|
||||
LOGGER.debug(String.format("Timestamp: %d", this.getBlockData().getTimestamp()));
|
||||
LOGGER.debug(String.format("Minter level: %d", minterLevel));
|
||||
LOGGER.debug(String.format("Online accounts: %d", this.getBlockData().getOnlineAccountsCount()));
|
||||
LOGGER.debug(String.format("AT count: %d", this.getBlockData().getATCount()));
|
||||
|
||||
BlockSummaryData blockSummaryData = new BlockSummaryData(this.getBlockData());
|
||||
if (this.getParent() == null || this.getParent().getSignature() == null || blockSummaryData == null)
|
||||
if (this.getParent() == null || this.getParent().getSignature() == null || blockSummaryData == null || minterLevel == 0)
|
||||
return;
|
||||
|
||||
blockSummaryData.setMinterLevel(minterLevel);
|
||||
|
||||
@@ -4,10 +4,7 @@ import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
import javax.xml.bind.JAXBContext;
|
||||
@@ -27,11 +24,9 @@ import org.eclipse.persistence.jaxb.UnmarshallerProperties;
|
||||
import org.qortal.controller.Controller;
|
||||
import org.qortal.data.block.BlockData;
|
||||
import org.qortal.network.Network;
|
||||
import org.qortal.repository.BlockRepository;
|
||||
import org.qortal.repository.DataException;
|
||||
import org.qortal.repository.Repository;
|
||||
import org.qortal.repository.RepositoryManager;
|
||||
import org.qortal.repository.*;
|
||||
import org.qortal.settings.Settings;
|
||||
import org.qortal.utils.Base58;
|
||||
import org.qortal.utils.StringLongMapXmlAdapter;
|
||||
|
||||
/**
|
||||
@@ -73,9 +68,20 @@ public class BlockChain {
|
||||
atFindNextTransactionFix,
|
||||
newBlockSigHeight,
|
||||
shareBinFix,
|
||||
calcChainWeightTimestamp;
|
||||
calcChainWeightTimestamp,
|
||||
transactionV5Timestamp,
|
||||
transactionV6Timestamp;
|
||||
}
|
||||
|
||||
// Custom transaction fees
|
||||
/** Unit fees by transaction timestamp */
|
||||
public static class UnitFeesByTimestamp {
|
||||
public long timestamp;
|
||||
@XmlJavaTypeAdapter(value = org.qortal.api.AmountTypeAdapter.class)
|
||||
public long fee;
|
||||
}
|
||||
private List<UnitFeesByTimestamp> nameRegistrationUnitFees;
|
||||
|
||||
/** Map of which blockchain features are enabled when (height/timestamp) */
|
||||
@XmlJavaTypeAdapter(StringLongMapXmlAdapter.class)
|
||||
private Map<String, Long> featureTriggers;
|
||||
@@ -146,7 +152,8 @@ public class BlockChain {
|
||||
}
|
||||
private List<BlockTimingByHeight> blockTimingsByHeight;
|
||||
|
||||
private int minAccountLevelToMint = 1;
|
||||
private int minAccountLevelToMint;
|
||||
private int minAccountLevelForBlockSubmissions;
|
||||
private int minAccountLevelToRewardShare;
|
||||
private int maxRewardSharesPerMintingAccount;
|
||||
private int founderEffectiveMintingLevel;
|
||||
@@ -349,6 +356,10 @@ public class BlockChain {
|
||||
return this.minAccountLevelToMint;
|
||||
}
|
||||
|
||||
public int getMinAccountLevelForBlockSubmissions() {
|
||||
return this.minAccountLevelForBlockSubmissions;
|
||||
}
|
||||
|
||||
public int getMinAccountLevelToRewardShare() {
|
||||
return this.minAccountLevelToRewardShare;
|
||||
}
|
||||
@@ -391,6 +402,14 @@ public class BlockChain {
|
||||
return this.featureTriggers.get(FeatureTrigger.calcChainWeightTimestamp.name()).longValue();
|
||||
}
|
||||
|
||||
public long getTransactionV5Timestamp() {
|
||||
return this.featureTriggers.get(FeatureTrigger.transactionV5Timestamp.name()).longValue();
|
||||
}
|
||||
|
||||
public long getTransactionV6Timestamp() {
|
||||
return this.featureTriggers.get(FeatureTrigger.transactionV6Timestamp.name()).longValue();
|
||||
}
|
||||
|
||||
// More complex getters for aspects that change by height or timestamp
|
||||
|
||||
public long getRewardAtHeight(int ourHeight) {
|
||||
@@ -410,6 +429,15 @@ public class BlockChain {
|
||||
throw new IllegalStateException(String.format("No block timing info available for height %d", ourHeight));
|
||||
}
|
||||
|
||||
public long getNameRegistrationUnitFeeAtTimestamp(long ourTimestamp) {
|
||||
for (int i = nameRegistrationUnitFees.size() - 1; i >= 0; --i)
|
||||
if (nameRegistrationUnitFees.get(i).timestamp <= ourTimestamp)
|
||||
return nameRegistrationUnitFees.get(i).fee;
|
||||
|
||||
// Default to system-wide unit fee
|
||||
return this.getUnitFee();
|
||||
}
|
||||
|
||||
/** Validate blockchain config read from JSON */
|
||||
private void validateConfig() {
|
||||
if (this.genesisInfo == null)
|
||||
@@ -506,29 +534,105 @@ public class BlockChain {
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static void validate() throws DataException {
|
||||
// Check first block is Genesis Block
|
||||
if (!isGenesisBlockValid())
|
||||
rebuildBlockchain();
|
||||
|
||||
boolean isTopOnly = Settings.getInstance().isTopOnly();
|
||||
boolean archiveEnabled = Settings.getInstance().isArchiveEnabled();
|
||||
boolean canBootstrap = Settings.getInstance().getBootstrap();
|
||||
boolean needsArchiveRebuild = false;
|
||||
BlockData chainTip;
|
||||
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
chainTip = repository.getBlockRepository().getLastBlock();
|
||||
|
||||
// Ensure archive is (at least partially) intact, and force a bootstrap if it isn't
|
||||
if (!isTopOnly && archiveEnabled && canBootstrap) {
|
||||
needsArchiveRebuild = (repository.getBlockArchiveRepository().fromHeight(2) == null);
|
||||
if (needsArchiveRebuild) {
|
||||
LOGGER.info("Couldn't retrieve block 2 from archive. Bootstrapping...");
|
||||
|
||||
// If there are minting accounts, make sure to back them up
|
||||
// Don't backup if there are no minting accounts, as this can cause problems
|
||||
if (!repository.getAccountRepository().getMintingAccounts().isEmpty()) {
|
||||
Controller.getInstance().exportRepositoryData();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean hasBlocks = (chainTip != null && chainTip.getHeight() > 1);
|
||||
|
||||
if (isTopOnly && hasBlocks) {
|
||||
// Top-only mode is enabled and we have blocks, so it's possible that the genesis block has been pruned
|
||||
// It's best not to validate it, and there's no real need to
|
||||
} else {
|
||||
// Check first block is Genesis Block
|
||||
if (!isGenesisBlockValid() || needsArchiveRebuild) {
|
||||
try {
|
||||
rebuildBlockchain();
|
||||
|
||||
} catch (InterruptedException e) {
|
||||
throw new DataException(String.format("Interrupted when trying to rebuild blockchain: %s", e.getMessage()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We need to create a new connection, as the previous repository and its connections may be been
|
||||
// closed by rebuildBlockchain() if a bootstrap was applied
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
repository.checkConsistency();
|
||||
|
||||
int startHeight = Math.max(repository.getBlockRepository().getBlockchainHeight() - 1440, 1);
|
||||
// Set the number of blocks to validate based on the pruned state of the chain
|
||||
// If pruned, subtract an extra 10 to allow room for error
|
||||
int blocksToValidate = (isTopOnly || archiveEnabled) ? Settings.getInstance().getPruneBlockLimit() - 10 : 1440;
|
||||
|
||||
int startHeight = Math.max(repository.getBlockRepository().getBlockchainHeight() - blocksToValidate, 1);
|
||||
BlockData detachedBlockData = repository.getBlockRepository().getDetachedBlockSignature(startHeight);
|
||||
|
||||
if (detachedBlockData != null) {
|
||||
LOGGER.error(String.format("Block %d's reference does not match any block's signature", detachedBlockData.getHeight()));
|
||||
LOGGER.error(String.format("Block %d's reference does not match any block's signature",
|
||||
detachedBlockData.getHeight()));
|
||||
LOGGER.error(String.format("Your chain may be invalid and you should consider bootstrapping" +
|
||||
" or re-syncing from genesis."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for blockchain lock (whereas orphan() only tries to get lock)
|
||||
ReentrantLock blockchainLock = Controller.getInstance().getBlockchainLock();
|
||||
blockchainLock.lock();
|
||||
try {
|
||||
LOGGER.info(String.format("Orphaning back to block %d", detachedBlockData.getHeight() - 1));
|
||||
orphan(detachedBlockData.getHeight() - 1);
|
||||
} finally {
|
||||
blockchainLock.unlock();
|
||||
/**
|
||||
* More thorough blockchain validation method. Useful for validating bootstraps.
|
||||
* A DataException is thrown if anything is invalid.
|
||||
*
|
||||
* @throws DataException
|
||||
*/
|
||||
public static void validateAllBlocks() throws DataException {
|
||||
try (final Repository repository = RepositoryManager.getRepository()) {
|
||||
BlockData chainTip = repository.getBlockRepository().getLastBlock();
|
||||
final int chainTipHeight = chainTip.getHeight();
|
||||
final int oldestBlock = 1; // TODO: increase if in pruning mode
|
||||
byte[] lastReference = null;
|
||||
|
||||
for (int height = chainTipHeight; height > oldestBlock; height--) {
|
||||
BlockData blockData = repository.getBlockRepository().fromHeight(height);
|
||||
if (blockData == null) {
|
||||
blockData = repository.getBlockArchiveRepository().fromHeight(height);
|
||||
}
|
||||
|
||||
if (blockData == null) {
|
||||
String error = String.format("Missing block at height %d", height);
|
||||
LOGGER.error(error);
|
||||
throw new DataException(error);
|
||||
}
|
||||
|
||||
if (height != chainTipHeight) {
|
||||
// Check reference
|
||||
if (!Arrays.equals(blockData.getSignature(), lastReference)) {
|
||||
String error = String.format("Invalid reference for block at height %d: %s (should be %s)",
|
||||
height, Base58.encode(blockData.getReference()), Base58.encode(lastReference));
|
||||
LOGGER.error(error);
|
||||
throw new DataException(error);
|
||||
}
|
||||
}
|
||||
|
||||
lastReference = blockData.getReference();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -551,7 +655,15 @@ public class BlockChain {
|
||||
}
|
||||
}
|
||||
|
||||
private static void rebuildBlockchain() throws DataException {
|
||||
private static void rebuildBlockchain() throws DataException, InterruptedException {
|
||||
boolean shouldBootstrap = Settings.getInstance().getBootstrap();
|
||||
if (shouldBootstrap) {
|
||||
// Settings indicate that we should apply a bootstrap rather than rebuilding and syncing from genesis
|
||||
Bootstrap bootstrap = new Bootstrap();
|
||||
bootstrap.startImport();
|
||||
return;
|
||||
}
|
||||
|
||||
// (Re)build repository
|
||||
if (!RepositoryManager.wasPristineAtOpen())
|
||||
RepositoryManager.rebuild();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user