mirror of
https://github.com/haveno-dex/haveno.git
synced 2025-03-26 17:19:04 +00:00
automatically cancel offers with duplicate key images
This commit is contained in:
parent
3cde880b1c
commit
678dfc7887
2 changed files with 25 additions and 0 deletions
core/src/main/java/haveno/core
|
@ -265,6 +265,7 @@ public class CoreOffersService {
|
|||
if (!seenKeyImages.add(keyImage)) {
|
||||
for (Offer offer2 : offers) {
|
||||
if (offer == offer2) continue;
|
||||
if (offer2.getOfferPayload().getReserveTxKeyImages() == null) continue;
|
||||
if (offer2.getOfferPayload().getReserveTxKeyImages().contains(keyImage)) {
|
||||
log.warn("Key image {} belongs to multiple offers, seen in offer {} and {}", keyImage, offer.getId(), offer2.getId());
|
||||
duplicateFundedOffers.add(offer2);
|
||||
|
|
|
@ -97,6 +97,7 @@ import haveno.network.p2p.peers.PeerManager;
|
|||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -888,6 +889,7 @@ public class OpenOfferManager implements PeerManager.Listener, DecryptedDirectMe
|
|||
List<String> errorMessages = new ArrayList<String>();
|
||||
synchronized (processOffersLock) {
|
||||
List<OpenOffer> openOffers = getOpenOffers();
|
||||
removeOffersWithDuplicateKeyImages(openOffers);
|
||||
for (OpenOffer pendingOffer : openOffers) {
|
||||
if (pendingOffer.getState() != OpenOffer.State.PENDING) continue;
|
||||
if (skipOffersWithTooManyAttempts && pendingOffer.getNumProcessingAttempts() > NUM_ATTEMPTS_THRESHOLD) continue; // skip offers with too many attempts
|
||||
|
@ -919,6 +921,28 @@ public class OpenOfferManager implements PeerManager.Listener, DecryptedDirectMe
|
|||
}, THREAD_ID);
|
||||
}
|
||||
|
||||
private void removeOffersWithDuplicateKeyImages(List<OpenOffer> openOffers) {
|
||||
|
||||
// collect offers with duplicate key images
|
||||
Set<String> keyImages = new HashSet<>();
|
||||
Set<OpenOffer> offersToRemove = new HashSet<>();
|
||||
for (OpenOffer openOffer : openOffers) {
|
||||
if (openOffer.getOffer().getOfferPayload().getReserveTxKeyImages() == null) continue;
|
||||
if (Collections.disjoint(keyImages, openOffer.getOffer().getOfferPayload().getReserveTxKeyImages())) {
|
||||
keyImages.addAll(openOffer.getOffer().getOfferPayload().getReserveTxKeyImages());
|
||||
} else {
|
||||
offersToRemove.add(openOffer);
|
||||
}
|
||||
}
|
||||
|
||||
// remove offers with duplicate key images
|
||||
for (OpenOffer offerToRemove : offersToRemove) {
|
||||
log.warn("Removing open offer which has duplicate key images with other open offers: {}", offerToRemove.getId());
|
||||
doCancelOffer(offerToRemove);
|
||||
openOffers.remove(offerToRemove);
|
||||
}
|
||||
}
|
||||
|
||||
private void processPendingOffer(List<OpenOffer> openOffers, OpenOffer openOffer, TransactionResultHandler resultHandler, ErrorMessageHandler errorMessageHandler) {
|
||||
|
||||
// skip if already processing
|
||||
|
|
Loading…
Reference in a new issue