mirror of
https://github.com/bitcoin/bitcoin.git
synced 2025-04-29 06:49:38 -04:00
Compare commits
16 commits
d67631f5c0
...
570b245d1a
Author | SHA1 | Date | |
---|---|---|---|
|
570b245d1a | ||
|
65714c162c | ||
|
a4eee6d50b | ||
|
47e713ea5a | ||
|
8fd09d622d | ||
|
47f9565c15 | ||
|
b420376aef | ||
|
af6cffa36d | ||
|
33e6538b30 | ||
|
fa48be3ba4 | ||
|
aaaa45399c | ||
|
cccc1f4e91 | ||
|
cd7872ca54 | ||
|
c7e2b9e264 | ||
|
fa58f40b89 | ||
|
fadf12a56c |
38 changed files with 212 additions and 275 deletions
|
@ -635,12 +635,6 @@ void SetupServerArgs(ArgsManager& argsman, bool can_listen_ipc)
|
||||||
argsman.AddArg("-dustrelayfee=<amt>", strprintf("Fee rate (in %s/kvB) used to define dust, the value of an output such that it will cost more than its value in fees at this fee rate to spend it. (default: %s)", CURRENCY_UNIT, FormatMoney(DUST_RELAY_TX_FEE)), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::NODE_RELAY);
|
argsman.AddArg("-dustrelayfee=<amt>", strprintf("Fee rate (in %s/kvB) used to define dust, the value of an output such that it will cost more than its value in fees at this fee rate to spend it. (default: %s)", CURRENCY_UNIT, FormatMoney(DUST_RELAY_TX_FEE)), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::NODE_RELAY);
|
||||||
argsman.AddArg("-acceptstalefeeestimates", strprintf("Read fee estimates even if they are stale (%sdefault: %u) fee estimates are considered stale if they are %s hours old", "regtest only; ", DEFAULT_ACCEPT_STALE_FEE_ESTIMATES, Ticks<std::chrono::hours>(MAX_FILE_AGE)), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::DEBUG_TEST);
|
argsman.AddArg("-acceptstalefeeestimates", strprintf("Read fee estimates even if they are stale (%sdefault: %u) fee estimates are considered stale if they are %s hours old", "regtest only; ", DEFAULT_ACCEPT_STALE_FEE_ESTIMATES, Ticks<std::chrono::hours>(MAX_FILE_AGE)), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::DEBUG_TEST);
|
||||||
argsman.AddArg("-bytespersigop", strprintf("Equivalent bytes per sigop in transactions for relay and mining (default: %u)", DEFAULT_BYTES_PER_SIGOP), ArgsManager::ALLOW_ANY, OptionsCategory::NODE_RELAY);
|
argsman.AddArg("-bytespersigop", strprintf("Equivalent bytes per sigop in transactions for relay and mining (default: %u)", DEFAULT_BYTES_PER_SIGOP), ArgsManager::ALLOW_ANY, OptionsCategory::NODE_RELAY);
|
||||||
argsman.AddArg("-datacarrier", strprintf("Relay and mine data carrier transactions (default: %u)", DEFAULT_ACCEPT_DATACARRIER), ArgsManager::ALLOW_ANY, OptionsCategory::NODE_RELAY);
|
|
||||||
argsman.AddArg("-datacarriersize",
|
|
||||||
strprintf("Relay and mine transactions whose data-carrying raw scriptPubKey "
|
|
||||||
"is of this size or less (default: %u)",
|
|
||||||
MAX_OP_RETURN_RELAY),
|
|
||||||
ArgsManager::ALLOW_ANY, OptionsCategory::NODE_RELAY);
|
|
||||||
argsman.AddArg("-permitbaremultisig", strprintf("Relay transactions creating non-P2SH multisig outputs (default: %u)", DEFAULT_PERMIT_BAREMULTISIG), ArgsManager::ALLOW_ANY,
|
argsman.AddArg("-permitbaremultisig", strprintf("Relay transactions creating non-P2SH multisig outputs (default: %u)", DEFAULT_PERMIT_BAREMULTISIG), ArgsManager::ALLOW_ANY,
|
||||||
OptionsCategory::NODE_RELAY);
|
OptionsCategory::NODE_RELAY);
|
||||||
argsman.AddArg("-minrelaytxfee=<amt>", strprintf("Fees (in %s/kvB) smaller than this are considered zero fee for relaying, mining and transaction creation (default: %s)",
|
argsman.AddArg("-minrelaytxfee=<amt>", strprintf("Fees (in %s/kvB) smaller than this are considered zero fee for relaying, mining and transaction creation (default: %s)",
|
||||||
|
|
|
@ -43,14 +43,6 @@ struct MemPoolOptions {
|
||||||
/** A fee rate smaller than this is considered zero fee (for relaying, mining and transaction creation) */
|
/** A fee rate smaller than this is considered zero fee (for relaying, mining and transaction creation) */
|
||||||
CFeeRate min_relay_feerate{DEFAULT_MIN_RELAY_TX_FEE};
|
CFeeRate min_relay_feerate{DEFAULT_MIN_RELAY_TX_FEE};
|
||||||
CFeeRate dust_relay_feerate{DUST_RELAY_TX_FEE};
|
CFeeRate dust_relay_feerate{DUST_RELAY_TX_FEE};
|
||||||
/**
|
|
||||||
* A data carrying output is an unspendable output containing data. The script
|
|
||||||
* type is designated as TxoutType::NULL_DATA.
|
|
||||||
*
|
|
||||||
* Maximum size of TxoutType::NULL_DATA scripts that this node considers standard.
|
|
||||||
* If nullopt, any size is nonstandard.
|
|
||||||
*/
|
|
||||||
std::optional<unsigned> max_datacarrier_bytes{DEFAULT_ACCEPT_DATACARRIER ? std::optional{MAX_OP_RETURN_RELAY} : std::nullopt};
|
|
||||||
bool permit_bare_multisig{DEFAULT_PERMIT_BAREMULTISIG};
|
bool permit_bare_multisig{DEFAULT_PERMIT_BAREMULTISIG};
|
||||||
bool require_standard{true};
|
bool require_standard{true};
|
||||||
bool persist_v1_dat{DEFAULT_PERSIST_V1_DAT};
|
bool persist_v1_dat{DEFAULT_PERSIST_V1_DAT};
|
||||||
|
|
|
@ -81,12 +81,6 @@ util::Result<void> ApplyArgsManOptions(const ArgsManager& argsman, const CChainP
|
||||||
|
|
||||||
mempool_opts.permit_bare_multisig = argsman.GetBoolArg("-permitbaremultisig", DEFAULT_PERMIT_BAREMULTISIG);
|
mempool_opts.permit_bare_multisig = argsman.GetBoolArg("-permitbaremultisig", DEFAULT_PERMIT_BAREMULTISIG);
|
||||||
|
|
||||||
if (argsman.GetBoolArg("-datacarrier", DEFAULT_ACCEPT_DATACARRIER)) {
|
|
||||||
mempool_opts.max_datacarrier_bytes = argsman.GetIntArg("-datacarriersize", MAX_OP_RETURN_RELAY);
|
|
||||||
} else {
|
|
||||||
mempool_opts.max_datacarrier_bytes = std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
mempool_opts.require_standard = !argsman.GetBoolArg("-acceptnonstdtxn", DEFAULT_ACCEPT_NON_STD_TXN);
|
mempool_opts.require_standard = !argsman.GetBoolArg("-acceptnonstdtxn", DEFAULT_ACCEPT_NON_STD_TXN);
|
||||||
if (!chainparams.IsTestChain() && !mempool_opts.require_standard) {
|
if (!chainparams.IsTestChain() && !mempool_opts.require_standard) {
|
||||||
return util::Error{Untranslated(strprintf("acceptnonstdtxn is not currently supported for %s chain", chainparams.GetChainTypeString()))};
|
return util::Error{Untranslated(strprintf("acceptnonstdtxn is not currently supported for %s chain", chainparams.GetChainTypeString()))};
|
||||||
|
|
|
@ -76,7 +76,7 @@ std::vector<uint32_t> GetDust(const CTransaction& tx, CFeeRate dust_relay_rate)
|
||||||
return dust_outputs;
|
return dust_outputs;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IsStandard(const CScript& scriptPubKey, const std::optional<unsigned>& max_datacarrier_bytes, TxoutType& whichType)
|
bool IsStandard(const CScript& scriptPubKey, TxoutType& whichType)
|
||||||
{
|
{
|
||||||
std::vector<std::vector<unsigned char> > vSolutions;
|
std::vector<std::vector<unsigned char> > vSolutions;
|
||||||
whichType = Solver(scriptPubKey, vSolutions);
|
whichType = Solver(scriptPubKey, vSolutions);
|
||||||
|
@ -91,16 +91,12 @@ bool IsStandard(const CScript& scriptPubKey, const std::optional<unsigned>& max_
|
||||||
return false;
|
return false;
|
||||||
if (m < 1 || m > n)
|
if (m < 1 || m > n)
|
||||||
return false;
|
return false;
|
||||||
} else if (whichType == TxoutType::NULL_DATA) {
|
|
||||||
if (!max_datacarrier_bytes || scriptPubKey.size() > *max_datacarrier_bytes) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IsStandardTx(const CTransaction& tx, const std::optional<unsigned>& max_datacarrier_bytes, bool permit_bare_multisig, const CFeeRate& dust_relay_fee, std::string& reason)
|
bool IsStandardTx(const CTransaction& tx, bool permit_bare_multisig, const CFeeRate& dust_relay_fee, std::string& reason)
|
||||||
{
|
{
|
||||||
if (tx.version > TX_MAX_STANDARD_VERSION || tx.version < 1) {
|
if (tx.version > TX_MAX_STANDARD_VERSION || tx.version < 1) {
|
||||||
reason = "version";
|
reason = "version";
|
||||||
|
@ -137,17 +133,14 @@ bool IsStandardTx(const CTransaction& tx, const std::optional<unsigned>& max_dat
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int nDataOut = 0;
|
|
||||||
TxoutType whichType;
|
TxoutType whichType;
|
||||||
for (const CTxOut& txout : tx.vout) {
|
for (const CTxOut& txout : tx.vout) {
|
||||||
if (!::IsStandard(txout.scriptPubKey, max_datacarrier_bytes, whichType)) {
|
if (!::IsStandard(txout.scriptPubKey, whichType)) {
|
||||||
reason = "scriptpubkey";
|
reason = "scriptpubkey";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (whichType == TxoutType::NULL_DATA)
|
if ((whichType == TxoutType::MULTISIG) && (!permit_bare_multisig)) {
|
||||||
nDataOut++;
|
|
||||||
else if ((whichType == TxoutType::MULTISIG) && (!permit_bare_multisig)) {
|
|
||||||
reason = "bare-multisig";
|
reason = "bare-multisig";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -159,12 +152,6 @@ bool IsStandardTx(const CTransaction& tx, const std::optional<unsigned>& max_dat
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// only one OP_RETURN txout is permitted
|
|
||||||
if (nDataOut > 1) {
|
|
||||||
reason = "multi-op-return";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,13 +70,6 @@ static constexpr unsigned int DEFAULT_ANCESTOR_SIZE_LIMIT_KVB{101};
|
||||||
static constexpr unsigned int DEFAULT_DESCENDANT_LIMIT{25};
|
static constexpr unsigned int DEFAULT_DESCENDANT_LIMIT{25};
|
||||||
/** Default for -limitdescendantsize, maximum kilobytes of in-mempool descendants */
|
/** Default for -limitdescendantsize, maximum kilobytes of in-mempool descendants */
|
||||||
static constexpr unsigned int DEFAULT_DESCENDANT_SIZE_LIMIT_KVB{101};
|
static constexpr unsigned int DEFAULT_DESCENDANT_SIZE_LIMIT_KVB{101};
|
||||||
/** Default for -datacarrier */
|
|
||||||
static const bool DEFAULT_ACCEPT_DATACARRIER = true;
|
|
||||||
/**
|
|
||||||
* Default setting for -datacarriersize. 80 bytes of data, +1 for OP_RETURN,
|
|
||||||
* +2 for the pushdata opcodes.
|
|
||||||
*/
|
|
||||||
static const unsigned int MAX_OP_RETURN_RELAY = 83;
|
|
||||||
/**
|
/**
|
||||||
* An extra transaction can be added to a package, as long as it only has one
|
* An extra transaction can be added to a package, as long as it only has one
|
||||||
* ancestor and is no larger than this. Not really any reason to make this
|
* ancestor and is no larger than this. Not really any reason to make this
|
||||||
|
@ -136,7 +129,7 @@ CAmount GetDustThreshold(const CTxOut& txout, const CFeeRate& dustRelayFee);
|
||||||
|
|
||||||
bool IsDust(const CTxOut& txout, const CFeeRate& dustRelayFee);
|
bool IsDust(const CTxOut& txout, const CFeeRate& dustRelayFee);
|
||||||
|
|
||||||
bool IsStandard(const CScript& scriptPubKey, const std::optional<unsigned>& max_datacarrier_bytes, TxoutType& whichType);
|
bool IsStandard(const CScript& scriptPubKey, TxoutType& whichType);
|
||||||
|
|
||||||
/** Get the vout index numbers of all dust outputs */
|
/** Get the vout index numbers of all dust outputs */
|
||||||
std::vector<uint32_t> GetDust(const CTransaction& tx, CFeeRate dust_relay_rate);
|
std::vector<uint32_t> GetDust(const CTransaction& tx, CFeeRate dust_relay_rate);
|
||||||
|
@ -150,7 +143,7 @@ static constexpr decltype(CTransaction::version) TX_MAX_STANDARD_VERSION{3};
|
||||||
* Check for standard transaction types
|
* Check for standard transaction types
|
||||||
* @return True if all outputs (scriptPubKeys) use only standard transaction forms
|
* @return True if all outputs (scriptPubKeys) use only standard transaction forms
|
||||||
*/
|
*/
|
||||||
bool IsStandardTx(const CTransaction& tx, const std::optional<unsigned>& max_datacarrier_bytes, bool permit_bare_multisig, const CFeeRate& dust_relay_fee, std::string& reason);
|
bool IsStandardTx(const CTransaction& tx, bool permit_bare_multisig, const CFeeRate& dust_relay_fee, std::string& reason);
|
||||||
/**
|
/**
|
||||||
* Check for standard transaction types
|
* Check for standard transaction types
|
||||||
* @param[in] mapInputs Map of previous transactions that have outputs we're spending
|
* @param[in] mapInputs Map of previous transactions that have outputs we're spending
|
||||||
|
|
|
@ -151,12 +151,12 @@ FUZZ_TARGET(key, .init = initialize_key)
|
||||||
assert(fillable_signing_provider_pub.HaveKey(pubkey.GetID()));
|
assert(fillable_signing_provider_pub.HaveKey(pubkey.GetID()));
|
||||||
|
|
||||||
TxoutType which_type_tx_pubkey;
|
TxoutType which_type_tx_pubkey;
|
||||||
const bool is_standard_tx_pubkey = IsStandard(tx_pubkey_script, std::nullopt, which_type_tx_pubkey);
|
const bool is_standard_tx_pubkey = IsStandard(tx_pubkey_script, which_type_tx_pubkey);
|
||||||
assert(is_standard_tx_pubkey);
|
assert(is_standard_tx_pubkey);
|
||||||
assert(which_type_tx_pubkey == TxoutType::PUBKEY);
|
assert(which_type_tx_pubkey == TxoutType::PUBKEY);
|
||||||
|
|
||||||
TxoutType which_type_tx_multisig;
|
TxoutType which_type_tx_multisig;
|
||||||
const bool is_standard_tx_multisig = IsStandard(tx_multisig_script, std::nullopt, which_type_tx_multisig);
|
const bool is_standard_tx_multisig = IsStandard(tx_multisig_script, which_type_tx_multisig);
|
||||||
assert(is_standard_tx_multisig);
|
assert(is_standard_tx_multisig);
|
||||||
assert(which_type_tx_multisig == TxoutType::MULTISIG);
|
assert(which_type_tx_multisig == TxoutType::MULTISIG);
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ FUZZ_TARGET(script, .init = initialize_script)
|
||||||
}
|
}
|
||||||
|
|
||||||
TxoutType which_type;
|
TxoutType which_type;
|
||||||
bool is_standard_ret = IsStandard(script, std::nullopt, which_type);
|
bool is_standard_ret = IsStandard(script, which_type);
|
||||||
if (!is_standard_ret) {
|
if (!is_standard_ret) {
|
||||||
assert(which_type == TxoutType::NONSTANDARD ||
|
assert(which_type == TxoutType::NONSTANDARD ||
|
||||||
which_type == TxoutType::NULL_DATA ||
|
which_type == TxoutType::NULL_DATA ||
|
||||||
|
|
|
@ -61,8 +61,8 @@ FUZZ_TARGET(transaction, .init = initialize_transaction)
|
||||||
|
|
||||||
const CFeeRate dust_relay_fee{DUST_RELAY_TX_FEE};
|
const CFeeRate dust_relay_fee{DUST_RELAY_TX_FEE};
|
||||||
std::string reason;
|
std::string reason;
|
||||||
const bool is_standard_with_permit_bare_multisig = IsStandardTx(tx, std::nullopt, /* permit_bare_multisig= */ true, dust_relay_fee, reason);
|
const bool is_standard_with_permit_bare_multisig = IsStandardTx(tx, /* permit_bare_multisig= */ true, dust_relay_fee, reason);
|
||||||
const bool is_standard_without_permit_bare_multisig = IsStandardTx(tx, std::nullopt, /* permit_bare_multisig= */ false, dust_relay_fee, reason);
|
const bool is_standard_without_permit_bare_multisig = IsStandardTx(tx, /* permit_bare_multisig= */ false, dust_relay_fee, reason);
|
||||||
if (is_standard_without_permit_bare_multisig) {
|
if (is_standard_without_permit_bare_multisig) {
|
||||||
assert(is_standard_with_permit_bare_multisig);
|
assert(is_standard_with_permit_bare_multisig);
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,7 +144,7 @@ BOOST_AUTO_TEST_CASE(multisig_IsStandard)
|
||||||
|
|
||||||
const auto is_standard{[](const CScript& spk) {
|
const auto is_standard{[](const CScript& spk) {
|
||||||
TxoutType type;
|
TxoutType type;
|
||||||
bool res{::IsStandard(spk, std::nullopt, type)};
|
bool res{::IsStandard(spk, type)};
|
||||||
if (res) {
|
if (res) {
|
||||||
BOOST_CHECK_EQUAL(type, TxoutType::MULTISIG);
|
BOOST_CHECK_EQUAL(type, TxoutType::MULTISIG);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,13 +21,13 @@
|
||||||
// Helpers:
|
// Helpers:
|
||||||
static bool IsStandardTx(const CTransaction& tx, bool permit_bare_multisig, std::string& reason)
|
static bool IsStandardTx(const CTransaction& tx, bool permit_bare_multisig, std::string& reason)
|
||||||
{
|
{
|
||||||
return IsStandardTx(tx, std::nullopt, permit_bare_multisig, CFeeRate{DUST_RELAY_TX_FEE}, reason);
|
return IsStandardTx(tx, permit_bare_multisig, CFeeRate{DUST_RELAY_TX_FEE}, reason);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool IsStandardTx(const CTransaction& tx, std::string& reason)
|
static bool IsStandardTx(const CTransaction& tx, std::string& reason)
|
||||||
{
|
{
|
||||||
return IsStandardTx(tx, std::nullopt, /*permit_bare_multisig=*/true, CFeeRate{DUST_RELAY_TX_FEE}, reason) &&
|
return IsStandardTx(tx, /*permit_bare_multisig=*/true, CFeeRate{DUST_RELAY_TX_FEE}, reason) &&
|
||||||
IsStandardTx(tx, std::nullopt, /*permit_bare_multisig=*/false, CFeeRate{DUST_RELAY_TX_FEE}, reason);
|
IsStandardTx(tx, /*permit_bare_multisig=*/false, CFeeRate{DUST_RELAY_TX_FEE}, reason);
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::vector<unsigned char> Serialize(const CScript& s)
|
static std::vector<unsigned char> Serialize(const CScript& s)
|
||||||
|
|
|
@ -799,12 +799,12 @@ BOOST_AUTO_TEST_CASE(test_IsStandard)
|
||||||
|
|
||||||
constexpr auto CheckIsStandard = [](const auto& t) {
|
constexpr auto CheckIsStandard = [](const auto& t) {
|
||||||
std::string reason;
|
std::string reason;
|
||||||
BOOST_CHECK(IsStandardTx(CTransaction{t}, MAX_OP_RETURN_RELAY, g_bare_multi, g_dust, reason));
|
BOOST_CHECK(IsStandardTx(CTransaction{t}, g_bare_multi, g_dust, reason));
|
||||||
BOOST_CHECK(reason.empty());
|
BOOST_CHECK(reason.empty());
|
||||||
};
|
};
|
||||||
constexpr auto CheckIsNotStandard = [](const auto& t, const std::string& reason_in) {
|
constexpr auto CheckIsNotStandard = [](const auto& t, const std::string& reason_in) {
|
||||||
std::string reason;
|
std::string reason;
|
||||||
BOOST_CHECK(!IsStandardTx(CTransaction{t}, MAX_OP_RETURN_RELAY, g_bare_multi, g_dust, reason));
|
BOOST_CHECK(!IsStandardTx(CTransaction{t}, g_bare_multi, g_dust, reason));
|
||||||
BOOST_CHECK_EQUAL(reason_in, reason);
|
BOOST_CHECK_EQUAL(reason_in, reason);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -858,15 +858,13 @@ BOOST_AUTO_TEST_CASE(test_IsStandard)
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_1;
|
t.vout[0].scriptPubKey = CScript() << OP_1;
|
||||||
CheckIsNotStandard(t, "scriptpubkey");
|
CheckIsNotStandard(t, "scriptpubkey");
|
||||||
|
|
||||||
// MAX_OP_RETURN_RELAY-byte TxoutType::NULL_DATA (standard)
|
// TxoutType::NULL_DATA
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef3804678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
//
|
||||||
BOOST_CHECK_EQUAL(MAX_OP_RETURN_RELAY, t.vout[0].scriptPubKey.size());
|
// Until v30 OP_RETURN was limited to 83 bytes (80 bytes of data, +1 for
|
||||||
CheckIsStandard(t);
|
// OP_RETURN, +2 for the pushdata opcodes). Here we test with 84 bytes.
|
||||||
|
|
||||||
// MAX_OP_RETURN_RELAY+1-byte TxoutType::NULL_DATA (non-standard)
|
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef3804678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef3800"_hex;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef3804678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef3800"_hex;
|
||||||
BOOST_CHECK_EQUAL(MAX_OP_RETURN_RELAY + 1, t.vout[0].scriptPubKey.size());
|
BOOST_CHECK_EQUAL(84, t.vout[0].scriptPubKey.size());
|
||||||
CheckIsNotStandard(t, "scriptpubkey");
|
CheckIsStandard(t);
|
||||||
|
|
||||||
// Data payload can be encoded in any way...
|
// Data payload can be encoded in any way...
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN << ""_hex;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN << ""_hex;
|
||||||
|
@ -888,21 +886,21 @@ BOOST_AUTO_TEST_CASE(test_IsStandard)
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN;
|
||||||
CheckIsStandard(t);
|
CheckIsStandard(t);
|
||||||
|
|
||||||
// Only one TxoutType::NULL_DATA permitted in all cases
|
// Multiple TxoutType::NULL_DATA outputs are permitted
|
||||||
t.vout.resize(2);
|
t.vout.resize(2);
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
||||||
t.vout[0].nValue = 0;
|
t.vout[0].nValue = 0;
|
||||||
t.vout[1].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
t.vout[1].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
||||||
t.vout[1].nValue = 0;
|
t.vout[1].nValue = 0;
|
||||||
CheckIsNotStandard(t, "multi-op-return");
|
CheckIsStandard(t);
|
||||||
|
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN << "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38"_hex;
|
||||||
t.vout[1].scriptPubKey = CScript() << OP_RETURN;
|
t.vout[1].scriptPubKey = CScript() << OP_RETURN;
|
||||||
CheckIsNotStandard(t, "multi-op-return");
|
CheckIsStandard(t);
|
||||||
|
|
||||||
t.vout[0].scriptPubKey = CScript() << OP_RETURN;
|
t.vout[0].scriptPubKey = CScript() << OP_RETURN;
|
||||||
t.vout[1].scriptPubKey = CScript() << OP_RETURN;
|
t.vout[1].scriptPubKey = CScript() << OP_RETURN;
|
||||||
CheckIsNotStandard(t, "multi-op-return");
|
CheckIsStandard(t);
|
||||||
|
|
||||||
// Check large scriptSig (non-standard if size is >1650 bytes)
|
// Check large scriptSig (non-standard if size is >1650 bytes)
|
||||||
t.vout.resize(1);
|
t.vout.resize(1);
|
||||||
|
|
|
@ -790,7 +790,7 @@ bool MemPoolAccept::PreChecks(ATMPArgs& args, Workspace& ws)
|
||||||
|
|
||||||
// Rather not work on nonstandard transactions (unless -testnet/-regtest)
|
// Rather not work on nonstandard transactions (unless -testnet/-regtest)
|
||||||
std::string reason;
|
std::string reason;
|
||||||
if (m_pool.m_opts.require_standard && !IsStandardTx(tx, m_pool.m_opts.max_datacarrier_bytes, m_pool.m_opts.permit_bare_multisig, m_pool.m_opts.dust_relay_feerate, reason)) {
|
if (m_pool.m_opts.require_standard && !IsStandardTx(tx, m_pool.m_opts.permit_bare_multisig, m_pool.m_opts.dust_relay_feerate, reason)) {
|
||||||
return state.Invalid(TxValidationResult::TX_NOT_STANDARD, reason);
|
return state.Invalid(TxValidationResult::TX_NOT_STANDARD, reason);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,6 @@ class BlocksXORTest(BitcoinTestFramework):
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
'-blocksxor=1',
|
'-blocksxor=1',
|
||||||
'-fastprune=1', # use smaller block files
|
'-fastprune=1', # use smaller block files
|
||||||
'-datacarriersize=100000', # needed to pad transaction with MiniWallet
|
|
||||||
]]
|
]]
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
|
|
|
@ -51,7 +51,6 @@ class MaxUploadTest(BitcoinTestFramework):
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
f"-maxuploadtarget={UPLOAD_TARGET_MB}M",
|
f"-maxuploadtarget={UPLOAD_TARGET_MB}M",
|
||||||
"-datacarriersize=100000",
|
|
||||||
]]
|
]]
|
||||||
self.supports_cli = False
|
self.supports_cli = False
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ from decimal import Decimal
|
||||||
import math
|
import math
|
||||||
|
|
||||||
from test_framework.test_framework import BitcoinTestFramework
|
from test_framework.test_framework import BitcoinTestFramework
|
||||||
|
from test_framework.blocktools import MAX_STANDARD_TX_WEIGHT
|
||||||
from test_framework.messages import (
|
from test_framework.messages import (
|
||||||
MAX_BIP125_RBF_SEQUENCE,
|
MAX_BIP125_RBF_SEQUENCE,
|
||||||
COIN,
|
COIN,
|
||||||
|
@ -326,11 +327,50 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
|
||||||
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
|
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
|
||||||
rawtxs=[tx.serialize().hex()],
|
rawtxs=[tx.serialize().hex()],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# OP_RETURN followed by non-push
|
||||||
tx = tx_from_hex(raw_tx_reference)
|
tx = tx_from_hex(raw_tx_reference)
|
||||||
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
|
tx.vout[0].scriptPubKey = CScript([OP_RETURN, OP_HASH160])
|
||||||
tx.vout = [tx.vout[0]] * 2
|
tx.vout = [tx.vout[0]] * 2
|
||||||
self.check_mempool_result(
|
self.check_mempool_result(
|
||||||
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}],
|
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}],
|
||||||
|
rawtxs=[tx.serialize().hex()],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Multiple OP_RETURN and more than 83 bytes are standard since v30
|
||||||
|
tx = tx_from_hex(raw_tx_reference)
|
||||||
|
tx.vout.append(CTxOut(0, CScript([OP_RETURN, b'\xff'])))
|
||||||
|
tx.vout.append(CTxOut(0, CScript([OP_RETURN, b'\xff' * 90])))
|
||||||
|
|
||||||
|
self.check_mempool_result(
|
||||||
|
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': Decimal('0.05')}}],
|
||||||
|
rawtxs=[tx.serialize().hex()],
|
||||||
|
maxfeerate=0
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info("A transaction with several OP_RETURN outputs.")
|
||||||
|
tx = tx_from_hex(raw_tx_reference)
|
||||||
|
op_return_count = 42
|
||||||
|
tx.vout[0].nValue = int(tx.vout[0].nValue / op_return_count)
|
||||||
|
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
|
||||||
|
tx.vout = [tx.vout[0]] * op_return_count
|
||||||
|
self.check_mempool_result(
|
||||||
|
result_expected=[{"txid": tx.rehash(), "allowed": True, "vsize": tx.get_vsize(), "fees": {"base": Decimal("0.05000026")}}],
|
||||||
|
rawtxs=[tx.serialize().hex()],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info("A transaction with an OP_RETUN output that bumps into the max standardness tx size.")
|
||||||
|
tx = tx_from_hex(raw_tx_reference)
|
||||||
|
tx.vout[0].scriptPubKey = CScript([OP_RETURN])
|
||||||
|
data_len = int(MAX_STANDARD_TX_WEIGHT / 4) - tx.get_vsize() - 5 - 4 # -5 for PUSHDATA4 and -4 for script size
|
||||||
|
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b"\xff" * (data_len)])
|
||||||
|
self.check_mempool_result(
|
||||||
|
result_expected=[{"txid": tx.rehash(), "allowed": True, "vsize": tx.get_vsize(), "fees": {"base": Decimal("0.1") - Decimal("0.05")}}],
|
||||||
|
rawtxs=[tx.serialize().hex()],
|
||||||
|
)
|
||||||
|
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b"\xff" * (data_len + 1)])
|
||||||
|
self.check_mempool_result(
|
||||||
|
result_expected=[{"txid": tx.rehash(), "allowed": False, "reject-reason": "tx-size"}],
|
||||||
rawtxs=[tx.serialize().hex()],
|
rawtxs=[tx.serialize().hex()],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,91 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# Copyright (c) 2020-2022 The Bitcoin Core developers
|
|
||||||
# Distributed under the MIT software license, see the accompanying
|
|
||||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
||||||
"""Test datacarrier functionality"""
|
|
||||||
from test_framework.messages import (
|
|
||||||
CTxOut,
|
|
||||||
MAX_OP_RETURN_RELAY,
|
|
||||||
)
|
|
||||||
from test_framework.script import (
|
|
||||||
CScript,
|
|
||||||
OP_RETURN,
|
|
||||||
)
|
|
||||||
from test_framework.test_framework import BitcoinTestFramework
|
|
||||||
from test_framework.test_node import TestNode
|
|
||||||
from test_framework.util import assert_raises_rpc_error
|
|
||||||
from test_framework.wallet import MiniWallet
|
|
||||||
|
|
||||||
from random import randbytes
|
|
||||||
|
|
||||||
|
|
||||||
class DataCarrierTest(BitcoinTestFramework):
|
|
||||||
def set_test_params(self):
|
|
||||||
self.num_nodes = 4
|
|
||||||
self.extra_args = [
|
|
||||||
[],
|
|
||||||
["-datacarrier=0"],
|
|
||||||
["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"],
|
|
||||||
["-datacarrier=1", "-datacarriersize=2"],
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_null_data_transaction(self, node: TestNode, data, success: bool) -> None:
|
|
||||||
tx = self.wallet.create_self_transfer(fee_rate=0)["tx"]
|
|
||||||
data = [] if data is None else [data]
|
|
||||||
tx.vout.append(CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN] + data)))
|
|
||||||
tx.vout[0].nValue -= tx.get_vsize() # simply pay 1sat/vbyte fee
|
|
||||||
|
|
||||||
tx_hex = tx.serialize().hex()
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.wallet.sendrawtransaction(from_node=node, tx_hex=tx_hex)
|
|
||||||
assert tx.rehash() in node.getrawmempool(True), f'{tx_hex} not in mempool'
|
|
||||||
else:
|
|
||||||
assert_raises_rpc_error(-26, "scriptpubkey", self.wallet.sendrawtransaction, from_node=node, tx_hex=tx_hex)
|
|
||||||
|
|
||||||
def run_test(self):
|
|
||||||
self.wallet = MiniWallet(self.nodes[0])
|
|
||||||
|
|
||||||
# By default, only 80 bytes are used for data (+1 for OP_RETURN, +2 for the pushdata opcodes).
|
|
||||||
default_size_data = randbytes(MAX_OP_RETURN_RELAY - 3)
|
|
||||||
too_long_data = randbytes(MAX_OP_RETURN_RELAY - 2)
|
|
||||||
small_data = randbytes(MAX_OP_RETURN_RELAY - 4)
|
|
||||||
one_byte = randbytes(1)
|
|
||||||
zero_bytes = randbytes(0)
|
|
||||||
|
|
||||||
self.log.info("Testing null data transaction with default -datacarrier and -datacarriersize values.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[0], data=default_size_data, success=True)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction larger than allowed by the default -datacarriersize value.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[0], data=too_long_data, success=False)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with -datacarrier=false.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[1], data=default_size_data, success=False)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with a size larger than accepted by -datacarriersize.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[2], data=default_size_data, success=False)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with a size smaller than accepted by -datacarriersize.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[2], data=small_data, success=True)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with no data.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[0], data=None, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[1], data=None, success=False)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[2], data=None, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[3], data=None, success=True)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with zero bytes of data.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[0], data=zero_bytes, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[1], data=zero_bytes, success=False)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[2], data=zero_bytes, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[3], data=zero_bytes, success=True)
|
|
||||||
|
|
||||||
self.log.info("Testing a null data transaction with one byte of data.")
|
|
||||||
self.test_null_data_transaction(node=self.nodes[0], data=one_byte, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[1], data=one_byte, success=False)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[2], data=one_byte, success=True)
|
|
||||||
self.test_null_data_transaction(node=self.nodes[3], data=one_byte, success=False)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
DataCarrierTest(__file__).main()
|
|
|
@ -29,7 +29,6 @@ class MempoolLimitTest(BitcoinTestFramework):
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-datacarriersize=100000",
|
|
||||||
"-maxmempool=5",
|
"-maxmempool=5",
|
||||||
]]
|
]]
|
||||||
self.supports_cli = False
|
self.supports_cli = False
|
||||||
|
|
|
@ -52,8 +52,7 @@ class MempoolPackageLimitsTest(BitcoinTestFramework):
|
||||||
self.test_anc_count_limits_2()
|
self.test_anc_count_limits_2()
|
||||||
self.test_anc_count_limits_bushy()
|
self.test_anc_count_limits_bushy()
|
||||||
|
|
||||||
# The node will accept (nonstandard) extra large OP_RETURN outputs
|
self.restart_node(0)
|
||||||
self.restart_node(0, extra_args=["-datacarriersize=100000"])
|
|
||||||
self.test_anc_size_limits()
|
self.test_anc_size_limits()
|
||||||
self.test_desc_size_limits()
|
self.test_desc_size_limits()
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,6 @@ class PackageRBFTest(BitcoinTestFramework):
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
# Required for fill_mempool()
|
# Required for fill_mempool()
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-datacarriersize=100000",
|
|
||||||
"-maxmempool=5",
|
"-maxmempool=5",
|
||||||
]] * self.num_nodes
|
]] * self.num_nodes
|
||||||
|
|
||||||
|
|
|
@ -44,8 +44,6 @@ MAX_PUBKEYS_PER_MULTISIG = 20
|
||||||
class BytesPerSigOpTest(BitcoinTestFramework):
|
class BytesPerSigOpTest(BitcoinTestFramework):
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
# allow large datacarrier output to pad transactions
|
|
||||||
self.extra_args = [['-datacarriersize=100000']]
|
|
||||||
|
|
||||||
def create_p2wsh_spending_tx(self, witness_script, output_script):
|
def create_p2wsh_spending_tx(self, witness_script, output_script):
|
||||||
"""Create a 1-input-1-output P2WSH spending transaction with only the
|
"""Create a 1-input-1-output P2WSH spending transaction with only the
|
||||||
|
@ -139,7 +137,7 @@ class BytesPerSigOpTest(BitcoinTestFramework):
|
||||||
self.log.info("Test a overly-large sigops-vbyte hits package limits")
|
self.log.info("Test a overly-large sigops-vbyte hits package limits")
|
||||||
# Make a 2-transaction package which fails vbyte checks even though
|
# Make a 2-transaction package which fails vbyte checks even though
|
||||||
# separately they would work.
|
# separately they would work.
|
||||||
self.restart_node(0, extra_args=["-bytespersigop=5000","-permitbaremultisig=1"] + self.extra_args[0])
|
self.restart_node(0, extra_args=["-bytespersigop=5000","-permitbaremultisig=1"])
|
||||||
|
|
||||||
def create_bare_multisig_tx(utxo_to_spend=None):
|
def create_bare_multisig_tx(utxo_to_spend=None):
|
||||||
_, pubkey = generate_keypair()
|
_, pubkey = generate_keypair()
|
||||||
|
@ -185,7 +183,7 @@ class BytesPerSigOpTest(BitcoinTestFramework):
|
||||||
else:
|
else:
|
||||||
bytespersigop_parameter = f"-bytespersigop={bytes_per_sigop}"
|
bytespersigop_parameter = f"-bytespersigop={bytes_per_sigop}"
|
||||||
self.log.info(f"Test sigops limit setting {bytespersigop_parameter}...")
|
self.log.info(f"Test sigops limit setting {bytespersigop_parameter}...")
|
||||||
self.restart_node(0, extra_args=[bytespersigop_parameter] + self.extra_args[0])
|
self.restart_node(0, extra_args=[bytespersigop_parameter])
|
||||||
|
|
||||||
for num_sigops in (69, 101, 142, 183, 222):
|
for num_sigops in (69, 101, 142, 183, 222):
|
||||||
self.test_sigops_limit(bytes_per_sigop, num_sigops)
|
self.test_sigops_limit(bytes_per_sigop, num_sigops)
|
||||||
|
|
|
@ -49,7 +49,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
assert_equal(len(txids), len(mempool_contents))
|
assert_equal(len(txids), len(mempool_contents))
|
||||||
assert all([txid in txids for txid in mempool_contents])
|
assert all([txid in txids for txid in mempool_contents])
|
||||||
|
|
||||||
@cleanup(extra_args=["-datacarriersize=20000"])
|
@cleanup()
|
||||||
def test_truc_max_vsize(self):
|
def test_truc_max_vsize(self):
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
self.log.info("Test TRUC-specific maximum transaction vsize")
|
self.log.info("Test TRUC-specific maximum transaction vsize")
|
||||||
|
@ -63,7 +63,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
tx_v2_heavy = self.wallet.send_self_transfer(from_node=node, target_vsize=TRUC_MAX_VSIZE + 1, version=2)
|
tx_v2_heavy = self.wallet.send_self_transfer(from_node=node, target_vsize=TRUC_MAX_VSIZE + 1, version=2)
|
||||||
self.check_mempool([tx_v2_heavy["txid"]])
|
self.check_mempool([tx_v2_heavy["txid"]])
|
||||||
|
|
||||||
@cleanup(extra_args=["-datacarriersize=1000"])
|
@cleanup()
|
||||||
def test_truc_acceptance(self):
|
def test_truc_acceptance(self):
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
self.log.info("Test a child of a TRUC transaction cannot be more than 1000vB")
|
self.log.info("Test a child of a TRUC transaction cannot be more than 1000vB")
|
||||||
|
@ -160,7 +160,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
self.check_mempool([tx_v3_bip125_rbf_v2["txid"], tx_v3_parent["txid"], tx_v3_child["txid"]])
|
self.check_mempool([tx_v3_bip125_rbf_v2["txid"], tx_v3_parent["txid"], tx_v3_child["txid"]])
|
||||||
|
|
||||||
|
|
||||||
@cleanup(extra_args=["-datacarriersize=40000"])
|
@cleanup()
|
||||||
def test_truc_reorg(self):
|
def test_truc_reorg(self):
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
self.log.info("Test that, during a reorg, TRUC rules are not enforced")
|
self.log.info("Test that, during a reorg, TRUC rules are not enforced")
|
||||||
|
@ -182,7 +182,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
node.reconsiderblock(block[0])
|
node.reconsiderblock(block[0])
|
||||||
|
|
||||||
|
|
||||||
@cleanup(extra_args=["-limitdescendantsize=10", "-datacarriersize=40000"])
|
@cleanup(extra_args=["-limitdescendantsize=10"])
|
||||||
def test_nondefault_package_limits(self):
|
def test_nondefault_package_limits(self):
|
||||||
"""
|
"""
|
||||||
Max standard tx size + TRUC rules imply the ancestor/descendant rules (at their default
|
Max standard tx size + TRUC rules imply the ancestor/descendant rules (at their default
|
||||||
|
@ -215,7 +215,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
self.generate(node, 1)
|
self.generate(node, 1)
|
||||||
|
|
||||||
self.log.info("Test that a decreased limitancestorsize also applies to v3 parent")
|
self.log.info("Test that a decreased limitancestorsize also applies to v3 parent")
|
||||||
self.restart_node(0, extra_args=["-limitancestorsize=10", "-datacarriersize=40000"])
|
self.restart_node(0, extra_args=["-limitancestorsize=10"])
|
||||||
tx_v3_parent_large2 = self.wallet.send_self_transfer(
|
tx_v3_parent_large2 = self.wallet.send_self_transfer(
|
||||||
from_node=node,
|
from_node=node,
|
||||||
target_vsize=parent_target_vsize,
|
target_vsize=parent_target_vsize,
|
||||||
|
@ -235,7 +235,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
assert_raises_rpc_error(-26, "too-long-mempool-chain, exceeds ancestor size limit", node.sendrawtransaction, tx_v3_child_large2["hex"])
|
assert_raises_rpc_error(-26, "too-long-mempool-chain, exceeds ancestor size limit", node.sendrawtransaction, tx_v3_child_large2["hex"])
|
||||||
self.check_mempool([tx_v3_parent_large2["txid"]])
|
self.check_mempool([tx_v3_parent_large2["txid"]])
|
||||||
|
|
||||||
@cleanup(extra_args=["-datacarriersize=1000"])
|
@cleanup()
|
||||||
def test_truc_ancestors_package(self):
|
def test_truc_ancestors_package(self):
|
||||||
self.log.info("Test that TRUC ancestor limits are checked within the package")
|
self.log.info("Test that TRUC ancestor limits are checked within the package")
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
|
@ -384,7 +384,7 @@ class MempoolTRUC(BitcoinTestFramework):
|
||||||
assert_equal(result_package_cpfp["tx-results"][tx_sibling_3['wtxid']]['error'], expected_error_cpfp)
|
assert_equal(result_package_cpfp["tx-results"][tx_sibling_3['wtxid']]['error'], expected_error_cpfp)
|
||||||
|
|
||||||
|
|
||||||
@cleanup(extra_args=["-datacarriersize=1000"])
|
@cleanup()
|
||||||
def test_truc_package_inheritance(self):
|
def test_truc_package_inheritance(self):
|
||||||
self.log.info("Test that TRUC inheritance is checked within package")
|
self.log.info("Test that TRUC inheritance is checked within package")
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
|
|
|
@ -186,6 +186,9 @@ class MiningTest(BitcoinTestFramework):
|
||||||
assert tx_below_min_feerate['txid'] not in block_template_txids
|
assert tx_below_min_feerate['txid'] not in block_template_txids
|
||||||
assert tx_below_min_feerate['txid'] not in block_txids
|
assert tx_below_min_feerate['txid'] not in block_txids
|
||||||
|
|
||||||
|
# Restart node to clear mempool for the next test
|
||||||
|
self.restart_node(0)
|
||||||
|
|
||||||
def test_timewarp(self):
|
def test_timewarp(self):
|
||||||
self.log.info("Test timewarp attack mitigation (BIP94)")
|
self.log.info("Test timewarp attack mitigation (BIP94)")
|
||||||
node = self.nodes[0]
|
node = self.nodes[0]
|
||||||
|
@ -279,11 +282,9 @@ class MiningTest(BitcoinTestFramework):
|
||||||
def test_block_max_weight(self):
|
def test_block_max_weight(self):
|
||||||
self.log.info("Testing default and custom -blockmaxweight startup options.")
|
self.log.info("Testing default and custom -blockmaxweight startup options.")
|
||||||
|
|
||||||
# Restart the node to allow large transactions
|
|
||||||
LARGE_TXS_COUNT = 10
|
LARGE_TXS_COUNT = 10
|
||||||
LARGE_VSIZE = int(((MAX_BLOCK_WEIGHT - DEFAULT_BLOCK_RESERVED_WEIGHT) / WITNESS_SCALE_FACTOR) / LARGE_TXS_COUNT)
|
LARGE_VSIZE = int(((MAX_BLOCK_WEIGHT - DEFAULT_BLOCK_RESERVED_WEIGHT) / WITNESS_SCALE_FACTOR) / LARGE_TXS_COUNT)
|
||||||
HIGH_FEERATE = Decimal("0.0003")
|
HIGH_FEERATE = Decimal("0.0003")
|
||||||
self.restart_node(0, extra_args=[f"-datacarriersize={LARGE_VSIZE}"])
|
|
||||||
|
|
||||||
# Ensure the mempool is empty
|
# Ensure the mempool is empty
|
||||||
assert_equal(len(self.nodes[0].getrawmempool()), 0)
|
assert_equal(len(self.nodes[0].getrawmempool()), 0)
|
||||||
|
@ -311,7 +312,7 @@ class MiningTest(BitcoinTestFramework):
|
||||||
# Test block template creation with custom -blockmaxweight
|
# Test block template creation with custom -blockmaxweight
|
||||||
custom_block_weight = MAX_BLOCK_WEIGHT - 2000
|
custom_block_weight = MAX_BLOCK_WEIGHT - 2000
|
||||||
# Reducing the weight by 2000 units will prevent 1 large transaction from fitting into the block.
|
# Reducing the weight by 2000 units will prevent 1 large transaction from fitting into the block.
|
||||||
self.restart_node(0, extra_args=[f"-datacarriersize={LARGE_VSIZE}", f"-blockmaxweight={custom_block_weight}"])
|
self.restart_node(0, extra_args=[f"-blockmaxweight={custom_block_weight}"])
|
||||||
|
|
||||||
self.log.info("Testing the block template with custom -blockmaxweight to include 9 large and 2 normal transactions.")
|
self.log.info("Testing the block template with custom -blockmaxweight to include 9 large and 2 normal transactions.")
|
||||||
self.verify_block_template(
|
self.verify_block_template(
|
||||||
|
@ -321,7 +322,7 @@ class MiningTest(BitcoinTestFramework):
|
||||||
|
|
||||||
# Ensure the block weight does not exceed the maximum
|
# Ensure the block weight does not exceed the maximum
|
||||||
self.log.info(f"Testing that the block weight will never exceed {MAX_BLOCK_WEIGHT - DEFAULT_BLOCK_RESERVED_WEIGHT}.")
|
self.log.info(f"Testing that the block weight will never exceed {MAX_BLOCK_WEIGHT - DEFAULT_BLOCK_RESERVED_WEIGHT}.")
|
||||||
self.restart_node(0, extra_args=[f"-datacarriersize={LARGE_VSIZE}", f"-blockmaxweight={MAX_BLOCK_WEIGHT}"])
|
self.restart_node(0, extra_args=[f"-blockmaxweight={MAX_BLOCK_WEIGHT}"])
|
||||||
self.log.info("Sending 2 additional normal transactions to fill the mempool to the maximum block weight.")
|
self.log.info("Sending 2 additional normal transactions to fill the mempool to the maximum block weight.")
|
||||||
self.send_transactions(utxos[LARGE_TXS_COUNT + 2:], NORMAL_FEERATE, NORMAL_VSIZE)
|
self.send_transactions(utxos[LARGE_TXS_COUNT + 2:], NORMAL_FEERATE, NORMAL_VSIZE)
|
||||||
self.log.info(f"Testing that the mempool's weight matches the maximum block weight: {MAX_BLOCK_WEIGHT}.")
|
self.log.info(f"Testing that the mempool's weight matches the maximum block weight: {MAX_BLOCK_WEIGHT}.")
|
||||||
|
@ -335,7 +336,7 @@ class MiningTest(BitcoinTestFramework):
|
||||||
|
|
||||||
self.log.info("Test -blockreservedweight startup option.")
|
self.log.info("Test -blockreservedweight startup option.")
|
||||||
# Lowering the -blockreservedweight by 4000 will allow for two more transactions.
|
# Lowering the -blockreservedweight by 4000 will allow for two more transactions.
|
||||||
self.restart_node(0, extra_args=[f"-datacarriersize={LARGE_VSIZE}", "-blockreservedweight=4000"])
|
self.restart_node(0, extra_args=["-blockreservedweight=4000"])
|
||||||
self.verify_block_template(
|
self.verify_block_template(
|
||||||
expected_tx_count=12,
|
expected_tx_count=12,
|
||||||
expected_weight=MAX_BLOCK_WEIGHT - 4000,
|
expected_weight=MAX_BLOCK_WEIGHT - 4000,
|
||||||
|
|
|
@ -27,7 +27,6 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-printpriority=1",
|
"-printpriority=1",
|
||||||
"-datacarriersize=100000",
|
|
||||||
]] * self.num_nodes
|
]] * self.num_nodes
|
||||||
self.supports_cli = False
|
self.supports_cli = False
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,6 @@ class PackageRelayTest(BitcoinTestFramework):
|
||||||
# hugely speeds up the test, as it involves multiple hops of tx relay.
|
# hugely speeds up the test, as it involves multiple hops of tx relay.
|
||||||
self.noban_tx_relay = True
|
self.noban_tx_relay = True
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-datacarriersize=100000",
|
|
||||||
"-maxmempool=5",
|
"-maxmempool=5",
|
||||||
]] * self.num_nodes
|
]] * self.num_nodes
|
||||||
self.supports_cli = False
|
self.supports_cli = False
|
||||||
|
|
|
@ -59,7 +59,6 @@ class PackageRelayTest(BitcoinTestFramework):
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-datacarriersize=100000",
|
|
||||||
"-maxmempool=5",
|
"-maxmempool=5",
|
||||||
]]
|
]]
|
||||||
self.supports_cli = False
|
self.supports_cli = False
|
||||||
|
|
|
@ -68,7 +68,7 @@ class ConnectionType(Enum):
|
||||||
class TxDownloadTest(BitcoinTestFramework):
|
class TxDownloadTest(BitcoinTestFramework):
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
self.num_nodes = 2
|
self.num_nodes = 2
|
||||||
self.extra_args= [['-datacarriersize=100000', '-maxmempool=5', '-persistmempool=0']] * self.num_nodes
|
self.extra_args= [['-maxmempool=5', '-persistmempool=0']] * self.num_nodes
|
||||||
|
|
||||||
def test_tx_requests(self):
|
def test_tx_requests(self):
|
||||||
self.log.info("Test that we request transactions from all our peers, eventually")
|
self.log.info("Test that we request transactions from all our peers, eventually")
|
||||||
|
|
|
@ -438,7 +438,6 @@ class RPCPackagesTest(BitcoinTestFramework):
|
||||||
# but child is too high fee
|
# but child is too high fee
|
||||||
# Lower mempool limit to make it easier to fill_mempool
|
# Lower mempool limit to make it easier to fill_mempool
|
||||||
self.restart_node(0, extra_args=[
|
self.restart_node(0, extra_args=[
|
||||||
"-datacarriersize=100000",
|
|
||||||
"-maxmempool=5",
|
"-maxmempool=5",
|
||||||
"-persistmempool=0",
|
"-persistmempool=0",
|
||||||
])
|
])
|
||||||
|
@ -467,7 +466,7 @@ class RPCPackagesTest(BitcoinTestFramework):
|
||||||
assert parent["txid"] not in node.getrawmempool()
|
assert parent["txid"] not in node.getrawmempool()
|
||||||
assert child["txid"] not in node.getrawmempool()
|
assert child["txid"] not in node.getrawmempool()
|
||||||
|
|
||||||
# Reset maxmempool, datacarriersize, reset dynamic mempool minimum feerate, and empty mempool.
|
# Reset maxmempool, reset dynamic mempool minimum feerate, and empty mempool.
|
||||||
self.restart_node(0)
|
self.restart_node(0)
|
||||||
self.wallet.rescan_utxos()
|
self.wallet.rescan_utxos()
|
||||||
|
|
||||||
|
|
|
@ -111,7 +111,7 @@ class PSBTTest(BitcoinTestFramework):
|
||||||
# Mine a transaction that credits the offline address
|
# Mine a transaction that credits the offline address
|
||||||
offline_addr = offline_node.getnewaddress(address_type="bech32m")
|
offline_addr = offline_node.getnewaddress(address_type="bech32m")
|
||||||
online_addr = w2.getnewaddress(address_type="bech32m")
|
online_addr = w2.getnewaddress(address_type="bech32m")
|
||||||
wonline.importaddress(offline_addr, "", False)
|
wonline.importaddress(offline_addr, label="", rescan=False)
|
||||||
mining_wallet = mining_node.get_wallet_rpc(self.default_wallet_name)
|
mining_wallet = mining_node.get_wallet_rpc(self.default_wallet_name)
|
||||||
mining_wallet.sendtoaddress(address=offline_addr, amount=1.0)
|
mining_wallet.sendtoaddress(address=offline_addr, amount=1.0)
|
||||||
self.generate(mining_node, nblocks=1, sync_fun=lambda: self.sync_all([online_node, mining_node]))
|
self.generate(mining_node, nblocks=1, sync_fun=lambda: self.sync_all([online_node, mining_node]))
|
||||||
|
@ -312,9 +312,9 @@ class PSBTTest(BitcoinTestFramework):
|
||||||
wmulti = self.nodes[2].get_wallet_rpc('wmulti')
|
wmulti = self.nodes[2].get_wallet_rpc('wmulti')
|
||||||
|
|
||||||
# Create all the addresses
|
# Create all the addresses
|
||||||
p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
|
p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], label="", address_type="legacy")["address"]
|
||||||
p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
|
p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], label="", address_type="bech32")["address"]
|
||||||
p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
|
p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], label="", address_type="p2sh-segwit")["address"]
|
||||||
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
|
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
|
||||||
p2pkh = self.nodes[1].getnewaddress("", "legacy")
|
p2pkh = self.nodes[1].getnewaddress("", "legacy")
|
||||||
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
|
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
|
||||||
|
|
|
@ -41,8 +41,8 @@ def fill_mempool(test_framework, node, *, tx_sync_fun=None):
|
||||||
"""Fill mempool until eviction.
|
"""Fill mempool until eviction.
|
||||||
|
|
||||||
Allows for simpler testing of scenarios with floating mempoolminfee > minrelay
|
Allows for simpler testing of scenarios with floating mempoolminfee > minrelay
|
||||||
Requires -datacarriersize=100000 and -maxmempool=5 and assumes -minrelaytxfee
|
Requires -maxmempool=5 and assumes -minrelaytxfee is 1 sat/vbyte.
|
||||||
is 1 sat/vbyte.
|
|
||||||
To avoid unintentional tx dependencies, the mempool filling txs are created with a
|
To avoid unintentional tx dependencies, the mempool filling txs are created with a
|
||||||
tagged ephemeral miniwallet instance.
|
tagged ephemeral miniwallet instance.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -73,9 +73,6 @@ WITNESS_SCALE_FACTOR = 4
|
||||||
DEFAULT_ANCESTOR_LIMIT = 25 # default max number of in-mempool ancestors
|
DEFAULT_ANCESTOR_LIMIT = 25 # default max number of in-mempool ancestors
|
||||||
DEFAULT_DESCENDANT_LIMIT = 25 # default max number of in-mempool descendants
|
DEFAULT_DESCENDANT_LIMIT = 25 # default max number of in-mempool descendants
|
||||||
|
|
||||||
# Default setting for -datacarriersize. 80 bytes of data, +1 for OP_RETURN, +2 for the pushdata opcodes.
|
|
||||||
MAX_OP_RETURN_RELAY = 83
|
|
||||||
|
|
||||||
DEFAULT_MEMPOOL_EXPIRY_HOURS = 336 # hours
|
DEFAULT_MEMPOOL_EXPIRY_HOURS = 336 # hours
|
||||||
|
|
||||||
MAGIC_BYTES = {
|
MAGIC_BYTES = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# Copyright (c) 2017-2022 The Bitcoin Core developers
|
# Copyright (c) 2017-present The Bitcoin Core developers
|
||||||
# Distributed under the MIT software license, see the accompanying
|
# Distributed under the MIT software license, see the accompanying
|
||||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||||
"""Class for bitcoind node under test"""
|
"""Class for bitcoind node under test"""
|
||||||
|
@ -209,7 +209,7 @@ class TestNode():
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
|
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
|
||||||
if self.use_cli:
|
if self.use_cli:
|
||||||
return getattr(RPCOverloadWrapper(self.cli, True), name)
|
return getattr(RPCOverloadWrapper(self.cli), name)
|
||||||
else:
|
else:
|
||||||
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
|
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
|
||||||
return getattr(RPCOverloadWrapper(self.rpc), name)
|
return getattr(RPCOverloadWrapper(self.rpc), name)
|
||||||
|
@ -374,7 +374,7 @@ class TestNode():
|
||||||
|
|
||||||
def get_wallet_rpc(self, wallet_name):
|
def get_wallet_rpc(self, wallet_name):
|
||||||
if self.use_cli:
|
if self.use_cli:
|
||||||
return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True)
|
return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)))
|
||||||
else:
|
else:
|
||||||
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
|
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
|
||||||
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
|
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
|
||||||
|
@ -925,17 +925,13 @@ class TestNodeCLI():
|
||||||
return cli_stdout.rstrip("\n")
|
return cli_stdout.rstrip("\n")
|
||||||
|
|
||||||
class RPCOverloadWrapper():
|
class RPCOverloadWrapper():
|
||||||
def __init__(self, rpc, cli=False):
|
def __init__(self, rpc):
|
||||||
self.rpc = rpc
|
self.rpc = rpc
|
||||||
self.is_cli = cli
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return getattr(self.rpc, name)
|
return getattr(self.rpc, name)
|
||||||
|
|
||||||
def createwallet_passthrough(self, *args, **kwargs):
|
def importprivkey(self, privkey, *, label=None, rescan=None):
|
||||||
return self.__getattr__("createwallet")(*args, **kwargs)
|
|
||||||
|
|
||||||
def importprivkey(self, privkey, label=None, rescan=None):
|
|
||||||
wallet_info = self.getwalletinfo()
|
wallet_info = self.getwalletinfo()
|
||||||
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
||||||
return self.__getattr__('importprivkey')(privkey, label, rescan)
|
return self.__getattr__('importprivkey')(privkey, label, rescan)
|
||||||
|
@ -943,13 +939,13 @@ class RPCOverloadWrapper():
|
||||||
req = [{
|
req = [{
|
||||||
'desc': desc,
|
'desc': desc,
|
||||||
'timestamp': 0 if rescan else 'now',
|
'timestamp': 0 if rescan else 'now',
|
||||||
'label': label if label else ''
|
'label': label if label else '',
|
||||||
}]
|
}]
|
||||||
import_res = self.importdescriptors(req)
|
import_res = self.importdescriptors(req)
|
||||||
if not import_res[0]['success']:
|
if not import_res[0]['success']:
|
||||||
raise JSONRPCException(import_res[0]['error'])
|
raise JSONRPCException(import_res[0]['error'])
|
||||||
|
|
||||||
def addmultisigaddress(self, nrequired, keys, label=None, address_type=None):
|
def addmultisigaddress(self, nrequired, keys, *, label=None, address_type=None):
|
||||||
wallet_info = self.getwalletinfo()
|
wallet_info = self.getwalletinfo()
|
||||||
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
||||||
return self.__getattr__('addmultisigaddress')(nrequired, keys, label, address_type)
|
return self.__getattr__('addmultisigaddress')(nrequired, keys, label, address_type)
|
||||||
|
@ -957,14 +953,14 @@ class RPCOverloadWrapper():
|
||||||
req = [{
|
req = [{
|
||||||
'desc': cms['descriptor'],
|
'desc': cms['descriptor'],
|
||||||
'timestamp': 0,
|
'timestamp': 0,
|
||||||
'label': label if label else ''
|
'label': label if label else '',
|
||||||
}]
|
}]
|
||||||
import_res = self.importdescriptors(req)
|
import_res = self.importdescriptors(req)
|
||||||
if not import_res[0]['success']:
|
if not import_res[0]['success']:
|
||||||
raise JSONRPCException(import_res[0]['error'])
|
raise JSONRPCException(import_res[0]['error'])
|
||||||
return cms
|
return cms
|
||||||
|
|
||||||
def importpubkey(self, pubkey, label=None, rescan=None):
|
def importpubkey(self, pubkey, *, label=None, rescan=None):
|
||||||
wallet_info = self.getwalletinfo()
|
wallet_info = self.getwalletinfo()
|
||||||
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
||||||
return self.__getattr__('importpubkey')(pubkey, label, rescan)
|
return self.__getattr__('importpubkey')(pubkey, label, rescan)
|
||||||
|
@ -972,13 +968,13 @@ class RPCOverloadWrapper():
|
||||||
req = [{
|
req = [{
|
||||||
'desc': desc,
|
'desc': desc,
|
||||||
'timestamp': 0 if rescan else 'now',
|
'timestamp': 0 if rescan else 'now',
|
||||||
'label': label if label else ''
|
'label': label if label else '',
|
||||||
}]
|
}]
|
||||||
import_res = self.importdescriptors(req)
|
import_res = self.importdescriptors(req)
|
||||||
if not import_res[0]['success']:
|
if not import_res[0]['success']:
|
||||||
raise JSONRPCException(import_res[0]['error'])
|
raise JSONRPCException(import_res[0]['error'])
|
||||||
|
|
||||||
def importaddress(self, address, label=None, rescan=None, p2sh=None):
|
def importaddress(self, address, *, label=None, rescan=None, p2sh=None):
|
||||||
wallet_info = self.getwalletinfo()
|
wallet_info = self.getwalletinfo()
|
||||||
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
||||||
return self.__getattr__('importaddress')(address, label, rescan, p2sh)
|
return self.__getattr__('importaddress')(address, label, rescan, p2sh)
|
||||||
|
@ -992,13 +988,13 @@ class RPCOverloadWrapper():
|
||||||
reqs = [{
|
reqs = [{
|
||||||
'desc': desc,
|
'desc': desc,
|
||||||
'timestamp': 0 if rescan else 'now',
|
'timestamp': 0 if rescan else 'now',
|
||||||
'label': label if label else ''
|
'label': label if label else '',
|
||||||
}]
|
}]
|
||||||
if is_hex and p2sh:
|
if is_hex and p2sh:
|
||||||
reqs.append({
|
reqs.append({
|
||||||
'desc': descsum_create('p2sh(raw(' + address + '))'),
|
'desc': descsum_create('p2sh(raw(' + address + '))'),
|
||||||
'timestamp': 0 if rescan else 'now',
|
'timestamp': 0 if rescan else 'now',
|
||||||
'label': label if label else ''
|
'label': label if label else '',
|
||||||
})
|
})
|
||||||
import_res = self.importdescriptors(reqs)
|
import_res = self.importdescriptors(reqs)
|
||||||
for res in import_res:
|
for res in import_res:
|
||||||
|
|
|
@ -338,7 +338,6 @@ BASE_SCRIPTS = [
|
||||||
'feature_unsupported_utxo_db.py',
|
'feature_unsupported_utxo_db.py',
|
||||||
'feature_logging.py',
|
'feature_logging.py',
|
||||||
'feature_anchors.py',
|
'feature_anchors.py',
|
||||||
'mempool_datacarrier.py',
|
|
||||||
'feature_coinstatsindex.py',
|
'feature_coinstatsindex.py',
|
||||||
'wallet_orphanedreward.py',
|
'wallet_orphanedreward.py',
|
||||||
'wallet_timelock.py',
|
'wallet_timelock.py',
|
||||||
|
|
|
@ -344,7 +344,7 @@ class AddressTypeTest(BitcoinTestFramework):
|
||||||
self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32')
|
self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32')
|
||||||
|
|
||||||
self.log.info('test invalid address type arguments')
|
self.log.info('test invalid address type arguments')
|
||||||
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].addmultisigaddress, 2, [compressed_1, compressed_2], None, '')
|
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].addmultisigaddress, 2, [compressed_1, compressed_2], address_type="")
|
||||||
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '')
|
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '')
|
||||||
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '')
|
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '')
|
||||||
assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23')
|
assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23')
|
||||||
|
|
|
@ -26,11 +26,12 @@ from test_framework.util import (
|
||||||
assert_raises_rpc_error,
|
assert_raises_rpc_error,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
LAST_KEYPOOL_INDEX = 9 # Index of the last derived address with the keypool size of 10
|
||||||
|
|
||||||
class BackwardsCompatibilityTest(BitcoinTestFramework):
|
class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
self.num_nodes = 10
|
self.num_nodes = 8
|
||||||
# Add new version after each release:
|
# Add new version after each release:
|
||||||
self.extra_args = [
|
self.extra_args = [
|
||||||
["-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # Pre-release: use to mine blocks. noban for immediate tx relay
|
["-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # Pre-release: use to mine blocks. noban for immediate tx relay
|
||||||
|
@ -38,11 +39,9 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v25.0
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v25.0
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v24.0.1
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v24.0.1
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v23.0
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v23.0
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v22.0
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1", f"-keypool={LAST_KEYPOOL_INDEX + 1}"], # v22.0
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v0.21.0
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v0.21.0
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v0.20.1
|
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v0.20.1
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=noban@127.0.0.1"], # v0.19.1
|
|
||||||
["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-whitelist=127.0.0.1"], # v0.18.1
|
|
||||||
]
|
]
|
||||||
self.wallet_names = [self.default_wallet_name]
|
self.wallet_names = [self.default_wallet_name]
|
||||||
|
|
||||||
|
@ -60,8 +59,6 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
220000,
|
220000,
|
||||||
210000,
|
210000,
|
||||||
200100,
|
200100,
|
||||||
190100,
|
|
||||||
180100,
|
|
||||||
])
|
])
|
||||||
|
|
||||||
self.start_nodes()
|
self.start_nodes()
|
||||||
|
@ -85,21 +82,98 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
node_major, _, _ = self.split_version(node)
|
node_major, _, _ = self.split_version(node)
|
||||||
return node_major >= major
|
return node_major >= major
|
||||||
|
|
||||||
|
def test_v22_inactivehdchain_path(self):
|
||||||
|
self.log.info("Testing inactive hd chain bad derivation path cleanup")
|
||||||
|
# 0.21.x and 22.x would both produce bad derivation paths when topping up an inactive hd chain
|
||||||
|
# Make sure that this is being automatically cleaned up by migration
|
||||||
|
node_master = self.nodes[1]
|
||||||
|
node_v22 = self.nodes[self.num_nodes - 5]
|
||||||
|
wallet_name = "bad_deriv_path"
|
||||||
|
node_v22.createwallet(wallet_name=wallet_name, descriptors=False)
|
||||||
|
bad_deriv_wallet = node_v22.get_wallet_rpc(wallet_name)
|
||||||
|
|
||||||
|
# Make a dump of the wallet to get an unused address
|
||||||
|
dump_path = node_v22.wallets_path / f"{wallet_name}.dump"
|
||||||
|
bad_deriv_wallet.dumpwallet(dump_path)
|
||||||
|
addr = None
|
||||||
|
seed = None
|
||||||
|
with open(dump_path, encoding="utf8") as f:
|
||||||
|
for line in f:
|
||||||
|
if f"hdkeypath=m/0'/0'/{LAST_KEYPOOL_INDEX}'" in line:
|
||||||
|
addr = line.split(" ")[4].split("=")[1]
|
||||||
|
elif " hdseed=1 " in line:
|
||||||
|
seed = line.split(" ")[0]
|
||||||
|
assert addr is not None
|
||||||
|
assert seed is not None
|
||||||
|
# Rotate seed and unload
|
||||||
|
bad_deriv_wallet.sethdseed()
|
||||||
|
bad_deriv_wallet.unloadwallet()
|
||||||
|
# Receive at addr to trigger inactive chain topup on next load
|
||||||
|
self.nodes[0].sendtoaddress(addr, 1)
|
||||||
|
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
|
||||||
|
self.sync_all(nodes=[self.nodes[0], node_master, node_v22])
|
||||||
|
node_v22.loadwallet(wallet_name)
|
||||||
|
|
||||||
|
# Dump again to find bad hd keypath
|
||||||
|
bad_deriv_path = f"m/0'/0'/{LAST_KEYPOOL_INDEX}'/0'/0'/{LAST_KEYPOOL_INDEX + 1}'"
|
||||||
|
good_deriv_path = f"m/0h/0h/{LAST_KEYPOOL_INDEX + 1}h"
|
||||||
|
os.unlink(dump_path)
|
||||||
|
bad_deriv_wallet.dumpwallet(dump_path)
|
||||||
|
bad_path_addr = None
|
||||||
|
with open(dump_path, encoding="utf8") as f:
|
||||||
|
for line in f:
|
||||||
|
if f"hdkeypath={bad_deriv_path}" in line:
|
||||||
|
bad_path_addr = line.split(" ")[4].split("=")[1]
|
||||||
|
assert bad_path_addr is not None
|
||||||
|
assert_equal(bad_deriv_wallet.getaddressinfo(bad_path_addr)["hdkeypath"], bad_deriv_path)
|
||||||
|
|
||||||
|
# Verify that this bad derivation path addr is actually at m/0'/0'/10' by making a new wallet with the same seed but larger keypool
|
||||||
|
node_v22.createwallet(wallet_name="path_verify", descriptors=False, blank=True)
|
||||||
|
verify_wallet = node_v22.get_wallet_rpc("path_verify")
|
||||||
|
verify_wallet.sethdseed(True, seed)
|
||||||
|
# Bad addr is after keypool, so need to generate it by refilling
|
||||||
|
verify_wallet.keypoolrefill(LAST_KEYPOOL_INDEX + 2)
|
||||||
|
assert_equal(verify_wallet.getaddressinfo(bad_path_addr)["hdkeypath"], good_deriv_path.replace("h", "'"))
|
||||||
|
|
||||||
|
# Migrate with master
|
||||||
|
# Since all keymeta records are now deleted after migration, the derivation path
|
||||||
|
# should now be correct as it is derived on-the-fly from the inactive hd chain's descriptor
|
||||||
|
backup_path = node_v22.wallets_path / f"{wallet_name}.bak"
|
||||||
|
bad_deriv_wallet.backupwallet(backup_path)
|
||||||
|
wallet_dir_master = os.path.join(node_master.wallets_path, wallet_name)
|
||||||
|
os.makedirs(wallet_dir_master, exist_ok=True)
|
||||||
|
shutil.copy(backup_path, os.path.join(wallet_dir_master, "wallet.dat"))
|
||||||
|
node_master.migratewallet(wallet_name)
|
||||||
|
bad_deriv_wallet_master = node_master.get_wallet_rpc(wallet_name)
|
||||||
|
assert_equal(bad_deriv_wallet_master.getaddressinfo(bad_path_addr)["hdkeypath"], good_deriv_path)
|
||||||
|
bad_deriv_wallet_master.unloadwallet()
|
||||||
|
|
||||||
|
# If we have sqlite3, verify that there are no keymeta records
|
||||||
|
try:
|
||||||
|
import sqlite3
|
||||||
|
wallet_db = node_master.wallets_path / wallet_name / "wallet.dat"
|
||||||
|
conn = sqlite3.connect(wallet_db)
|
||||||
|
with conn:
|
||||||
|
# Retrieve all records that have the "keymeta" prefix. The remaining key data varies for each record.
|
||||||
|
keymeta_rec = conn.execute("SELECT value FROM main where key >= x'076b65796d657461' AND key < x'076b65796d657462'").fetchone()
|
||||||
|
assert_equal(keymeta_rec, None)
|
||||||
|
conn.close()
|
||||||
|
except ImportError:
|
||||||
|
self.log.warning("sqlite3 module not available, skipping lack of keymeta records check")
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
node_miner = self.nodes[0]
|
node_miner = self.nodes[0]
|
||||||
node_master = self.nodes[1]
|
node_master = self.nodes[1]
|
||||||
node_v21 = self.nodes[self.num_nodes - 4]
|
node_v21 = self.nodes[self.num_nodes - 2]
|
||||||
node_v18 = self.nodes[self.num_nodes - 1]
|
node_v20 = self.nodes[self.num_nodes - 1] # bdb only
|
||||||
|
|
||||||
legacy_nodes = self.nodes[2:] # Nodes that support legacy wallets
|
legacy_nodes = self.nodes[2:] # Nodes that support legacy wallets
|
||||||
legacy_only_nodes = self.nodes[-3:] # Nodes that only support legacy wallets
|
descriptors_nodes = self.nodes[2:-1] # Nodes that support descriptor wallets
|
||||||
descriptors_nodes = self.nodes[2:-3] # Nodes that support descriptor wallets
|
|
||||||
|
|
||||||
self.generatetoaddress(node_miner, COINBASE_MATURITY + 1, node_miner.getnewaddress())
|
self.generatetoaddress(node_miner, COINBASE_MATURITY + 1, node_miner.getnewaddress())
|
||||||
|
|
||||||
# Sanity check the test framework:
|
# Sanity check the test framework:
|
||||||
res = node_v18.getblockchaininfo()
|
assert_equal(node_v20.getblockchaininfo()["blocks"], COINBASE_MATURITY + 1)
|
||||||
assert_equal(res['blocks'], COINBASE_MATURITY + 1)
|
|
||||||
|
|
||||||
self.log.info("Test wallet backwards compatibility...")
|
self.log.info("Test wallet backwards compatibility...")
|
||||||
# Create a number of wallets and open them in older versions:
|
# Create a number of wallets and open them in older versions:
|
||||||
|
@ -206,13 +280,11 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that descriptor wallets don't work on legacy only nodes
|
# Check that descriptor wallets don't work on legacy only nodes
|
||||||
self.log.info("Test descriptor wallet incompatibility on:")
|
self.log.info("Test descriptor wallet incompatibility on v0.20")
|
||||||
for node in legacy_only_nodes:
|
# Descriptor wallets appear to be corrupted wallets to old software
|
||||||
self.log.info(f"- {node.version}")
|
assert self.major_version_equals(node_v20, 20)
|
||||||
# Descriptor wallets appear to be corrupted wallets to old software
|
for wallet_name in ["w1", "w2", "w3"]:
|
||||||
assert self.major_version_less_than(node, 21)
|
assert_raises_rpc_error(-4, "Wallet file verification failed: wallet.dat corrupt, salvage failed", node_v20.loadwallet, wallet_name)
|
||||||
for wallet_name in ["w1", "w2", "w3"]:
|
|
||||||
assert_raises_rpc_error(-4, "Wallet file verification failed: wallet.dat corrupt, salvage failed", node.loadwallet, wallet_name)
|
|
||||||
|
|
||||||
# w1 cannot be opened by 0.21 since it contains a taproot descriptor
|
# w1 cannot be opened by 0.21 since it contains a taproot descriptor
|
||||||
self.log.info("Test that 0.21 cannot open wallet containing tr() descriptors")
|
self.log.info("Test that 0.21 cannot open wallet containing tr() descriptors")
|
||||||
|
@ -308,5 +380,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||||
# Legacy wallets are no longer supported. Trying to load these should result in an error
|
# Legacy wallets are no longer supported. Trying to load these should result in an error
|
||||||
assert_raises_rpc_error(-18, "The wallet appears to be a Legacy wallet, please use the wallet migration tool (migratewallet RPC)", node_master.restorewallet, wallet_name, backup_path)
|
assert_raises_rpc_error(-18, "The wallet appears to be a Legacy wallet, please use the wallet migration tool (migratewallet RPC)", node_master.restorewallet, wallet_name, backup_path)
|
||||||
|
|
||||||
|
self.test_v22_inactivehdchain_path()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
BackwardsCompatibilityTest(__file__).main()
|
BackwardsCompatibilityTest(__file__).main()
|
||||||
|
|
|
@ -192,7 +192,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
||||||
watchonly_address = self.nodes[0].getnewaddress()
|
watchonly_address = self.nodes[0].getnewaddress()
|
||||||
watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"]
|
watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"]
|
||||||
self.watchonly_amount = Decimal(200)
|
self.watchonly_amount = Decimal(200)
|
||||||
wwatch.importpubkey(watchonly_pubkey, "", True)
|
wwatch.importpubkey(watchonly_pubkey, label="", rescan=True)
|
||||||
self.watchonly_utxo = self.create_outpoints(self.nodes[0], outputs=[{watchonly_address: self.watchonly_amount}])[0]
|
self.watchonly_utxo = self.create_outpoints(self.nodes[0], outputs=[{watchonly_address: self.watchonly_amount}])[0]
|
||||||
|
|
||||||
# Lock UTXO so nodes[0] doesn't accidentally spend it
|
# Lock UTXO so nodes[0] doesn't accidentally spend it
|
||||||
|
|
|
@ -49,7 +49,7 @@ class WalletLabelsTest(BitcoinTestFramework):
|
||||||
assert_equal(response[0]['error']['message'], "Invalid label name")
|
assert_equal(response[0]['error']['message'], "Invalid label name")
|
||||||
|
|
||||||
for rpc_call in rpc_calls:
|
for rpc_call in rpc_calls:
|
||||||
assert_raises_rpc_error(-11, "Invalid label name", *rpc_call, "*")
|
assert_raises_rpc_error(-11, "Invalid label name", *rpc_call, label="*")
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
# Check that there's no UTXO on the node
|
# Check that there's no UTXO on the node
|
||||||
|
|
|
@ -24,34 +24,7 @@ SHA256_SUMS = {
|
||||||
"d86fc90824a85c38b25c8488115178d5785dbc975f5ff674f9f5716bc8ad6e65": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-arm-linux-gnueabihf.tar.gz"},
|
"d86fc90824a85c38b25c8488115178d5785dbc975f5ff674f9f5716bc8ad6e65": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-arm-linux-gnueabihf.tar.gz"},
|
||||||
"1b0a7408c050e3d09a8be8e21e183ef7ee570385dc41216698cc3ab392a484e7": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-osx64.tar.gz"},
|
"1b0a7408c050e3d09a8be8e21e183ef7ee570385dc41216698cc3ab392a484e7": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-osx64.tar.gz"},
|
||||||
"706e0472dbc933ed2757650d54cbcd780fd3829ebf8f609b32780c7eedebdbc9": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-x86_64-linux-gnu.tar.gz"},
|
"706e0472dbc933ed2757650d54cbcd780fd3829ebf8f609b32780c7eedebdbc9": {"tag": "v0.14.3", "tarball": "bitcoin-0.14.3-x86_64-linux-gnu.tar.gz"},
|
||||||
#
|
|
||||||
"d40f18b4e43c6e6370ef7db9131f584fbb137276ec2e3dba67a4b267f81cb644": {"tag": "v0.15.2", "tarball": "bitcoin-0.15.2-aarch64-linux-gnu.tar.gz"},
|
|
||||||
"54fb877a148a6ad189a1e1ab1ff8b11181e58ff2aaf430da55b3fd46ae549a6b": {"tag": "v0.15.2", "tarball": "bitcoin-0.15.2-arm-linux-gnueabihf.tar.gz"},
|
|
||||||
"87e9340ff3d382d543b2b69112376077f0c8b4f7450d372e83b68f5a1e22b2df": {"tag": "v0.15.2", "tarball": "bitcoin-0.15.2-osx64.tar.gz"},
|
|
||||||
"566be44190fd76daa01f13d428939dadfb8e3daacefc8fa17f433cad28f73bd5": {"tag": "v0.15.2", "tarball": "bitcoin-0.15.2-x86_64-linux-gnu.tar.gz"},
|
|
||||||
#
|
|
||||||
"0768c6c15caffbaca6524824c9563b42c24f70633c681c2744649158aa3fd484": {"tag": "v0.16.3", "tarball": "bitcoin-0.16.3-aarch64-linux-gnu.tar.gz"},
|
|
||||||
"fb2818069854a6ad20ea03b28b55dbd35d8b1f7d453e90b83eace5d0098a2a87": {"tag": "v0.16.3", "tarball": "bitcoin-0.16.3-arm-linux-gnueabihf.tar.gz"},
|
|
||||||
"78c3bff3b619a19aed575961ea43cc9e142959218835cf51aede7f0b764fc25d": {"tag": "v0.16.3", "tarball": "bitcoin-0.16.3-osx64.tar.gz"},
|
|
||||||
"5d422a9d544742bc0df12427383f9c2517433ce7b58cf672b9a9b17c2ef51e4f": {"tag": "v0.16.3", "tarball": "bitcoin-0.16.3-x86_64-linux-gnu.tar.gz"},
|
|
||||||
#
|
|
||||||
"5a6b35d1a348a402f2d2d6ab5aed653a1a1f13bc63aaaf51605e3501b0733b7a": {"tag": "v0.17.2", "tarball": "bitcoin-0.17.2-aarch64-linux-gnu.tar.gz"},
|
|
||||||
"d1913a5d19c8e8da4a67d1bd5205d03c8614dfd2e02bba2fe3087476643a729e": {"tag": "v0.17.2", "tarball": "bitcoin-0.17.2-arm-linux-gnueabihf.tar.gz"},
|
|
||||||
"a783ba20706dbfd5b47fbedf42165fce70fbbc7d78003305d964f6b3da14887f": {"tag": "v0.17.2", "tarball": "bitcoin-0.17.2-osx64.tar.gz"},
|
|
||||||
"943f9362b9f11130177839116f48f809d83478b4c28591d486ee9a7e35179da6": {"tag": "v0.17.2", "tarball": "bitcoin-0.17.2-x86_64-linux-gnu.tar.gz"},
|
|
||||||
#
|
|
||||||
"88f343af72803b851c7da13874cc5525026b0b55e63e1b5e1298390c4688adc6": {"tag": "v0.18.1", "tarball": "bitcoin-0.18.1-aarch64-linux-gnu.tar.gz"},
|
|
||||||
"cc7d483e4b20c5dabd4dcaf304965214cf4934bcc029ca99cbc9af00d3771a1f": {"tag": "v0.18.1", "tarball": "bitcoin-0.18.1-arm-linux-gnueabihf.tar.gz"},
|
|
||||||
"b7bbcee7a7540f711b171d6981f939ca8482005fde22689bc016596d80548bb1": {"tag": "v0.18.1", "tarball": "bitcoin-0.18.1-osx64.tar.gz"},
|
|
||||||
"425ee5ec631ae8da71ebc1c3f5c0269c627cf459379b9b030f047107a28e3ef8": {"tag": "v0.18.1", "tarball": "bitcoin-0.18.1-riscv64-linux-gnu.tar.gz"},
|
|
||||||
"600d1db5e751fa85903e935a01a74f5cc57e1e7473c15fd3e17ed21e202cfe5a": {"tag": "v0.18.1", "tarball": "bitcoin-0.18.1-x86_64-linux-gnu.tar.gz"},
|
|
||||||
#
|
|
||||||
"3a80431717842672df682bdb619e66523b59541483297772a7969413be3502ff": {"tag": "v0.19.1", "tarball": "bitcoin-0.19.1-aarch64-linux-gnu.tar.gz"},
|
|
||||||
"657f28213823d240dd3324d14829702f9ad6f0710f8bdd1c379cb3c447197f48": {"tag": "v0.19.1", "tarball": "bitcoin-0.19.1-arm-linux-gnueabihf.tar.gz"},
|
|
||||||
"1ae1b87de26487075cd2fd22e0d4ead87d969bd55c44f2f1d873ecdc6147ebb3": {"tag": "v0.19.1", "tarball": "bitcoin-0.19.1-osx64.tar.gz"},
|
|
||||||
"aa7a9563b48aa79252c8e7b6a41c07a5441bd9f14c5e4562cc72720ea6cb0ee5": {"tag": "v0.19.1", "tarball": "bitcoin-0.19.1-riscv64-linux-gnu.tar.gz"},
|
|
||||||
"5fcac9416e486d4960e1a946145566350ca670f9aaba99de6542080851122e4c": {"tag": "v0.19.1", "tarball": "bitcoin-0.19.1-x86_64-linux-gnu.tar.gz"},
|
|
||||||
#
|
|
||||||
"60c93e3462c303eb080be7cf623f1a7684b37fd47a018ad3848bc23e13c84e1c": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-aarch64-linux-gnu.tar.gz"},
|
"60c93e3462c303eb080be7cf623f1a7684b37fd47a018ad3848bc23e13c84e1c": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-aarch64-linux-gnu.tar.gz"},
|
||||||
"55b577e0fb306fb429d4be6c9316607753e8543e5946b542d75d876a2f08654c": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-arm-linux-gnueabihf.tar.gz"},
|
"55b577e0fb306fb429d4be6c9316607753e8543e5946b542d75d876a2f08654c": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-arm-linux-gnueabihf.tar.gz"},
|
||||||
"b9024dde373ea7dad707363e07ec7e265383204127539ae0c234bff3a61da0d1": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-osx64.tar.gz"},
|
"b9024dde373ea7dad707363e07ec7e265383204127539ae0c234bff3a61da0d1": {"tag": "v0.20.1", "tarball": "bitcoin-0.20.1-osx64.tar.gz"},
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# Copyright 2014 BitPay Inc.
|
# Copyright 2014 BitPay Inc.
|
||||||
# Copyright 2016-2017 The Bitcoin Core developers
|
# Copyright 2016-present The Bitcoin Core developers
|
||||||
# Distributed under the MIT software license, see the accompanying
|
# Distributed under the MIT software license, see the accompanying
|
||||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||||
"""Test framework for bitcoin utils.
|
"""Test framework for bitcoin utils.
|
||||||
|
@ -155,15 +155,16 @@ def bctest(testDir, testObj, buildenv):
|
||||||
|
|
||||||
if "error_txt" in testObj:
|
if "error_txt" in testObj:
|
||||||
want_error = testObj["error_txt"]
|
want_error = testObj["error_txt"]
|
||||||
# Compare error text
|
# A partial match instead of an exact match makes writing tests easier
|
||||||
# TODO: ideally, we'd compare the strings exactly and also assert
|
# and should be sufficient.
|
||||||
# That stderr is empty if no errors are expected. However, bitcoin-tx
|
|
||||||
# emits DISPLAY errors when running as a windows application on
|
|
||||||
# linux through wine. Just assert that the expected error text appears
|
|
||||||
# somewhere in stderr.
|
|
||||||
if want_error not in res.stderr:
|
if want_error not in res.stderr:
|
||||||
logging.error(f"Error mismatch:\nExpected: {want_error}\nReceived: {res.stderr.rstrip()}\nres: {str(res)}")
|
logging.error(f"Error mismatch:\nExpected: {want_error}\nReceived: {res.stderr.rstrip()}\nres: {str(res)}")
|
||||||
raise Exception
|
raise Exception
|
||||||
|
else:
|
||||||
|
if res.stderr:
|
||||||
|
logging.error(f"Unexpected error received: {res.stderr.rstrip()}\nres: {str(res)}")
|
||||||
|
raise Exception
|
||||||
|
|
||||||
|
|
||||||
def parse_output(a, fmt):
|
def parse_output(a, fmt):
|
||||||
"""Parse the output according to specified format.
|
"""Parse the output according to specified format.
|
||||||
|
|
Loading…
Add table
Reference in a new issue