mirror of
https://github.com/bitcoin/bitcoin.git
synced 2025-04-29 06:49:38 -04:00
Merge bitcoin/bitcoin#31250: wallet: Disable creating and loading legacy wallets
Some checks are pending
CI / test each commit (push) Waiting to run
CI / macOS 14 native, arm64, no depends, sqlite only, gui (push) Waiting to run
CI / macOS 14 native, arm64, fuzz (push) Waiting to run
CI / Windows native, VS 2022 (push) Waiting to run
CI / Windows native, fuzz, VS 2022 (push) Waiting to run
CI / Linux->Windows cross, no tests (push) Waiting to run
CI / Windows, test cross-built (push) Blocked by required conditions
CI / ASan + LSan + UBSan + integer, no depends, USDT (push) Waiting to run
Some checks are pending
CI / test each commit (push) Waiting to run
CI / macOS 14 native, arm64, no depends, sqlite only, gui (push) Waiting to run
CI / macOS 14 native, arm64, fuzz (push) Waiting to run
CI / Windows native, VS 2022 (push) Waiting to run
CI / Windows native, fuzz, VS 2022 (push) Waiting to run
CI / Linux->Windows cross, no tests (push) Waiting to run
CI / Windows, test cross-built (push) Blocked by required conditions
CI / ASan + LSan + UBSan + integer, no depends, USDT (push) Waiting to run
17bb63f9f9
wallet: Disallow loading legacy wallets (Ava Chow)9f04e02ffa
wallet: Disallow creating legacy wallets (Ava Chow)6b247279b7
wallet: Disallow legacy wallet creation from the wallet tool (Ava Chow)5e93b1fd6c
bench: Remove WalletLoadingLegacy benchmark (Ava Chow)56f959d829
wallet: Remove wallettool salvage (Ava Chow)7a41c939f0
wallet: Remove -format and bdb from wallet tool's createfromdump (Ava Chow)c847dee148
test: remove legacy wallet functional tests (Ava Chow)20a9173717
test: Remove legacy wallet tests from wallet_reindex.py (Ava Chow)446d480cb2
test: Remove legacy wallet tests from wallet_backwards_compatibility.py (Ava Chow)aff80298d0
test: wallet_signer.py bdb will be removed (Ava Chow)f94f9399ac
test: Remove legacy wallet unit tests (Ava Chow)d9ac9dbd8e
tests, gui: Use descriptors watchonly wallet for watchonly test (Ava Chow) Pull request description: To prepare for the deletion of legacy wallet code, disable creating or loading new legacy wallets. Tests for the legacy wallet specifically are deleted. Split from https://github.com/bitcoin/bitcoin/pull/28710 ACKs for top commit: Sjors: re-ACK17bb63f9f9
pablomartin4btc: re-ACK17bb63f9f9
laanwj: re-ACK17bb63f9f9
Tree-SHA512: d7a86df1f71f12451b335f22f7c3f0394166ac3f8f5b81f6bbf0321026e2e8ed621576656c371d70e202df1be4410b2b1c1acb5d5f0c341e7b67aaa0ac792e7c
This commit is contained in:
commit
80e6ad9e30
115 changed files with 540 additions and 5610 deletions
|
@ -4,7 +4,6 @@
|
|||
|
||||
#include <addresstype.h>
|
||||
#include <bench/bench.h>
|
||||
#include <bitcoin-build-config.h> // IWYU pragma: keep
|
||||
#include <key.h>
|
||||
#include <key_io.h>
|
||||
#include <script/descriptor.h>
|
||||
|
@ -26,7 +25,7 @@
|
|||
#include <utility>
|
||||
|
||||
namespace wallet {
|
||||
static void WalletIsMine(benchmark::Bench& bench, bool legacy_wallet, int num_combo = 0)
|
||||
static void WalletIsMine(benchmark::Bench& bench, int num_combo = 0)
|
||||
{
|
||||
const auto test_setup = MakeNoLogFileContext<TestingSetup>();
|
||||
|
||||
|
@ -36,16 +35,13 @@ static void WalletIsMine(benchmark::Bench& bench, bool legacy_wallet, int num_co
|
|||
|
||||
// Setup the wallet
|
||||
// Loading the wallet will also create it
|
||||
uint64_t create_flags = 0;
|
||||
if (!legacy_wallet) {
|
||||
create_flags = WALLET_FLAG_DESCRIPTORS;
|
||||
}
|
||||
uint64_t create_flags = WALLET_FLAG_DESCRIPTORS;
|
||||
auto database = CreateMockableWalletDatabase();
|
||||
auto wallet = TestLoadWallet(std::move(database), context, create_flags);
|
||||
|
||||
// For a descriptor wallet, fill with num_combo combo descriptors with random keys
|
||||
// This benchmarks a non-HD wallet migrated to descriptors
|
||||
if (!legacy_wallet && num_combo > 0) {
|
||||
if (num_combo > 0) {
|
||||
LOCK(wallet->cs_wallet);
|
||||
for (int i = 0; i < num_combo; ++i) {
|
||||
CKey key;
|
||||
|
@ -70,13 +66,8 @@ static void WalletIsMine(benchmark::Bench& bench, bool legacy_wallet, int num_co
|
|||
TestUnloadWallet(std::move(wallet));
|
||||
}
|
||||
|
||||
#ifdef USE_BDB
|
||||
static void WalletIsMineLegacy(benchmark::Bench& bench) { WalletIsMine(bench, /*legacy_wallet=*/true); }
|
||||
BENCHMARK(WalletIsMineLegacy, benchmark::PriorityLevel::LOW);
|
||||
#endif
|
||||
|
||||
static void WalletIsMineDescriptors(benchmark::Bench& bench) { WalletIsMine(bench, /*legacy_wallet=*/false); }
|
||||
static void WalletIsMineMigratedDescriptors(benchmark::Bench& bench) { WalletIsMine(bench, /*legacy_wallet=*/false, /*num_combo=*/2000); }
|
||||
static void WalletIsMineDescriptors(benchmark::Bench& bench) { WalletIsMine(bench); }
|
||||
static void WalletIsMineMigratedDescriptors(benchmark::Bench& bench) { WalletIsMine(bench, /*num_combo=*/2000); }
|
||||
BENCHMARK(WalletIsMineDescriptors, benchmark::PriorityLevel::LOW);
|
||||
BENCHMARK(WalletIsMineMigratedDescriptors, benchmark::PriorityLevel::LOW);
|
||||
} // namespace wallet
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
|
||||
#include <addresstype.h>
|
||||
#include <bench/bench.h>
|
||||
#include <bitcoin-build-config.h> // IWYU pragma: keep
|
||||
#include <consensus/amount.h>
|
||||
#include <outputtype.h>
|
||||
#include <primitives/transaction.h>
|
||||
|
@ -32,7 +31,7 @@ static void AddTx(CWallet& wallet)
|
|||
wallet.AddToWallet(MakeTransactionRef(mtx), TxStateInactive{});
|
||||
}
|
||||
|
||||
static void WalletLoading(benchmark::Bench& bench, bool legacy_wallet)
|
||||
static void WalletLoadingDescriptors(benchmark::Bench& bench)
|
||||
{
|
||||
const auto test_setup = MakeNoLogFileContext<TestingSetup>();
|
||||
|
||||
|
@ -42,10 +41,7 @@ static void WalletLoading(benchmark::Bench& bench, bool legacy_wallet)
|
|||
|
||||
// Setup the wallet
|
||||
// Loading the wallet will also create it
|
||||
uint64_t create_flags = 0;
|
||||
if (!legacy_wallet) {
|
||||
create_flags = WALLET_FLAG_DESCRIPTORS;
|
||||
}
|
||||
uint64_t create_flags = WALLET_FLAG_DESCRIPTORS;
|
||||
auto database = CreateMockableWalletDatabase();
|
||||
auto wallet = TestLoadWallet(std::move(database), context, create_flags);
|
||||
|
||||
|
@ -68,11 +64,5 @@ static void WalletLoading(benchmark::Bench& bench, bool legacy_wallet)
|
|||
});
|
||||
}
|
||||
|
||||
#ifdef USE_BDB
|
||||
static void WalletLoadingLegacy(benchmark::Bench& bench) { WalletLoading(bench, /*legacy_wallet=*/true); }
|
||||
BENCHMARK(WalletLoadingLegacy, benchmark::PriorityLevel::HIGH);
|
||||
#endif
|
||||
|
||||
static void WalletLoadingDescriptors(benchmark::Bench& bench) { WalletLoading(bench, /*legacy_wallet=*/false); }
|
||||
BENCHMARK(WalletLoadingDescriptors, benchmark::PriorityLevel::HIGH);
|
||||
} // namespace wallet
|
||||
|
|
|
@ -40,13 +40,11 @@ static void SetupWalletToolArgs(ArgsManager& argsman)
|
|||
argsman.AddArg("-debug=<category>", "Output debugging information (default: 0).", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST);
|
||||
argsman.AddArg("-descriptors", "Create descriptors wallet. Only for 'create'", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
||||
argsman.AddArg("-legacy", "Create legacy wallet. Only for 'create'", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
||||
argsman.AddArg("-format=<format>", "The format of the wallet file to create. Either \"bdb\" or \"sqlite\". Only used with 'createfromdump'", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
||||
argsman.AddArg("-printtoconsole", "Send trace/debug info to console (default: 1 when no -debug is true, 0 otherwise).", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST);
|
||||
argsman.AddArg("-withinternalbdb", "Use the internal Berkeley DB parser when dumping a Berkeley DB wallet file (default: false)", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST);
|
||||
|
||||
argsman.AddCommand("info", "Get wallet info");
|
||||
argsman.AddCommand("create", "Create new wallet file");
|
||||
argsman.AddCommand("salvage", "Attempt to recover private keys from a corrupt wallet. Warning: 'salvage' is experimental.");
|
||||
argsman.AddCommand("dump", "Print out all of the wallet key-value records");
|
||||
argsman.AddCommand("createfromdump", "Create new wallet file from dumped records");
|
||||
}
|
||||
|
|
|
@ -189,36 +189,28 @@ void SyncUpWallet(const std::shared_ptr<CWallet>& wallet, interfaces::Node& node
|
|||
QVERIFY(result.last_failed_block.IsNull());
|
||||
}
|
||||
|
||||
std::shared_ptr<CWallet> SetupLegacyWatchOnlyWallet(interfaces::Node& node, TestChain100Setup& test)
|
||||
{
|
||||
std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(node.context()->chain.get(), "", CreateMockableWalletDatabase());
|
||||
wallet->LoadWallet();
|
||||
{
|
||||
LOCK(wallet->cs_wallet);
|
||||
wallet->SetWalletFlag(WALLET_FLAG_DISABLE_PRIVATE_KEYS);
|
||||
wallet->SetupLegacyScriptPubKeyMan();
|
||||
// Add watched key
|
||||
CPubKey pubKey = test.coinbaseKey.GetPubKey();
|
||||
bool import_keys = wallet->ImportPubKeys({{pubKey.GetID(), false}}, {{pubKey.GetID(), pubKey}} , /*key_origins=*/{}, /*add_keypool=*/false, /*timestamp=*/1);
|
||||
assert(import_keys);
|
||||
wallet->SetLastBlockProcessed(105, WITH_LOCK(node.context()->chainman->GetMutex(), return node.context()->chainman->ActiveChain().Tip()->GetBlockHash()));
|
||||
}
|
||||
SyncUpWallet(wallet, node);
|
||||
return wallet;
|
||||
}
|
||||
|
||||
std::shared_ptr<CWallet> SetupDescriptorsWallet(interfaces::Node& node, TestChain100Setup& test)
|
||||
std::shared_ptr<CWallet> SetupDescriptorsWallet(interfaces::Node& node, TestChain100Setup& test, bool watch_only = false)
|
||||
{
|
||||
std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(node.context()->chain.get(), "", CreateMockableWalletDatabase());
|
||||
wallet->LoadWallet();
|
||||
LOCK(wallet->cs_wallet);
|
||||
wallet->SetWalletFlag(WALLET_FLAG_DESCRIPTORS);
|
||||
if (watch_only) {
|
||||
wallet->SetWalletFlag(WALLET_FLAG_DISABLE_PRIVATE_KEYS);
|
||||
} else {
|
||||
wallet->SetupDescriptorScriptPubKeyMans();
|
||||
}
|
||||
|
||||
// Add the coinbase key
|
||||
FlatSigningProvider provider;
|
||||
std::string error;
|
||||
auto descs = Parse("combo(" + EncodeSecret(test.coinbaseKey) + ")", provider, error, /* require_checksum=*/ false);
|
||||
std::string key_str;
|
||||
if (watch_only) {
|
||||
key_str = HexStr(test.coinbaseKey.GetPubKey());
|
||||
} else {
|
||||
key_str = EncodeSecret(test.coinbaseKey);
|
||||
}
|
||||
auto descs = Parse("combo(" + key_str + ")", provider, error, /* require_checksum=*/ false);
|
||||
assert(!descs.empty());
|
||||
assert(descs.size() == 1);
|
||||
auto& desc = descs.at(0);
|
||||
|
@ -398,7 +390,7 @@ void TestGUI(interfaces::Node& node, const std::shared_ptr<CWallet>& wallet)
|
|||
|
||||
void TestGUIWatchOnly(interfaces::Node& node, TestChain100Setup& test)
|
||||
{
|
||||
const std::shared_ptr<CWallet>& wallet = SetupLegacyWatchOnlyWallet(node, test);
|
||||
const std::shared_ptr<CWallet>& wallet = SetupDescriptorsWallet(node, test, /*watch_only=*/true);
|
||||
|
||||
// Create widgets and init models
|
||||
std::unique_ptr<const PlatformStyle> platformStyle(PlatformStyle::instantiate("other"));
|
||||
|
@ -410,7 +402,7 @@ void TestGUIWatchOnly(interfaces::Node& node, TestChain100Setup& test)
|
|||
// Update walletModel cached balance which will trigger an update for the 'labelBalance' QLabel.
|
||||
walletModel.pollBalanceChanged();
|
||||
// Check balance in send dialog
|
||||
CompareBalance(walletModel, walletModel.wallet().getBalances().watch_only_balance,
|
||||
CompareBalance(walletModel, walletModel.wallet().getBalances().balance,
|
||||
sendCoinsDialog.findChild<QLabel*>("labelBalance"));
|
||||
|
||||
// Set change address
|
||||
|
|
|
@ -46,6 +46,6 @@ target_link_libraries(bitcoin_wallet
|
|||
)
|
||||
|
||||
if(USE_BDB)
|
||||
target_sources(bitcoin_wallet PRIVATE bdb.cpp salvage.cpp)
|
||||
target_sources(bitcoin_wallet PRIVATE bdb.cpp)
|
||||
target_link_libraries(bitcoin_wallet PUBLIC BerkeleyDB::BerkeleyDB)
|
||||
endif()
|
||||
|
|
|
@ -182,7 +182,6 @@ public:
|
|||
};
|
||||
|
||||
enum class DatabaseFormat {
|
||||
BERKELEY,
|
||||
SQLITE,
|
||||
BERKELEY_RO,
|
||||
BERKELEY_SWAP,
|
||||
|
|
|
@ -175,26 +175,14 @@ bool CreateFromDump(const ArgsManager& args, const std::string& name, const fs::
|
|||
dump_file.close();
|
||||
return false;
|
||||
}
|
||||
// Get the data file format with format_value as the default
|
||||
std::string file_format = args.GetArg("-format", format_value);
|
||||
if (file_format.empty()) {
|
||||
error = _("No wallet file format provided. To use createfromdump, -format=<format> must be provided.");
|
||||
// Make sure that the dump was created from a sqlite database only as that is the only
|
||||
// type of database that we still support.
|
||||
// Other formats such as BDB should not be loaded into a sqlite database since they also
|
||||
// use a different type of wallet entirely which is no longer compatible with this software.
|
||||
if (format_value != "sqlite") {
|
||||
error = strprintf(_("Error: Dumpfile specifies an unsupported database format (%s). Only sqlite database dumps are supported"), format_value);
|
||||
return false;
|
||||
}
|
||||
DatabaseFormat data_format;
|
||||
if (file_format == "bdb") {
|
||||
data_format = DatabaseFormat::BERKELEY;
|
||||
} else if (file_format == "sqlite") {
|
||||
data_format = DatabaseFormat::SQLITE;
|
||||
} else if (file_format == "bdb_swap") {
|
||||
data_format = DatabaseFormat::BERKELEY_SWAP;
|
||||
} else {
|
||||
error = strprintf(_("Unknown wallet file format \"%s\" provided. Please provide one of \"bdb\" or \"sqlite\"."), file_format);
|
||||
return false;
|
||||
}
|
||||
if (file_format != format_value) {
|
||||
warnings.push_back(strprintf(_("Warning: Dumpfile wallet format \"%s\" does not match command line specified format \"%s\"."), format_value, file_format));
|
||||
}
|
||||
std::string format_hasher_line = strprintf("%s,%s\n", format_key, format_value);
|
||||
hasher << std::span{format_hasher_line};
|
||||
|
||||
|
@ -202,7 +190,7 @@ bool CreateFromDump(const ArgsManager& args, const std::string& name, const fs::
|
|||
DatabaseStatus status;
|
||||
ReadDatabaseArgs(args, options);
|
||||
options.require_create = true;
|
||||
options.require_format = data_format;
|
||||
options.require_format = DatabaseFormat::SQLITE;
|
||||
std::unique_ptr<WalletDatabase> database = MakeDatabase(wallet_path, options, status, error);
|
||||
if (!database) return false;
|
||||
|
||||
|
|
|
@ -98,11 +98,6 @@ void WalletInit::AddWalletOptions(ArgsManager& argsman) const
|
|||
|
||||
bool WalletInit::ParameterInteraction() const
|
||||
{
|
||||
#ifdef USE_BDB
|
||||
if (!BerkeleyDatabaseSanityCheck()) {
|
||||
return InitError(Untranslated("A version conflict was detected between the run-time BerkeleyDB library and the one used during compilation."));
|
||||
}
|
||||
#endif
|
||||
if (gArgs.GetBoolArg("-disablewallet", DEFAULT_DISABLE_WALLET)) {
|
||||
for (const std::string& wallet : gArgs.GetArgs("-wallet")) {
|
||||
LogPrintf("%s: parameter interaction: -disablewallet -> ignoring -wallet=%s\n", __func__, wallet);
|
||||
|
|
|
@ -355,9 +355,7 @@ static RPCHelpMan createwallet()
|
|||
{"blank", RPCArg::Type::BOOL, RPCArg::Default{false}, "Create a blank wallet. A blank wallet has no keys or HD seed. One can be set using sethdseed."},
|
||||
{"passphrase", RPCArg::Type::STR, RPCArg::Optional::OMITTED, "Encrypt the wallet with this passphrase."},
|
||||
{"avoid_reuse", RPCArg::Type::BOOL, RPCArg::Default{false}, "Keep track of coin reuse, and treat dirty and clean coins differently with privacy considerations in mind."},
|
||||
{"descriptors", RPCArg::Type::BOOL, RPCArg::Default{true}, "Create a native descriptor wallet. The wallet will use descriptors internally to handle address creation."
|
||||
" Setting to \"false\" will create a legacy wallet; This is only possible with the -deprecatedrpc=create_bdb setting because, the legacy wallet type is being deprecated and"
|
||||
" support for creating and opening legacy wallets will be removed in the future."},
|
||||
{"descriptors", RPCArg::Type::BOOL, RPCArg::Default{true}, "If set, must be \"true\""},
|
||||
{"load_on_startup", RPCArg::Type::BOOL, RPCArg::Optional::OMITTED, "Save wallet name to persistent settings and load on startup. True to add wallet to startup list, false to remove, null to leave unchanged."},
|
||||
{"external_signer", RPCArg::Type::BOOL, RPCArg::Default{false}, "Use an external signer such as a hardware wallet. Requires -signer to be configured. Wallet creation will fail if keys cannot be fetched. Requires disable_private_keys and descriptors set to true."},
|
||||
},
|
||||
|
@ -402,13 +400,9 @@ static RPCHelpMan createwallet()
|
|||
if (!request.params[4].isNull() && request.params[4].get_bool()) {
|
||||
flags |= WALLET_FLAG_AVOID_REUSE;
|
||||
}
|
||||
if (self.Arg<bool>("descriptors")) {
|
||||
flags |= WALLET_FLAG_DESCRIPTORS;
|
||||
} else {
|
||||
if (!context.chain->rpcEnableDeprecated("create_bdb")) {
|
||||
throw JSONRPCError(RPC_WALLET_ERROR, "BDB wallet creation is deprecated and will be removed in a future release."
|
||||
" In this release it can be re-enabled temporarily with the -deprecatedrpc=create_bdb setting.");
|
||||
}
|
||||
if (!self.Arg<bool>("descriptors")) {
|
||||
throw JSONRPCError(RPC_WALLET_ERROR, "descriptors argument must be set to \"true\"; it is no longer possible to create a legacy wallet.");
|
||||
}
|
||||
if (!request.params[7].isNull() && request.params[7].get_bool()) {
|
||||
#ifdef ENABLE_EXTERNAL_SIGNER
|
||||
|
@ -418,12 +412,6 @@ static RPCHelpMan createwallet()
|
|||
#endif
|
||||
}
|
||||
|
||||
#ifndef USE_BDB
|
||||
if (!(flags & WALLET_FLAG_DESCRIPTORS)) {
|
||||
throw JSONRPCError(RPC_WALLET_ERROR, "Compiled without bdb support (required for legacy wallets)");
|
||||
}
|
||||
#endif
|
||||
|
||||
DatabaseOptions options;
|
||||
DatabaseStatus status;
|
||||
ReadDatabaseArgs(*context.args, options);
|
||||
|
|
|
@ -1,221 +0,0 @@
|
|||
// Copyright (c) 2009-2010 Satoshi Nakamoto
|
||||
// Copyright (c) 2009-present The Bitcoin Core developers
|
||||
// Distributed under the MIT software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#include <streams.h>
|
||||
#include <util/fs.h>
|
||||
#include <util/translation.h>
|
||||
#include <wallet/bdb.h>
|
||||
#include <wallet/salvage.h>
|
||||
#include <wallet/wallet.h>
|
||||
#include <wallet/walletdb.h>
|
||||
|
||||
#include <db_cxx.h>
|
||||
|
||||
namespace wallet {
|
||||
/* End of headers, beginning of key/value data */
|
||||
static const char *HEADER_END = "HEADER=END";
|
||||
/* End of key/value data */
|
||||
static const char *DATA_END = "DATA=END";
|
||||
typedef std::pair<std::vector<unsigned char>, std::vector<unsigned char> > KeyValPair;
|
||||
|
||||
class DummyCursor : public DatabaseCursor
|
||||
{
|
||||
Status Next(DataStream& key, DataStream& value) override { return Status::FAIL; }
|
||||
};
|
||||
|
||||
/** RAII class that provides access to a DummyDatabase. Never fails. */
|
||||
class DummyBatch : public DatabaseBatch
|
||||
{
|
||||
private:
|
||||
bool ReadKey(DataStream&& key, DataStream& value) override { return true; }
|
||||
bool WriteKey(DataStream&& key, DataStream&& value, bool overwrite=true) override { return true; }
|
||||
bool EraseKey(DataStream&& key) override { return true; }
|
||||
bool HasKey(DataStream&& key) override { return true; }
|
||||
bool ErasePrefix(std::span<const std::byte> prefix) override { return true; }
|
||||
|
||||
public:
|
||||
void Flush() override {}
|
||||
void Close() override {}
|
||||
|
||||
std::unique_ptr<DatabaseCursor> GetNewCursor() override { return std::make_unique<DummyCursor>(); }
|
||||
std::unique_ptr<DatabaseCursor> GetNewPrefixCursor(std::span<const std::byte> prefix) override { return GetNewCursor(); }
|
||||
bool TxnBegin() override { return true; }
|
||||
bool TxnCommit() override { return true; }
|
||||
bool TxnAbort() override { return true; }
|
||||
bool HasActiveTxn() override { return false; }
|
||||
};
|
||||
|
||||
/** A dummy WalletDatabase that does nothing and never fails. Only used by salvage.
|
||||
**/
|
||||
class DummyDatabase : public WalletDatabase
|
||||
{
|
||||
public:
|
||||
void Open() override {};
|
||||
void AddRef() override {}
|
||||
void RemoveRef() override {}
|
||||
bool Rewrite(const char* pszSkip=nullptr) override { return true; }
|
||||
bool Backup(const std::string& strDest) const override { return true; }
|
||||
void Close() override {}
|
||||
void Flush() override {}
|
||||
bool PeriodicFlush() override { return true; }
|
||||
void IncrementUpdateCounter() override { ++nUpdateCounter; }
|
||||
void ReloadDbEnv() override {}
|
||||
std::string Filename() override { return "dummy"; }
|
||||
std::string Format() override { return "dummy"; }
|
||||
std::unique_ptr<DatabaseBatch> MakeBatch(bool flush_on_close = true) override { return std::make_unique<DummyBatch>(); }
|
||||
};
|
||||
|
||||
bool RecoverDatabaseFile(const ArgsManager& args, const fs::path& file_path, bilingual_str& error, std::vector<bilingual_str>& warnings)
|
||||
{
|
||||
DatabaseOptions options;
|
||||
DatabaseStatus status;
|
||||
ReadDatabaseArgs(args, options);
|
||||
options.require_existing = true;
|
||||
options.verify = false;
|
||||
options.require_format = DatabaseFormat::BERKELEY;
|
||||
std::unique_ptr<WalletDatabase> database = MakeDatabase(file_path, options, status, error);
|
||||
if (!database) return false;
|
||||
|
||||
BerkeleyDatabase& berkeley_database = static_cast<BerkeleyDatabase&>(*database);
|
||||
std::string filename = berkeley_database.Filename();
|
||||
std::shared_ptr<BerkeleyEnvironment> env = berkeley_database.env;
|
||||
|
||||
if (!env->Open(error)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Recovery procedure:
|
||||
// move wallet file to walletfilename.timestamp.bak
|
||||
// Call Salvage with fAggressive=true to
|
||||
// get as much data as possible.
|
||||
// Rewrite salvaged data to fresh wallet file
|
||||
// Rescan so any missing transactions will be
|
||||
// found.
|
||||
int64_t now = GetTime();
|
||||
std::string newFilename = strprintf("%s.%d.bak", filename, now);
|
||||
|
||||
int result = env->dbenv->dbrename(nullptr, filename.c_str(), nullptr,
|
||||
newFilename.c_str(), DB_AUTO_COMMIT);
|
||||
if (result != 0)
|
||||
{
|
||||
error = Untranslated(strprintf("Failed to rename %s to %s", filename, newFilename));
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Salvage data from a file. The DB_AGGRESSIVE flag is being used (see berkeley DB->verify() method documentation).
|
||||
* key/value pairs are appended to salvagedData which are then written out to a new wallet file.
|
||||
* NOTE: reads the entire database into memory, so cannot be used
|
||||
* for huge databases.
|
||||
*/
|
||||
std::vector<KeyValPair> salvagedData;
|
||||
|
||||
std::stringstream strDump;
|
||||
|
||||
Db db(env->dbenv.get(), 0);
|
||||
result = db.verify(newFilename.c_str(), nullptr, &strDump, DB_SALVAGE | DB_AGGRESSIVE);
|
||||
if (result == DB_VERIFY_BAD) {
|
||||
warnings.emplace_back(Untranslated("Salvage: Database salvage found errors, all data may not be recoverable."));
|
||||
}
|
||||
if (result != 0 && result != DB_VERIFY_BAD) {
|
||||
error = Untranslated(strprintf("Salvage: Database salvage failed with result %d.", result));
|
||||
return false;
|
||||
}
|
||||
|
||||
// Format of bdb dump is ascii lines:
|
||||
// header lines...
|
||||
// HEADER=END
|
||||
// hexadecimal key
|
||||
// hexadecimal value
|
||||
// ... repeated
|
||||
// DATA=END
|
||||
|
||||
std::string strLine;
|
||||
while (!strDump.eof() && strLine != HEADER_END)
|
||||
getline(strDump, strLine); // Skip past header
|
||||
|
||||
std::string keyHex, valueHex;
|
||||
while (!strDump.eof() && keyHex != DATA_END) {
|
||||
getline(strDump, keyHex);
|
||||
if (keyHex != DATA_END) {
|
||||
if (strDump.eof())
|
||||
break;
|
||||
getline(strDump, valueHex);
|
||||
if (valueHex == DATA_END) {
|
||||
warnings.emplace_back(Untranslated("Salvage: WARNING: Number of keys in data does not match number of values."));
|
||||
break;
|
||||
}
|
||||
salvagedData.emplace_back(ParseHex(keyHex), ParseHex(valueHex));
|
||||
}
|
||||
}
|
||||
|
||||
bool fSuccess;
|
||||
if (keyHex != DATA_END) {
|
||||
warnings.emplace_back(Untranslated("Salvage: WARNING: Unexpected end of file while reading salvage output."));
|
||||
fSuccess = false;
|
||||
} else {
|
||||
fSuccess = (result == 0);
|
||||
}
|
||||
|
||||
if (salvagedData.empty())
|
||||
{
|
||||
error = Untranslated(strprintf("Salvage(aggressive) found no records in %s.", newFilename));
|
||||
return false;
|
||||
}
|
||||
|
||||
std::unique_ptr<Db> pdbCopy = std::make_unique<Db>(env->dbenv.get(), 0);
|
||||
int ret = pdbCopy->open(nullptr, // Txn pointer
|
||||
filename.c_str(), // Filename
|
||||
"main", // Logical db name
|
||||
DB_BTREE, // Database type
|
||||
DB_CREATE, // Flags
|
||||
0);
|
||||
if (ret > 0) {
|
||||
error = Untranslated(strprintf("Cannot create database file %s", filename));
|
||||
pdbCopy->close(0);
|
||||
return false;
|
||||
}
|
||||
|
||||
DbTxn* ptxn = env->TxnBegin(DB_TXN_WRITE_NOSYNC);
|
||||
CWallet dummyWallet(nullptr, "", std::make_unique<DummyDatabase>());
|
||||
for (KeyValPair& row : salvagedData)
|
||||
{
|
||||
/* Filter for only private key type KV pairs to be added to the salvaged wallet */
|
||||
DataStream ssKey{row.first};
|
||||
DataStream ssValue(row.second);
|
||||
std::string strType, strErr;
|
||||
|
||||
// We only care about KEY, MASTER_KEY, CRYPTED_KEY, and HDCHAIN types
|
||||
ssKey >> strType;
|
||||
bool fReadOK = false;
|
||||
if (strType == DBKeys::KEY) {
|
||||
fReadOK = LoadKey(&dummyWallet, ssKey, ssValue, strErr);
|
||||
} else if (strType == DBKeys::CRYPTED_KEY) {
|
||||
fReadOK = LoadCryptedKey(&dummyWallet, ssKey, ssValue, strErr);
|
||||
} else if (strType == DBKeys::MASTER_KEY) {
|
||||
fReadOK = LoadEncryptionKey(&dummyWallet, ssKey, ssValue, strErr);
|
||||
} else if (strType == DBKeys::HDCHAIN) {
|
||||
fReadOK = LoadHDChain(&dummyWallet, ssValue, strErr);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!fReadOK)
|
||||
{
|
||||
warnings.push_back(Untranslated(strprintf("WARNING: WalletBatch::Recover skipping %s: %s", strType, strErr)));
|
||||
continue;
|
||||
}
|
||||
Dbt datKey(row.first.data(), row.first.size());
|
||||
Dbt datValue(row.second.data(), row.second.size());
|
||||
int ret2 = pdbCopy->put(ptxn, &datKey, &datValue, DB_NOOVERWRITE);
|
||||
if (ret2 > 0)
|
||||
fSuccess = false;
|
||||
}
|
||||
ptxn->commit(0);
|
||||
pdbCopy->close(0);
|
||||
|
||||
return fSuccess;
|
||||
}
|
||||
} // namespace wallet
|
|
@ -1,19 +0,0 @@
|
|||
// Copyright (c) 2009-2010 Satoshi Nakamoto
|
||||
// Copyright (c) 2009-2021 The Bitcoin Core developers
|
||||
// Distributed under the MIT software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#ifndef BITCOIN_WALLET_SALVAGE_H
|
||||
#define BITCOIN_WALLET_SALVAGE_H
|
||||
|
||||
#include <streams.h>
|
||||
#include <util/fs.h>
|
||||
|
||||
class ArgsManager;
|
||||
struct bilingual_str;
|
||||
|
||||
namespace wallet {
|
||||
bool RecoverDatabaseFile(const ArgsManager& args, const fs::path& file_path, bilingual_str& error, std::vector<bilingual_str>& warnings);
|
||||
} // namespace wallet
|
||||
|
||||
#endif // BITCOIN_WALLET_SALVAGE_H
|
|
@ -2,17 +2,12 @@
|
|||
// Distributed under the MIT software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#include <bitcoin-build-config.h> // IWYU pragma: keep
|
||||
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
#include <test/util/setup_common.h>
|
||||
#include <util/check.h>
|
||||
#include <util/fs.h>
|
||||
#include <util/translation.h>
|
||||
#ifdef USE_BDB
|
||||
#include <wallet/bdb.h>
|
||||
#endif
|
||||
#include <wallet/sqlite.h>
|
||||
#include <wallet/migrate.h>
|
||||
#include <wallet/test/util.h>
|
||||
|
@ -60,82 +55,13 @@ static void CheckPrefix(DatabaseBatch& batch, std::span<const std::byte> prefix,
|
|||
|
||||
BOOST_FIXTURE_TEST_SUITE(db_tests, BasicTestingSetup)
|
||||
|
||||
#ifdef USE_BDB
|
||||
static std::shared_ptr<BerkeleyEnvironment> GetWalletEnv(const fs::path& path, fs::path& database_filename)
|
||||
{
|
||||
fs::path data_file = BDBDataFile(path);
|
||||
database_filename = data_file.filename();
|
||||
return GetBerkeleyEnv(data_file.parent_path(), false);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(getwalletenv_file)
|
||||
{
|
||||
fs::path test_name = "test_name.dat";
|
||||
const fs::path datadir = m_args.GetDataDirNet();
|
||||
fs::path file_path = datadir / test_name;
|
||||
std::ofstream f{file_path};
|
||||
f.close();
|
||||
|
||||
fs::path filename;
|
||||
std::shared_ptr<BerkeleyEnvironment> env = GetWalletEnv(file_path, filename);
|
||||
BOOST_CHECK_EQUAL(filename, test_name);
|
||||
BOOST_CHECK_EQUAL(env->Directory(), datadir);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(getwalletenv_directory)
|
||||
{
|
||||
fs::path expected_name = "wallet.dat";
|
||||
const fs::path datadir = m_args.GetDataDirNet();
|
||||
|
||||
fs::path filename;
|
||||
std::shared_ptr<BerkeleyEnvironment> env = GetWalletEnv(datadir, filename);
|
||||
BOOST_CHECK_EQUAL(filename, expected_name);
|
||||
BOOST_CHECK_EQUAL(env->Directory(), datadir);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(getwalletenv_g_dbenvs_multiple)
|
||||
{
|
||||
fs::path datadir = m_args.GetDataDirNet() / "1";
|
||||
fs::path datadir_2 = m_args.GetDataDirNet() / "2";
|
||||
fs::path filename;
|
||||
|
||||
std::shared_ptr<BerkeleyEnvironment> env_1 = GetWalletEnv(datadir, filename);
|
||||
std::shared_ptr<BerkeleyEnvironment> env_2 = GetWalletEnv(datadir, filename);
|
||||
std::shared_ptr<BerkeleyEnvironment> env_3 = GetWalletEnv(datadir_2, filename);
|
||||
|
||||
BOOST_CHECK(env_1 == env_2);
|
||||
BOOST_CHECK(env_2 != env_3);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(getwalletenv_g_dbenvs_free_instance)
|
||||
{
|
||||
fs::path datadir = gArgs.GetDataDirNet() / "1";
|
||||
fs::path datadir_2 = gArgs.GetDataDirNet() / "2";
|
||||
fs::path filename;
|
||||
|
||||
std::shared_ptr <BerkeleyEnvironment> env_1_a = GetWalletEnv(datadir, filename);
|
||||
std::shared_ptr <BerkeleyEnvironment> env_2_a = GetWalletEnv(datadir_2, filename);
|
||||
env_1_a.reset();
|
||||
|
||||
std::shared_ptr<BerkeleyEnvironment> env_1_b = GetWalletEnv(datadir, filename);
|
||||
std::shared_ptr<BerkeleyEnvironment> env_2_b = GetWalletEnv(datadir_2, filename);
|
||||
|
||||
BOOST_CHECK(env_1_a != env_1_b);
|
||||
BOOST_CHECK(env_2_a == env_2_b);
|
||||
}
|
||||
#endif
|
||||
|
||||
static std::vector<std::unique_ptr<WalletDatabase>> TestDatabases(const fs::path& path_root)
|
||||
{
|
||||
std::vector<std::unique_ptr<WalletDatabase>> dbs;
|
||||
DatabaseOptions options;
|
||||
DatabaseStatus status;
|
||||
bilingual_str error;
|
||||
#ifdef USE_BDB
|
||||
dbs.emplace_back(MakeBerkeleyDatabase(path_root / "bdb", options, status, error));
|
||||
// Needs BDB to make the DB to read
|
||||
dbs.emplace_back(std::make_unique<BerkeleyRODatabase>(BDBDataFile(path_root / "bdb"), /*open=*/false));
|
||||
#endif
|
||||
// Unable to test BerkeleyRO since we cannot create a new BDB database to open
|
||||
dbs.emplace_back(MakeSQLiteDatabase(path_root / "sqlite", options, status, error));
|
||||
dbs.emplace_back(CreateMockableWalletDatabase());
|
||||
return dbs;
|
||||
|
@ -148,17 +74,12 @@ BOOST_AUTO_TEST_CASE(db_cursor_prefix_range_test)
|
|||
std::vector<std::string> prefixes = {"", "FIRST", "SECOND", "P\xfe\xff", "P\xff\x01", "\xff\xff"};
|
||||
|
||||
std::unique_ptr<DatabaseBatch> handler = Assert(database)->MakeBatch();
|
||||
if (dynamic_cast<BerkeleyRODatabase*>(database.get())) {
|
||||
// For BerkeleyRO, open the file now. This must happen after BDB has written to the file
|
||||
database->Open();
|
||||
} else {
|
||||
// Write elements to it if not berkeleyro
|
||||
// Write elements to it
|
||||
for (unsigned int i = 0; i < 10; i++) {
|
||||
for (const auto& prefix : prefixes) {
|
||||
BOOST_CHECK(handler->Write(std::make_pair(prefix, i), i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now read all the items by prefix and verify that each element gets parsed correctly
|
||||
for (const auto& prefix : prefixes) {
|
||||
|
@ -206,15 +127,10 @@ BOOST_AUTO_TEST_CASE(db_cursor_prefix_byte_test)
|
|||
for (const auto& database : TestDatabases(m_path_root)) {
|
||||
std::unique_ptr<DatabaseBatch> batch = database->MakeBatch();
|
||||
|
||||
if (dynamic_cast<BerkeleyRODatabase*>(database.get())) {
|
||||
// For BerkeleyRO, open the file now. This must happen after BDB has written to the file
|
||||
database->Open();
|
||||
} else {
|
||||
// Write elements to it if not berkeleyro
|
||||
for (const auto& [k, v] : {e, p, ps, f, fs, ff, ffs}) {
|
||||
batch->Write(std::span{k}, std::span{v});
|
||||
}
|
||||
}
|
||||
|
||||
CheckPrefix(*batch, StringBytes(""), {e, p, ps, f, fs, ff, ffs});
|
||||
CheckPrefix(*batch, StringBytes("prefix"), {p, ps});
|
||||
|
@ -231,10 +147,6 @@ BOOST_AUTO_TEST_CASE(db_availability_after_write_error)
|
|||
// To simulate the behavior, record overwrites are disallowed, and the test verifies
|
||||
// that the database remains active after failing to store an existing record.
|
||||
for (const auto& database : TestDatabases(m_path_root)) {
|
||||
if (dynamic_cast<BerkeleyRODatabase*>(database.get())) {
|
||||
// Skip this test if BerkeleyRO
|
||||
continue;
|
||||
}
|
||||
// Write original record
|
||||
std::unique_ptr<DatabaseBatch> batch = database->MakeBatch();
|
||||
std::string key = "key";
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
#ifndef BITCOIN_WALLET_TEST_UTIL_H
|
||||
#define BITCOIN_WALLET_TEST_UTIL_H
|
||||
|
||||
#include <bitcoin-build-config.h> // IWYU pragma: keep
|
||||
|
||||
#include <addresstype.h>
|
||||
#include <wallet/db.h>
|
||||
#include <wallet/scriptpubkeyman.h>
|
||||
|
@ -28,9 +26,6 @@ struct WalletContext;
|
|||
|
||||
static const DatabaseFormat DATABASE_FORMATS[] = {
|
||||
DatabaseFormat::SQLITE,
|
||||
#ifdef USE_BDB
|
||||
DatabaseFormat::BERKELEY,
|
||||
#endif
|
||||
};
|
||||
|
||||
const std::string ADDRESS_BCRT1_UNSPENDABLE = "bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj";
|
||||
|
|
|
@ -196,141 +196,6 @@ BOOST_FIXTURE_TEST_CASE(scan_for_wallet_transactions, TestChain100Setup)
|
|||
}
|
||||
}
|
||||
|
||||
BOOST_FIXTURE_TEST_CASE(importmulti_rescan, TestChain100Setup)
|
||||
{
|
||||
// Cap last block file size, and mine new block in a new block file.
|
||||
CBlockIndex* oldTip = WITH_LOCK(Assert(m_node.chainman)->GetMutex(), return m_node.chainman->ActiveChain().Tip());
|
||||
WITH_LOCK(::cs_main, m_node.chainman->m_blockman.GetBlockFileInfo(oldTip->GetBlockPos().nFile)->nSize = MAX_BLOCKFILE_SIZE);
|
||||
CreateAndProcessBlock({}, GetScriptForRawPubKey(coinbaseKey.GetPubKey()));
|
||||
CBlockIndex* newTip = WITH_LOCK(Assert(m_node.chainman)->GetMutex(), return m_node.chainman->ActiveChain().Tip());
|
||||
|
||||
// Prune the older block file.
|
||||
int file_number;
|
||||
{
|
||||
LOCK(cs_main);
|
||||
file_number = oldTip->GetBlockPos().nFile;
|
||||
Assert(m_node.chainman)->m_blockman.PruneOneBlockFile(file_number);
|
||||
}
|
||||
m_node.chainman->m_blockman.UnlinkPrunedFiles({file_number});
|
||||
|
||||
// Verify importmulti RPC returns failure for a key whose creation time is
|
||||
// before the missing block, and success for a key whose creation time is
|
||||
// after.
|
||||
{
|
||||
const std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(m_node.chain.get(), "", CreateMockableWalletDatabase());
|
||||
wallet->SetupLegacyScriptPubKeyMan();
|
||||
WITH_LOCK(wallet->cs_wallet, wallet->SetLastBlockProcessed(newTip->nHeight, newTip->GetBlockHash()));
|
||||
WalletContext context;
|
||||
context.args = &m_args;
|
||||
AddWallet(context, wallet);
|
||||
UniValue keys;
|
||||
keys.setArray();
|
||||
UniValue key;
|
||||
key.setObject();
|
||||
key.pushKV("scriptPubKey", HexStr(GetScriptForRawPubKey(coinbaseKey.GetPubKey())));
|
||||
key.pushKV("timestamp", 0);
|
||||
key.pushKV("internal", UniValue(true));
|
||||
keys.push_back(key);
|
||||
key.clear();
|
||||
key.setObject();
|
||||
CKey futureKey = GenerateRandomKey();
|
||||
key.pushKV("scriptPubKey", HexStr(GetScriptForRawPubKey(futureKey.GetPubKey())));
|
||||
key.pushKV("timestamp", newTip->GetBlockTimeMax() + TIMESTAMP_WINDOW + 1);
|
||||
key.pushKV("internal", UniValue(true));
|
||||
keys.push_back(std::move(key));
|
||||
JSONRPCRequest request;
|
||||
request.context = &context;
|
||||
request.params.setArray();
|
||||
request.params.push_back(std::move(keys));
|
||||
|
||||
UniValue response = importmulti().HandleRequest(request);
|
||||
BOOST_CHECK_EQUAL(response.write(),
|
||||
strprintf("[{\"success\":false,\"error\":{\"code\":-1,\"message\":\"Rescan failed for key with creation "
|
||||
"timestamp %d. There was an error reading a block from time %d, which is after or within %d "
|
||||
"seconds of key creation, and could contain transactions pertaining to the key. As a result, "
|
||||
"transactions and coins using this key may not appear in the wallet. This error could be caused "
|
||||
"by pruning or data corruption (see bitcoind log for details) and could be dealt with by "
|
||||
"downloading and rescanning the relevant blocks (see -reindex option and rescanblockchain "
|
||||
"RPC).\"}},{\"success\":true}]",
|
||||
0, oldTip->GetBlockTimeMax(), TIMESTAMP_WINDOW));
|
||||
RemoveWallet(context, wallet, /* load_on_start= */ std::nullopt);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify importwallet RPC starts rescan at earliest block with timestamp
|
||||
// greater or equal than key birthday. Previously there was a bug where
|
||||
// importwallet RPC would start the scan at the latest block with timestamp less
|
||||
// than or equal to key birthday.
|
||||
BOOST_FIXTURE_TEST_CASE(importwallet_rescan, TestChain100Setup)
|
||||
{
|
||||
// Create two blocks with same timestamp to verify that importwallet rescan
|
||||
// will pick up both blocks, not just the first.
|
||||
const int64_t BLOCK_TIME = WITH_LOCK(Assert(m_node.chainman)->GetMutex(), return m_node.chainman->ActiveChain().Tip()->GetBlockTimeMax() + 5);
|
||||
SetMockTime(BLOCK_TIME);
|
||||
m_coinbase_txns.emplace_back(CreateAndProcessBlock({}, GetScriptForRawPubKey(coinbaseKey.GetPubKey())).vtx[0]);
|
||||
m_coinbase_txns.emplace_back(CreateAndProcessBlock({}, GetScriptForRawPubKey(coinbaseKey.GetPubKey())).vtx[0]);
|
||||
|
||||
// Set key birthday to block time increased by the timestamp window, so
|
||||
// rescan will start at the block time.
|
||||
const int64_t KEY_TIME = BLOCK_TIME + TIMESTAMP_WINDOW;
|
||||
SetMockTime(KEY_TIME);
|
||||
m_coinbase_txns.emplace_back(CreateAndProcessBlock({}, GetScriptForRawPubKey(coinbaseKey.GetPubKey())).vtx[0]);
|
||||
|
||||
std::string backup_file = fs::PathToString(m_args.GetDataDirNet() / "wallet.backup");
|
||||
|
||||
// Import key into wallet and call dumpwallet to create backup file.
|
||||
{
|
||||
WalletContext context;
|
||||
context.args = &m_args;
|
||||
const std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(m_node.chain.get(), "", CreateMockableWalletDatabase());
|
||||
{
|
||||
auto spk_man = wallet->GetOrCreateLegacyScriptPubKeyMan();
|
||||
LOCK2(wallet->cs_wallet, spk_man->cs_KeyStore);
|
||||
spk_man->mapKeyMetadata[coinbaseKey.GetPubKey().GetID()].nCreateTime = KEY_TIME;
|
||||
spk_man->AddKeyPubKey(coinbaseKey, coinbaseKey.GetPubKey());
|
||||
|
||||
AddWallet(context, wallet);
|
||||
LOCK(Assert(m_node.chainman)->GetMutex());
|
||||
wallet->SetLastBlockProcessed(m_node.chainman->ActiveChain().Height(), m_node.chainman->ActiveChain().Tip()->GetBlockHash());
|
||||
}
|
||||
JSONRPCRequest request;
|
||||
request.context = &context;
|
||||
request.params.setArray();
|
||||
request.params.push_back(backup_file);
|
||||
|
||||
wallet::dumpwallet().HandleRequest(request);
|
||||
RemoveWallet(context, wallet, /* load_on_start= */ std::nullopt);
|
||||
}
|
||||
|
||||
// Call importwallet RPC and verify all blocks with timestamps >= BLOCK_TIME
|
||||
// were scanned, and no prior blocks were scanned.
|
||||
{
|
||||
const std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(m_node.chain.get(), "", CreateMockableWalletDatabase());
|
||||
LOCK(wallet->cs_wallet);
|
||||
wallet->SetupLegacyScriptPubKeyMan();
|
||||
|
||||
WalletContext context;
|
||||
context.args = &m_args;
|
||||
JSONRPCRequest request;
|
||||
request.context = &context;
|
||||
request.params.setArray();
|
||||
request.params.push_back(backup_file);
|
||||
AddWallet(context, wallet);
|
||||
LOCK(Assert(m_node.chainman)->GetMutex());
|
||||
wallet->SetLastBlockProcessed(m_node.chainman->ActiveChain().Height(), m_node.chainman->ActiveChain().Tip()->GetBlockHash());
|
||||
wallet::importwallet().HandleRequest(request);
|
||||
RemoveWallet(context, wallet, /* load_on_start= */ std::nullopt);
|
||||
|
||||
BOOST_CHECK_EQUAL(wallet->mapWallet.size(), 3U);
|
||||
BOOST_CHECK_EQUAL(m_coinbase_txns.size(), 103U);
|
||||
for (size_t i = 0; i < m_coinbase_txns.size(); ++i) {
|
||||
bool found = wallet->GetWalletTx(m_coinbase_txns[i]->GetHash());
|
||||
bool expected = i >= 100;
|
||||
BOOST_CHECK_EQUAL(found, expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This test verifies that wallet settings can be added and removed
|
||||
// concurrently, ensuring no race conditions occur during either process.
|
||||
BOOST_FIXTURE_TEST_CASE(write_wallet_settings_concurrently, TestingSetup)
|
||||
|
@ -496,87 +361,6 @@ BOOST_FIXTURE_TEST_CASE(LoadReceiveRequests, TestingSetup)
|
|||
}
|
||||
}
|
||||
|
||||
// Test some watch-only LegacyScriptPubKeyMan methods by the procedure of loading (LoadWatchOnly),
|
||||
// checking (HaveWatchOnly), getting (GetWatchPubKey) and removing (RemoveWatchOnly) a
|
||||
// given PubKey, resp. its corresponding P2PK Script. Results of the impact on
|
||||
// the address -> PubKey map is dependent on whether the PubKey is a point on the curve
|
||||
static void TestWatchOnlyPubKey(LegacyScriptPubKeyMan* spk_man, const CPubKey& add_pubkey)
|
||||
{
|
||||
CScript p2pk = GetScriptForRawPubKey(add_pubkey);
|
||||
CKeyID add_address = add_pubkey.GetID();
|
||||
CPubKey found_pubkey;
|
||||
LOCK(spk_man->cs_KeyStore);
|
||||
|
||||
// all Scripts (i.e. also all PubKeys) are added to the general watch-only set
|
||||
BOOST_CHECK(!spk_man->HaveWatchOnly(p2pk));
|
||||
spk_man->LoadWatchOnly(p2pk);
|
||||
BOOST_CHECK(spk_man->HaveWatchOnly(p2pk));
|
||||
|
||||
// only PubKeys on the curve shall be added to the watch-only address -> PubKey map
|
||||
bool is_pubkey_fully_valid = add_pubkey.IsFullyValid();
|
||||
if (is_pubkey_fully_valid) {
|
||||
BOOST_CHECK(spk_man->GetWatchPubKey(add_address, found_pubkey));
|
||||
BOOST_CHECK(found_pubkey == add_pubkey);
|
||||
} else {
|
||||
BOOST_CHECK(!spk_man->GetWatchPubKey(add_address, found_pubkey));
|
||||
BOOST_CHECK(found_pubkey == CPubKey()); // passed key is unchanged
|
||||
}
|
||||
|
||||
spk_man->RemoveWatchOnly(p2pk);
|
||||
BOOST_CHECK(!spk_man->HaveWatchOnly(p2pk));
|
||||
|
||||
if (is_pubkey_fully_valid) {
|
||||
BOOST_CHECK(!spk_man->GetWatchPubKey(add_address, found_pubkey));
|
||||
BOOST_CHECK(found_pubkey == add_pubkey); // passed key is unchanged
|
||||
}
|
||||
}
|
||||
|
||||
// Cryptographically invalidate a PubKey whilst keeping length and first byte
|
||||
static void PollutePubKey(CPubKey& pubkey)
|
||||
{
|
||||
assert(pubkey.size() >= 1);
|
||||
std::vector<unsigned char> pubkey_raw;
|
||||
pubkey_raw.push_back(pubkey[0]);
|
||||
pubkey_raw.insert(pubkey_raw.end(), pubkey.size() - 1, 0);
|
||||
pubkey = CPubKey(pubkey_raw);
|
||||
assert(!pubkey.IsFullyValid());
|
||||
assert(pubkey.IsValid());
|
||||
}
|
||||
|
||||
// Test watch-only logic for PubKeys
|
||||
BOOST_AUTO_TEST_CASE(WatchOnlyPubKeys)
|
||||
{
|
||||
CKey key;
|
||||
CPubKey pubkey;
|
||||
LegacyScriptPubKeyMan* spk_man = m_wallet.GetOrCreateLegacyScriptPubKeyMan();
|
||||
|
||||
BOOST_CHECK(!spk_man->HaveWatchOnly());
|
||||
|
||||
// uncompressed valid PubKey
|
||||
key.MakeNewKey(false);
|
||||
pubkey = key.GetPubKey();
|
||||
assert(!pubkey.IsCompressed());
|
||||
TestWatchOnlyPubKey(spk_man, pubkey);
|
||||
|
||||
// uncompressed cryptographically invalid PubKey
|
||||
PollutePubKey(pubkey);
|
||||
TestWatchOnlyPubKey(spk_man, pubkey);
|
||||
|
||||
// compressed valid PubKey
|
||||
key.MakeNewKey(true);
|
||||
pubkey = key.GetPubKey();
|
||||
assert(pubkey.IsCompressed());
|
||||
TestWatchOnlyPubKey(spk_man, pubkey);
|
||||
|
||||
// compressed cryptographically invalid PubKey
|
||||
PollutePubKey(pubkey);
|
||||
TestWatchOnlyPubKey(spk_man, pubkey);
|
||||
|
||||
// invalid empty PubKey
|
||||
pubkey = CPubKey();
|
||||
TestWatchOnlyPubKey(spk_man, pubkey);
|
||||
}
|
||||
|
||||
class ListCoinsTestingSetup : public TestChain100Setup
|
||||
{
|
||||
public:
|
||||
|
@ -718,15 +502,6 @@ BOOST_FIXTURE_TEST_CASE(BasicOutputTypesTest, ListCoinsTest)
|
|||
}
|
||||
|
||||
BOOST_FIXTURE_TEST_CASE(wallet_disableprivkeys, TestChain100Setup)
|
||||
{
|
||||
{
|
||||
const std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(m_node.chain.get(), "", CreateMockableWalletDatabase());
|
||||
wallet->SetupLegacyScriptPubKeyMan();
|
||||
wallet->SetMinVersion(FEATURE_LATEST);
|
||||
wallet->SetWalletFlag(WALLET_FLAG_DISABLE_PRIVATE_KEYS);
|
||||
BOOST_CHECK(!wallet->TopUpKeyPool(1000));
|
||||
BOOST_CHECK(!wallet->GetNewDestination(OutputType::BECH32, ""));
|
||||
}
|
||||
{
|
||||
const std::shared_ptr<CWallet> wallet = std::make_shared<CWallet>(m_node.chain.get(), "", CreateMockableWalletDatabase());
|
||||
LOCK(wallet->cs_wallet);
|
||||
|
@ -735,7 +510,6 @@ BOOST_FIXTURE_TEST_CASE(wallet_disableprivkeys, TestChain100Setup)
|
|||
wallet->SetWalletFlag(WALLET_FLAG_DISABLE_PRIVATE_KEYS);
|
||||
BOOST_CHECK(!wallet->GetNewDestination(OutputType::BECH32, ""));
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit calculation which is used to test the wallet constant
|
||||
// We get the same virtual size due to rounding(weight/4) for both use_max_sig values
|
||||
|
|
|
@ -84,132 +84,5 @@ BOOST_FIXTURE_TEST_CASE(wallet_load_descriptors, TestingSetup)
|
|||
}
|
||||
}
|
||||
|
||||
bool HasAnyRecordOfType(WalletDatabase& db, const std::string& key)
|
||||
{
|
||||
std::unique_ptr<DatabaseBatch> batch = db.MakeBatch(false);
|
||||
BOOST_CHECK(batch);
|
||||
std::unique_ptr<DatabaseCursor> cursor = batch->GetNewCursor();
|
||||
BOOST_CHECK(cursor);
|
||||
while (true) {
|
||||
DataStream ssKey{};
|
||||
DataStream ssValue{};
|
||||
DatabaseCursor::Status status = cursor->Next(ssKey, ssValue);
|
||||
assert(status != DatabaseCursor::Status::FAIL);
|
||||
if (status == DatabaseCursor::Status::DONE) break;
|
||||
std::string type;
|
||||
ssKey >> type;
|
||||
if (type == key) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
SerializeData MakeSerializeData(const Args&... args)
|
||||
{
|
||||
DataStream s{};
|
||||
SerializeMany(s, args...);
|
||||
return {s.begin(), s.end()};
|
||||
}
|
||||
|
||||
|
||||
BOOST_FIXTURE_TEST_CASE(wallet_load_ckey, TestingSetup)
|
||||
{
|
||||
SerializeData ckey_record_key;
|
||||
SerializeData ckey_record_value;
|
||||
MockableData records;
|
||||
|
||||
{
|
||||
// Context setup.
|
||||
// Create and encrypt legacy wallet
|
||||
std::shared_ptr<CWallet> wallet(new CWallet(m_node.chain.get(), "", CreateMockableWalletDatabase()));
|
||||
LOCK(wallet->cs_wallet);
|
||||
auto legacy_spkm = wallet->GetOrCreateLegacyScriptPubKeyMan();
|
||||
BOOST_CHECK(legacy_spkm->SetupGeneration(true));
|
||||
|
||||
// Retrieve a key
|
||||
CTxDestination dest = *Assert(legacy_spkm->GetNewDestination(OutputType::LEGACY));
|
||||
CKeyID key_id = GetKeyForDestination(*legacy_spkm, dest);
|
||||
CKey first_key;
|
||||
BOOST_CHECK(legacy_spkm->GetKey(key_id, first_key));
|
||||
|
||||
// Encrypt the wallet
|
||||
BOOST_CHECK(wallet->EncryptWallet("encrypt"));
|
||||
wallet->Flush();
|
||||
|
||||
// Store a copy of all the records
|
||||
records = GetMockableDatabase(*wallet).m_records;
|
||||
|
||||
// Get the record for the retrieved key
|
||||
ckey_record_key = MakeSerializeData(DBKeys::CRYPTED_KEY, first_key.GetPubKey());
|
||||
ckey_record_value = records.at(ckey_record_key);
|
||||
}
|
||||
|
||||
{
|
||||
// First test case:
|
||||
// Erase all the crypted keys from db and unlock the wallet.
|
||||
// The wallet will only re-write the crypted keys to db if any checksum is missing at load time.
|
||||
// So, if any 'ckey' record re-appears on db, then the checksums were not properly calculated, and we are re-writing
|
||||
// the records every time that 'CWallet::Unlock' gets called, which is not good.
|
||||
|
||||
// Load the wallet and check that is encrypted
|
||||
std::shared_ptr<CWallet> wallet(new CWallet(m_node.chain.get(), "", CreateMockableWalletDatabase(records)));
|
||||
BOOST_CHECK_EQUAL(wallet->LoadWallet(), DBErrors::LOAD_OK);
|
||||
BOOST_CHECK(wallet->IsCrypted());
|
||||
BOOST_CHECK(HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
|
||||
// Now delete all records and check that the 'Unlock' function doesn't re-write them
|
||||
BOOST_CHECK(wallet->GetLegacyScriptPubKeyMan()->DeleteRecords());
|
||||
BOOST_CHECK(!HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
BOOST_CHECK(wallet->Unlock("encrypt"));
|
||||
BOOST_CHECK(!HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
}
|
||||
|
||||
{
|
||||
// Second test case:
|
||||
// Verify that loading up a 'ckey' with no checksum triggers a complete re-write of the crypted keys.
|
||||
|
||||
// Cut off the 32 byte checksum from a ckey record
|
||||
records[ckey_record_key].resize(ckey_record_value.size() - 32);
|
||||
|
||||
// Load the wallet and check that is encrypted
|
||||
std::shared_ptr<CWallet> wallet(new CWallet(m_node.chain.get(), "", CreateMockableWalletDatabase(records)));
|
||||
BOOST_CHECK_EQUAL(wallet->LoadWallet(), DBErrors::LOAD_OK);
|
||||
BOOST_CHECK(wallet->IsCrypted());
|
||||
BOOST_CHECK(HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
|
||||
// Now delete all ckey records and check that the 'Unlock' function re-writes them
|
||||
// (this is because the wallet, at load time, found a ckey record with no checksum)
|
||||
BOOST_CHECK(wallet->GetLegacyScriptPubKeyMan()->DeleteRecords());
|
||||
BOOST_CHECK(!HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
BOOST_CHECK(wallet->Unlock("encrypt"));
|
||||
BOOST_CHECK(HasAnyRecordOfType(wallet->GetDatabase(), DBKeys::CRYPTED_KEY));
|
||||
}
|
||||
|
||||
{
|
||||
// Third test case:
|
||||
// Verify that loading up a 'ckey' with an invalid checksum throws an error.
|
||||
|
||||
// Cut off the 32 byte checksum from a ckey record
|
||||
records[ckey_record_key].resize(ckey_record_value.size() - 32);
|
||||
// Fill in the checksum space with 0s
|
||||
records[ckey_record_key].resize(ckey_record_value.size());
|
||||
|
||||
std::shared_ptr<CWallet> wallet(new CWallet(m_node.chain.get(), "", CreateMockableWalletDatabase(records)));
|
||||
BOOST_CHECK_EQUAL(wallet->LoadWallet(), DBErrors::CORRUPT);
|
||||
}
|
||||
|
||||
{
|
||||
// Fourth test case:
|
||||
// Verify that loading up a 'ckey' with an invalid pubkey throws an error
|
||||
CPubKey invalid_key;
|
||||
BOOST_CHECK(!invalid_key.IsValid());
|
||||
SerializeData key = MakeSerializeData(DBKeys::CRYPTED_KEY, invalid_key);
|
||||
records[key] = ckey_record_value;
|
||||
|
||||
std::shared_ptr<CWallet> wallet(new CWallet(m_node.chain.get(), "", CreateMockableWalletDatabase(records)));
|
||||
BOOST_CHECK_EQUAL(wallet->LoadWallet(), DBErrors::CORRUPT);
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
} // namespace wallet
|
||||
|
|
|
@ -385,11 +385,11 @@ std::shared_ptr<CWallet> CreateWallet(WalletContext& context, const std::string&
|
|||
uint64_t wallet_creation_flags = options.create_flags;
|
||||
const SecureString& passphrase = options.create_passphrase;
|
||||
|
||||
ArgsManager& args = *Assert(context.args);
|
||||
|
||||
if (wallet_creation_flags & WALLET_FLAG_DESCRIPTORS) options.require_format = DatabaseFormat::SQLITE;
|
||||
else if (args.GetBoolArg("-swapbdbendian", false)) {
|
||||
options.require_format = DatabaseFormat::BERKELEY_SWAP;
|
||||
else {
|
||||
error = Untranslated("Legacy wallets can no longer be created");
|
||||
status = DatabaseStatus::FAILED_CREATE;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Indicate that the wallet is actually supposed to be blank and not just blank to make it encrypted
|
||||
|
@ -3052,6 +3052,9 @@ std::shared_ptr<CWallet> CWallet::Create(WalletContext& context, const std::stri
|
|||
error = strprintf(_("Unexpected legacy entry in descriptor wallet found. Loading wallet %s\n\n"
|
||||
"The wallet might have been tampered with or created with malicious intent.\n"), walletFile);
|
||||
return nullptr;
|
||||
} else if (nLoadWalletRet == DBErrors::LEGACY_WALLET) {
|
||||
error = strprintf(_("Error loading %s: Wallet is a legacy wallet. Please migrate to a descriptor wallet using the migration tool (migratewallet RPC)."), walletFile);
|
||||
return nullptr;
|
||||
} else {
|
||||
error = strprintf(_("Error loading %s"), walletFile);
|
||||
return nullptr;
|
||||
|
@ -3069,10 +3072,8 @@ std::shared_ptr<CWallet> CWallet::Create(WalletContext& context, const std::stri
|
|||
|
||||
walletInstance->InitWalletFlags(wallet_creation_flags);
|
||||
|
||||
// Only create LegacyScriptPubKeyMan when not descriptor wallet
|
||||
if (!walletInstance->IsWalletFlagSet(WALLET_FLAG_DESCRIPTORS)) {
|
||||
walletInstance->SetupLegacyScriptPubKeyMan();
|
||||
}
|
||||
// Only descriptor wallets can be created
|
||||
assert(walletInstance->IsWalletFlagSet(WALLET_FLAG_DESCRIPTORS));
|
||||
|
||||
if ((wallet_creation_flags & WALLET_FLAG_EXTERNAL_SIGNER) || !(wallet_creation_flags & (WALLET_FLAG_DISABLE_PRIVATE_KEYS | WALLET_FLAG_BLANK_WALLET))) {
|
||||
LOCK(walletInstance->cs_wallet);
|
||||
|
|
|
@ -485,6 +485,11 @@ static DBErrors LoadWalletFlags(CWallet* pwallet, DatabaseBatch& batch) EXCLUSIV
|
|||
pwallet->WalletLogPrintf("Error reading wallet database: Unknown non-tolerable wallet flags found\n");
|
||||
return DBErrors::TOO_NEW;
|
||||
}
|
||||
// All wallets must be descriptor wallets unless opened with a bdb_ro db
|
||||
// bdb_ro is only used for legacy to descriptor migration.
|
||||
if (pwallet->GetDatabase().Format() != "bdb_ro" && !pwallet->IsWalletFlagSet(WALLET_FLAG_DESCRIPTORS)) {
|
||||
return DBErrors::LEGACY_WALLET;
|
||||
}
|
||||
}
|
||||
return DBErrors::LOAD_OK;
|
||||
}
|
||||
|
@ -1432,7 +1437,7 @@ std::unique_ptr<WalletDatabase> MakeDatabase(const fs::path& path, const Databas
|
|||
std::optional<DatabaseFormat> format;
|
||||
if (exists) {
|
||||
if (IsBDBFile(BDBDataFile(path))) {
|
||||
format = DatabaseFormat::BERKELEY;
|
||||
format = DatabaseFormat::BERKELEY_RO;
|
||||
}
|
||||
if (IsSQLiteFile(SQLiteDataFile(path))) {
|
||||
if (format) {
|
||||
|
@ -1460,9 +1465,11 @@ std::unique_ptr<WalletDatabase> MakeDatabase(const fs::path& path, const Databas
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
// If BERKELEY was the format, then change the format from BERKELEY to BERKELEY_RO
|
||||
if (format && options.require_format && format == DatabaseFormat::BERKELEY && options.require_format == DatabaseFormat::BERKELEY_RO) {
|
||||
format = DatabaseFormat::BERKELEY_RO;
|
||||
// BERKELEY_RO can only be opened if require_format was set, which only occurs in migration.
|
||||
if (format && format == DatabaseFormat::BERKELEY_RO && (!options.require_format || options.require_format != DatabaseFormat::BERKELEY_RO)) {
|
||||
error = Untranslated(strprintf("Failed to open database path '%s'. The wallet appears to be a Legacy wallet, please use the wallet migration tool (migratewallet RPC).", fs::PathToString(path)));
|
||||
status = DatabaseStatus::FAILED_BAD_FORMAT;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// A db already exists so format is set, but options also specifies the format, so make sure they agree
|
||||
|
@ -1475,12 +1482,8 @@ std::unique_ptr<WalletDatabase> MakeDatabase(const fs::path& path, const Databas
|
|||
// Format is not set when a db doesn't already exist, so use the format specified by the options if it is set.
|
||||
if (!format && options.require_format) format = options.require_format;
|
||||
|
||||
// If the format is not specified or detected, choose the default format based on what is available. We prefer BDB over SQLite for now.
|
||||
if (!format) {
|
||||
format = DatabaseFormat::SQLITE;
|
||||
#ifdef USE_BDB
|
||||
format = DatabaseFormat::BERKELEY;
|
||||
#endif
|
||||
}
|
||||
|
||||
if (format == DatabaseFormat::SQLITE) {
|
||||
|
@ -1491,15 +1494,8 @@ std::unique_ptr<WalletDatabase> MakeDatabase(const fs::path& path, const Databas
|
|||
return MakeBerkeleyRODatabase(path, options, status, error);
|
||||
}
|
||||
|
||||
#ifdef USE_BDB
|
||||
if constexpr (true) {
|
||||
return MakeBerkeleyDatabase(path, options, status, error);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
error = Untranslated(strprintf("Failed to open database path '%s'. Build does not support Berkeley DB database format.", fs::PathToString(path)));
|
||||
error = Untranslated(STR_INTERNAL_BUG("Could not determine wallet format"));
|
||||
status = DatabaseStatus::FAILED_BAD_FORMAT;
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
} // namespace wallet
|
||||
|
|
|
@ -55,7 +55,8 @@ enum class DBErrors : int
|
|||
UNKNOWN_DESCRIPTOR = 6,
|
||||
LOAD_FAIL = 7,
|
||||
UNEXPECTED_LEGACY_ENTRY = 8,
|
||||
CORRUPT = 9,
|
||||
LEGACY_WALLET = 9,
|
||||
CORRUPT = 10,
|
||||
};
|
||||
|
||||
namespace DBKeys {
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
#include <util/fs.h>
|
||||
#include <util/translation.h>
|
||||
#include <wallet/dump.h>
|
||||
#include <wallet/salvage.h>
|
||||
#include <wallet/wallet.h>
|
||||
#include <wallet/walletutil.h>
|
||||
|
||||
|
@ -112,22 +111,30 @@ static void WalletShowInfo(CWallet* wallet_instance)
|
|||
|
||||
bool ExecuteWalletToolFunc(const ArgsManager& args, const std::string& command)
|
||||
{
|
||||
if (args.IsArgSet("-format") && command != "createfromdump") {
|
||||
tfm::format(std::cerr, "The -format option can only be used with the \"createfromdump\" command.\n");
|
||||
return false;
|
||||
}
|
||||
if (args.IsArgSet("-dumpfile") && command != "dump" && command != "createfromdump") {
|
||||
tfm::format(std::cerr, "The -dumpfile option can only be used with the \"dump\" and \"createfromdump\" commands.\n");
|
||||
return false;
|
||||
}
|
||||
if (args.IsArgSet("-descriptors") && command != "create") {
|
||||
if (args.IsArgSet("-descriptors")) {
|
||||
if (command != "create") {
|
||||
tfm::format(std::cerr, "The -descriptors option can only be used with the 'create' command.\n");
|
||||
return false;
|
||||
}
|
||||
if (args.IsArgSet("-legacy") && command != "create") {
|
||||
if (!args.GetBoolArg("-descriptors", true)) {
|
||||
tfm::format(std::cerr, "The -descriptors option must be set to \"true\"\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (args.IsArgSet("-legacy")) {
|
||||
if (command != "create") {
|
||||
tfm::format(std::cerr, "The -legacy option can only be used with the 'create' command.\n");
|
||||
return false;
|
||||
}
|
||||
if (args.GetBoolArg("-legacy", true)) {
|
||||
tfm::format(std::cerr, "The -legacy option must be set to \"false\"\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (command == "create" && !args.IsArgSet("-wallet")) {
|
||||
tfm::format(std::cerr, "Wallet name must be provided when creating a new wallet.\n");
|
||||
return false;
|
||||
|
@ -139,22 +146,8 @@ bool ExecuteWalletToolFunc(const ArgsManager& args, const std::string& command)
|
|||
DatabaseOptions options;
|
||||
ReadDatabaseArgs(args, options);
|
||||
options.require_create = true;
|
||||
// If -legacy is set, use it. Otherwise default to false.
|
||||
bool make_legacy = args.GetBoolArg("-legacy", false);
|
||||
// If neither -legacy nor -descriptors is set, default to true. If -descriptors is set, use its value.
|
||||
bool make_descriptors = (!args.IsArgSet("-descriptors") && !args.IsArgSet("-legacy")) || (args.IsArgSet("-descriptors") && args.GetBoolArg("-descriptors", true));
|
||||
if (make_legacy && make_descriptors) {
|
||||
tfm::format(std::cerr, "Only one of -legacy or -descriptors can be set to true, not both\n");
|
||||
return false;
|
||||
}
|
||||
if (!make_legacy && !make_descriptors) {
|
||||
tfm::format(std::cerr, "One of -legacy or -descriptors must be set to true (or omitted)\n");
|
||||
return false;
|
||||
}
|
||||
if (make_descriptors) {
|
||||
options.create_flags |= WALLET_FLAG_DESCRIPTORS;
|
||||
options.require_format = DatabaseFormat::SQLITE;
|
||||
}
|
||||
|
||||
const std::shared_ptr<CWallet> wallet_instance = MakeWallet(name, path, options);
|
||||
if (wallet_instance) {
|
||||
|
@ -169,24 +162,6 @@ bool ExecuteWalletToolFunc(const ArgsManager& args, const std::string& command)
|
|||
if (!wallet_instance) return false;
|
||||
WalletShowInfo(wallet_instance.get());
|
||||
wallet_instance->Close();
|
||||
} else if (command == "salvage") {
|
||||
#ifdef USE_BDB
|
||||
bilingual_str error;
|
||||
std::vector<bilingual_str> warnings;
|
||||
bool ret = RecoverDatabaseFile(args, path, error, warnings);
|
||||
if (!ret) {
|
||||
for (const auto& warning : warnings) {
|
||||
tfm::format(std::cerr, "%s\n", warning.original);
|
||||
}
|
||||
if (!error.empty()) {
|
||||
tfm::format(std::cerr, "%s\n", error.original);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
#else
|
||||
tfm::format(std::cerr, "Salvage command is not available as BDB support is not compiled");
|
||||
return false;
|
||||
#endif
|
||||
} else if (command == "dump") {
|
||||
DatabaseOptions options;
|
||||
ReadDatabaseArgs(args, options);
|
||||
|
|
|
@ -79,9 +79,6 @@ class ExampleTest(BitcoinTestFramework):
|
|||
# Override the set_test_params(), skip_test_if_missing_module(), add_options(), setup_chain(), setup_network()
|
||||
# and setup_nodes() methods to customize the test setup as required.
|
||||
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
"""Override test parameters for your individual test.
|
||||
|
||||
|
|
|
@ -45,9 +45,6 @@ SEQUENCE_LOCKTIME_MASK = 0x0000ffff
|
|||
NOT_FINAL_ERROR = "non-BIP68-final"
|
||||
|
||||
class BIP68Test(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.extra_args = [
|
||||
|
@ -58,6 +55,7 @@ class BIP68Test(BitcoinTestFramework):
|
|||
'-testactivationheight=csv@432',
|
||||
],
|
||||
]
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
|
||||
|
|
|
@ -18,9 +18,6 @@ from test_framework import util
|
|||
|
||||
|
||||
class ConfArgsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
@ -30,6 +27,7 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||
self.supports_cli = False
|
||||
self.wallet_names = []
|
||||
self.disable_autoconnect = False
|
||||
self.uses_wallet = None
|
||||
|
||||
# Overridden to avoid attempt to sync not yet started nodes.
|
||||
def setup_network(self):
|
||||
|
|
|
@ -13,12 +13,10 @@ from test_framework.test_node import (
|
|||
)
|
||||
|
||||
class FilelockTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
self.uses_wallet = None
|
||||
|
||||
def setup_network(self):
|
||||
self.add_nodes(self.num_nodes, extra_args=None)
|
||||
|
@ -46,20 +44,12 @@ class FilelockTest(BitcoinTestFramework):
|
|||
assert pid_file.exists()
|
||||
|
||||
if self.is_wallet_compiled():
|
||||
def check_wallet_filelock(descriptors):
|
||||
wallet_name = ''.join([random.choice(string.ascii_lowercase) for _ in range(6)])
|
||||
self.nodes[0].createwallet(wallet_name=wallet_name, descriptors=descriptors)
|
||||
self.nodes[0].createwallet(wallet_name=wallet_name)
|
||||
wallet_dir = self.nodes[0].wallets_path
|
||||
self.log.info("Check that we can't start a second bitcoind instance using the same wallet")
|
||||
if descriptors:
|
||||
expected_msg = f"Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another instance of {self.config['environment']['CLIENT_NAME']}?"
|
||||
else:
|
||||
expected_msg = "Error: Error initializing wallet database environment"
|
||||
self.nodes[1].assert_start_raises_init_error(extra_args=[f'-walletdir={wallet_dir}', f'-wallet={wallet_name}', '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
|
||||
|
||||
if self.is_bdb_compiled():
|
||||
check_wallet_filelock(False)
|
||||
check_wallet_filelock(True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
FilelockTest(__file__).main()
|
||||
|
|
|
@ -24,12 +24,10 @@ class InitTest(BitcoinTestFramework):
|
|||
subsequent starts.
|
||||
"""
|
||||
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = False
|
||||
self.num_nodes = 1
|
||||
self.uses_wallet = None
|
||||
|
||||
def init_stress_test(self):
|
||||
"""
|
||||
|
|
|
@ -25,12 +25,10 @@ def notify_outputname(walletname, txid):
|
|||
|
||||
|
||||
class NotificationsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
self.uses_wallet = None
|
||||
|
||||
def setup_network(self):
|
||||
self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHARS_DISALLOWED)
|
||||
|
@ -58,7 +56,6 @@ class NotificationsTest(BitcoinTestFramework):
|
|||
def run_test(self):
|
||||
if self.is_wallet_compiled():
|
||||
# Setup the descriptors to be imported to the wallet
|
||||
seed = "cTdGmKFWpbvpKQ7ejrdzqYT2hhjyb3GPHnLAK7wdi5Em67YLwSm9"
|
||||
xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
|
||||
desc_imports = [{
|
||||
"desc": descsum_create(f"wpkh({xpriv}/0/*)"),
|
||||
|
@ -75,11 +72,8 @@ class NotificationsTest(BitcoinTestFramework):
|
|||
# Make the wallets and import the descriptors
|
||||
# Ensures that node 0 and node 1 share the same wallet for the conflicting transaction tests below.
|
||||
for i, name in enumerate(self.wallet_names):
|
||||
self.nodes[i].createwallet(wallet_name=name, descriptors=self.options.descriptors, blank=True, load_on_startup=True)
|
||||
if self.options.descriptors:
|
||||
self.nodes[i].createwallet(wallet_name=name, blank=True, load_on_startup=True)
|
||||
self.nodes[i].importdescriptors(desc_imports)
|
||||
else:
|
||||
self.nodes[i].sethdseed(True, seed)
|
||||
|
||||
self.log.info("test -blocknotify")
|
||||
block_count = 10
|
||||
|
|
|
@ -66,13 +66,11 @@ def calc_usage(blockdir):
|
|||
return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
|
||||
|
||||
class PruneTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 6
|
||||
self.supports_cli = False
|
||||
self.uses_wallet = None
|
||||
|
||||
# Create nodes 0 and 1 to mine.
|
||||
# Create node 2 to test pruning.
|
||||
|
|
|
@ -20,9 +20,6 @@ from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
|
|||
|
||||
MAX_REPLACEMENT_LIMIT = 100
|
||||
class ReplaceByFeeTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.extra_args = [
|
||||
|
@ -37,6 +34,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
|||
],
|
||||
]
|
||||
self.supports_cli = False
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
self.wallet = MiniWallet(self.nodes[0])
|
||||
|
@ -567,7 +565,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
|
|||
assert_equal(json0["vin"][0]["sequence"], 4294967293)
|
||||
assert_equal(json1["vin"][0]["sequence"], 4294967295)
|
||||
|
||||
if self.is_specified_wallet_compiled():
|
||||
if self.is_wallet_compiled():
|
||||
self.init_wallet(node=0)
|
||||
rawtx2 = self.nodes[0].createrawtransaction([], outs)
|
||||
frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
|
||||
|
|
|
@ -7,9 +7,6 @@
|
|||
from decimal import Decimal
|
||||
|
||||
from test_framework.address import (
|
||||
key_to_p2pkh,
|
||||
program_to_witness,
|
||||
script_to_p2sh,
|
||||
script_to_p2sh_p2wsh,
|
||||
script_to_p2wsh,
|
||||
)
|
||||
|
@ -28,14 +25,11 @@ from test_framework.messages import (
|
|||
)
|
||||
from test_framework.script import (
|
||||
CScript,
|
||||
OP_0,
|
||||
OP_1,
|
||||
OP_DROP,
|
||||
OP_TRUE,
|
||||
)
|
||||
from test_framework.script_util import (
|
||||
key_to_p2pk_script,
|
||||
key_to_p2pkh_script,
|
||||
key_to_p2wpkh_script,
|
||||
keys_to_multisig_script,
|
||||
script_to_p2sh_script,
|
||||
|
@ -47,7 +41,6 @@ from test_framework.util import (
|
|||
assert_greater_than_or_equal,
|
||||
assert_is_hex_string,
|
||||
assert_raises_rpc_error,
|
||||
try_rpc,
|
||||
)
|
||||
from test_framework.wallet_util import (
|
||||
get_generate_key,
|
||||
|
@ -78,9 +71,6 @@ txs_mined = {} # txindex from txid to blockhash
|
|||
|
||||
|
||||
class SegWitTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 3
|
||||
|
@ -159,18 +149,12 @@ class SegWitTest(BitcoinTestFramework):
|
|||
assert_equal(self.nodes[i].deriveaddresses(sh_wpkh_desc)[0], key.p2sh_p2wpkh_addr)
|
||||
assert_equal(self.nodes[i].deriveaddresses(wpkh_desc)[0], key.p2wpkh_addr)
|
||||
|
||||
if self.options.descriptors:
|
||||
res = self.nodes[i].importdescriptors([
|
||||
{"desc": p2sh_ms_desc, "timestamp": "now"},
|
||||
{"desc": bip173_ms_desc, "timestamp": "now"},
|
||||
{"desc": sh_wpkh_desc, "timestamp": "now"},
|
||||
{"desc": wpkh_desc, "timestamp": "now"},
|
||||
])
|
||||
else:
|
||||
# The nature of the legacy wallet is that this import results in also adding all of the necessary scripts
|
||||
res = self.nodes[i].importmulti([
|
||||
{"desc": p2sh_ms_desc, "timestamp": "now"},
|
||||
])
|
||||
assert all([r["success"] for r in res])
|
||||
|
||||
p2sh_ids.append([])
|
||||
|
@ -315,286 +299,6 @@ class SegWitTest(BitcoinTestFramework):
|
|||
# Mine a block to clear the gbt cache again.
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
if not self.options.descriptors:
|
||||
self.log.info("Verify behaviour of importaddress and listunspent")
|
||||
|
||||
# Some public keys to be used later
|
||||
pubkeys = [
|
||||
"0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
|
||||
"02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
|
||||
"04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
|
||||
"02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
|
||||
"036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
|
||||
"0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
|
||||
"0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
|
||||
]
|
||||
|
||||
# Import a compressed key and an uncompressed key, generate some multisig addresses
|
||||
self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
|
||||
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
|
||||
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
|
||||
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
|
||||
assert not self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed']
|
||||
assert self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed']
|
||||
|
||||
self.nodes[0].importpubkey(pubkeys[0])
|
||||
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
|
||||
self.nodes[0].importpubkey(pubkeys[1])
|
||||
compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
|
||||
self.nodes[0].importpubkey(pubkeys[2])
|
||||
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]
|
||||
|
||||
spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress
|
||||
spendable_after_importaddress = [] # These outputs should be seen after importaddress
|
||||
solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable
|
||||
unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress
|
||||
solvable_anytime = [] # These outputs should be solvable after importpubkey
|
||||
unseen_anytime = [] # These outputs should never be seen
|
||||
|
||||
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
|
||||
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
|
||||
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
|
||||
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address'])
|
||||
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
|
||||
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address'])
|
||||
|
||||
# Test multisig_without_privkey
|
||||
# We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
|
||||
# Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.
|
||||
|
||||
multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address']
|
||||
script = keys_to_multisig_script([pubkeys[3], pubkeys[4]])
|
||||
solvable_after_importaddress.append(script_to_p2sh_script(script))
|
||||
|
||||
for i in compressed_spendable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
# p2sh multisig with compressed keys should always be spendable
|
||||
spendable_anytime.extend([p2sh])
|
||||
# bare multisig can be watched and signed, but is not treated as ours
|
||||
solvable_after_importaddress.extend([bare])
|
||||
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
|
||||
spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# normal P2PKH and P2PK with compressed keys should always be spendable
|
||||
spendable_anytime.extend([p2pkh, p2pk])
|
||||
# P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress
|
||||
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
|
||||
# P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable
|
||||
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
|
||||
|
||||
for i in uncompressed_spendable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
# p2sh multisig with uncompressed keys should always be spendable
|
||||
spendable_anytime.extend([p2sh])
|
||||
# bare multisig can be watched and signed, but is not treated as ours
|
||||
solvable_after_importaddress.extend([bare])
|
||||
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# normal P2PKH and P2PK with uncompressed keys should always be spendable
|
||||
spendable_anytime.extend([p2pkh, p2pk])
|
||||
# P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
|
||||
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
|
||||
# Witness output types with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
|
||||
|
||||
for i in compressed_solvable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen
|
||||
solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh])
|
||||
# P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress
|
||||
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
|
||||
|
||||
for i in uncompressed_solvable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
|
||||
solvable_after_importaddress.extend([bare, p2sh])
|
||||
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# normal P2PKH and P2PK with uncompressed keys should always be seen
|
||||
solvable_anytime.extend([p2pkh, p2pk])
|
||||
# P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
|
||||
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
|
||||
# Witness output types with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
|
||||
|
||||
op1 = CScript([OP_1])
|
||||
op0 = CScript([OP_0])
|
||||
# 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
|
||||
unsolvable_address_key = bytes.fromhex("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
|
||||
unsolvablep2pkh = key_to_p2pkh_script(unsolvable_address_key)
|
||||
unsolvablep2wshp2pkh = script_to_p2wsh_script(unsolvablep2pkh)
|
||||
p2shop0 = script_to_p2sh_script(op0)
|
||||
p2wshop1 = script_to_p2wsh_script(op1)
|
||||
unsolvable_after_importaddress.append(unsolvablep2pkh)
|
||||
unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
|
||||
unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
|
||||
unsolvable_after_importaddress.append(p2wshop1)
|
||||
unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
|
||||
unsolvable_after_importaddress.append(p2shop0)
|
||||
|
||||
spendable_txid = []
|
||||
solvable_txid = []
|
||||
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
|
||||
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
|
||||
self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)
|
||||
|
||||
importlist = []
|
||||
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
bare = bytes.fromhex(v['hex'])
|
||||
importlist.append(bare.hex())
|
||||
importlist.append(script_to_p2wsh_script(bare).hex())
|
||||
else:
|
||||
pubkey = bytes.fromhex(v['pubkey'])
|
||||
p2pk = key_to_p2pk_script(pubkey)
|
||||
p2pkh = key_to_p2pkh_script(pubkey)
|
||||
importlist.append(p2pk.hex())
|
||||
importlist.append(p2pkh.hex())
|
||||
importlist.append(key_to_p2wpkh_script(pubkey).hex())
|
||||
importlist.append(script_to_p2wsh_script(p2pk).hex())
|
||||
importlist.append(script_to_p2wsh_script(p2pkh).hex())
|
||||
|
||||
importlist.append(unsolvablep2pkh.hex())
|
||||
importlist.append(unsolvablep2wshp2pkh.hex())
|
||||
importlist.append(op1.hex())
|
||||
importlist.append(p2wshop1.hex())
|
||||
|
||||
for i in importlist:
|
||||
# import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
|
||||
# exceptions and continue.
|
||||
try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)
|
||||
|
||||
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
|
||||
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
|
||||
|
||||
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
|
||||
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
|
||||
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
|
||||
self.mine_and_test_listunspent(unseen_anytime, 0)
|
||||
|
||||
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
|
||||
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
|
||||
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
|
||||
self.mine_and_test_listunspent(unseen_anytime, 0)
|
||||
|
||||
# Repeat some tests. This time we don't add witness scripts with importaddress
|
||||
# Import a compressed key and an uncompressed key, generate some multisig addresses
|
||||
self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
|
||||
uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
|
||||
self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
|
||||
compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]
|
||||
|
||||
self.nodes[0].importpubkey(pubkeys[5])
|
||||
compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
|
||||
self.nodes[0].importpubkey(pubkeys[6])
|
||||
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]
|
||||
|
||||
unseen_anytime = [] # These outputs should never be seen
|
||||
solvable_anytime = [] # These outputs should be solvable after importpubkey
|
||||
unseen_anytime = [] # These outputs should never be seen
|
||||
|
||||
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
|
||||
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
|
||||
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
|
||||
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address'])
|
||||
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
|
||||
|
||||
premature_witaddress = []
|
||||
|
||||
for i in compressed_spendable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
premature_witaddress.append(script_to_p2sh(p2wsh))
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# P2WPKH, P2SH_P2WPKH are always spendable
|
||||
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
|
||||
|
||||
for i in uncompressed_spendable_address + uncompressed_solvable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
|
||||
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
|
||||
|
||||
for i in compressed_solvable_address:
|
||||
v = self.nodes[0].getaddressinfo(i)
|
||||
if v['isscript']:
|
||||
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
|
||||
premature_witaddress.append(script_to_p2sh(p2wsh))
|
||||
else:
|
||||
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
|
||||
# P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable
|
||||
solvable_anytime.extend([p2wpkh, p2sh_p2wpkh])
|
||||
|
||||
self.mine_and_test_listunspent(spendable_anytime, 2)
|
||||
self.mine_and_test_listunspent(solvable_anytime, 1)
|
||||
self.mine_and_test_listunspent(unseen_anytime, 0)
|
||||
|
||||
# Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
|
||||
v1_addr = program_to_witness(1, [3, 5])
|
||||
v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])], {v1_addr: 1})
|
||||
v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
|
||||
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['address'], v1_addr)
|
||||
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")
|
||||
|
||||
# Check that spendable outputs are really spendable
|
||||
self.create_and_mine_tx_from_txids(spendable_txid)
|
||||
|
||||
# import all the private keys so solvable addresses become spendable
|
||||
self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
|
||||
self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
|
||||
self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
|
||||
self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
|
||||
self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
|
||||
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
|
||||
self.create_and_mine_tx_from_txids(solvable_txid)
|
||||
|
||||
# Test that importing native P2WPKH/P2WSH scripts works
|
||||
for use_p2wsh in [False, True]:
|
||||
if use_p2wsh:
|
||||
scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
|
||||
transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
|
||||
else:
|
||||
scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
|
||||
transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"
|
||||
|
||||
self.nodes[1].importaddress(scriptPubKey, "", False)
|
||||
rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
|
||||
rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"]
|
||||
txid = self.nodes[1].sendrawtransaction(rawtxfund)
|
||||
|
||||
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
|
||||
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
|
||||
|
||||
# Assert it is properly saved
|
||||
self.restart_node(1)
|
||||
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
|
||||
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
|
||||
|
||||
def mine_and_test_listunspent(self, script_list, ismine):
|
||||
utxo = find_spendable_utxo(self.nodes[0], 50)
|
||||
tx = CTransaction()
|
||||
|
|
|
@ -13,13 +13,11 @@ from test_framework.util import assert_equal
|
|||
|
||||
|
||||
class SettingsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
self.wallet_names = []
|
||||
self.uses_wallet = None
|
||||
|
||||
def test_wallet_settings(self, settings_path):
|
||||
if not self.is_wallet_compiled():
|
||||
|
|
|
@ -1311,7 +1311,6 @@ UTXOData = namedtuple('UTXOData', 'outpoint,output,spender')
|
|||
|
||||
class TaprootTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
parser.add_argument("--dumptests", dest="dump_tests", default=False, action="store_true",
|
||||
help="Dump generated test cases to directory set by TEST_DUMP_DIR environment variable")
|
||||
|
||||
|
|
|
@ -73,12 +73,10 @@ def cli_get_info_string_to_dict(cli_get_info_string):
|
|||
|
||||
|
||||
class TestBitcoinCli(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
self.uses_wallet = None
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_cli()
|
||||
|
@ -180,7 +178,7 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||
assert_raises_process_error(1, "Invalid value for -color option. Valid values: always, auto, never.", self.nodes[0].cli('-getinfo', '-color=foo').send_cli)
|
||||
|
||||
self.log.info("Test -getinfo returns expected network and blockchain info")
|
||||
if self.is_specified_wallet_compiled():
|
||||
if self.is_wallet_compiled():
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
self.nodes[0].encryptwallet(password)
|
||||
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
|
||||
|
@ -206,7 +204,7 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
|
||||
assert_equal(cli_get_info["Proxies"], "127.0.0.1:9050 (ipv4, ipv6, onion, cjdns), 127.0.0.1:7656 (i2p)")
|
||||
|
||||
if self.is_specified_wallet_compiled():
|
||||
if self.is_wallet_compiled():
|
||||
self.log.info("Test -getinfo and bitcoin-cli getwalletinfo return expected wallet info")
|
||||
# Explicitly set the output type in order to have consistent tx vsize / fees
|
||||
# for both legacy and descriptor wallets (disables the change address type detection algorithm)
|
||||
|
|
|
@ -107,9 +107,6 @@ int trace_aps_create_tx(struct pt_regs *ctx) {
|
|||
|
||||
|
||||
class CoinSelectionTracepointTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -50,12 +50,10 @@ from test_framework.wallet import MiniWallet, COIN
|
|||
|
||||
|
||||
class MempoolPersistTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
self.extra_args = [[], ["-persistmempool=0"], []]
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
self.mini_wallet = MiniWallet(self.nodes[2])
|
||||
|
|
|
@ -16,11 +16,9 @@ from test_framework.wallet import MiniWallet
|
|||
MAX_INITIAL_BROADCAST_DELAY = 15 * 60 # 15 minutes in seconds
|
||||
|
||||
class MempoolUnbroadcastTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
self.wallet = MiniWallet(self.nodes[0])
|
||||
|
|
|
@ -52,8 +52,6 @@ class MiningMainnetTest(BitcoinTestFramework):
|
|||
help='Block data file (default: %(default)s)',
|
||||
)
|
||||
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def mine(self, height, prev_hash, blocks, node, fees=0):
|
||||
self.log.debug(f"height={height}")
|
||||
block = CBlock()
|
||||
|
|
|
@ -9,7 +9,7 @@ import json
|
|||
import os
|
||||
|
||||
from test_framework.address import address_to_scriptpubkey
|
||||
from test_framework.descriptors import descsum_create, drop_origins
|
||||
from test_framework.descriptors import descsum_create
|
||||
from test_framework.key import ECPubKey
|
||||
from test_framework.messages import COIN
|
||||
from test_framework.script_util import keys_to_multisig_script
|
||||
|
@ -25,14 +25,10 @@ from test_framework.wallet import (
|
|||
)
|
||||
|
||||
class RpcCreateMultiSigTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 3
|
||||
self.supports_cli = False
|
||||
self.enable_wallet_if_possible()
|
||||
|
||||
def create_keys(self, num_keys):
|
||||
self.pub = []
|
||||
|
@ -42,29 +38,20 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
self.pub.append(pubkey.hex())
|
||||
self.priv.append(privkey)
|
||||
|
||||
def create_wallet(self, node, wallet_name):
|
||||
node.createwallet(wallet_name=wallet_name, disable_private_keys=True)
|
||||
return node.get_wallet_rpc(wallet_name)
|
||||
|
||||
def run_test(self):
|
||||
node0, node1, _node2 = self.nodes
|
||||
self.wallet = MiniWallet(test_node=node0)
|
||||
|
||||
if self.is_wallet_compiled():
|
||||
self.check_addmultisigaddress_errors()
|
||||
|
||||
self.log.info('Generating blocks ...')
|
||||
self.generate(self.wallet, 149)
|
||||
|
||||
wallet_multi = self.create_wallet(node1, 'wmulti') if self._requires_wallet else None
|
||||
self.create_keys(21) # max number of allowed keys + 1
|
||||
m_of_n = [(2, 3), (3, 3), (2, 5), (3, 5), (10, 15), (15, 15)]
|
||||
for (sigs, keys) in m_of_n:
|
||||
for output_type in ["bech32", "p2sh-segwit", "legacy"]:
|
||||
self.do_multisig(keys, sigs, output_type, wallet_multi)
|
||||
self.do_multisig(keys, sigs, output_type)
|
||||
|
||||
self.test_multisig_script_limit(wallet_multi)
|
||||
self.test_mixing_uncompressed_and_compressed_keys(node0, wallet_multi)
|
||||
self.test_mixing_uncompressed_and_compressed_keys(node0)
|
||||
self.test_sortedmulti_descriptors_bip67()
|
||||
|
||||
# Check that bech32m is currently not allowed
|
||||
|
@ -80,22 +67,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
res = self.nodes[0].createmultisig(nrequired=nkeys, keys=keys, address_type='bech32')
|
||||
assert_equal(res['redeemScript'], expected_ms_script.hex())
|
||||
|
||||
def check_addmultisigaddress_errors(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info('Check that addmultisigaddress fails when the private keys are missing')
|
||||
addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
|
||||
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
|
||||
for a in addresses:
|
||||
# Importing all addresses should not change the result
|
||||
self.nodes[0].importaddress(a)
|
||||
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
|
||||
|
||||
# Bech32m address type is disallowed for legacy wallets
|
||||
pubs = [self.nodes[1].getaddressinfo(addr)["pubkey"] for addr in addresses]
|
||||
assert_raises_rpc_error(-5, "Bech32m multisig addresses cannot be created with legacy wallets", self.nodes[0].addmultisigaddress, 2, pubs, "", "bech32m")
|
||||
|
||||
def test_multisig_script_limit(self, wallet_multi):
|
||||
def test_multisig_script_limit(self):
|
||||
node1 = self.nodes[1]
|
||||
pubkeys = self.pub[0:20]
|
||||
|
||||
|
@ -103,25 +75,14 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
assert_raises_rpc_error(-8, "redeemScript exceeds size limit: 684 > 520", node1.createmultisig, 16, pubkeys, 'legacy')
|
||||
|
||||
self.log.info('Test valid 16-20 multisig p2sh-legacy and bech32 (no wallet)')
|
||||
self.do_multisig(nkeys=20, nsigs=16, output_type="p2sh-segwit", wallet_multi=None)
|
||||
self.do_multisig(nkeys=20, nsigs=16, output_type="bech32", wallet_multi=None)
|
||||
self.do_multisig(nkeys=20, nsigs=16, output_type="p2sh-segwit")
|
||||
self.do_multisig(nkeys=20, nsigs=16, output_type="bech32")
|
||||
|
||||
self.log.info('Test invalid 16-21 multisig p2sh-legacy and bech32 (no wallet)')
|
||||
assert_raises_rpc_error(-8, "Number of keys involved in the multisignature address creation > 20", node1.createmultisig, 16, self.pub, 'p2sh-segwit')
|
||||
assert_raises_rpc_error(-8, "Number of keys involved in the multisignature address creation > 20", node1.createmultisig, 16, self.pub, 'bech32')
|
||||
|
||||
# Check legacy wallet related command
|
||||
self.log.info('Test legacy redeem script max size limit (with wallet)')
|
||||
if wallet_multi is not None and not self.options.descriptors:
|
||||
assert_raises_rpc_error(-8, "redeemScript exceeds size limit: 684 > 520", wallet_multi.addmultisigaddress, 16, pubkeys, '', 'legacy')
|
||||
|
||||
self.log.info('Test legacy wallet unsupported operation. 16-20 multisig p2sh-legacy and bech32 generation')
|
||||
# Due an internal limitation on legacy wallets, the redeem script limit also applies to p2sh-segwit and bech32 (even when the scripts are valid)
|
||||
# We take this as a "good thing" to tell users to upgrade to descriptors.
|
||||
assert_raises_rpc_error(-4, "Unsupported multisig script size for legacy wallet. Upgrade to descriptors to overcome this limitation for p2sh-segwit or bech32 scripts", wallet_multi.addmultisigaddress, 16, pubkeys, '', 'p2sh-segwit')
|
||||
assert_raises_rpc_error(-4, "Unsupported multisig script size for legacy wallet. Upgrade to descriptors to overcome this limitation for p2sh-segwit or bech32 scripts", wallet_multi.addmultisigaddress, 16, pubkeys, '', 'bech32')
|
||||
|
||||
def do_multisig(self, nkeys, nsigs, output_type, wallet_multi):
|
||||
def do_multisig(self, nkeys, nsigs, output_type):
|
||||
node0, _node1, node2 = self.nodes
|
||||
pub_keys = self.pub[0: nkeys]
|
||||
priv_keys = self.priv[0: nkeys]
|
||||
|
@ -144,16 +105,6 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
if output_type == 'bech32':
|
||||
assert madd[0:4] == "bcrt" # actually a bech32 address
|
||||
|
||||
if wallet_multi is not None:
|
||||
# compare against addmultisigaddress
|
||||
msigw = wallet_multi.addmultisigaddress(nsigs, pub_keys, None, output_type)
|
||||
maddw = msigw["address"]
|
||||
mredeemw = msigw["redeemScript"]
|
||||
assert_equal(desc, drop_origins(msigw['descriptor']))
|
||||
# addmultisigiaddress and createmultisig work the same
|
||||
assert maddw == madd
|
||||
assert mredeemw == mredeem
|
||||
|
||||
spk = address_to_scriptpubkey(madd)
|
||||
value = decimal.Decimal("0.00004000")
|
||||
tx = self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=spk, amount=int(value * COIN))
|
||||
|
@ -162,9 +113,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
self.generate(node0, 1)
|
||||
|
||||
outval = value - decimal.Decimal("0.00002000") # deduce fee (must be higher than the min relay fee)
|
||||
# send coins to node2 when wallet is enabled
|
||||
node2_balance = node2.getbalances()['mine']['trusted'] if self.is_wallet_compiled() else 0
|
||||
out_addr = node2.getnewaddress() if self.is_wallet_compiled() else getnewdestination('bech32')[2]
|
||||
out_addr = getnewdestination('bech32')[2]
|
||||
rawtx = node2.createrawtransaction([{"txid": tx["txid"], "vout": tx["sent_vout"]}], [{out_addr: outval}])
|
||||
|
||||
prevtx_err = dict(prevtxs[0])
|
||||
|
@ -207,14 +156,10 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
|
||||
assert_raises_rpc_error(-25, "Input not found or already spent", node2.combinerawtransaction, [rawtx2['hex'], rawtx3['hex']])
|
||||
|
||||
# When the wallet is enabled, assert node2 sees the incoming amount
|
||||
if self.is_wallet_compiled():
|
||||
assert_equal(node2.getbalances()['mine']['trusted'], node2_balance + outval)
|
||||
|
||||
txinfo = node0.getrawtransaction(tx, True, blk)
|
||||
self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (nsigs, nkeys, output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"]))
|
||||
|
||||
def test_mixing_uncompressed_and_compressed_keys(self, node, wallet_multi):
|
||||
def test_mixing_uncompressed_and_compressed_keys(self, node):
|
||||
self.log.info('Mixed compressed and uncompressed multisigs are not allowed')
|
||||
pk0, pk1, pk2 = [getnewdestination('bech32')[0].hex() for _ in range(3)]
|
||||
|
||||
|
@ -229,12 +174,6 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
# Results should be the same as this legacy one
|
||||
legacy_addr = node.createmultisig(2, keys, 'legacy')['address']
|
||||
|
||||
if wallet_multi is not None:
|
||||
# 'addmultisigaddress' should return the same address
|
||||
result = wallet_multi.addmultisigaddress(2, keys, '', 'legacy')
|
||||
assert_equal(legacy_addr, result['address'])
|
||||
assert 'warnings' not in result
|
||||
|
||||
# Generate addresses with the segwit types. These should all make legacy addresses
|
||||
err_msg = ["Unable to make chosen address type, please ensure no uncompressed public keys are present."]
|
||||
|
||||
|
@ -243,11 +182,6 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
|
|||
assert_equal(legacy_addr, result['address'])
|
||||
assert_equal(result['warnings'], err_msg)
|
||||
|
||||
if wallet_multi is not None:
|
||||
result = wallet_multi.addmultisigaddress(nrequired=2, keys=keys, address_type=addr_type)
|
||||
assert_equal(legacy_addr, result['address'])
|
||||
assert_equal(result['warnings'], err_msg)
|
||||
|
||||
def test_sortedmulti_descriptors_bip67(self):
|
||||
self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors')
|
||||
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f:
|
||||
|
|
|
@ -7,13 +7,11 @@ from test_framework.test_framework import BitcoinTestFramework
|
|||
from test_framework.util import assert_raises_rpc_error
|
||||
|
||||
class DeprecatedRpcTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
self.extra_args = [[]]
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
# This test should be used to verify the errors of the currently
|
||||
|
|
|
@ -43,12 +43,10 @@ def process_mapping(fname):
|
|||
|
||||
|
||||
class HelpRpcTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.supports_cli = False
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
self.test_client_conversion_table()
|
||||
|
|
|
@ -40,12 +40,10 @@ INVALID_ADDRESS = 'asfah14i8fajz0123f'
|
|||
INVALID_ADDRESS_2 = '1q049ldschfnwystcqnsvyfpj23mpsg3jcedq9xv'
|
||||
|
||||
class InvalidAddressErrorMessageTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
self.uses_wallet = None
|
||||
|
||||
def check_valid(self, addr):
|
||||
info = self.nodes[0].validateaddress(addr)
|
||||
|
|
|
@ -56,9 +56,6 @@ import os
|
|||
|
||||
|
||||
class PSBTTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
self.extra_args = [
|
||||
|
@ -318,10 +315,6 @@ class PSBTTest(BitcoinTestFramework):
|
|||
p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
|
||||
p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
|
||||
p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
|
||||
if not self.options.descriptors:
|
||||
wmulti.importaddress(p2sh)
|
||||
wmulti.importaddress(p2wsh)
|
||||
wmulti.importaddress(p2sh_p2wsh)
|
||||
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
|
||||
p2pkh = self.nodes[1].getnewaddress("", "legacy")
|
||||
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
|
||||
|
@ -655,7 +648,6 @@ class PSBTTest(BitcoinTestFramework):
|
|||
for i, signer in enumerate(signers):
|
||||
self.nodes[2].unloadwallet("wallet{}".format(i))
|
||||
|
||||
if self.options.descriptors:
|
||||
self.test_utxo_conversion()
|
||||
self.test_psbt_incomplete_after_invalid_modification()
|
||||
|
||||
|
@ -782,10 +774,7 @@ class PSBTTest(BitcoinTestFramework):
|
|||
|
||||
# Make a weird but signable script. sh(wsh(pkh())) descriptor accomplishes this
|
||||
desc = descsum_create("sh(wsh(pkh({})))".format(privkey))
|
||||
if self.options.descriptors:
|
||||
res = self.nodes[0].importdescriptors([{"desc": desc, "timestamp": "now"}])
|
||||
else:
|
||||
res = self.nodes[0].importmulti([{"desc": desc, "timestamp": "now"}])
|
||||
assert res[0]["success"]
|
||||
addr = self.nodes[0].deriveaddresses(desc)[0]
|
||||
addr_info = self.nodes[0].getaddressinfo(addr)
|
||||
|
@ -867,10 +856,7 @@ class PSBTTest(BitcoinTestFramework):
|
|||
assert_equal(psbt2["fee"], psbt3["fee"])
|
||||
|
||||
# Import the external utxo descriptor so that we can sign for it from the test wallet
|
||||
if self.options.descriptors:
|
||||
res = wallet.importdescriptors([{"desc": desc, "timestamp": "now"}])
|
||||
else:
|
||||
res = wallet.importmulti([{"desc": desc, "timestamp": "now"}])
|
||||
assert res[0]["success"]
|
||||
# The provided weight should override the calculated weight for a wallet input
|
||||
psbt3 = wallet.walletcreatefundedpsbt(
|
||||
|
@ -887,10 +873,7 @@ class PSBTTest(BitcoinTestFramework):
|
|||
privkey, pubkey = generate_keypair(wif=True)
|
||||
|
||||
desc = descsum_create("wsh(pkh({}))".format(pubkey.hex()))
|
||||
if self.options.descriptors:
|
||||
res = watchonly.importdescriptors([{"desc": desc, "timestamp": "now"}])
|
||||
else:
|
||||
res = watchonly.importmulti([{"desc": desc, "timestamp": "now"}])
|
||||
assert res[0]["success"]
|
||||
addr = self.nodes[0].deriveaddresses(desc)[0]
|
||||
self.nodes[0].sendtoaddress(addr, 10)
|
||||
|
@ -902,7 +885,6 @@ class PSBTTest(BitcoinTestFramework):
|
|||
self.nodes[0].sendrawtransaction(signed_tx["hex"])
|
||||
|
||||
# Same test but for taproot
|
||||
if self.options.descriptors:
|
||||
privkey, pubkey = generate_keypair(wif=True)
|
||||
|
||||
desc = descsum_create("tr({},pk({}))".format(H_POINT, pubkey.hex()))
|
||||
|
|
|
@ -64,9 +64,6 @@ class multidict(dict):
|
|||
|
||||
|
||||
class RawTransactionsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
self.extra_args = [
|
||||
|
@ -77,6 +74,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
# whitelist peers to speed up tx relay / mempool sync
|
||||
self.noban_tx_relay = True
|
||||
self.supports_cli = False
|
||||
self.uses_wallet = None
|
||||
|
||||
def setup_network(self):
|
||||
super().setup_network()
|
||||
|
@ -91,12 +89,8 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
self.sendrawtransaction_testmempoolaccept_tests()
|
||||
self.decoderawtransaction_tests()
|
||||
self.transaction_version_number_tests()
|
||||
if self.is_specified_wallet_compiled() and not self.options.descriptors:
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
self.raw_multisig_transaction_legacy_tests()
|
||||
self.getrawtransaction_verbosity_tests()
|
||||
|
||||
|
||||
def getrawtransaction_tests(self):
|
||||
tx = self.wallet.send_self_transfer(from_node=self.nodes[0])
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
@ -493,125 +487,5 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
|
||||
assert_equal(decrawtx['version'], 0xffffffff)
|
||||
|
||||
def raw_multisig_transaction_legacy_tests(self):
|
||||
self.log.info("Test raw multisig transactions (legacy)")
|
||||
# The traditional multisig workflow does not work with descriptor wallets so these are legacy only.
|
||||
# The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here.
|
||||
|
||||
# 2of2 test
|
||||
addr1 = self.nodes[2].getnewaddress()
|
||||
addr2 = self.nodes[2].getnewaddress()
|
||||
|
||||
addr1Obj = self.nodes[2].getaddressinfo(addr1)
|
||||
addr2Obj = self.nodes[2].getaddressinfo(addr2)
|
||||
|
||||
# Tests for createmultisig and addmultisigaddress
|
||||
assert_raises_rpc_error(-5, 'Pubkey "01020304" must have a length of either 33 or 65 bytes', self.nodes[0].createmultisig, 1, ["01020304"])
|
||||
# createmultisig can only take public keys
|
||||
self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
|
||||
# addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here
|
||||
assert_raises_rpc_error(-5, f'Pubkey "{addr1}" must be a hex string', self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1])
|
||||
|
||||
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
|
||||
|
||||
# use balance deltas instead of absolute values
|
||||
bal = self.nodes[2].getbalance()
|
||||
|
||||
# send 1.2 BTC to msig adr
|
||||
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
|
||||
self.sync_all()
|
||||
self.generate(self.nodes[0], 1)
|
||||
# node2 has both keys of the 2of2 ms addr, tx should affect the balance
|
||||
assert_equal(self.nodes[2].getbalance(), bal + Decimal('1.20000000'))
|
||||
|
||||
|
||||
# 2of3 test from different nodes
|
||||
bal = self.nodes[2].getbalance()
|
||||
addr1 = self.nodes[1].getnewaddress()
|
||||
addr2 = self.nodes[2].getnewaddress()
|
||||
addr3 = self.nodes[2].getnewaddress()
|
||||
|
||||
addr1Obj = self.nodes[1].getaddressinfo(addr1)
|
||||
addr2Obj = self.nodes[2].getaddressinfo(addr2)
|
||||
addr3Obj = self.nodes[2].getaddressinfo(addr3)
|
||||
|
||||
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
|
||||
|
||||
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
|
||||
decTx = self.nodes[0].gettransaction(txId)
|
||||
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
|
||||
self.sync_all()
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
# THIS IS AN INCOMPLETE FEATURE
|
||||
# NODE2 HAS TWO OF THREE KEYS AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
|
||||
assert_equal(self.nodes[2].getbalance(), bal) # for now, assume the funds of a 2of3 multisig tx are not marked as spendable
|
||||
|
||||
txDetails = self.nodes[0].gettransaction(txId, True)
|
||||
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
|
||||
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000'))
|
||||
|
||||
bal = self.nodes[0].getbalance()
|
||||
inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "amount": vout['value']}]
|
||||
outputs = {self.nodes[0].getnewaddress(): 2.19}
|
||||
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
|
||||
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
|
||||
assert_equal(rawTxPartialSigned['complete'], False) # node1 only has one key, can't comp. sign the tx
|
||||
|
||||
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
|
||||
assert_equal(rawTxSigned['complete'], True) # node2 can sign the tx compl., own two of three keys
|
||||
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
|
||||
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
|
||||
self.sync_all()
|
||||
self.generate(self.nodes[0], 1)
|
||||
assert_equal(self.nodes[0].getbalance(), bal + Decimal('50.00000000') + Decimal('2.19000000')) # block reward + tx
|
||||
|
||||
# 2of2 test for combining transactions
|
||||
bal = self.nodes[2].getbalance()
|
||||
addr1 = self.nodes[1].getnewaddress()
|
||||
addr2 = self.nodes[2].getnewaddress()
|
||||
|
||||
addr1Obj = self.nodes[1].getaddressinfo(addr1)
|
||||
addr2Obj = self.nodes[2].getaddressinfo(addr2)
|
||||
|
||||
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
|
||||
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
|
||||
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
|
||||
|
||||
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
|
||||
decTx = self.nodes[0].gettransaction(txId)
|
||||
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
|
||||
self.sync_all()
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
|
||||
|
||||
txDetails = self.nodes[0].gettransaction(txId, True)
|
||||
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
|
||||
vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000'))
|
||||
|
||||
bal = self.nodes[0].getbalance()
|
||||
inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "redeemScript": mSigObjValid['hex'], "amount": vout['value']}]
|
||||
outputs = {self.nodes[0].getnewaddress(): 2.19}
|
||||
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
|
||||
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
|
||||
self.log.debug(rawTxPartialSigned1)
|
||||
assert_equal(rawTxPartialSigned1['complete'], False) # node1 only has one key, can't comp. sign the tx
|
||||
|
||||
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
|
||||
self.log.debug(rawTxPartialSigned2)
|
||||
assert_equal(rawTxPartialSigned2['complete'], False) # node2 only has one key, can't comp. sign the tx
|
||||
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].combinerawtransaction, [rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex'] + "00"])
|
||||
assert_raises_rpc_error(-22, "Missing transactions", self.nodes[0].combinerawtransaction, [])
|
||||
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
|
||||
self.log.debug(rawTxComb)
|
||||
self.nodes[2].sendrawtransaction(rawTxComb)
|
||||
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
|
||||
self.sync_all()
|
||||
self.generate(self.nodes[0], 1)
|
||||
assert_equal(self.nodes[0].getbalance(), bal + Decimal('50.00000000') + Decimal('2.19000000')) # block reward + tx
|
||||
assert_raises_rpc_error(-25, "Input not found or already spent", self.nodes[0].combinerawtransaction, [rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
RawTransactionsTest(__file__).main()
|
||||
|
|
|
@ -151,7 +151,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
self.supports_cli = True
|
||||
self.bind_to_localhost_only = True
|
||||
self.parse_args(test_file)
|
||||
self.default_wallet_name = "default_wallet" if self.options.descriptors else ""
|
||||
self.default_wallet_name = "default_wallet"
|
||||
self.wallet_data_filename = "wallet.dat"
|
||||
# Optional list of wallet names that can be set in set_test_params to
|
||||
# create and import keys to. If unset, default is len(nodes) *
|
||||
|
@ -160,8 +160,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
# are not imported.
|
||||
self.wallet_names = None
|
||||
# By default the wallet is not required. Set to true by skip_if_no_wallet().
|
||||
# When False, we ignore wallet_names regardless of what it is.
|
||||
self._requires_wallet = False
|
||||
# Can also be set to None to indicate that the wallet will be used if available.
|
||||
# When False or None, we ignore wallet_names regardless of what it is.
|
||||
self.uses_wallet = False
|
||||
# Disable ThreadOpenConnections by default, so that adding entries to
|
||||
# addrman will not result in automatic connections to them.
|
||||
self.disable_autoconnect = True
|
||||
|
@ -271,20 +272,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
if self.options.v1transport:
|
||||
self.options.v2transport=False
|
||||
|
||||
if "descriptors" not in self.options:
|
||||
# Wallet is not required by the test at all and the value of self.options.descriptors won't matter.
|
||||
# It still needs to exist and be None in order for tests to work however.
|
||||
# So set it to None to force -disablewallet, because the wallet is not needed.
|
||||
self.options.descriptors = None
|
||||
elif self.options.descriptors is None:
|
||||
if self.is_wallet_compiled():
|
||||
self.options.descriptors = True
|
||||
else:
|
||||
# Tests requiring a wallet will be skipped and the value of self.options.descriptors won't matter
|
||||
# It still needs to exist and be None in order for tests to work however.
|
||||
# So set it to None, which will also set -disablewallet.
|
||||
self.options.descriptors = None
|
||||
|
||||
PortSeed.n = self.options.port_seed
|
||||
|
||||
def get_binary_paths(self):
|
||||
|
@ -472,7 +459,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
"""Override this method to customize test node setup"""
|
||||
self.add_nodes(self.num_nodes, self.extra_args)
|
||||
self.start_nodes()
|
||||
if self._requires_wallet:
|
||||
if self.uses_wallet:
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
if not self.setup_clean_chain:
|
||||
for n in self.nodes:
|
||||
|
@ -497,7 +484,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
if wallet_name is not False:
|
||||
n = self.nodes[node]
|
||||
if wallet_name is not None:
|
||||
n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
|
||||
n.createwallet(wallet_name=wallet_name, load_on_startup=True)
|
||||
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=True)
|
||||
|
||||
# Only enables wallet support when the module is available
|
||||
|
@ -510,21 +497,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
|
||||
# Public helper methods. These can be accessed by the subclass test scripts.
|
||||
|
||||
def add_wallet_options(self, parser, *, descriptors=True, legacy=True):
|
||||
kwargs = {}
|
||||
if descriptors + legacy == 1:
|
||||
# If only one type can be chosen, set it as default
|
||||
kwargs["default"] = descriptors
|
||||
group = parser.add_mutually_exclusive_group(
|
||||
# If only one type is allowed, require it to be set in test_runner.py
|
||||
required=os.getenv("REQUIRE_WALLET_TYPE_SET") == "1" and "default" in kwargs)
|
||||
if descriptors:
|
||||
group.add_argument("--descriptors", action='store_const', const=True, **kwargs,
|
||||
help="Run test using a descriptor wallet", dest='descriptors')
|
||||
if legacy:
|
||||
group.add_argument("--legacy-wallet", action='store_const', const=False, **kwargs,
|
||||
help="Run test using legacy wallets", dest='descriptors')
|
||||
|
||||
def add_nodes(self, num_nodes: int, extra_args=None, *, rpchost=None, versions=None):
|
||||
"""Instantiate TestNode objects.
|
||||
|
||||
|
@ -598,8 +570,8 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
use_cli=self.options.usecli,
|
||||
start_perf=self.options.perf,
|
||||
use_valgrind=self.options.valgrind,
|
||||
descriptors=self.options.descriptors,
|
||||
v2transport=self.options.v2transport,
|
||||
uses_wallet=self.uses_wallet,
|
||||
)
|
||||
self.nodes.append(test_node_i)
|
||||
if not test_node_i.version_is_at_least(170000):
|
||||
|
@ -903,7 +875,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
binaries=self.get_binaries(),
|
||||
coverage_dir=None,
|
||||
cwd=self.options.tmpdir,
|
||||
descriptors=self.options.descriptors,
|
||||
uses_wallet=self.uses_wallet,
|
||||
))
|
||||
self.start_node(CACHE_NODE_ID)
|
||||
cache_node = self.nodes[CACHE_NODE_ID]
|
||||
|
@ -1006,11 +978,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
|
||||
def skip_if_no_wallet(self):
|
||||
"""Skip the running test if wallet has not been compiled."""
|
||||
self._requires_wallet = True
|
||||
self.uses_wallet = True
|
||||
if not self.is_wallet_compiled():
|
||||
raise SkipTest("wallet has not been compiled.")
|
||||
if not self.options.descriptors:
|
||||
self.skip_if_no_bdb()
|
||||
|
||||
def skip_if_no_bdb(self):
|
||||
"""Skip the running test if BDB has not been compiled."""
|
||||
|
@ -1067,14 +1037,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
|
|||
"""Checks whether the wallet module was compiled."""
|
||||
return self.config["components"].getboolean("ENABLE_WALLET")
|
||||
|
||||
def is_specified_wallet_compiled(self):
|
||||
"""Checks whether wallet support for the specified type
|
||||
(legacy or descriptor wallet) was compiled."""
|
||||
if self.options.descriptors:
|
||||
return self.is_wallet_compiled()
|
||||
else:
|
||||
return self.is_bdb_compiled()
|
||||
|
||||
def is_wallet_tool_compiled(self):
|
||||
"""Checks whether bitcoin-wallet was compiled."""
|
||||
return self.config["components"].getboolean("ENABLE_WALLET_TOOL")
|
||||
|
|
|
@ -77,7 +77,7 @@ class TestNode():
|
|||
To make things easier for the test writer, any unrecognised messages will
|
||||
be dispatched to the RPC connection."""
|
||||
|
||||
def __init__(self, i, datadir_path, *, chain, rpchost, timewait, timeout_factor, binaries, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None, descriptors=False, v2transport=False):
|
||||
def __init__(self, i, datadir_path, *, chain, rpchost, timewait, timeout_factor, binaries, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None, v2transport=False, uses_wallet=False):
|
||||
"""
|
||||
Kwargs:
|
||||
start_perf (bool): If True, begin profiling the node with `perf` as soon as
|
||||
|
@ -96,7 +96,6 @@ class TestNode():
|
|||
self.binaries = binaries
|
||||
self.coverage_dir = coverage_dir
|
||||
self.cwd = cwd
|
||||
self.descriptors = descriptors
|
||||
self.has_explicit_bind = False
|
||||
if extra_conf is not None:
|
||||
append_config(self.datadir_path, extra_conf)
|
||||
|
@ -119,7 +118,7 @@ class TestNode():
|
|||
"-debugexclude=rand",
|
||||
"-uacomment=testnode%d" % i, # required for subversion uniqueness across peers
|
||||
]
|
||||
if self.descriptors is None:
|
||||
if uses_wallet is not None and not uses_wallet:
|
||||
self.args.append("-disablewallet")
|
||||
|
||||
# Use valgrind, expect for previous release binaries
|
||||
|
@ -210,10 +209,10 @@ class TestNode():
|
|||
def __getattr__(self, name):
|
||||
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
|
||||
if self.use_cli:
|
||||
return getattr(RPCOverloadWrapper(self.cli, True, self.descriptors), name)
|
||||
return getattr(RPCOverloadWrapper(self.cli, True), name)
|
||||
else:
|
||||
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
|
||||
return getattr(RPCOverloadWrapper(self.rpc, descriptors=self.descriptors), name)
|
||||
return getattr(RPCOverloadWrapper(self.rpc), name)
|
||||
|
||||
def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, env=None, **kwargs):
|
||||
"""Start the node."""
|
||||
|
@ -375,11 +374,11 @@ class TestNode():
|
|||
|
||||
def get_wallet_rpc(self, wallet_name):
|
||||
if self.use_cli:
|
||||
return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors)
|
||||
return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True)
|
||||
else:
|
||||
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
|
||||
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
|
||||
return RPCOverloadWrapper(self.rpc / wallet_path, descriptors=self.descriptors)
|
||||
return RPCOverloadWrapper(self.rpc / wallet_path)
|
||||
|
||||
def version_is_at_least(self, ver):
|
||||
return self.version is None or self.version >= ver
|
||||
|
@ -926,10 +925,9 @@ class TestNodeCLI():
|
|||
return cli_stdout.rstrip("\n")
|
||||
|
||||
class RPCOverloadWrapper():
|
||||
def __init__(self, rpc, cli=False, descriptors=False):
|
||||
def __init__(self, rpc, cli=False):
|
||||
self.rpc = rpc
|
||||
self.is_cli = cli
|
||||
self.descriptors = descriptors
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.rpc, name)
|
||||
|
@ -937,11 +935,6 @@ class RPCOverloadWrapper():
|
|||
def createwallet_passthrough(self, *args, **kwargs):
|
||||
return self.__getattr__("createwallet")(*args, **kwargs)
|
||||
|
||||
def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None, load_on_startup=None, external_signer=None):
|
||||
if descriptors is None:
|
||||
descriptors = self.descriptors
|
||||
return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors, load_on_startup, external_signer)
|
||||
|
||||
def importprivkey(self, privkey, label=None, rescan=None):
|
||||
wallet_info = self.getwalletinfo()
|
||||
if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
|
||||
|
|
|
@ -18,11 +18,8 @@ class TestShell:
|
|||
start a single TestShell at a time."""
|
||||
|
||||
class __TestShell(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
pass
|
||||
self.uses_wallet = None
|
||||
|
||||
def run_test(self):
|
||||
pass
|
||||
|
|
|
@ -86,7 +86,6 @@ EXTENDED_SCRIPTS = [
|
|||
'feature_pruning.py',
|
||||
'feature_dbcrash.py',
|
||||
'feature_index_prune.py',
|
||||
'wallet_pruning.py --legacy-wallet',
|
||||
]
|
||||
|
||||
BASE_SCRIPTS = [
|
||||
|
@ -97,8 +96,7 @@ BASE_SCRIPTS = [
|
|||
'feature_taproot.py',
|
||||
'feature_block.py',
|
||||
'mempool_ephemeral_dust.py',
|
||||
'wallet_conflicts.py --legacy-wallet',
|
||||
'wallet_conflicts.py --descriptors',
|
||||
'wallet_conflicts.py',
|
||||
'p2p_opportunistic_1p1c.py',
|
||||
'p2p_node_network_limited.py --v1transport',
|
||||
'p2p_node_network_limited.py --v2transport',
|
||||
|
@ -108,39 +106,28 @@ BASE_SCRIPTS = [
|
|||
'feature_maxuploadtarget.py',
|
||||
'feature_assumeutxo.py',
|
||||
'mempool_updatefromblock.py',
|
||||
'mempool_persist.py --descriptors',
|
||||
'mempool_persist.py',
|
||||
# vv Tests less than 60s vv
|
||||
'rpc_psbt.py --legacy-wallet',
|
||||
'rpc_psbt.py --descriptors',
|
||||
'wallet_fundrawtransaction.py --legacy-wallet',
|
||||
'wallet_fundrawtransaction.py --descriptors',
|
||||
'wallet_bumpfee.py --legacy-wallet',
|
||||
'wallet_bumpfee.py --descriptors',
|
||||
'wallet_import_rescan.py --legacy-wallet',
|
||||
'wallet_backup.py --legacy-wallet',
|
||||
'wallet_backup.py --descriptors',
|
||||
'feature_segwit.py --legacy-wallet',
|
||||
'feature_segwit.py --descriptors --v1transport',
|
||||
'feature_segwit.py --descriptors --v2transport',
|
||||
'rpc_psbt.py',
|
||||
'wallet_fundrawtransaction.py',
|
||||
'wallet_bumpfee.py',
|
||||
'wallet_backup.py',
|
||||
'feature_segwit.py --v2transport',
|
||||
'feature_segwit.py --v1transport',
|
||||
'p2p_tx_download.py',
|
||||
'wallet_avoidreuse.py --legacy-wallet',
|
||||
'wallet_avoidreuse.py --descriptors',
|
||||
'wallet_avoidreuse.py',
|
||||
'feature_abortnode.py',
|
||||
'wallet_address_types.py --legacy-wallet',
|
||||
'wallet_address_types.py --descriptors',
|
||||
'wallet_address_types.py',
|
||||
'p2p_orphan_handling.py',
|
||||
'wallet_basic.py --legacy-wallet',
|
||||
'wallet_basic.py --descriptors',
|
||||
'wallet_basic.py',
|
||||
'feature_maxtipage.py',
|
||||
'wallet_multiwallet.py --legacy-wallet',
|
||||
'wallet_multiwallet.py --descriptors',
|
||||
'wallet_multiwallet.py',
|
||||
'wallet_multiwallet.py --usecli',
|
||||
'p2p_dns_seeds.py',
|
||||
'wallet_groups.py --legacy-wallet',
|
||||
'wallet_groups.py --descriptors',
|
||||
'wallet_groups.py',
|
||||
'p2p_blockfilters.py',
|
||||
'feature_assumevalid.py',
|
||||
'wallet_taproot.py --descriptors',
|
||||
'wallet_taproot.py',
|
||||
'feature_bip68_sequence.py',
|
||||
'rpc_packages.py',
|
||||
'rpc_bind.py --ipv4',
|
||||
|
@ -151,55 +138,41 @@ BASE_SCRIPTS = [
|
|||
'feature_csv_activation.py',
|
||||
'p2p_sendheaders.py',
|
||||
'feature_config_args.py',
|
||||
'wallet_listtransactions.py --legacy-wallet',
|
||||
'wallet_listtransactions.py --descriptors',
|
||||
'wallet_miniscript.py --descriptors',
|
||||
'wallet_listtransactions.py',
|
||||
'wallet_miniscript.py',
|
||||
# vv Tests less than 30s vv
|
||||
'p2p_invalid_messages.py',
|
||||
'rpc_createmultisig.py',
|
||||
'p2p_timeouts.py --v1transport',
|
||||
'p2p_timeouts.py --v2transport',
|
||||
'wallet_dump.py --legacy-wallet',
|
||||
'rpc_signer.py',
|
||||
'wallet_signer.py --descriptors',
|
||||
'wallet_importmulti.py --legacy-wallet',
|
||||
'wallet_signer.py',
|
||||
'mempool_limit.py',
|
||||
'rpc_txoutproof.py',
|
||||
'rpc_orphans.py',
|
||||
'wallet_listreceivedby.py --legacy-wallet',
|
||||
'wallet_listreceivedby.py --descriptors',
|
||||
'wallet_abandonconflict.py --legacy-wallet',
|
||||
'wallet_abandonconflict.py --descriptors',
|
||||
'wallet_listreceivedby.py',
|
||||
'wallet_abandonconflict.py',
|
||||
'feature_reindex.py',
|
||||
'feature_reindex_readonly.py',
|
||||
'wallet_labels.py --legacy-wallet',
|
||||
'wallet_labels.py --descriptors',
|
||||
'wallet_labels.py',
|
||||
'p2p_compactblocks.py',
|
||||
'p2p_compactblocks_blocksonly.py',
|
||||
'wallet_hd.py --legacy-wallet',
|
||||
'wallet_hd.py --descriptors',
|
||||
'wallet_blank.py --legacy-wallet',
|
||||
'wallet_blank.py --descriptors',
|
||||
'wallet_keypool_topup.py --legacy-wallet',
|
||||
'wallet_keypool_topup.py --descriptors',
|
||||
'wallet_fast_rescan.py --descriptors',
|
||||
'wallet_gethdkeys.py --descriptors',
|
||||
'wallet_createwalletdescriptor.py --descriptors',
|
||||
'wallet_hd.py',
|
||||
'wallet_blank.py',
|
||||
'wallet_keypool_topup.py',
|
||||
'wallet_fast_rescan.py',
|
||||
'wallet_gethdkeys.py',
|
||||
'wallet_createwalletdescriptor.py',
|
||||
'interface_zmq.py',
|
||||
'rpc_invalid_address_message.py',
|
||||
'rpc_validateaddress.py',
|
||||
'interface_bitcoin_cli.py --legacy-wallet',
|
||||
'interface_bitcoin_cli.py --descriptors',
|
||||
'interface_bitcoin_cli.py',
|
||||
'feature_bind_extra.py',
|
||||
'mempool_resurrect.py',
|
||||
'wallet_txn_doublespend.py --mineblock',
|
||||
'tool_bitcoin_chainstate.py',
|
||||
'tool_wallet.py --legacy-wallet',
|
||||
'tool_wallet.py --legacy-wallet --bdbro',
|
||||
'tool_wallet.py --legacy-wallet --bdbro --swap-bdb-endian',
|
||||
'tool_wallet.py --descriptors',
|
||||
'tool_signet_miner.py --legacy-wallet',
|
||||
'tool_signet_miner.py --descriptors',
|
||||
'tool_wallet.py',
|
||||
'tool_signet_miner.py',
|
||||
'wallet_txn_clone.py',
|
||||
'wallet_txn_clone.py --segwit',
|
||||
'rpc_getchaintips.py',
|
||||
|
@ -207,17 +180,13 @@ BASE_SCRIPTS = [
|
|||
'p2p_1p1c_network.py',
|
||||
'interface_rest.py',
|
||||
'mempool_spend_coinbase.py',
|
||||
'wallet_avoid_mixing_output_types.py --descriptors',
|
||||
'wallet_avoid_mixing_output_types.py',
|
||||
'mempool_reorg.py',
|
||||
'p2p_block_sync.py --v1transport',
|
||||
'p2p_block_sync.py --v2transport',
|
||||
'wallet_createwallet.py --legacy-wallet',
|
||||
'wallet_createwallet.py --usecli',
|
||||
'wallet_createwallet.py --descriptors',
|
||||
'wallet_watchonly.py --legacy-wallet',
|
||||
'wallet_watchonly.py --usecli --legacy-wallet',
|
||||
'wallet_reindex.py --legacy-wallet',
|
||||
'wallet_reindex.py --descriptors',
|
||||
'wallet_createwallet.py',
|
||||
'wallet_reindex.py',
|
||||
'wallet_reorgsrestore.py',
|
||||
'interface_http.py',
|
||||
'interface_rpc.py',
|
||||
|
@ -229,12 +198,10 @@ BASE_SCRIPTS = [
|
|||
'rpc_users.py',
|
||||
'rpc_whitelist.py',
|
||||
'feature_proxy.py',
|
||||
'wallet_signrawtransactionwithwallet.py --legacy-wallet',
|
||||
'wallet_signrawtransactionwithwallet.py --descriptors',
|
||||
'wallet_signrawtransactionwithwallet.py',
|
||||
'rpc_signrawtransactionwithkey.py',
|
||||
'rpc_rawtransaction.py --legacy-wallet',
|
||||
'wallet_transactiontime_rescan.py --descriptors',
|
||||
'wallet_transactiontime_rescan.py --legacy-wallet',
|
||||
'rpc_rawtransaction.py',
|
||||
'wallet_transactiontime_rescan.py',
|
||||
'p2p_addrv2_relay.py',
|
||||
'p2p_compactblocks_hb.py --v1transport',
|
||||
'p2p_compactblocks_hb.py --v2transport',
|
||||
|
@ -246,17 +213,15 @@ BASE_SCRIPTS = [
|
|||
'rpc_blockchain.py --v2transport',
|
||||
'rpc_deprecated.py',
|
||||
'wallet_disable.py',
|
||||
'wallet_change_address.py --legacy-wallet',
|
||||
'wallet_change_address.py --descriptors',
|
||||
'wallet_change_address.py',
|
||||
'p2p_addr_relay.py',
|
||||
'p2p_getaddr_caching.py',
|
||||
'p2p_getdata.py',
|
||||
'p2p_addrfetch.py',
|
||||
'rpc_net.py --v1transport',
|
||||
'rpc_net.py --v2transport',
|
||||
'wallet_keypool.py --legacy-wallet',
|
||||
'wallet_keypool.py --descriptors',
|
||||
'wallet_descriptor.py --descriptors',
|
||||
'wallet_keypool.py',
|
||||
'wallet_descriptor.py',
|
||||
'p2p_nobloomfilter_messages.py',
|
||||
TEST_FRAMEWORK_UNIT_TESTS,
|
||||
'p2p_filter.py',
|
||||
|
@ -274,12 +239,10 @@ BASE_SCRIPTS = [
|
|||
'p2p_v2_misbehaving.py',
|
||||
'example_test.py',
|
||||
'mempool_truc.py',
|
||||
'wallet_txn_doublespend.py --legacy-wallet',
|
||||
'wallet_multisig_descriptor_psbt.py --descriptors',
|
||||
'wallet_miniscript_decaying_multisig_descriptor_psbt.py --descriptors',
|
||||
'wallet_txn_doublespend.py --descriptors',
|
||||
'wallet_backwards_compatibility.py --legacy-wallet',
|
||||
'wallet_backwards_compatibility.py --descriptors',
|
||||
'wallet_multisig_descriptor_psbt.py',
|
||||
'wallet_miniscript_decaying_multisig_descriptor_psbt.py',
|
||||
'wallet_txn_doublespend.py',
|
||||
'wallet_backwards_compatibility.py',
|
||||
'wallet_txn_clone.py --mineblock',
|
||||
'feature_notifications.py',
|
||||
'rpc_getblockfilter.py',
|
||||
|
@ -295,8 +258,7 @@ BASE_SCRIPTS = [
|
|||
'feature_versionbits_warning.py',
|
||||
'feature_blocksxor.py',
|
||||
'rpc_preciousblock.py',
|
||||
'wallet_importprunedfunds.py --legacy-wallet',
|
||||
'wallet_importprunedfunds.py --descriptors',
|
||||
'wallet_importprunedfunds.py',
|
||||
'p2p_leak_tx.py --v1transport',
|
||||
'p2p_leak_tx.py --v2transport',
|
||||
'p2p_eviction.py',
|
||||
|
@ -308,64 +270,49 @@ BASE_SCRIPTS = [
|
|||
'wallet_signmessagewithaddress.py',
|
||||
'rpc_signmessagewithprivkey.py',
|
||||
'rpc_generate.py',
|
||||
'wallet_balance.py --legacy-wallet',
|
||||
'wallet_balance.py --descriptors',
|
||||
'wallet_balance.py',
|
||||
'p2p_initial_headers_sync.py',
|
||||
'feature_nulldummy.py',
|
||||
'mempool_accept.py',
|
||||
'mempool_expiry.py',
|
||||
'wallet_import_with_label.py --legacy-wallet',
|
||||
'wallet_importdescriptors.py --descriptors',
|
||||
'wallet_upgradewallet.py --legacy-wallet',
|
||||
'wallet_importdescriptors.py',
|
||||
'wallet_crosschain.py',
|
||||
'mining_basic.py',
|
||||
'mining_mainnet.py',
|
||||
'feature_signet.py',
|
||||
'p2p_mutated_blocks.py',
|
||||
'wallet_implicitsegwit.py --legacy-wallet',
|
||||
'rpc_named_arguments.py',
|
||||
'feature_startupnotify.py',
|
||||
'wallet_simulaterawtx.py --legacy-wallet',
|
||||
'wallet_simulaterawtx.py --descriptors',
|
||||
'wallet_listsinceblock.py --legacy-wallet',
|
||||
'wallet_listsinceblock.py --descriptors',
|
||||
'wallet_listdescriptors.py --descriptors',
|
||||
'wallet_simulaterawtx.py',
|
||||
'wallet_listsinceblock.py',
|
||||
'wallet_listdescriptors.py',
|
||||
'p2p_leak.py',
|
||||
'wallet_encryption.py --legacy-wallet',
|
||||
'wallet_encryption.py --descriptors',
|
||||
'wallet_encryption.py',
|
||||
'feature_dersig.py',
|
||||
'feature_cltv.py',
|
||||
'rpc_uptime.py',
|
||||
'feature_discover.py',
|
||||
'wallet_resendwallettransactions.py --legacy-wallet',
|
||||
'wallet_resendwallettransactions.py --descriptors',
|
||||
'wallet_fallbackfee.py --legacy-wallet',
|
||||
'wallet_fallbackfee.py --descriptors',
|
||||
'wallet_resendwallettransactions.py',
|
||||
'wallet_fallbackfee.py',
|
||||
'rpc_dumptxoutset.py',
|
||||
'feature_minchainwork.py',
|
||||
'rpc_estimatefee.py',
|
||||
'rpc_getblockstats.py',
|
||||
'feature_port.py',
|
||||
'feature_bind_port_externalip.py',
|
||||
'wallet_create_tx.py --legacy-wallet',
|
||||
'wallet_send.py --legacy-wallet',
|
||||
'wallet_send.py --descriptors',
|
||||
'wallet_sendall.py --legacy-wallet',
|
||||
'wallet_sendall.py --descriptors',
|
||||
'wallet_sendmany.py --descriptors',
|
||||
'wallet_sendmany.py --legacy-wallet',
|
||||
'wallet_create_tx.py --descriptors',
|
||||
'wallet_inactive_hdchains.py --legacy-wallet',
|
||||
'wallet_create_tx.py',
|
||||
'wallet_send.py',
|
||||
'wallet_sendall.py',
|
||||
'wallet_sendmany.py',
|
||||
'wallet_spend_unconfirmed.py',
|
||||
'wallet_rescan_unconfirmed.py --descriptors',
|
||||
'wallet_rescan_unconfirmed.py',
|
||||
'p2p_fingerprint.py',
|
||||
'feature_uacomment.py',
|
||||
'feature_init.py',
|
||||
'wallet_coinbase_category.py --legacy-wallet',
|
||||
'wallet_coinbase_category.py --descriptors',
|
||||
'wallet_coinbase_category.py',
|
||||
'feature_filelock.py',
|
||||
'feature_loadblock.py',
|
||||
'wallet_assumeutxo.py --descriptors',
|
||||
'wallet_assumeutxo.py',
|
||||
'p2p_add_connections.py',
|
||||
'feature_bind_port_discover.py',
|
||||
'p2p_unrequested_blocks.py',
|
||||
|
@ -545,7 +492,7 @@ def main():
|
|||
|
||||
exclude_tests = [test.strip() for test in args.exclude.split(",")]
|
||||
for exclude_test in exclude_tests:
|
||||
# A space in the name indicates it has arguments such as "wallet_basic.py --descriptors"
|
||||
# A space in the name indicates it has arguments such as "rpc_bind.py --ipv4"
|
||||
if ' ' in exclude_test:
|
||||
remove_tests([test for test in test_list if test.replace('.py', '') == exclude_test.replace('.py', '')])
|
||||
else:
|
||||
|
|
|
@ -22,9 +22,6 @@ CHALLENGE_PRIVATE_KEY = (42).to_bytes(32, 'big')
|
|||
|
||||
|
||||
class SignetMinerTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.chain = "signet"
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -5,28 +5,22 @@
|
|||
"""Test bitcoin-wallet."""
|
||||
|
||||
import os
|
||||
import random
|
||||
import stat
|
||||
import string
|
||||
import subprocess
|
||||
import textwrap
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from test_framework.bdb import dump_bdb_kv
|
||||
from test_framework.messages import ser_string
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_greater_than,
|
||||
sha256sum_file,
|
||||
)
|
||||
from test_framework.wallet import getnewdestination
|
||||
|
||||
|
||||
class ToolWalletTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
parser.add_argument("--bdbro", action="store_true", help="Use the BerkeleyRO internal parser when dumping a Berkeley DB wallet file")
|
||||
parser.add_argument("--swap-bdb-endian", action="store_true",help="When making Legacy BDB wallets, always make then byte swapped internally")
|
||||
|
||||
|
@ -43,8 +37,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
|
||||
def bitcoin_wallet_process(self, *args):
|
||||
default_args = ['-datadir={}'.format(self.nodes[0].datadir_path), '-chain=%s' % self.chain]
|
||||
if not self.options.descriptors and 'create' in args:
|
||||
default_args.append('-legacy')
|
||||
if "dump" in args and self.options.bdbro:
|
||||
default_args.append("-withinternalbdb")
|
||||
|
||||
|
@ -83,7 +75,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
|
||||
def get_expected_info_output(self, name="", transactions=0, keypool=2, address=0, imported_privs=0):
|
||||
wallet_name = self.default_wallet_name if name == "" else name
|
||||
if self.options.descriptors:
|
||||
output_types = 4 # p2pkh, p2sh, segwit, bech32m
|
||||
return textwrap.dedent('''\
|
||||
Wallet info
|
||||
|
@ -97,20 +88,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
Transactions: %d
|
||||
Address Book: %d
|
||||
''' % (wallet_name, keypool * output_types, transactions, imported_privs * 3 + address))
|
||||
else:
|
||||
output_types = 3 # p2pkh, p2sh, segwit. Legacy wallets do not support bech32m.
|
||||
return textwrap.dedent('''\
|
||||
Wallet info
|
||||
===========
|
||||
Name: %s
|
||||
Format: bdb
|
||||
Descriptors: no
|
||||
Encrypted: no
|
||||
HD (hd seed available): yes
|
||||
Keypool Size: %d
|
||||
Transactions: %d
|
||||
Address Book: %d
|
||||
''' % (wallet_name, keypool, transactions, (address + imported_privs) * output_types))
|
||||
|
||||
def read_dump(self, filename):
|
||||
dump = OrderedDict()
|
||||
|
@ -168,21 +145,15 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
for k, v in e.items():
|
||||
assert_equal(v, r[k])
|
||||
|
||||
def do_tool_createfromdump(self, wallet_name, dumpfile, file_format=None):
|
||||
def do_tool_createfromdump(self, wallet_name, dumpfile):
|
||||
dumppath = self.nodes[0].datadir_path / dumpfile
|
||||
rt_dumppath = self.nodes[0].datadir_path / "rt-{}.dump".format(wallet_name)
|
||||
|
||||
dump_data = self.read_dump(dumppath)
|
||||
|
||||
args = ["-wallet={}".format(wallet_name),
|
||||
"-dumpfile={}".format(dumppath)]
|
||||
if file_format is not None:
|
||||
args.append("-format={}".format(file_format))
|
||||
args.append("createfromdump")
|
||||
|
||||
load_output = ""
|
||||
if file_format is not None and file_format != dump_data["format"]:
|
||||
load_output += "Warning: Dumpfile wallet format \"{}\" does not match command line specified format \"{}\".\n".format(dump_data["format"], file_format)
|
||||
self.assert_tool_output(load_output, *args)
|
||||
assert (self.nodes[0].wallets_path / wallet_name).is_dir()
|
||||
|
||||
|
@ -204,9 +175,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo')
|
||||
self.assert_raises_tool_error('No method provided. Run `bitcoin-wallet -help` for valid methods.')
|
||||
self.assert_raises_tool_error('Wallet name must be provided when creating a new wallet.', 'create')
|
||||
locked_dir = self.nodes[0].wallets_path
|
||||
error = 'Error initializing wallet database environment "{}"!'.format(locked_dir)
|
||||
if self.options.descriptors:
|
||||
error = f"SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another instance of {self.config['environment']['CLIENT_NAME']}?"
|
||||
self.assert_raises_tool_error(
|
||||
error,
|
||||
|
@ -307,11 +275,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
self.log.debug('Wallet file timestamp after calling getwalletinfo: {}'.format(timestamp_after))
|
||||
|
||||
assert_equal(0, out['txcount'])
|
||||
if not self.options.descriptors:
|
||||
assert_equal(1000, out['keypoolsize'])
|
||||
assert_equal(1000, out['keypoolsize_hd_internal'])
|
||||
assert_equal(True, 'hdseedid' in out)
|
||||
else:
|
||||
assert_equal(4000, out['keypoolsize'])
|
||||
assert_equal(4000, out['keypoolsize_hd_internal'])
|
||||
|
||||
|
@ -320,15 +283,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
assert_equal(shasum_after, shasum_before)
|
||||
self.log.debug('Wallet file shasum unchanged\n')
|
||||
|
||||
def test_salvage(self):
|
||||
# TODO: Check salvage actually salvages and doesn't break things. https://github.com/bitcoin/bitcoin/issues/7463
|
||||
self.log.info('Check salvage')
|
||||
self.start_node(0)
|
||||
self.nodes[0].createwallet("salvage")
|
||||
self.stop_node(0)
|
||||
|
||||
self.assert_tool_output('', '-wallet=salvage', 'salvage')
|
||||
|
||||
def test_dump_createfromdump(self):
|
||||
self.start_node(0)
|
||||
self.nodes[0].createwallet("todump")
|
||||
|
@ -356,7 +310,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
self.log.info('Checking createfromdump arguments')
|
||||
self.assert_raises_tool_error('No dump file provided. To use createfromdump, -dumpfile=<filename> must be provided.', '-wallet=todump', 'createfromdump')
|
||||
non_exist_dump = self.nodes[0].datadir_path / "wallet.nodump"
|
||||
self.assert_raises_tool_error('Unknown wallet file format "notaformat" provided. Please provide one of "bdb" or "sqlite".', '-wallet=todump', '-format=notaformat', '-dumpfile={}'.format(wallet_dump), 'createfromdump')
|
||||
self.assert_raises_tool_error('Dump file {} does not exist.'.format(non_exist_dump), '-wallet=todump', '-dumpfile={}'.format(non_exist_dump), 'createfromdump')
|
||||
wallet_path = self.nodes[0].wallets_path / "todump2"
|
||||
self.assert_raises_tool_error('Failed to create database path \'{}\'. Database already exists.'.format(wallet_path), '-wallet=todump2', '-dumpfile={}'.format(wallet_dump), 'createfromdump')
|
||||
|
@ -364,9 +317,6 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
|
||||
self.log.info('Checking createfromdump')
|
||||
self.do_tool_createfromdump("load", "wallet.dump")
|
||||
if self.is_bdb_compiled():
|
||||
self.do_tool_createfromdump("load-bdb", "wallet.dump", "bdb")
|
||||
self.do_tool_createfromdump("load-sqlite", "wallet.dump", "sqlite")
|
||||
|
||||
self.log.info('Checking createfromdump handling of magic and versions')
|
||||
bad_ver_wallet_dump = self.nodes[0].datadir_path / "wallet-bad_ver1.dump"
|
||||
|
@ -452,39 +402,20 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
self.stop_node(0)
|
||||
|
||||
# Wallet tool should successfully give info for this wallet
|
||||
expected_output = textwrap.dedent(f'''\
|
||||
expected_output = textwrap.dedent('''\
|
||||
Wallet info
|
||||
===========
|
||||
Name: conflicts
|
||||
Format: {"sqlite" if self.options.descriptors else "bdb"}
|
||||
Descriptors: {"yes" if self.options.descriptors else "no"}
|
||||
Format: sqlite
|
||||
Descriptors: yes
|
||||
Encrypted: no
|
||||
HD (hd seed available): yes
|
||||
Keypool Size: {"8" if self.options.descriptors else "1"}
|
||||
Keypool Size: 8
|
||||
Transactions: 4
|
||||
Address Book: 4
|
||||
''')
|
||||
self.assert_tool_output(expected_output, "-wallet=conflicts", "info")
|
||||
|
||||
def test_dump_endianness(self):
|
||||
self.log.info("Testing dumps of the same contents with different BDB endianness")
|
||||
|
||||
self.start_node(0)
|
||||
self.nodes[0].createwallet("endian")
|
||||
self.stop_node(0)
|
||||
|
||||
wallet_dump = self.nodes[0].datadir_path / "endian.dump"
|
||||
self.assert_tool_output("The dumpfile may contain private keys. To ensure the safety of your Bitcoin, do not share the dumpfile.\n", "-wallet=endian", f"-dumpfile={wallet_dump}", "dump")
|
||||
expected_dump = self.read_dump(wallet_dump)
|
||||
|
||||
self.do_tool_createfromdump("native_endian", "endian.dump", "bdb")
|
||||
native_dump = self.read_dump(self.nodes[0].datadir_path / "rt-native_endian.dump")
|
||||
self.assert_dump(expected_dump, native_dump)
|
||||
|
||||
self.do_tool_createfromdump("other_endian", "endian.dump", "bdb_swap")
|
||||
other_dump = self.read_dump(self.nodes[0].datadir_path / "rt-other_endian.dump")
|
||||
self.assert_dump(expected_dump, other_dump)
|
||||
|
||||
def test_dump_very_large_records(self):
|
||||
self.log.info("Test that wallets with large records are successfully dumped")
|
||||
|
||||
|
@ -521,68 +452,13 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
else:
|
||||
assert False, "Big transaction was not found in wallet dump"
|
||||
|
||||
def test_dump_unclean_lsns(self):
|
||||
if not self.options.bdbro:
|
||||
return
|
||||
self.log.info("Test that a legacy wallet that has not been compacted is not dumped by bdbro")
|
||||
def test_no_create_legacy(self):
|
||||
self.log.info("Test that legacy wallets cannot be created")
|
||||
|
||||
self.start_node(0, extra_args=["-flushwallet=0"])
|
||||
self.nodes[0].createwallet("unclean_lsn")
|
||||
wallet = self.nodes[0].get_wallet_rpc("unclean_lsn")
|
||||
# First unload and load normally to make sure everything is written
|
||||
wallet.unloadwallet()
|
||||
self.nodes[0].loadwallet("unclean_lsn")
|
||||
# Next cause a bunch of writes by filling the keypool
|
||||
wallet.keypoolrefill(wallet.getwalletinfo()["keypoolsize"] + 100)
|
||||
# Lastly kill bitcoind so that the LSNs don't get reset
|
||||
self.nodes[0].kill_process()
|
||||
|
||||
wallet_dump = self.nodes[0].datadir_path / "unclean_lsn.dump"
|
||||
self.assert_raises_tool_error("LSNs are not reset, this database is not completely flushed. Please reopen then close the database with a version that has BDB support", "-wallet=unclean_lsn", f"-dumpfile={wallet_dump}", "dump")
|
||||
|
||||
# File can be dumped after reload it normally
|
||||
self.start_node(0)
|
||||
self.nodes[0].loadwallet("unclean_lsn")
|
||||
self.stop_node(0)
|
||||
self.assert_tool_output("The dumpfile may contain private keys. To ensure the safety of your Bitcoin, do not share the dumpfile.\n", "-wallet=unclean_lsn", f"-dumpfile={wallet_dump}", "dump")
|
||||
|
||||
def test_compare_legacy_dump_with_framework_bdb_parser(self):
|
||||
self.log.info("Verify that legacy wallet database dump matches the one from the test framework's BDB parser")
|
||||
wallet_name = "bdb_ro_test"
|
||||
self.start_node(0)
|
||||
# add some really large labels (above twice the largest valid page size) to create BDB overflow pages
|
||||
self.nodes[0].createwallet(wallet_name)
|
||||
wallet_rpc = self.nodes[0].get_wallet_rpc(wallet_name)
|
||||
generated_labels = {}
|
||||
for i in range(10):
|
||||
address = getnewdestination()[2]
|
||||
large_label = ''.join([random.choice(string.ascii_letters) for _ in range(150000)])
|
||||
wallet_rpc.setlabel(address, large_label)
|
||||
generated_labels[address] = large_label
|
||||
# fill the keypool to create BDB internal pages
|
||||
wallet_rpc.keypoolrefill(1000)
|
||||
self.stop_node(0)
|
||||
|
||||
wallet_dumpfile = self.nodes[0].datadir_path / "bdb_ro_test.dump"
|
||||
self.assert_tool_output("The dumpfile may contain private keys. To ensure the safety of your Bitcoin, do not share the dumpfile.\n", "-wallet={}".format(wallet_name), "-dumpfile={}".format(wallet_dumpfile), "dump")
|
||||
|
||||
expected_dump = self.read_dump(wallet_dumpfile)
|
||||
# remove extra entries from wallet tool dump that are not actual key/value pairs from the database
|
||||
del expected_dump['BITCOIN_CORE_WALLET_DUMP']
|
||||
del expected_dump['format']
|
||||
del expected_dump['checksum']
|
||||
bdb_ro_parser_dump_raw = dump_bdb_kv(self.nodes[0].wallets_path / wallet_name / "wallet.dat")
|
||||
bdb_ro_parser_dump = OrderedDict()
|
||||
assert any([len(bytes.fromhex(value)) >= 150000 for value in expected_dump.values()])
|
||||
for key, value in sorted(bdb_ro_parser_dump_raw.items()):
|
||||
bdb_ro_parser_dump[key.hex()] = value.hex()
|
||||
assert_equal(bdb_ro_parser_dump, expected_dump)
|
||||
|
||||
# check that all labels were created with the correct address
|
||||
for address, label in generated_labels.items():
|
||||
key_bytes = b'\x04name' + ser_string(address.encode())
|
||||
assert key_bytes in bdb_ro_parser_dump_raw
|
||||
assert_equal(bdb_ro_parser_dump_raw[key_bytes], ser_string(label.encode()))
|
||||
self.assert_raises_tool_error("The -legacy option must be set to \"false\"", "-wallet=legacy", "-legacy", "create")
|
||||
assert not (self.nodes[0].wallets_path / "legacy").exists()
|
||||
self.assert_raises_tool_error("The -descriptors option must be set to \"true\"", "-wallet=legacy", "-descriptors=false", "create")
|
||||
assert not (self.nodes[0].wallets_path / "legacy").exists()
|
||||
|
||||
def run_test(self):
|
||||
self.wallet_path = self.nodes[0].wallets_path / self.default_wallet_name / self.wallet_data_filename
|
||||
|
@ -592,16 +468,10 @@ class ToolWalletTest(BitcoinTestFramework):
|
|||
self.test_tool_wallet_info_after_transaction()
|
||||
self.test_tool_wallet_create_on_existing_wallet()
|
||||
self.test_getwalletinfo_on_different_wallet()
|
||||
if not self.options.descriptors:
|
||||
# Salvage is a legacy wallet only thing
|
||||
self.test_salvage()
|
||||
self.test_dump_endianness()
|
||||
self.test_dump_unclean_lsns()
|
||||
self.test_dump_createfromdump()
|
||||
self.test_chainless_conflicts()
|
||||
self.test_dump_very_large_records()
|
||||
if not self.options.descriptors and self.is_bdb_compiled() and not self.options.swap_bdb_endian:
|
||||
self.test_compare_legacy_dump_with_framework_bdb_parser()
|
||||
self.test_no_create_legacy()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -21,9 +21,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class AbandonConflictTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.extra_args = [["-minrelaytxfee=0.00001"], []]
|
||||
|
|
|
@ -66,9 +66,6 @@ from test_framework.util import (
|
|||
)
|
||||
|
||||
class AddressTypeTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 6
|
||||
self.extra_args = [
|
||||
|
@ -230,28 +227,10 @@ class AddressTypeTest(BitcoinTestFramework):
|
|||
# no coinbases are maturing for the nodes-under-test during the test
|
||||
self.generate(self.nodes[5], COINBASE_MATURITY + 1)
|
||||
|
||||
uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee"
|
||||
uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77"
|
||||
compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"
|
||||
compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Tests for addmultisigaddress's address type behavior is only for legacy wallets.
|
||||
# Descriptor wallets do not have addmultsigaddress so these tests are not needed for those.
|
||||
# addmultisigaddress with at least 1 uncompressed key should return a legacy address.
|
||||
for node in range(4):
|
||||
self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy')
|
||||
self.test_address(node, self.nodes[node].addmultisigaddress(2, [compressed_1, uncompressed_2])['address'], True, 'legacy')
|
||||
self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, compressed_2])['address'], True, 'legacy')
|
||||
# addmultisigaddress with all compressed keys should return the appropriate address type (even when the keys are not ours).
|
||||
self.test_address(0, self.nodes[0].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'legacy')
|
||||
self.test_address(1, self.nodes[1].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit')
|
||||
self.test_address(2, self.nodes[2].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit')
|
||||
self.test_address(3, self.nodes[3].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'bech32')
|
||||
|
||||
do_multisigs = [False]
|
||||
if not self.options.descriptors:
|
||||
do_multisigs.append(True)
|
||||
|
||||
for explicit_type, multisig, from_node in itertools.product([False, True], do_multisigs, range(4)):
|
||||
address_type = None
|
||||
|
@ -369,7 +348,6 @@ class AddressTypeTest(BitcoinTestFramework):
|
|||
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '')
|
||||
assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '')
|
||||
assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23')
|
||||
if self.options.descriptors:
|
||||
assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].createwalletdescriptor, "bech23")
|
||||
|
||||
self.log.info("Nodes with changetype=p2sh-segwit never use a P2WPKH change output")
|
||||
|
@ -379,14 +357,9 @@ class AddressTypeTest(BitcoinTestFramework):
|
|||
self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit')
|
||||
self.test_address(4, self.nodes[4].getrawchangeaddress('bech32'), multisig=False, typ='bech32')
|
||||
|
||||
if self.options.descriptors:
|
||||
self.log.info("Descriptor wallets have bech32m addresses")
|
||||
self.test_address(4, self.nodes[4].getnewaddress("", "bech32m"), multisig=False, typ="bech32m")
|
||||
self.test_address(4, self.nodes[4].getrawchangeaddress("bech32m"), multisig=False, typ="bech32m")
|
||||
else:
|
||||
self.log.info("Legacy wallets cannot make bech32m addresses")
|
||||
assert_raises_rpc_error(-8, "Legacy wallets cannot provide bech32m addresses", self.nodes[0].getnewaddress, "", "bech32m")
|
||||
assert_raises_rpc_error(-8, "Legacy wallets cannot provide bech32m addresses", self.nodes[0].getrawchangeaddress, "bech32m")
|
||||
|
||||
if __name__ == '__main__':
|
||||
AddressTypeTest(__file__).main()
|
||||
|
|
|
@ -31,9 +31,6 @@ class AssumeutxoTest(BitcoinTestFramework):
|
|||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
"""Use the pregenerated, deterministic chain up to height 199."""
|
||||
self.num_nodes = 3
|
||||
|
|
|
@ -106,9 +106,6 @@ def generate_payment_values(n, m):
|
|||
|
||||
|
||||
class AddressInputTypeGrouping(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test the avoid_reuse and setwalletflag features."""
|
||||
|
||||
from test_framework.address import address_to_scriptpubkey
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_not_equal,
|
||||
|
@ -65,9 +64,6 @@ def assert_balances(node, mine, margin=0.001):
|
|||
assert_approx(got[k], v, margin)
|
||||
|
||||
class AvoidReuseTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
@ -255,44 +251,6 @@ class AvoidReuseTest(BitcoinTestFramework):
|
|||
# getbalances should show no used, 5 btc trusted
|
||||
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
|
||||
|
||||
if not self.options.descriptors:
|
||||
# For the second send, we transmute it to a related single-key address
|
||||
# to make sure it's also detected as reuse
|
||||
fund_spk = address_to_scriptpubkey(fundaddr).hex()
|
||||
fund_decoded = self.nodes[0].decodescript(fund_spk)
|
||||
if second_addr_type == "p2sh-segwit":
|
||||
new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
|
||||
elif second_addr_type == "bech32":
|
||||
new_fundaddr = fund_decoded["segwit"]["address"]
|
||||
else:
|
||||
new_fundaddr = fundaddr
|
||||
assert_equal(second_addr_type, "legacy")
|
||||
|
||||
self.nodes[0].sendtoaddress(new_fundaddr, 10)
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
# listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
|
||||
assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
|
||||
# getbalances should show 10 used, 5 btc trusted
|
||||
assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
|
||||
|
||||
# node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
|
||||
assert_approx(self.nodes[1].getbalance(), 5, 0.001)
|
||||
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
|
||||
|
||||
assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
|
||||
|
||||
self.nodes[1].sendtoaddress(retaddr, 4)
|
||||
|
||||
# listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
|
||||
assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
|
||||
# getbalances should show 10 used, 1 btc trusted
|
||||
assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
|
||||
|
||||
# node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
|
||||
assert_approx(self.nodes[1].getbalance(), 1, 0.001)
|
||||
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
|
||||
|
||||
def test_getbalances_used(self):
|
||||
'''
|
||||
getbalances and listunspent should pick up on reused addresses
|
||||
|
|
|
@ -33,7 +33,6 @@ and confirm again balances are correct.
|
|||
from decimal import Decimal
|
||||
import os
|
||||
from random import randint
|
||||
import shutil
|
||||
|
||||
from test_framework.blocktools import COINBASE_MATURITY
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
|
@ -44,9 +43,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class WalletBackupTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 4
|
||||
self.setup_clean_chain = True
|
||||
|
@ -181,10 +177,6 @@ class WalletBackupTest(BitcoinTestFramework):
|
|||
for node_num in range(3):
|
||||
self.nodes[node_num].backupwallet(self.nodes[node_num].datadir_path / 'wallet.bak')
|
||||
|
||||
if not self.options.descriptors:
|
||||
for node_num in range(3):
|
||||
self.nodes[node_num].dumpwallet(self.nodes[node_num].datadir_path / 'wallet.dump')
|
||||
|
||||
self.log.info("More transactions")
|
||||
for _ in range(5):
|
||||
self.do_one_round()
|
||||
|
@ -228,29 +220,6 @@ class WalletBackupTest(BitcoinTestFramework):
|
|||
|
||||
self.restore_wallet_existent_name()
|
||||
|
||||
if not self.options.descriptors:
|
||||
self.log.info("Restoring using dumped wallet")
|
||||
self.stop_three()
|
||||
self.erase_three()
|
||||
|
||||
#start node2 with no chain
|
||||
shutil.rmtree(self.nodes[2].blocks_path)
|
||||
shutil.rmtree(self.nodes[2].chain_path / 'chainstate')
|
||||
|
||||
self.start_three(["-nowallet"])
|
||||
# Create new wallets for the three nodes.
|
||||
# We will use this empty wallets to test the 'importwallet()' RPC command below.
|
||||
for node_num in range(3):
|
||||
self.nodes[node_num].createwallet(wallet_name=self.default_wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
|
||||
assert_equal(self.nodes[node_num].getbalance(), 0)
|
||||
self.nodes[node_num].importwallet(self.nodes[node_num].datadir_path / 'wallet.dump')
|
||||
|
||||
self.sync_blocks()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), balance0)
|
||||
assert_equal(self.nodes[1].getbalance(), balance1)
|
||||
assert_equal(self.nodes[2].getbalance(), balance2)
|
||||
|
||||
# Backup to source wallet file must fail
|
||||
sourcePaths = [
|
||||
os.path.join(self.nodes[0].wallets_path, self.default_wallet_name, self.wallet_data_filename),
|
||||
|
|
|
@ -28,9 +28,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class BackwardsCompatibilityTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 10
|
||||
|
@ -88,44 +85,6 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
node_major, _, _ = self.split_version(node)
|
||||
return node_major >= major
|
||||
|
||||
def test_v19_addmultisigaddress(self):
|
||||
if not self.is_bdb_compiled():
|
||||
return
|
||||
# Specific test for addmultisigaddress using v19
|
||||
# See #18075
|
||||
self.log.info("Testing 0.19 addmultisigaddress case (#18075)")
|
||||
node_master = self.nodes[1]
|
||||
node_v19 = self.nodes[self.num_nodes - 3]
|
||||
node_v19.rpc.createwallet(wallet_name="w1_v19")
|
||||
wallet = node_v19.get_wallet_rpc("w1_v19")
|
||||
info = wallet.getwalletinfo()
|
||||
assert info['private_keys_enabled']
|
||||
assert info['keypoolsize'] > 0
|
||||
# Use addmultisigaddress (see #18075)
|
||||
address_18075 = wallet.rpc.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "", "legacy")["address"]
|
||||
assert wallet.getaddressinfo(address_18075)["solvable"]
|
||||
node_v19.unloadwallet("w1_v19")
|
||||
|
||||
# Copy the 0.19 wallet to the last Bitcoin Core version and open it:
|
||||
shutil.copytree(
|
||||
os.path.join(node_v19.wallets_path, "w1_v19"),
|
||||
os.path.join(node_master.wallets_path, "w1_v19")
|
||||
)
|
||||
node_master.loadwallet("w1_v19")
|
||||
wallet = node_master.get_wallet_rpc("w1_v19")
|
||||
assert wallet.getaddressinfo(address_18075)["solvable"]
|
||||
|
||||
# Now copy that same wallet back to 0.19 to make sure no automatic upgrade breaks it
|
||||
node_master.unloadwallet("w1_v19")
|
||||
shutil.rmtree(os.path.join(node_v19.wallets_path, "w1_v19"))
|
||||
shutil.copytree(
|
||||
os.path.join(node_master.wallets_path, "w1_v19"),
|
||||
os.path.join(node_v19.wallets_path, "w1_v19")
|
||||
)
|
||||
node_v19.loadwallet("w1_v19")
|
||||
wallet = node_v19.get_wallet_rpc("w1_v19")
|
||||
assert wallet.getaddressinfo(address_18075)["solvable"]
|
||||
|
||||
def run_test(self):
|
||||
node_miner = self.nodes[0]
|
||||
node_master = self.nodes[1]
|
||||
|
@ -201,15 +160,13 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
source = node_master_wallets_dir / wallet
|
||||
shutil.copytree(source, dest)
|
||||
|
||||
self.test_v19_addmultisigaddress()
|
||||
|
||||
self.log.info("Test that a wallet made on master can be opened on:")
|
||||
# In descriptors wallet mode, run this test on the nodes that support descriptor wallets
|
||||
# In legacy wallets mode, run this test on the nodes that support legacy wallets
|
||||
for node in descriptors_nodes if self.options.descriptors else legacy_nodes:
|
||||
# This test only works on the nodes that support descriptor wallets
|
||||
# since we can no longer create legacy wallets.
|
||||
for node in descriptors_nodes:
|
||||
self.log.info(f"- {node.version}")
|
||||
for wallet_name in ["w1", "w2", "w3"]:
|
||||
if self.major_version_less_than(node, 22) and wallet_name == "w1" and self.options.descriptors:
|
||||
if self.major_version_less_than(node, 22) and wallet_name == "w1":
|
||||
# Descriptor wallets created after 0.21 have taproot descriptors which 0.21 does not support, tested below
|
||||
continue
|
||||
# Also try to reopen on master after opening on old
|
||||
|
@ -249,7 +206,6 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
)
|
||||
|
||||
# Check that descriptor wallets don't work on legacy only nodes
|
||||
if self.options.descriptors:
|
||||
self.log.info("Test descriptor wallet incompatibility on:")
|
||||
for node in legacy_only_nodes:
|
||||
self.log.info(f"- {node.version}")
|
||||
|
@ -258,19 +214,15 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
for wallet_name in ["w1", "w2", "w3"]:
|
||||
assert_raises_rpc_error(-4, "Wallet file verification failed: wallet.dat corrupt, salvage failed", node.loadwallet, wallet_name)
|
||||
|
||||
# When descriptors are enabled, w1 cannot be opened by 0.21 since it contains a taproot descriptor
|
||||
if self.options.descriptors:
|
||||
# w1 cannot be opened by 0.21 since it contains a taproot descriptor
|
||||
self.log.info("Test that 0.21 cannot open wallet containing tr() descriptors")
|
||||
assert_raises_rpc_error(-1, "map::at", node_v21.loadwallet, "w1")
|
||||
|
||||
self.log.info("Test that a wallet can upgrade to and downgrade from master, from:")
|
||||
for node in descriptors_nodes if self.options.descriptors else legacy_nodes:
|
||||
for node in descriptors_nodes:
|
||||
self.log.info(f"- {node.version}")
|
||||
wallet_name = f"up_{node.version}"
|
||||
if self.major_version_at_least(node, 21):
|
||||
node.rpc.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
|
||||
else:
|
||||
node.rpc.createwallet(wallet_name=wallet_name)
|
||||
node.rpc.createwallet(wallet_name=wallet_name, descriptors=True)
|
||||
wallet_prev = node.get_wallet_rpc(wallet_name)
|
||||
address = wallet_prev.getnewaddress('', "bech32")
|
||||
addr_info = wallet_prev.getaddressinfo(address)
|
||||
|
@ -288,11 +240,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
# Restore the wallet to master
|
||||
load_res = node_master.restorewallet(wallet_name, backup_path)
|
||||
|
||||
# Make sure this wallet opens with only the migration warning. See https://github.com/bitcoin/bitcoin/pull/19054
|
||||
if not self.options.descriptors:
|
||||
# Legacy wallets will have only a deprecation warning
|
||||
assert_equal(load_res["warnings"], ["Wallet loaded successfully. The legacy wallet type is being deprecated and support for creating and opening legacy wallets will be removed in the future. Legacy wallets can be migrated to a descriptor wallet with migratewallet."])
|
||||
else:
|
||||
# There should be no warnings
|
||||
assert "warnings" not in load_res
|
||||
|
||||
wallet = node_master.get_wallet_rpc(wallet_name)
|
||||
|
@ -308,7 +256,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
# Check that taproot descriptors can be added to 0.21 wallets
|
||||
# This must be done after the backup is created so that 0.21 can still load
|
||||
# the backup
|
||||
if self.options.descriptors and self.major_version_equals(node, 21):
|
||||
if self.major_version_equals(node, 21):
|
||||
assert_raises_rpc_error(-12, "No bech32m addresses available", wallet.getnewaddress, address_type="bech32m")
|
||||
xpubs = wallet.gethdkeys(active_only=True)
|
||||
assert_equal(len(xpubs), 1)
|
||||
|
@ -337,5 +285,28 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||
info = wallet_res.getaddressinfo(address)
|
||||
assert_equal(info, addr_info)
|
||||
|
||||
self.log.info("Test that a wallet from a legacy only node must be migrated, from:")
|
||||
for node in legacy_nodes:
|
||||
self.log.info(f"- {node.version}")
|
||||
wallet_name = f"legacy_up_{node.version}"
|
||||
if self.major_version_at_least(node, 21):
|
||||
node.rpc.createwallet(wallet_name=wallet_name, descriptors=False)
|
||||
else:
|
||||
node.rpc.createwallet(wallet_name=wallet_name)
|
||||
wallet_prev = node.get_wallet_rpc(wallet_name)
|
||||
address = wallet_prev.getnewaddress('', "bech32")
|
||||
addr_info = wallet_prev.getaddressinfo(address)
|
||||
|
||||
# Make a backup of the wallet file
|
||||
backup_path = os.path.join(self.options.tmpdir, f"{wallet_name}.dat")
|
||||
wallet_prev.backupwallet(backup_path)
|
||||
|
||||
# Remove the wallet from old node
|
||||
wallet_prev.unloadwallet()
|
||||
|
||||
# Restore the wallet to master
|
||||
# Legacy wallets are no longer supported. Trying to load these should result in an error
|
||||
assert_raises_rpc_error(-18, "The wallet appears to be a Legacy wallet, please use the wallet migration tool (migratewallet RPC)", node_master.restorewallet, wallet_name, backup_path)
|
||||
|
||||
if __name__ == '__main__':
|
||||
BackwardsCompatibilityTest(__file__).main()
|
||||
|
|
|
@ -46,9 +46,6 @@ def create_transactions(node, address, amt, fees):
|
|||
return txs
|
||||
|
||||
class WalletTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
|
@ -65,17 +62,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
self.skip_if_no_wallet()
|
||||
|
||||
def run_test(self):
|
||||
if not self.options.descriptors:
|
||||
# Tests legacy watchonly behavior which is not present (and does not need to be tested) in descriptor wallets
|
||||
self.nodes[0].importaddress(ADDRESS_WATCHONLY)
|
||||
# Check that nodes don't own any UTXOs
|
||||
assert_equal(len(self.nodes[0].listunspent()), 0)
|
||||
assert_equal(len(self.nodes[1].listunspent()), 0)
|
||||
|
||||
self.log.info("Check that only node 0 is watching an address")
|
||||
assert 'watchonly' in self.nodes[0].getbalances()
|
||||
assert 'watchonly' not in self.nodes[1].getbalances()
|
||||
|
||||
self.log.info("Mining blocks ...")
|
||||
self.generate(self.nodes[0], 1)
|
||||
self.generate(self.nodes[1], 1)
|
||||
|
@ -87,30 +73,13 @@ class WalletTest(BitcoinTestFramework):
|
|||
self.generatetoaddress(self.nodes[1], COINBASE_MATURITY + 1, ADDRESS_WATCHONLY)
|
||||
|
||||
# Verify listunspent returns all immature coinbases if 'include_immature_coinbase' is set
|
||||
# For now, only the legacy wallet will see the coinbases going to the imported 'ADDRESS_WATCHONLY'
|
||||
assert_equal(len(self.nodes[0].listunspent(query_options={'include_immature_coinbase': False})), 1 if self.options.descriptors else 2)
|
||||
assert_equal(len(self.nodes[0].listunspent(query_options={'include_immature_coinbase': True})), 1 if self.options.descriptors else COINBASE_MATURITY + 2)
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Tests legacy watchonly behavior which is not present (and does not need to be tested) in descriptor wallets
|
||||
assert_equal(self.nodes[0].getbalances()['mine']['trusted'], 50)
|
||||
assert_equal(self.nodes[0].getwalletinfo()['balance'], 50)
|
||||
assert_equal(self.nodes[1].getbalances()['mine']['trusted'], 50)
|
||||
|
||||
assert_equal(self.nodes[0].getbalances()['watchonly']['immature'], 5000)
|
||||
assert 'watchonly' not in self.nodes[1].getbalances()
|
||||
|
||||
assert_equal(self.nodes[0].getbalance(), 50)
|
||||
assert_equal(self.nodes[1].getbalance(), 50)
|
||||
assert_equal(len(self.nodes[0].listunspent(query_options={'include_immature_coinbase': False})), 1)
|
||||
assert_equal(len(self.nodes[0].listunspent(query_options={'include_immature_coinbase': True})), 1)
|
||||
|
||||
self.log.info("Test getbalance with different arguments")
|
||||
assert_equal(self.nodes[0].getbalance("*"), 50)
|
||||
assert_equal(self.nodes[0].getbalance("*", 1), 50)
|
||||
assert_equal(self.nodes[0].getbalance(minconf=1), 50)
|
||||
if not self.options.descriptors:
|
||||
assert_equal(self.nodes[0].getbalance(minconf=0, include_watchonly=True), 100)
|
||||
assert_equal(self.nodes[0].getbalance("*", 1, True), 100)
|
||||
else:
|
||||
assert_equal(self.nodes[0].getbalance(minconf=0, include_watchonly=True), 50)
|
||||
assert_equal(self.nodes[0].getbalance("*", 1, True), 50)
|
||||
assert_equal(self.nodes[1].getbalance(minconf=0, include_watchonly=True), 50)
|
||||
|
@ -180,7 +149,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
expected_balances_1 = {'mine': {'immature': Decimal('0E-8'),
|
||||
'trusted': Decimal('0E-8'), # node 1's send had an unsafe input
|
||||
'untrusted_pending': Decimal('30.0') - fee_node_1}} # Doesn't include output of node 0's send since it was spent
|
||||
if self.options.descriptors:
|
||||
del expected_balances_0["watchonly"]
|
||||
balances_0 = self.nodes[0].getbalances()
|
||||
balances_1 = self.nodes[1].getbalances()
|
||||
|
@ -292,27 +260,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY)
|
||||
assert_equal(self.nodes[0].getbalance(minconf=0), total_amount + 1) # The reorg recovered our fee of 1 coin
|
||||
|
||||
if not self.options.descriptors:
|
||||
self.log.info('Check if mempool is taken into account after import*')
|
||||
address = self.nodes[0].getnewaddress()
|
||||
privkey = self.nodes[0].dumpprivkey(address)
|
||||
self.nodes[0].sendtoaddress(address, 0.1)
|
||||
self.nodes[0].unloadwallet('')
|
||||
# check importaddress on fresh wallet
|
||||
self.nodes[0].createwallet('w1', False, True)
|
||||
self.nodes[0].importaddress(address)
|
||||
assert_equal(self.nodes[0].getbalances()['mine']['untrusted_pending'], 0)
|
||||
assert_equal(self.nodes[0].getbalances()['watchonly']['untrusted_pending'], Decimal('0.1'))
|
||||
self.nodes[0].importprivkey(privkey)
|
||||
assert_equal(self.nodes[0].getbalances()['mine']['untrusted_pending'], Decimal('0.1'))
|
||||
assert_equal(self.nodes[0].getbalances()['watchonly']['untrusted_pending'], 0)
|
||||
self.nodes[0].unloadwallet('w1')
|
||||
# check importprivkey on fresh wallet
|
||||
self.nodes[0].createwallet('w2', False, True)
|
||||
self.nodes[0].importprivkey(privkey)
|
||||
assert_equal(self.nodes[0].getbalances()['mine']['untrusted_pending'], Decimal('0.1'))
|
||||
|
||||
|
||||
# Tests the lastprocessedblock JSON object in getbalances, getwalletinfo
|
||||
# and gettransaction by checking for valid hex strings and by comparing
|
||||
# the hashes & heights between generated blocks.
|
||||
|
|
|
@ -27,9 +27,6 @@ OUT_OF_RANGE = "Amount out of range"
|
|||
|
||||
|
||||
class WalletTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 4
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
@ -439,125 +436,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
# This will raise an exception since generate does not accept a string
|
||||
assert_raises_rpc_error(-3, "not of expected type number", self.generate, self.nodes[0], "2")
|
||||
|
||||
if not self.options.descriptors:
|
||||
|
||||
# This will raise an exception for the invalid private key format
|
||||
assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
|
||||
|
||||
# This will raise an exception for importing an address with the PS2H flag
|
||||
temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
|
||||
assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
|
||||
|
||||
# This will raise an exception for attempting to dump the private key of an address you do not own
|
||||
assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
|
||||
|
||||
# This will raise an exception for attempting to get the private key of an invalid Bitcoin address
|
||||
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid")
|
||||
|
||||
# This will raise an exception for attempting to set a label for an invalid Bitcoin address
|
||||
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label")
|
||||
|
||||
# This will raise an exception for importing an invalid address
|
||||
assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid")
|
||||
|
||||
# This will raise an exception for attempting to import a pubkey that isn't in hex
|
||||
assert_raises_rpc_error(-5, 'Pubkey "not hex" must be a hex string', self.nodes[0].importpubkey, "not hex")
|
||||
|
||||
# This will raise exceptions for importing a pubkeys with invalid length / invalid coordinates
|
||||
too_short_pubkey = "5361746f736869204e616b616d6f746f"
|
||||
assert_raises_rpc_error(-5, f'Pubkey "{too_short_pubkey}" must have a length of either 33 or 65 bytes', self.nodes[0].importpubkey, too_short_pubkey)
|
||||
not_on_curve_pubkey = bytes([4] + [0]*64).hex() # pubkey with coordinates (0,0) is not on curve
|
||||
assert_raises_rpc_error(-5, f'Pubkey "{not_on_curve_pubkey}" must be cryptographically valid', self.nodes[0].importpubkey, not_on_curve_pubkey)
|
||||
|
||||
# Bech32m addresses cannot be imported into a legacy wallet
|
||||
assert_raises_rpc_error(-5, "Bech32m addresses cannot be imported into legacy wallets", self.nodes[0].importaddress, "bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqc8gma6")
|
||||
|
||||
# Import address and private key to check correct behavior of spendable unspents
|
||||
# 1. Send some coins to generate new UTXO
|
||||
address_to_import = self.nodes[2].getnewaddress()
|
||||
utxo = self.create_outpoints(self.nodes[0], outputs=[{address_to_import: 1}])[0]
|
||||
self.sync_mempools(self.nodes[0:3])
|
||||
self.nodes[2].lockunspent(False, [utxo])
|
||||
self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3]))
|
||||
|
||||
self.log.info("Test sendtoaddress with fee_rate param (explicit fee rate in sat/vB)")
|
||||
prebalance = self.nodes[2].getbalance()
|
||||
assert prebalance > 2
|
||||
address = self.nodes[1].getnewaddress()
|
||||
amount = 3
|
||||
fee_rate_sat_vb = 2
|
||||
fee_rate_btc_kvb = fee_rate_sat_vb * 1e3 / 1e8
|
||||
# Test passing fee_rate as an integer
|
||||
txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=fee_rate_sat_vb)
|
||||
tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])
|
||||
self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3]))
|
||||
postbalance = self.nodes[2].getbalance()
|
||||
fee = prebalance - postbalance - Decimal(amount)
|
||||
assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb))
|
||||
|
||||
prebalance = self.nodes[2].getbalance()
|
||||
amount = Decimal("0.001")
|
||||
fee_rate_sat_vb = 1.23
|
||||
fee_rate_btc_kvb = fee_rate_sat_vb * 1e3 / 1e8
|
||||
# Test passing fee_rate as a string
|
||||
txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=str(fee_rate_sat_vb))
|
||||
tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])
|
||||
self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3]))
|
||||
postbalance = self.nodes[2].getbalance()
|
||||
fee = prebalance - postbalance - amount
|
||||
assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb))
|
||||
|
||||
# Test setting explicit fee rate just below the minimum.
|
||||
self.log.info("Test sendtoaddress raises 'fee rate too low' if fee_rate of 0.99999999 is passed")
|
||||
assert_raises_rpc_error(-6, "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)",
|
||||
self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=0.999)
|
||||
|
||||
self.log.info("Test sendtoaddress raises if an invalid fee_rate is passed")
|
||||
# Test fee_rate with zero values.
|
||||
msg = "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)"
|
||||
for zero_value in [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]:
|
||||
assert_raises_rpc_error(-6, msg, self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=zero_value)
|
||||
msg = "Invalid amount"
|
||||
# Test fee_rate values that don't pass fixed-point parsing checks.
|
||||
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
|
||||
assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=invalid_value)
|
||||
# Test fee_rate values that cannot be represented in sat/vB.
|
||||
for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
|
||||
assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=10, fee_rate=invalid_value)
|
||||
# Test fee_rate out of range (negative number).
|
||||
assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=-1)
|
||||
# Test type error.
|
||||
for invalid_value in [True, {"foo": "bar"}]:
|
||||
assert_raises_rpc_error(-3, NOT_A_NUMBER_OR_STRING, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=invalid_value)
|
||||
|
||||
self.log.info("Test sendtoaddress raises if an invalid conf_target or estimate_mode is passed")
|
||||
for target, mode in product([-1, 0, 1009], ["economical", "conservative"]):
|
||||
assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h
|
||||
self.nodes[2].sendtoaddress, address=address, amount=1, conf_target=target, estimate_mode=mode)
|
||||
for target, mode in product([-1, 0], ["btc/kb", "sat/b"]):
|
||||
assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"',
|
||||
self.nodes[2].sendtoaddress, address=address, amount=1, conf_target=target, estimate_mode=mode)
|
||||
|
||||
# 2. Import address from node2 to node1
|
||||
self.nodes[1].importaddress(address_to_import)
|
||||
|
||||
# 3. Validate that the imported address is watch-only on node1
|
||||
assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
|
||||
|
||||
# 4. Check that the unspents after import are not spendable
|
||||
assert_array_result(self.nodes[1].listunspent(),
|
||||
{"address": address_to_import},
|
||||
{"spendable": False})
|
||||
|
||||
# 5. Import private key of the previously imported address on node1
|
||||
priv_key = self.nodes[2].dumpprivkey(address_to_import)
|
||||
self.nodes[1].importprivkey(priv_key)
|
||||
|
||||
# 6. Check that the unspents are now spendable on node1
|
||||
assert_array_result(self.nodes[1].listunspent(),
|
||||
{"address": address_to_import},
|
||||
{"spendable": True})
|
||||
|
||||
# Mine a block from node0 to an address from node1
|
||||
coinbase_addr = self.nodes[1].getnewaddress()
|
||||
block_hash = self.generatetoaddress(self.nodes[0], 1, coinbase_addr, sync_fun=lambda: self.sync_all(self.nodes[0:3]))[0]
|
||||
|
@ -716,7 +594,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
txid_feeReason_four = self.nodes[2].sendmany(dummy='', amounts={address: 5}, verbose=False)
|
||||
assert_equal(self.nodes[2].gettransaction(txid_feeReason_four)['txid'], txid_feeReason_four)
|
||||
|
||||
if self.options.descriptors:
|
||||
self.log.info("Testing 'listunspent' outputs the parent descriptor(s) of coins")
|
||||
# Create two multisig descriptors, and send a UTxO each.
|
||||
multi_a = descsum_create("wsh(multi(1,tpubD6NzVbkrYhZ4YBNjUo96Jxd1u4XKWgnoc7LsA1jz3Yc2NiDbhtfBhaBtemB73n9V5vtJHwU6FVXwggTbeoJWQ1rzdz8ysDuQkpnaHyvnvzR/*,tpubD6NzVbkrYhZ4YHdDGMAYGaWxMSC1B6tPRTHuU5t3BcfcS3nrF523iFm5waFd1pP3ZvJt4Jr8XmCmsTBNx5suhcSgtzpGjGMASR3tau1hJz4/*))")
|
||||
|
@ -791,8 +668,6 @@ class WalletTest(BitcoinTestFramework):
|
|||
self.test_chain_listunspent()
|
||||
|
||||
def test_chain_listunspent(self):
|
||||
if not self.options.descriptors:
|
||||
return
|
||||
self.wallet = MiniWallet(self.nodes[0])
|
||||
self.nodes[0].get_wallet_rpc(self.default_wallet_name).sendtoaddress(self.wallet.get_address(), "5")
|
||||
self.generate(self.wallet, 1, sync_fun=self.no_op)
|
||||
|
|
|
@ -6,13 +6,11 @@
|
|||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.address import (
|
||||
ADDRESS_BCRT1_UNSPENDABLE,
|
||||
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR,
|
||||
)
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
)
|
||||
from test_framework.wallet_util import generate_keypair
|
||||
|
||||
|
||||
class WalletBlankTest(BitcoinTestFramework):
|
||||
|
@ -22,69 +20,7 @@ class WalletBlankTest(BitcoinTestFramework):
|
|||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def add_options(self, options):
|
||||
self.add_wallet_options(options)
|
||||
|
||||
def test_importaddress(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importaddress unsets the blank flag")
|
||||
self.nodes[0].createwallet(wallet_name="iaddr", disable_private_keys=True, blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("iaddr")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
wallet.importaddress(ADDRESS_BCRT1_UNSPENDABLE)
|
||||
assert_equal(wallet.getwalletinfo()["blank"], False)
|
||||
|
||||
def test_importpubkey(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importpubkey unsets the blank flag")
|
||||
for i, comp in enumerate([True, False]):
|
||||
self.nodes[0].createwallet(wallet_name=f"ipub{i}", disable_private_keys=True, blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc(f"ipub{i}")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
|
||||
_, pubkey = generate_keypair(compressed=comp)
|
||||
wallet.importpubkey(pubkey.hex())
|
||||
assert_equal(wallet.getwalletinfo()["blank"], False)
|
||||
|
||||
def test_importprivkey(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importprivkey unsets the blank flag")
|
||||
for i, comp in enumerate([True, False]):
|
||||
self.nodes[0].createwallet(wallet_name=f"ipriv{i}", blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc(f"ipriv{i}")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
|
||||
wif, _ = generate_keypair(compressed=comp, wif=True)
|
||||
wallet.importprivkey(wif)
|
||||
assert_equal(wallet.getwalletinfo()["blank"], False)
|
||||
|
||||
def test_importmulti(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importmulti unsets the blank flag")
|
||||
self.nodes[0].createwallet(wallet_name="imulti", disable_private_keys=True, blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("imulti")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
wallet.importmulti([{
|
||||
"desc": ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR,
|
||||
"timestamp": "now",
|
||||
}])
|
||||
assert_equal(wallet.getwalletinfo()["blank"], False)
|
||||
|
||||
def test_importdescriptors(self):
|
||||
if not self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importdescriptors preserves the blank flag")
|
||||
self.nodes[0].createwallet(wallet_name="idesc", disable_private_keys=True, blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("idesc")
|
||||
|
@ -97,44 +33,7 @@ class WalletBlankTest(BitcoinTestFramework):
|
|||
}])
|
||||
assert_equal(wallet.getwalletinfo()["blank"], True)
|
||||
|
||||
def test_importwallet(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that importwallet unsets the blank flag")
|
||||
def_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
|
||||
|
||||
self.nodes[0].createwallet(wallet_name="iwallet", blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("iwallet")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
|
||||
wallet_dump_path = self.nodes[0].datadir_path / "wallet.dump"
|
||||
def_wallet.dumpwallet(wallet_dump_path)
|
||||
|
||||
wallet.importwallet(wallet_dump_path)
|
||||
assert_equal(wallet.getwalletinfo()["blank"], False)
|
||||
|
||||
def test_encrypt_legacy(self):
|
||||
if self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that encrypting a blank legacy wallet preserves the blank flag and does not generate a seed")
|
||||
self.nodes[0].createwallet(wallet_name="encblanklegacy", blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("encblanklegacy")
|
||||
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], False)
|
||||
assert_equal(info["blank"], True)
|
||||
assert "hdseedid" not in info
|
||||
|
||||
wallet.encryptwallet("pass")
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["blank"], True)
|
||||
assert "hdseedid" not in info
|
||||
|
||||
def test_encrypt_descriptors(self):
|
||||
if not self.options.descriptors:
|
||||
return
|
||||
self.log.info("Test that encrypting a blank descriptor wallet preserves the blank flag and descriptors remain the same")
|
||||
self.nodes[0].createwallet(wallet_name="encblankdesc", blank=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("encblankdesc")
|
||||
|
@ -149,13 +48,7 @@ class WalletBlankTest(BitcoinTestFramework):
|
|||
assert_equal(descs, wallet.listdescriptors())
|
||||
|
||||
def run_test(self):
|
||||
self.test_importaddress()
|
||||
self.test_importpubkey()
|
||||
self.test_importprivkey()
|
||||
self.test_importmulti()
|
||||
self.test_importdescriptors()
|
||||
self.test_importwallet()
|
||||
self.test_encrypt_legacy()
|
||||
self.test_encrypt_descriptors()
|
||||
|
||||
|
||||
|
|
|
@ -50,9 +50,6 @@ def get_change_address(tx, node):
|
|||
return [address for address in txout_addresses if node.getaddressinfo(address)["ischange"]]
|
||||
|
||||
class BumpFeeTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
|
@ -597,10 +594,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
|
|||
"internal": True,
|
||||
"keypool": False
|
||||
}]
|
||||
if self.options.descriptors:
|
||||
result = signer.importdescriptors(reqs)
|
||||
else:
|
||||
result = signer.importmulti(reqs)
|
||||
assert_equal(result, [{'success': True}, {'success': True}])
|
||||
|
||||
# Create another wallet with just the public keys, which creates PSBTs
|
||||
|
@ -625,10 +619,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
|
|||
"watchonly": True,
|
||||
"active": True,
|
||||
}]
|
||||
if self.options.descriptors:
|
||||
result = watcher.importdescriptors(reqs)
|
||||
else:
|
||||
result = watcher.importmulti(reqs)
|
||||
assert_equal(result, [{'success': True}, {'success': True}])
|
||||
|
||||
funding_address1 = watcher.getnewaddress(address_type='bech32')
|
||||
|
|
|
@ -14,9 +14,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class WalletChangeAddressTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 3
|
||||
|
|
|
@ -13,9 +13,6 @@ from test_framework.util import (
|
|||
)
|
||||
|
||||
class CoinbaseCategoryTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -15,9 +15,6 @@ from test_framework.util import (
|
|||
)
|
||||
|
||||
class TxConflicts(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
|
||||
|
|
|
@ -17,9 +17,6 @@ from test_framework.blocktools import (
|
|||
|
||||
|
||||
class CreateTxWalletTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
"""Test createwallet arguments.
|
||||
"""
|
||||
|
||||
from test_framework.address import key_to_p2wpkh
|
||||
from test_framework.descriptors import descsum_create
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
|
@ -20,9 +19,6 @@ LEGACY_WALLET_MSG = "Wallet created successfully. The legacy wallet type is bein
|
|||
|
||||
|
||||
class CreateWalletTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
|
@ -52,10 +48,7 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
self.log.info('Test that private keys cannot be imported')
|
||||
privkey, pubkey = generate_keypair(wif=True)
|
||||
assert_raises_rpc_error(-4, 'Cannot import private keys to a wallet with private keys disabled', w1.importprivkey, privkey)
|
||||
if self.options.descriptors:
|
||||
result = w1.importdescriptors([{'desc': descsum_create('wpkh(' + privkey + ')'), 'timestamp': 'now'}])
|
||||
else:
|
||||
result = w1.importmulti([{'scriptPubKey': {'address': key_to_p2wpkh(pubkey)}, 'timestamp': 'now', 'keys': [privkey]}])
|
||||
assert not result[0]['success']
|
||||
assert 'warnings' not in result[0]
|
||||
assert_equal(result[0]['error']['code'], -4)
|
||||
|
@ -80,7 +73,6 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
assert_equal(w3.getwalletinfo()['keypoolsize'], 0)
|
||||
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w3.getnewaddress)
|
||||
# Set the seed
|
||||
if self.options.descriptors:
|
||||
w3.importdescriptors([{
|
||||
'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'),
|
||||
'timestamp': 'now',
|
||||
|
@ -92,8 +84,6 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
'active': True,
|
||||
'internal': True
|
||||
}])
|
||||
else:
|
||||
w3.sethdseed()
|
||||
assert_equal(w3.getwalletinfo()['keypoolsize'], 1)
|
||||
w3.getnewaddress()
|
||||
w3.getrawchangeaddress()
|
||||
|
@ -110,7 +100,6 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress)
|
||||
with WalletUnlock(w4, "pass"):
|
||||
# Now set a seed and it should work. Wallet should also be encrypted
|
||||
if self.options.descriptors:
|
||||
w4.importdescriptors([{
|
||||
'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'),
|
||||
'timestamp': 'now',
|
||||
|
@ -122,8 +111,6 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
'active': True,
|
||||
'internal': True
|
||||
}])
|
||||
else:
|
||||
w4.sethdseed()
|
||||
w4.getnewaddress()
|
||||
w4.getrawchangeaddress()
|
||||
|
||||
|
@ -156,12 +143,12 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
w6.keypoolrefill(1)
|
||||
# There should only be 1 key for legacy, 3 for descriptors
|
||||
walletinfo = w6.getwalletinfo()
|
||||
keys = 4 if self.options.descriptors else 1
|
||||
keys = 4
|
||||
assert_equal(walletinfo['keypoolsize'], keys)
|
||||
assert_equal(walletinfo['keypoolsize_hd_internal'], keys)
|
||||
# Allow empty passphrase, but there should be a warning
|
||||
resp = self.nodes[0].createwallet(wallet_name='w7', disable_private_keys=False, blank=False, passphrase='')
|
||||
assert_equal(resp["warnings"], [EMPTY_PASSPHRASE_MSG] if self.options.descriptors else [EMPTY_PASSPHRASE_MSG, LEGACY_WALLET_MSG])
|
||||
assert_equal(resp["warnings"], [EMPTY_PASSPHRASE_MSG])
|
||||
|
||||
w7 = node.get_wallet_rpc('w7')
|
||||
assert_raises_rpc_error(-15, 'Error: running with an unencrypted wallet, but walletpassphrase was called.', w7.walletpassphrase, '', 60)
|
||||
|
@ -175,18 +162,8 @@ class CreateWalletTest(BitcoinTestFramework):
|
|||
self.log.info('Using a passphrase with private keys disabled returns error')
|
||||
assert_raises_rpc_error(-4, 'Passphrase provided but private keys are disabled. A passphrase is only used to encrypt private keys, so cannot be used for wallets with private keys disabled.', self.nodes[0].createwallet, wallet_name='w9', disable_private_keys=True, passphrase='thisisapassphrase')
|
||||
|
||||
if self.is_bdb_compiled():
|
||||
self.log.info("Test legacy wallet deprecation")
|
||||
result = self.nodes[0].createwallet(wallet_name="legacy_w0", descriptors=False, passphrase=None)
|
||||
assert_equal(result, {
|
||||
"name": "legacy_w0",
|
||||
"warnings": [LEGACY_WALLET_MSG],
|
||||
})
|
||||
result = self.nodes[0].createwallet(wallet_name="legacy_w1", descriptors=False, passphrase="")
|
||||
assert_equal(result, {
|
||||
"name": "legacy_w1",
|
||||
"warnings": [EMPTY_PASSPHRASE_MSG, LEGACY_WALLET_MSG],
|
||||
})
|
||||
self.log.info("Test that legacy wallets cannot be created")
|
||||
assert_raises_rpc_error(-4, 'descriptors argument must be set to "true"; it is no longer possible to create a legacy wallet.', self.nodes[0].createwallet, wallet_name="legacy", descriptors=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -14,9 +14,6 @@ from test_framework.wallet_util import WalletUnlock
|
|||
|
||||
|
||||
class WalletCreateDescriptorTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=True, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
|
|
@ -7,9 +7,6 @@ from test_framework.test_framework import BitcoinTestFramework
|
|||
from test_framework.util import assert_raises_rpc_error
|
||||
|
||||
class WalletCrossChain(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
|
@ -43,23 +40,10 @@ class WalletCrossChain(BitcoinTestFramework):
|
|||
|
||||
self.log.info("Loading/restoring wallets into nodes with a different genesis block")
|
||||
|
||||
if self.options.descriptors:
|
||||
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[0].loadwallet, node1_wallet)
|
||||
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[1].loadwallet, node0_wallet)
|
||||
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[0].restorewallet, 'w', node1_wallet_backup)
|
||||
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[1].restorewallet, 'w', node0_wallet_backup)
|
||||
else:
|
||||
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[0].loadwallet, node1_wallet)
|
||||
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[1].loadwallet, node0_wallet)
|
||||
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[0].restorewallet, 'w', node1_wallet_backup)
|
||||
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[1].restorewallet, 'w', node0_wallet_backup)
|
||||
|
||||
if not self.options.descriptors:
|
||||
self.log.info("Override cross-chain wallet load protection")
|
||||
self.stop_nodes()
|
||||
self.start_nodes([['-walletcrosschain', '-prune=550']] * self.num_nodes)
|
||||
self.nodes[0].loadwallet(node1_wallet)
|
||||
self.nodes[1].loadwallet(node0_wallet)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -23,9 +23,6 @@ from test_framework.wallet_util import WalletUnlock
|
|||
|
||||
|
||||
class WalletDescriptorTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
@ -72,15 +69,6 @@ class WalletDescriptorTest(BitcoinTestFramework):
|
|||
assert_equal(cache_records, 1000)
|
||||
|
||||
def run_test(self):
|
||||
if self.is_bdb_compiled():
|
||||
# Make a legacy wallet and check it is BDB
|
||||
self.nodes[0].createwallet(wallet_name="legacy1", descriptors=False)
|
||||
wallet_info = self.nodes[0].getwalletinfo()
|
||||
assert_equal(wallet_info['format'], 'bdb')
|
||||
self.nodes[0].unloadwallet("legacy1")
|
||||
else:
|
||||
self.log.warning("Skipping BDB test")
|
||||
|
||||
# Make a descriptor wallet
|
||||
self.log.info("Making a descriptor wallet")
|
||||
self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
|
||||
|
|
|
@ -1,227 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2016-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test the dumpwallet RPC."""
|
||||
import datetime
|
||||
import time
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_not_equal,
|
||||
assert_equal,
|
||||
assert_raises_rpc_error,
|
||||
)
|
||||
from test_framework.wallet_util import WalletUnlock
|
||||
|
||||
|
||||
def read_dump(file_name, addrs, script_addrs, hd_master_addr_old):
|
||||
"""
|
||||
Read the given dump, count the addrs that match, count change and reserve.
|
||||
Also check that the old hd_master is inactive
|
||||
"""
|
||||
with open(file_name, encoding='utf8') as inputfile:
|
||||
found_comments = []
|
||||
found_legacy_addr = 0
|
||||
found_p2sh_segwit_addr = 0
|
||||
found_bech32_addr = 0
|
||||
found_script_addr = 0
|
||||
found_addr_chg = 0
|
||||
found_addr_rsv = 0
|
||||
hd_master_addr_ret = None
|
||||
for line in inputfile:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line[0] == '#':
|
||||
found_comments.append(line)
|
||||
else:
|
||||
# split out some data
|
||||
key_date_label, comment = line.split("#")
|
||||
key_date_label = key_date_label.split(" ")
|
||||
# key = key_date_label[0]
|
||||
date = key_date_label[1]
|
||||
keytype = key_date_label[2]
|
||||
|
||||
imported_key = date == '1970-01-01T00:00:01Z'
|
||||
if imported_key:
|
||||
# Imported keys have multiple addresses, no label (keypath) and timestamp
|
||||
# Skip them
|
||||
continue
|
||||
|
||||
addr_keypath = comment.split(" addr=")[1]
|
||||
addr = addr_keypath.split(" ")[0]
|
||||
keypath = None
|
||||
if keytype == "inactivehdseed=1":
|
||||
# ensure the old master is still available
|
||||
assert hd_master_addr_old == addr
|
||||
elif keytype == "hdseed=1":
|
||||
# ensure we have generated a new hd master key
|
||||
assert_not_equal(hd_master_addr_old, addr)
|
||||
hd_master_addr_ret = addr
|
||||
elif keytype == "script=1":
|
||||
# scripts don't have keypaths
|
||||
keypath = None
|
||||
else:
|
||||
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
|
||||
|
||||
# count key types
|
||||
for addrObj in addrs:
|
||||
if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=":
|
||||
if addr.startswith('m') or addr.startswith('n'):
|
||||
# P2PKH address
|
||||
found_legacy_addr += 1
|
||||
elif addr.startswith('2'):
|
||||
# P2SH-segwit address
|
||||
found_p2sh_segwit_addr += 1
|
||||
elif addr.startswith('bcrt1'):
|
||||
found_bech32_addr += 1
|
||||
break
|
||||
elif keytype == "change=1":
|
||||
found_addr_chg += 1
|
||||
break
|
||||
elif keytype == "reserve=1":
|
||||
found_addr_rsv += 1
|
||||
break
|
||||
|
||||
# count scripts
|
||||
for script_addr in script_addrs:
|
||||
if script_addr == addr.rstrip() and keytype == "script=1":
|
||||
found_script_addr += 1
|
||||
break
|
||||
|
||||
return found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
|
||||
|
||||
|
||||
class WalletDumpTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.extra_args = [["-keypool=90", "-addresstype=legacy"]]
|
||||
self.rpc_timeout = 120
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def setup_network(self):
|
||||
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
|
||||
self.start_nodes()
|
||||
|
||||
def run_test(self):
|
||||
self.nodes[0].createwallet("dump")
|
||||
|
||||
wallet_unenc_dump = self.nodes[0].datadir_path / "wallet.unencrypted.dump"
|
||||
wallet_enc_dump = self.nodes[0].datadir_path / "wallet.encrypted.dump"
|
||||
|
||||
# generate 30 addresses to compare against the dump
|
||||
# - 10 legacy P2PKH
|
||||
# - 10 P2SH-segwit
|
||||
# - 10 bech32
|
||||
test_addr_count = 10
|
||||
addrs = []
|
||||
for address_type in ['legacy', 'p2sh-segwit', 'bech32']:
|
||||
for _ in range(test_addr_count):
|
||||
addr = self.nodes[0].getnewaddress(address_type=address_type)
|
||||
vaddr = self.nodes[0].getaddressinfo(addr) # required to get hd keypath
|
||||
addrs.append(vaddr)
|
||||
|
||||
# Test scripts dump by adding a 1-of-1 multisig address
|
||||
multisig_addr = self.nodes[0].addmultisigaddress(1, [addrs[1]["address"]])["address"]
|
||||
|
||||
# Refill the keypool. getnewaddress() refills the keypool *before* taking a key from
|
||||
# the keypool, so the final call to getnewaddress leaves the keypool with one key below
|
||||
# its capacity
|
||||
self.nodes[0].keypoolrefill()
|
||||
|
||||
self.log.info('Mine a block one second before the wallet is dumped')
|
||||
dump_time = int(time.time())
|
||||
self.nodes[0].setmocktime(dump_time - 1)
|
||||
self.generate(self.nodes[0], 1)
|
||||
self.nodes[0].setmocktime(dump_time)
|
||||
dump_time_str = '# * Created on {}Z'.format(
|
||||
datetime.datetime.fromtimestamp(
|
||||
dump_time,
|
||||
tz=datetime.timezone.utc,
|
||||
).replace(tzinfo=None).isoformat())
|
||||
dump_best_block_1 = '# * Best block at time of backup was {} ({}),'.format(
|
||||
self.nodes[0].getblockcount(),
|
||||
self.nodes[0].getbestblockhash(),
|
||||
)
|
||||
dump_best_block_2 = '# mined on {}Z'.format(
|
||||
datetime.datetime.fromtimestamp(
|
||||
dump_time - 1,
|
||||
tz=datetime.timezone.utc,
|
||||
).replace(tzinfo=None).isoformat())
|
||||
|
||||
self.log.info('Dump unencrypted wallet')
|
||||
result = self.nodes[0].dumpwallet(wallet_unenc_dump)
|
||||
assert_equal(result['filename'], str(wallet_unenc_dump))
|
||||
|
||||
found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
|
||||
read_dump(wallet_unenc_dump, addrs, [multisig_addr], None)
|
||||
assert '# End of dump' in found_comments # Check that file is not corrupt
|
||||
assert_equal(dump_time_str, next(c for c in found_comments if c.startswith('# * Created on')))
|
||||
assert_equal(dump_best_block_1, next(c for c in found_comments if c.startswith('# * Best block')))
|
||||
assert_equal(dump_best_block_2, next(c for c in found_comments if c.startswith('# mined on')))
|
||||
assert_equal(found_legacy_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_p2sh_segwit_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_bech32_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_script_addr, 1) # all scripts must be in the dump
|
||||
assert_equal(found_addr_chg, 0) # 0 blocks where mined
|
||||
assert_equal(found_addr_rsv, 90 * 2) # 90 keys plus 100% internal keys
|
||||
|
||||
# encrypt wallet, restart, unlock and dump
|
||||
self.nodes[0].encryptwallet('test')
|
||||
with WalletUnlock(self.nodes[0], "test"):
|
||||
# Should be a no-op:
|
||||
self.nodes[0].keypoolrefill()
|
||||
self.nodes[0].dumpwallet(wallet_enc_dump)
|
||||
|
||||
found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, _ = \
|
||||
read_dump(wallet_enc_dump, addrs, [multisig_addr], hd_master_addr_unenc)
|
||||
assert '# End of dump' in found_comments # Check that file is not corrupt
|
||||
assert_equal(dump_time_str, next(c for c in found_comments if c.startswith('# * Created on')))
|
||||
assert_equal(dump_best_block_1, next(c for c in found_comments if c.startswith('# * Best block')))
|
||||
assert_equal(dump_best_block_2, next(c for c in found_comments if c.startswith('# mined on')))
|
||||
assert_equal(found_legacy_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_p2sh_segwit_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_bech32_addr, test_addr_count) # all keys must be in the dump
|
||||
assert_equal(found_script_addr, 1)
|
||||
assert_equal(found_addr_chg, 90 * 2) # old reserve keys are marked as change now
|
||||
assert_equal(found_addr_rsv, 90 * 2)
|
||||
|
||||
# Overwriting should fail
|
||||
assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))
|
||||
|
||||
# Restart node with new wallet, and test importwallet
|
||||
self.restart_node(0)
|
||||
self.nodes[0].createwallet("w2")
|
||||
|
||||
# Make sure the address is not IsMine before import
|
||||
result = self.nodes[0].getaddressinfo(multisig_addr)
|
||||
assert not result['ismine']
|
||||
|
||||
self.nodes[0].importwallet(wallet_unenc_dump)
|
||||
|
||||
# Now check IsMine is true
|
||||
result = self.nodes[0].getaddressinfo(multisig_addr)
|
||||
assert result['ismine']
|
||||
|
||||
self.log.info('Check that wallet is flushed')
|
||||
with self.nodes[0].assert_debug_log(['Flushing wallet.dat'], timeout=20):
|
||||
self.nodes[0].getnewaddress()
|
||||
|
||||
# Make sure that dumpwallet doesn't have a lock order issue when there is an unconfirmed tx and it is reloaded
|
||||
# See https://github.com/bitcoin/bitcoin/issues/22489
|
||||
self.nodes[0].createwallet("w3")
|
||||
w3 = self.nodes[0].get_wallet_rpc("w3")
|
||||
w3.importprivkey(privkey=self.nodes[0].get_deterministic_priv_key().key, label="coinbase_import")
|
||||
w3.sendtoaddress(w3.getnewaddress(), 10)
|
||||
w3.unloadwallet()
|
||||
self.nodes[0].loadwallet("w3")
|
||||
w3.dumpwallet(self.nodes[0].datadir_path / "w3.dump")
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletDumpTest(__file__).main()
|
|
@ -17,9 +17,6 @@ from test_framework.wallet_util import WalletUnlock
|
|||
|
||||
|
||||
class WalletEncryptionTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
|
|
@ -9,9 +9,6 @@ from test_framework.test_framework import BitcoinTestFramework
|
|||
from test_framework.util import assert_raises_rpc_error
|
||||
|
||||
class WalletRBFTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -19,9 +19,6 @@ NUM_BLOCKS = 6 # number of blocks to mine
|
|||
|
||||
|
||||
class WalletFastRescanTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.extra_args = [[f'-keypool={KEYPOOL_SIZE}', '-blockfilterindex=1']]
|
||||
|
|
|
@ -40,9 +40,6 @@ def get_unspent(listunspent, amount):
|
|||
raise AssertionError('Could not find unspent with amount={}'.format(amount))
|
||||
|
||||
class RawTransactionsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 4
|
||||
self.extra_args = [[
|
||||
|
@ -575,8 +572,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
addr2Obj['pubkey'],
|
||||
]
|
||||
)['address']
|
||||
if not self.options.descriptors:
|
||||
wmulti.importaddress(mSigObj)
|
||||
|
||||
# Send 1.2 BTC to msig addr.
|
||||
self.nodes[0].sendtoaddress(mSigObj, 1.2)
|
||||
|
@ -600,7 +595,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
self.log.info("Test fundrawtxn with locked wallet and hardened derivation")
|
||||
|
||||
df_wallet = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
|
||||
self.nodes[1].createwallet(wallet_name="locked_wallet", descriptors=self.options.descriptors)
|
||||
self.nodes[1].createwallet(wallet_name="locked_wallet")
|
||||
wallet = self.nodes[1].get_wallet_rpc("locked_wallet")
|
||||
# This test is not meant to exercise fee estimation. Making sure all txs are sent at a consistent fee rate.
|
||||
wallet.settxfee(self.min_relay_tx_fee)
|
||||
|
@ -612,7 +607,6 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
# Encrypt wallet and import descriptors
|
||||
wallet.encryptwallet("test")
|
||||
|
||||
if self.options.descriptors:
|
||||
with WalletUnlock(wallet, "test"):
|
||||
wallet.importdescriptors([{
|
||||
'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPdYeeZbPSKd2KYLmeVKtcFA7kqCxDvDR13MQ6us8HopUR2wLcS2ZKPhLyKsqpDL2FtL73LMHcgoCL7DXsciA8eX8nbjCR2eG/0h/*h)'),
|
||||
|
@ -761,10 +755,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
"range": [0, 100],
|
||||
"watchonly": True,
|
||||
}]
|
||||
if self.options.descriptors:
|
||||
wwatch.importdescriptors(desc_import)
|
||||
else:
|
||||
wwatch.importmulti(desc_import)
|
||||
|
||||
# Backward compatibility test (2nd params is includeWatching)
|
||||
result = wwatch.fundrawtransaction(rawtx, True)
|
||||
|
@ -1041,10 +1032,7 @@ class RawTransactionsTest(BitcoinTestFramework):
|
|||
|
||||
# Make a weird but signable script. sh(pkh()) descriptor accomplishes this
|
||||
desc = descsum_create("sh(pkh({}))".format(privkey))
|
||||
if self.options.descriptors:
|
||||
res = self.nodes[0].importdescriptors([{"desc": desc, "timestamp": "now"}])
|
||||
else:
|
||||
res = self.nodes[0].importmulti([{"desc": desc, "timestamp": "now"}])
|
||||
assert res[0]["success"]
|
||||
addr = self.nodes[0].deriveaddresses(desc)[0]
|
||||
addr_info = self.nodes[0].getaddressinfo(addr)
|
||||
|
|
|
@ -15,9 +15,6 @@ from test_framework.wallet_util import WalletUnlock
|
|||
|
||||
|
||||
class WalletGetHDKeyTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=True, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 1
|
||||
|
|
|
@ -16,9 +16,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class WalletGroupTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 5
|
||||
|
|
|
@ -9,16 +9,11 @@ import shutil
|
|||
from test_framework.blocktools import COINBASE_MATURITY
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_not_equal,
|
||||
assert_equal,
|
||||
assert_raises_rpc_error,
|
||||
)
|
||||
|
||||
|
||||
class WalletHDTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
@ -39,10 +34,7 @@ class WalletHDTest(BitcoinTestFramework):
|
|||
# create an internal key
|
||||
change_addr = self.nodes[1].getrawchangeaddress()
|
||||
change_addrV = self.nodes[1].getaddressinfo(change_addr)
|
||||
if self.options.descriptors:
|
||||
assert_equal(change_addrV["hdkeypath"], "m/84h/1h/0h/1/0")
|
||||
else:
|
||||
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
|
||||
|
||||
# Import a non-HD private key in the HD wallet
|
||||
non_hd_add = 'bcrt1qmevj8zfx0wdvp05cqwkmr6mxkfx60yezwjksmt'
|
||||
|
@ -61,10 +53,7 @@ class WalletHDTest(BitcoinTestFramework):
|
|||
for i in range(1, NUM_HD_ADDS + 1):
|
||||
hd_add = self.nodes[1].getnewaddress()
|
||||
hd_info = self.nodes[1].getaddressinfo(hd_add)
|
||||
if self.options.descriptors:
|
||||
assert_equal(hd_info["hdkeypath"], "m/84h/1h/0h/0/" + str(i))
|
||||
else:
|
||||
assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'")
|
||||
assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint)
|
||||
self.nodes[0].sendtoaddress(hd_add, 1)
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
@ -74,10 +63,7 @@ class WalletHDTest(BitcoinTestFramework):
|
|||
# create an internal key (again)
|
||||
change_addr = self.nodes[1].getrawchangeaddress()
|
||||
change_addrV = self.nodes[1].getaddressinfo(change_addr)
|
||||
if self.options.descriptors:
|
||||
assert_equal(change_addrV["hdkeypath"], "m/84h/1h/0h/1/1")
|
||||
else:
|
||||
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
|
||||
|
||||
self.sync_all()
|
||||
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
|
||||
|
@ -99,10 +85,7 @@ class WalletHDTest(BitcoinTestFramework):
|
|||
for i in range(1, NUM_HD_ADDS + 1):
|
||||
hd_add_2 = self.nodes[1].getnewaddress()
|
||||
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
|
||||
if self.options.descriptors:
|
||||
assert_equal(hd_info_2["hdkeypath"], "m/84h/1h/0h/0/" + str(i))
|
||||
else:
|
||||
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'")
|
||||
assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
|
||||
assert_equal(hd_add, hd_add_2)
|
||||
self.connect_nodes(0, 1)
|
||||
|
@ -141,143 +124,7 @@ class WalletHDTest(BitcoinTestFramework):
|
|||
if out['value'] != 1:
|
||||
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['address'])['hdkeypath']
|
||||
|
||||
if self.options.descriptors:
|
||||
assert_equal(keypath[0:14], "m/84h/1h/0h/1/")
|
||||
else:
|
||||
assert_equal(keypath[0:7], "m/0'/1'")
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Generate a new HD seed on node 1 and make sure it is set
|
||||
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
|
||||
self.nodes[1].sethdseed()
|
||||
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
|
||||
assert_not_equal(orig_masterkeyid, new_masterkeyid)
|
||||
addr = self.nodes[1].getnewaddress()
|
||||
# Make sure the new address is the first from the keypool
|
||||
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'')
|
||||
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
|
||||
|
||||
# Set a new HD seed on node 1 without flushing the keypool
|
||||
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
|
||||
orig_masterkeyid = new_masterkeyid
|
||||
self.nodes[1].sethdseed(False, new_seed)
|
||||
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
|
||||
assert_not_equal(orig_masterkeyid, new_masterkeyid)
|
||||
addr = self.nodes[1].getnewaddress()
|
||||
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
|
||||
# Make sure the new address continues previous keypool
|
||||
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'')
|
||||
|
||||
# Check that the next address is from the new seed
|
||||
self.nodes[1].keypoolrefill(1)
|
||||
next_addr = self.nodes[1].getnewaddress()
|
||||
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
|
||||
# Make sure the new address is not from previous keypool
|
||||
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'')
|
||||
assert_not_equal(next_addr, addr)
|
||||
|
||||
# Sethdseed parameter validity
|
||||
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
|
||||
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
|
||||
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type bool", self.nodes[1].sethdseed, "Not_bool")
|
||||
assert_raises_rpc_error(-3, "JSON value of type bool is not of expected type string", self.nodes[1].sethdseed, False, True)
|
||||
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
|
||||
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
|
||||
|
||||
self.log.info('Test sethdseed restoring with keys outside of the initial keypool')
|
||||
self.generate(self.nodes[0], 10)
|
||||
# Restart node 1 with keypool of 3 and a different wallet
|
||||
self.nodes[1].createwallet(wallet_name='origin', blank=True)
|
||||
self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin'])
|
||||
self.connect_nodes(0, 1)
|
||||
|
||||
# sethdseed restoring and seeing txs to addresses out of the keypool
|
||||
origin_rpc = self.nodes[1].get_wallet_rpc('origin')
|
||||
seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
|
||||
origin_rpc.sethdseed(True, seed)
|
||||
|
||||
self.nodes[1].createwallet(wallet_name='restore', blank=True)
|
||||
restore_rpc = self.nodes[1].get_wallet_rpc('restore')
|
||||
restore_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
|
||||
restore_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
|
||||
|
||||
self.nodes[1].createwallet(wallet_name='restore2', blank=True)
|
||||
restore2_rpc = self.nodes[1].get_wallet_rpc('restore2')
|
||||
restore2_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
|
||||
restore2_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
|
||||
|
||||
# Check persistence of inactive seed by reloading restore. restore2 is still loaded to test the case where the wallet is not reloaded
|
||||
restore_rpc.unloadwallet()
|
||||
self.nodes[1].loadwallet('restore')
|
||||
restore_rpc = self.nodes[1].get_wallet_rpc('restore')
|
||||
|
||||
# Empty origin keypool and get an address that is beyond the initial keypool
|
||||
origin_rpc.getnewaddress()
|
||||
origin_rpc.getnewaddress()
|
||||
last_addr = origin_rpc.getnewaddress() # Last address of initial keypool
|
||||
addr = origin_rpc.getnewaddress() # First address beyond initial keypool
|
||||
|
||||
# Check that the restored seed has last_addr but does not have addr
|
||||
info = restore_rpc.getaddressinfo(last_addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
info = restore_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], False)
|
||||
info = restore2_rpc.getaddressinfo(last_addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
info = restore2_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], False)
|
||||
# Check that the origin seed has addr
|
||||
info = origin_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
|
||||
# Send a transaction to addr, which is out of the initial keypool.
|
||||
# The wallet that has set a new seed (restore_rpc) should not detect this transaction.
|
||||
txid = self.nodes[0].sendtoaddress(addr, 1)
|
||||
origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
|
||||
self.generate(self.nodes[0], 1)
|
||||
origin_rpc.gettransaction(txid)
|
||||
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, txid)
|
||||
out_of_kp_txid = txid
|
||||
|
||||
# Send a transaction to last_addr, which is in the initial keypool.
|
||||
# The wallet that has set a new seed (restore_rpc) should detect this transaction and generate 3 new keys from the initial seed.
|
||||
# The previous transaction (out_of_kp_txid) should still not be detected as a rescan is required.
|
||||
txid = self.nodes[0].sendtoaddress(last_addr, 1)
|
||||
origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
|
||||
self.generate(self.nodes[0], 1)
|
||||
origin_rpc.gettransaction(txid)
|
||||
restore_rpc.gettransaction(txid)
|
||||
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, out_of_kp_txid)
|
||||
restore2_rpc.gettransaction(txid)
|
||||
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore2_rpc.gettransaction, out_of_kp_txid)
|
||||
|
||||
# After rescanning, restore_rpc should now see out_of_kp_txid and generate an additional key.
|
||||
# addr should now be part of restore_rpc and be ismine
|
||||
restore_rpc.rescanblockchain()
|
||||
restore_rpc.gettransaction(out_of_kp_txid)
|
||||
info = restore_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
restore2_rpc.rescanblockchain()
|
||||
restore2_rpc.gettransaction(out_of_kp_txid)
|
||||
info = restore2_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
|
||||
# Check again that 3 keys were derived.
|
||||
# Empty keypool and get an address that is beyond the initial keypool
|
||||
origin_rpc.getnewaddress()
|
||||
origin_rpc.getnewaddress()
|
||||
last_addr = origin_rpc.getnewaddress()
|
||||
addr = origin_rpc.getnewaddress()
|
||||
|
||||
# Check that the restored seed has last_addr but does not have addr
|
||||
info = restore_rpc.getaddressinfo(last_addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
info = restore_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], False)
|
||||
info = restore2_rpc.getaddressinfo(last_addr)
|
||||
assert_equal(info['ismine'], True)
|
||||
info = restore2_rpc.getaddressinfo(addr)
|
||||
assert_equal(info['ismine'], False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2019-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test the wallet implicit segwit feature."""
|
||||
|
||||
import test_framework.address as address
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
|
||||
# TODO: Might be nice to test p2pk here too
|
||||
address_types = ('legacy', 'bech32', 'p2sh-segwit')
|
||||
|
||||
def key_to_address(key, address_type):
|
||||
if address_type == 'legacy':
|
||||
return address.key_to_p2pkh(key)
|
||||
elif address_type == 'p2sh-segwit':
|
||||
return address.key_to_p2sh_p2wpkh(key)
|
||||
elif address_type == 'bech32':
|
||||
return address.key_to_p2wpkh(key)
|
||||
|
||||
def send_a_to_b(receive_node, send_node):
|
||||
keys = {}
|
||||
for a in address_types:
|
||||
a_address = receive_node.getnewaddress(address_type=a)
|
||||
pubkey = receive_node.getaddressinfo(a_address)['pubkey']
|
||||
keys[a] = pubkey
|
||||
for b in address_types:
|
||||
b_address = key_to_address(pubkey, b)
|
||||
send_node.sendtoaddress(address=b_address, amount=1)
|
||||
return keys
|
||||
|
||||
def check_implicit_transactions(implicit_keys, implicit_node):
|
||||
# The implicit segwit node allows conversion all possible ways
|
||||
txs = implicit_node.listtransactions(None, 99999)
|
||||
for a in address_types:
|
||||
pubkey = implicit_keys[a]
|
||||
for b in address_types:
|
||||
b_address = key_to_address(pubkey, b)
|
||||
assert ('receive', b_address) in tuple((tx['category'], tx['address']) for tx in txs)
|
||||
|
||||
class ImplicitSegwitTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.supports_cli = False
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def run_test(self):
|
||||
self.log.info("Manipulating addresses and sending transactions to all variations")
|
||||
implicit_keys = send_a_to_b(self.nodes[0], self.nodes[1])
|
||||
|
||||
self.sync_all()
|
||||
|
||||
self.log.info("Checking that transactions show up correctly without a restart")
|
||||
check_implicit_transactions(implicit_keys, self.nodes[0])
|
||||
|
||||
self.log.info("Checking that transactions still show up correctly after a restart")
|
||||
self.restart_node(0)
|
||||
self.restart_node(1)
|
||||
|
||||
check_implicit_transactions(implicit_keys, self.nodes[0])
|
||||
|
||||
if __name__ == '__main__':
|
||||
ImplicitSegwitTest(__file__).main()
|
|
@ -1,341 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2014-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test wallet import RPCs.
|
||||
|
||||
Test rescan behavior of importaddress, importpubkey, importprivkey, and
|
||||
importmulti RPCs with different types of keys and rescan options.
|
||||
|
||||
In the first part of the test, node 0 creates an address for each type of
|
||||
import RPC call and sends BTC to it. Then other nodes import the addresses,
|
||||
and the test makes listtransactions and getbalance calls to confirm that the
|
||||
importing node either did or did not execute rescans picking up the send
|
||||
transactions.
|
||||
|
||||
In the second part of the test, node 0 sends more BTC to each address, and the
|
||||
test makes more listtransactions and getbalance calls to confirm that the
|
||||
importing nodes pick up the new transactions regardless of whether rescans
|
||||
happened previously.
|
||||
"""
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.address import (
|
||||
AddressType,
|
||||
ADDRESS_BCRT1_UNSPENDABLE,
|
||||
)
|
||||
from test_framework.messages import COIN
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
set_node_times,
|
||||
)
|
||||
|
||||
import collections
|
||||
from decimal import Decimal
|
||||
import enum
|
||||
import itertools
|
||||
import random
|
||||
|
||||
Call = enum.Enum("Call", "single multiaddress multiscript")
|
||||
Data = enum.Enum("Data", "address pub priv")
|
||||
Rescan = enum.Enum("Rescan", "no yes late_timestamp")
|
||||
|
||||
|
||||
class Variant(collections.namedtuple("Variant", "call data address_type rescan prune")):
|
||||
"""Helper for importing one key and verifying scanned transactions."""
|
||||
def do_import(self, timestamp):
|
||||
"""Call one key import RPC."""
|
||||
rescan = self.rescan == Rescan.yes
|
||||
|
||||
assert_equal(self.address["solvable"], True)
|
||||
assert_equal(self.address["isscript"], self.address_type == AddressType.p2sh_segwit)
|
||||
assert_equal(self.address["iswitness"], self.address_type == AddressType.bech32)
|
||||
if self.address["isscript"]:
|
||||
assert_equal(self.address["embedded"]["isscript"], False)
|
||||
assert_equal(self.address["embedded"]["iswitness"], True)
|
||||
|
||||
if self.call == Call.single:
|
||||
if self.data == Data.address:
|
||||
response = self.node.importaddress(address=self.address["address"], label=self.label, rescan=rescan)
|
||||
elif self.data == Data.pub:
|
||||
response = self.node.importpubkey(pubkey=self.address["pubkey"], label=self.label, rescan=rescan)
|
||||
elif self.data == Data.priv:
|
||||
response = self.node.importprivkey(privkey=self.key, label=self.label, rescan=rescan)
|
||||
assert_equal(response, None)
|
||||
|
||||
elif self.call in (Call.multiaddress, Call.multiscript):
|
||||
request = {
|
||||
"scriptPubKey": {
|
||||
"address": self.address["address"]
|
||||
} if self.call == Call.multiaddress else self.address["scriptPubKey"],
|
||||
"timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
|
||||
"pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
|
||||
"keys": [self.key] if self.data == Data.priv else [],
|
||||
"label": self.label,
|
||||
"watchonly": self.data != Data.priv
|
||||
}
|
||||
if self.address_type == AddressType.p2sh_segwit and self.data != Data.address:
|
||||
# We need solving data when providing a pubkey or privkey as data
|
||||
request.update({"redeemscript": self.address['embedded']['scriptPubKey']})
|
||||
response = self.node.importmulti(
|
||||
requests=[request],
|
||||
rescan=self.rescan in (Rescan.yes, Rescan.late_timestamp),
|
||||
)
|
||||
assert_equal(response, [{"success": True}])
|
||||
|
||||
def check(self, txid=None, amount=None, confirmation_height=None):
|
||||
"""Verify that listtransactions/listreceivedbyaddress return expected values."""
|
||||
|
||||
txs = self.node.listtransactions(label=self.label, count=10000, include_watchonly=True)
|
||||
current_height = self.node.getblockcount()
|
||||
assert_equal(len(txs), self.expected_txs)
|
||||
|
||||
addresses = self.node.listreceivedbyaddress(minconf=0, include_watchonly=True, address_filter=self.address['address'])
|
||||
|
||||
if self.expected_txs:
|
||||
assert_equal(len(addresses[0]["txids"]), self.expected_txs)
|
||||
|
||||
if txid is not None:
|
||||
tx, = [tx for tx in txs if tx["txid"] == txid]
|
||||
assert_equal(tx["label"], self.label)
|
||||
assert_equal(tx["address"], self.address["address"])
|
||||
assert_equal(tx["amount"], amount)
|
||||
assert_equal(tx["category"], "receive")
|
||||
assert_equal(tx["label"], self.label)
|
||||
assert_equal(tx["txid"], txid)
|
||||
|
||||
# If no confirmation height is given, the tx is still in the
|
||||
# mempool.
|
||||
confirmations = (1 + current_height - confirmation_height) if confirmation_height else 0
|
||||
assert_equal(tx["confirmations"], confirmations)
|
||||
if confirmations:
|
||||
assert "trusted" not in tx
|
||||
|
||||
address, = [ad for ad in addresses if txid in ad["txids"]]
|
||||
assert_equal(address["address"], self.address["address"])
|
||||
assert_equal(address["amount"], self.amount_received)
|
||||
assert_equal(address["confirmations"], confirmations)
|
||||
# Verify the transaction is correctly marked watchonly depending on
|
||||
# whether the transaction pays to an imported public key or
|
||||
# imported private key. The test setup ensures that transaction
|
||||
# inputs will not be from watchonly keys (important because
|
||||
# involvesWatchonly will be true if either the transaction output
|
||||
# or inputs are watchonly).
|
||||
if self.data != Data.priv:
|
||||
assert_equal(address["involvesWatchonly"], True)
|
||||
else:
|
||||
assert_equal("involvesWatchonly" not in address, True)
|
||||
|
||||
|
||||
# List of Variants for each way a key or address could be imported.
|
||||
IMPORT_VARIANTS = [Variant(*variants) for variants in itertools.product(Call, Data, AddressType, Rescan, (False, True))]
|
||||
|
||||
# List of nodes to import keys to. Half the nodes will have pruning disabled,
|
||||
# half will have it enabled. Different nodes will be used for imports that are
|
||||
# expected to cause rescans, and imports that are not expected to cause
|
||||
# rescans, in order to prevent rescans during later imports picking up
|
||||
# transactions associated with earlier imports. This makes it easier to keep
|
||||
# track of expected balances and transactions.
|
||||
ImportNode = collections.namedtuple("ImportNode", "prune rescan")
|
||||
IMPORT_NODES = [ImportNode(*fields) for fields in itertools.product((False, True), repeat=2)]
|
||||
|
||||
# Rescans start at the earliest block up to 2 hours before the key timestamp.
|
||||
TIMESTAMP_WINDOW = 2 * 60 * 60
|
||||
|
||||
AMOUNT_DUST = 0.00000546
|
||||
|
||||
|
||||
def get_rand_amount(min_amount=AMOUNT_DUST):
|
||||
assert min_amount <= 1
|
||||
r = random.uniform(min_amount, 1)
|
||||
# note: min_amount can get rounded down here
|
||||
return Decimal(str(round(r, 8)))
|
||||
|
||||
|
||||
class ImportRescanTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2 + len(IMPORT_NODES)
|
||||
self.supports_cli = False
|
||||
self.rpc_timeout = 120
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
self.noban_tx_relay = True
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def setup_network(self):
|
||||
self.extra_args = [[] for _ in range(self.num_nodes)]
|
||||
for i, import_node in enumerate(IMPORT_NODES, 2):
|
||||
if import_node.prune:
|
||||
self.extra_args[i] += ["-prune=1"]
|
||||
|
||||
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
|
||||
|
||||
# Import keys with pruning disabled
|
||||
self.start_nodes(extra_args=[[]] * self.num_nodes)
|
||||
self.import_deterministic_coinbase_privkeys()
|
||||
self.stop_nodes()
|
||||
|
||||
self.start_nodes()
|
||||
for i in range(1, self.num_nodes):
|
||||
self.connect_nodes(i, 0)
|
||||
|
||||
def run_test(self):
|
||||
|
||||
# Create one transaction on node 0 with a unique amount for
|
||||
# each possible type of wallet import RPC.
|
||||
last_variants = []
|
||||
for i, variant in enumerate(IMPORT_VARIANTS):
|
||||
if i % 10 == 0:
|
||||
blockhash = self.generate(self.nodes[0], 1)[0]
|
||||
conf_height = self.nodes[0].getblockcount()
|
||||
timestamp = self.nodes[0].getblockheader(blockhash)["time"]
|
||||
for var in last_variants:
|
||||
var.confirmation_height = conf_height
|
||||
var.timestamp = timestamp
|
||||
last_variants.clear()
|
||||
variant.label = "label {} {}".format(i, variant)
|
||||
variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(
|
||||
label=variant.label,
|
||||
address_type=variant.address_type.value,
|
||||
))
|
||||
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
|
||||
variant.initial_amount = get_rand_amount()
|
||||
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
|
||||
last_variants.append(variant)
|
||||
|
||||
blockhash = self.generate(self.nodes[0], 1)[0]
|
||||
conf_height = self.nodes[0].getblockcount()
|
||||
timestamp = self.nodes[0].getblockheader(blockhash)["time"]
|
||||
for var in last_variants:
|
||||
var.confirmation_height = conf_height
|
||||
var.timestamp = timestamp
|
||||
last_variants.clear()
|
||||
|
||||
# Generate a block further in the future (past the rescan window).
|
||||
assert_equal(self.nodes[0].getrawmempool(), [])
|
||||
set_node_times(
|
||||
self.nodes,
|
||||
self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1,
|
||||
)
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
# For each variation of wallet key import, invoke the import RPC and
|
||||
# check the results from getbalance and listtransactions.
|
||||
for variant in IMPORT_VARIANTS:
|
||||
self.log.info('Run import for variant {}'.format(variant))
|
||||
expect_rescan = variant.rescan == Rescan.yes
|
||||
variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
|
||||
variant.do_import(variant.timestamp)
|
||||
if expect_rescan:
|
||||
variant.amount_received = variant.initial_amount
|
||||
variant.expected_txs = 1
|
||||
variant.check(variant.initial_txid, variant.initial_amount, variant.confirmation_height)
|
||||
else:
|
||||
variant.amount_received = 0
|
||||
variant.expected_txs = 0
|
||||
variant.check()
|
||||
|
||||
# Create new transactions sending to each address.
|
||||
for i, variant in enumerate(IMPORT_VARIANTS):
|
||||
if i % 10 == 0:
|
||||
blockhash = self.generate(self.nodes[0], 1)[0]
|
||||
conf_height = self.nodes[0].getblockcount() + 1
|
||||
variant.sent_amount = get_rand_amount()
|
||||
variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount)
|
||||
variant.confirmation_height = conf_height
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
assert_equal(self.nodes[0].getrawmempool(), [])
|
||||
self.sync_all()
|
||||
|
||||
# Check the latest results from getbalance and listtransactions.
|
||||
for variant in IMPORT_VARIANTS:
|
||||
self.log.info('Run check for variant {}'.format(variant))
|
||||
variant.amount_received += variant.sent_amount
|
||||
variant.expected_txs += 1
|
||||
variant.check(variant.sent_txid, variant.sent_amount, variant.confirmation_height)
|
||||
|
||||
self.log.info('Test that the mempool is rescanned as well if the rescan parameter is set to true')
|
||||
|
||||
# The late timestamp and pruned variants are not necessary when testing mempool rescan
|
||||
mempool_variants = [variant for variant in IMPORT_VARIANTS if variant.rescan != Rescan.late_timestamp and not variant.prune]
|
||||
# No further blocks are mined so the timestamp will stay the same
|
||||
timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"]
|
||||
|
||||
# Create one transaction on node 0 with a unique amount for
|
||||
# each possible type of wallet import RPC.
|
||||
for i, variant in enumerate(mempool_variants):
|
||||
variant.label = "mempool label {} {}".format(i, variant)
|
||||
variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(
|
||||
label=variant.label,
|
||||
address_type=variant.address_type.value,
|
||||
))
|
||||
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
|
||||
# Ensure output is large enough to pay for fees: conservatively assuming txsize of
|
||||
# 500 vbytes and feerate of 20 sats/vbytes
|
||||
variant.initial_amount = get_rand_amount(min_amount=((500 * 20 / COIN) + AMOUNT_DUST))
|
||||
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
|
||||
variant.confirmation_height = 0
|
||||
variant.timestamp = timestamp
|
||||
|
||||
# Mine a block so these parents are confirmed
|
||||
assert_equal(len(self.nodes[0].getrawmempool()), len(mempool_variants))
|
||||
self.sync_mempools()
|
||||
block_to_disconnect = self.generate(self.nodes[0], 1)[0]
|
||||
assert_equal(len(self.nodes[0].getrawmempool()), 0)
|
||||
|
||||
# For each variant, create an unconfirmed child transaction from initial_txid, sending all
|
||||
# the funds to an unspendable address. Importantly, no change output is created so the
|
||||
# transaction can't be recognized using its outputs. The wallet rescan needs to know the
|
||||
# inputs of the transaction to detect it, so the parent must be processed before the child.
|
||||
# An equivalent test for descriptors exists in wallet_rescan_unconfirmed.py.
|
||||
unspent_txid_map = {txin["txid"] : txin for txin in self.nodes[1].listunspent()}
|
||||
for variant in mempool_variants:
|
||||
# Send full amount, subtracting fee from outputs, to ensure no change is created.
|
||||
child = self.nodes[1].send(
|
||||
add_to_wallet=False,
|
||||
inputs=[unspent_txid_map[variant.initial_txid]],
|
||||
outputs=[{ADDRESS_BCRT1_UNSPENDABLE : variant.initial_amount}],
|
||||
subtract_fee_from_outputs=[0]
|
||||
)
|
||||
variant.child_txid = child["txid"]
|
||||
variant.amount_received = 0
|
||||
self.nodes[0].sendrawtransaction(child["hex"])
|
||||
|
||||
# Mempools should contain the child transactions for each variant.
|
||||
assert_equal(len(self.nodes[0].getrawmempool()), len(mempool_variants))
|
||||
self.sync_mempools()
|
||||
|
||||
# Mock a reorg so the parent transactions are added back to the mempool
|
||||
for node in self.nodes:
|
||||
node.invalidateblock(block_to_disconnect)
|
||||
# Mempools should now contain the parent and child for each variant.
|
||||
assert_equal(len(node.getrawmempool()), 2 * len(mempool_variants))
|
||||
|
||||
# For each variation of wallet key import, invoke the import RPC and
|
||||
# check the results from getbalance and listtransactions.
|
||||
for variant in mempool_variants:
|
||||
self.log.info('Run import for mempool variant {}'.format(variant))
|
||||
expect_rescan = variant.rescan == Rescan.yes
|
||||
variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
|
||||
variant.do_import(variant.timestamp)
|
||||
if expect_rescan:
|
||||
# Ensure both transactions were rescanned. This would raise a JSONRPCError if the
|
||||
# transactions were not identified as belonging to the wallet.
|
||||
assert_equal(variant.node.gettransaction(variant.initial_txid)['confirmations'], 0)
|
||||
assert_equal(variant.node.gettransaction(variant.child_txid)['confirmations'], 0)
|
||||
variant.amount_received = variant.initial_amount
|
||||
variant.expected_txs = 1
|
||||
variant.check(variant.initial_txid, variant.initial_amount, 0)
|
||||
else:
|
||||
variant.amount_received = 0
|
||||
variant.expected_txs = 0
|
||||
variant.check()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ImportRescanTest(__file__).main()
|
|
@ -1,128 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2018-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test the behavior of RPC importprivkey on set and unset labels of
|
||||
addresses.
|
||||
|
||||
It tests different cases in which an address is imported with importaddress
|
||||
with or without a label and then its private key is imported with importprivkey
|
||||
with and without a label.
|
||||
"""
|
||||
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.wallet_util import test_address
|
||||
|
||||
|
||||
class ImportWithLabel(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.setup_clean_chain = True
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def run_test(self):
|
||||
"""Main test logic"""
|
||||
|
||||
self.log.info(
|
||||
"Test importaddress with label and importprivkey without label."
|
||||
)
|
||||
self.log.info("Import a watch-only address with a label.")
|
||||
address = self.nodes[0].getnewaddress()
|
||||
label = "Test Label"
|
||||
self.nodes[1].importaddress(address, label)
|
||||
test_address(self.nodes[1],
|
||||
address,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
labels=[label])
|
||||
|
||||
self.log.info(
|
||||
"Import the watch-only address's private key without a "
|
||||
"label and the address should keep its label."
|
||||
)
|
||||
priv_key = self.nodes[0].dumpprivkey(address)
|
||||
self.nodes[1].importprivkey(priv_key)
|
||||
test_address(self.nodes[1], address, labels=[label])
|
||||
|
||||
self.log.info(
|
||||
"Test importaddress without label and importprivkey with label."
|
||||
)
|
||||
self.log.info("Import a watch-only address without a label.")
|
||||
address2 = self.nodes[0].getnewaddress()
|
||||
self.nodes[1].importaddress(address2)
|
||||
test_address(self.nodes[1],
|
||||
address2,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
labels=[""])
|
||||
|
||||
self.log.info(
|
||||
"Import the watch-only address's private key with a "
|
||||
"label and the address should have its label updated."
|
||||
)
|
||||
priv_key2 = self.nodes[0].dumpprivkey(address2)
|
||||
label2 = "Test Label 2"
|
||||
self.nodes[1].importprivkey(priv_key2, label2)
|
||||
|
||||
test_address(self.nodes[1], address2, labels=[label2])
|
||||
|
||||
self.log.info("Test importaddress with label and importprivkey with label.")
|
||||
self.log.info("Import a watch-only address with a label.")
|
||||
address3 = self.nodes[0].getnewaddress()
|
||||
label3_addr = "Test Label 3 for importaddress"
|
||||
self.nodes[1].importaddress(address3, label3_addr)
|
||||
test_address(self.nodes[1],
|
||||
address3,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
labels=[label3_addr])
|
||||
|
||||
self.log.info(
|
||||
"Import the watch-only address's private key with a "
|
||||
"label and the address should have its label updated."
|
||||
)
|
||||
priv_key3 = self.nodes[0].dumpprivkey(address3)
|
||||
label3_priv = "Test Label 3 for importprivkey"
|
||||
self.nodes[1].importprivkey(priv_key3, label3_priv)
|
||||
|
||||
test_address(self.nodes[1], address3, labels=[label3_priv])
|
||||
|
||||
self.log.info(
|
||||
"Test importprivkey won't label new dests with the same "
|
||||
"label as others labeled dests for the same key."
|
||||
)
|
||||
self.log.info("Import a watch-only p2sh-segwit address with a label.")
|
||||
address4 = self.nodes[0].getnewaddress("", "p2sh-segwit")
|
||||
label4_addr = "Test Label 4 for importaddress"
|
||||
self.nodes[1].importaddress(address4, label4_addr)
|
||||
test_address(self.nodes[1],
|
||||
address4,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
labels=[label4_addr],
|
||||
embedded=None)
|
||||
|
||||
self.log.info(
|
||||
"Import the watch-only address's private key without a "
|
||||
"label and new destinations for the key should have an "
|
||||
"empty label while the 'old' destination should keep "
|
||||
"its label."
|
||||
)
|
||||
priv_key4 = self.nodes[0].dumpprivkey(address4)
|
||||
self.nodes[1].importprivkey(priv_key4)
|
||||
embedded_addr = self.nodes[1].getaddressinfo(address4)['embedded']['address']
|
||||
|
||||
test_address(self.nodes[1], embedded_addr, labels=[""])
|
||||
|
||||
test_address(self.nodes[1], address4, labels=[label4_addr])
|
||||
|
||||
self.stop_nodes()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ImportWithLabel(__file__).main()
|
|
@ -32,9 +32,6 @@ from test_framework.wallet_util import (
|
|||
)
|
||||
|
||||
class ImportDescriptorsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
|
|
@ -1,938 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2014-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test the importmulti RPC.
|
||||
|
||||
Test importmulti by generating keys on node0, importing the scriptPubKeys and
|
||||
addresses on node1 and then testing the address info for the different address
|
||||
variants.
|
||||
|
||||
- `get_key()` and `get_multisig()` are called to generate keys on node0 and
|
||||
return the privkeys, pubkeys and all variants of scriptPubKey and address.
|
||||
- `test_importmulti()` is called to send an importmulti call to node1, test
|
||||
success, and (if unsuccessful) test the error code and error message returned.
|
||||
- `test_address()` is called to call getaddressinfo for an address on node1
|
||||
and test the values returned."""
|
||||
|
||||
from test_framework.blocktools import COINBASE_MATURITY
|
||||
from test_framework.script import (
|
||||
CScript,
|
||||
OP_NOP,
|
||||
)
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.descriptors import descsum_create
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_greater_than,
|
||||
assert_raises_rpc_error,
|
||||
)
|
||||
from test_framework.wallet_util import (
|
||||
get_key,
|
||||
get_multisig,
|
||||
test_address,
|
||||
)
|
||||
|
||||
|
||||
class ImportMultiTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"]]
|
||||
self.setup_clean_chain = True
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def setup_network(self):
|
||||
self.setup_nodes()
|
||||
|
||||
def test_importmulti(self, req, success, error_code=None, error_message=None, warnings=None):
|
||||
"""Run importmulti and assert success"""
|
||||
if warnings is None:
|
||||
warnings = []
|
||||
result = self.nodes[1].importmulti([req])
|
||||
observed_warnings = []
|
||||
if 'warnings' in result[0]:
|
||||
observed_warnings = result[0]['warnings']
|
||||
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
|
||||
assert_equal(result[0]['success'], success)
|
||||
if error_code is not None:
|
||||
assert_equal(result[0]['error']['code'], error_code)
|
||||
assert_equal(result[0]['error']['message'], error_message)
|
||||
|
||||
def run_test(self):
|
||||
self.log.info("Mining blocks...")
|
||||
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
|
||||
self.generate(self.nodes[1], 1, sync_fun=self.no_op)
|
||||
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
|
||||
|
||||
node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
|
||||
|
||||
# Check only one address
|
||||
assert_equal(node0_address1['ismine'], True)
|
||||
|
||||
# Node 1 sync test
|
||||
assert_equal(self.nodes[1].getblockcount(), 1)
|
||||
|
||||
# Address Test - before import
|
||||
address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
|
||||
assert_equal(address_info['iswatchonly'], False)
|
||||
assert_equal(address_info['ismine'], False)
|
||||
|
||||
# RPC importmulti -----------------------------------------------
|
||||
|
||||
# Bitcoin Address (implicit non-internal)
|
||||
self.log.info("Should import an address")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=timestamp,
|
||||
ischange=False)
|
||||
watchonly_address = key.p2pkh_addr
|
||||
watchonly_timestamp = timestamp
|
||||
|
||||
self.log.info("Should not import an invalid address")
|
||||
self.test_importmulti({"scriptPubKey": {"address": "not valid address"},
|
||||
"timestamp": "now"},
|
||||
success=False,
|
||||
error_code=-5,
|
||||
error_message='Invalid address \"not valid address\"')
|
||||
|
||||
# ScriptPubKey + internal
|
||||
self.log.info("Should import a scriptPubKey with internal flag")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"internal": True},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=timestamp,
|
||||
ischange=True)
|
||||
|
||||
# ScriptPubKey + internal + label
|
||||
self.log.info("Should not allow a label to be specified when internal is true")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"internal": True,
|
||||
"label": "Unsuccessful labelling for internal addresses"},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Internal addresses should not have a label')
|
||||
|
||||
# Nonstandard scriptPubKey + !internal
|
||||
self.log.info("Should not import a nonstandard scriptPubKey without internal flag")
|
||||
nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex()
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
|
||||
"timestamp": "now"},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=False,
|
||||
timestamp=None)
|
||||
|
||||
# Address + Public key + !Internal(explicit)
|
||||
self.log.info("Should import an address with public key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"pubkeys": [key.pubkey],
|
||||
"internal": False},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# ScriptPubKey + Public key + internal
|
||||
self.log.info("Should import a scriptPubKey with internal and with public key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"pubkeys": [key.pubkey],
|
||||
"internal": True},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# Nonstandard scriptPubKey + Public key + !internal
|
||||
self.log.info("Should not import a nonstandard scriptPubKey without internal and with public key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
|
||||
"timestamp": "now",
|
||||
"pubkeys": [key.pubkey]},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=False,
|
||||
timestamp=None)
|
||||
|
||||
# Address + Private key + !watchonly
|
||||
self.log.info("Should import an address with private key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey]},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=True,
|
||||
timestamp=timestamp)
|
||||
|
||||
self.log.info("Should not import an address with private key if is already imported")
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey]},
|
||||
success=False,
|
||||
error_code=-4,
|
||||
error_message='The wallet already contains the private key for this address or script ("' + key.p2pkh_script + '")')
|
||||
|
||||
# Address + Private key + watchonly
|
||||
self.log.info("Should import an address with private key and with watchonly")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey],
|
||||
"watchonly": True},
|
||||
success=True,
|
||||
warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=True,
|
||||
timestamp=timestamp)
|
||||
|
||||
# ScriptPubKey + Private key + internal
|
||||
self.log.info("Should import a scriptPubKey with internal and with private key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey],
|
||||
"internal": True},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=True,
|
||||
timestamp=timestamp)
|
||||
|
||||
# Nonstandard scriptPubKey + Private key + !internal
|
||||
self.log.info("Should not import a nonstandard scriptPubKey without internal and with private key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey]},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=False,
|
||||
timestamp=None)
|
||||
|
||||
# P2SH address
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op)
|
||||
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
|
||||
self.generate(self.nodes[1], 1, sync_fun=self.no_op)
|
||||
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
|
||||
|
||||
self.log.info("Should import a p2sh")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr,
|
||||
isscript=True,
|
||||
iswatchonly=True,
|
||||
timestamp=timestamp)
|
||||
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
|
||||
assert_equal(p2shunspent['spendable'], False)
|
||||
assert_equal(p2shunspent['solvable'], False)
|
||||
|
||||
# P2SH + Redeem script
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op)
|
||||
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
|
||||
self.generate(self.nodes[1], 1, sync_fun=self.no_op)
|
||||
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
|
||||
|
||||
self.log.info("Should import a p2sh with respective redeem script")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": multisig.redeem_script},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr, timestamp=timestamp, iswatchonly=True, ismine=False, solvable=True)
|
||||
|
||||
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
|
||||
assert_equal(p2shunspent['spendable'], False)
|
||||
assert_equal(p2shunspent['solvable'], True)
|
||||
|
||||
# P2SH + Redeem script + Private Keys + !Watchonly
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op)
|
||||
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
|
||||
self.generate(self.nodes[1], 1, sync_fun=self.no_op)
|
||||
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
|
||||
|
||||
self.log.info("Should import a p2sh with respective redeem script and private keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": multisig.redeem_script,
|
||||
"keys": multisig.privkeys[0:2]},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr,
|
||||
timestamp=timestamp,
|
||||
ismine=False,
|
||||
iswatchonly=True,
|
||||
solvable=True)
|
||||
|
||||
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
|
||||
assert_equal(p2shunspent['spendable'], False)
|
||||
assert_equal(p2shunspent['solvable'], True)
|
||||
|
||||
# P2SH + Redeem script + Private Keys + Watchonly
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op)
|
||||
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
|
||||
self.generate(self.nodes[1], 1, sync_fun=self.no_op)
|
||||
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
|
||||
|
||||
self.log.info("Should import a p2sh with respective redeem script and private keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": multisig.redeem_script,
|
||||
"keys": multisig.privkeys[0:2],
|
||||
"watchonly": True},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
solvable=True,
|
||||
timestamp=timestamp)
|
||||
|
||||
# Address + Public key + !Internal + Wrong pubkey
|
||||
self.log.info("Should not import an address with the wrong public key as non-solvable")
|
||||
key = get_key(self.nodes[0])
|
||||
wrong_key = get_key(self.nodes[0]).pubkey
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"pubkeys": [wrong_key]},
|
||||
success=True,
|
||||
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
solvable=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# ScriptPubKey + Public key + internal + Wrong pubkey
|
||||
self.log.info("Should import a scriptPubKey with internal and with a wrong public key as non-solvable")
|
||||
key = get_key(self.nodes[0])
|
||||
wrong_key = get_key(self.nodes[0]).pubkey
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"pubkeys": [wrong_key],
|
||||
"internal": True},
|
||||
success=True,
|
||||
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
solvable=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# Address + Private key + !watchonly + Wrong private key
|
||||
self.log.info("Should import an address with a wrong private key as non-solvable")
|
||||
key = get_key(self.nodes[0])
|
||||
wrong_privkey = get_key(self.nodes[0]).privkey
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now",
|
||||
"keys": [wrong_privkey]},
|
||||
success=True,
|
||||
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
solvable=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# ScriptPubKey + Private key + internal + Wrong private key
|
||||
self.log.info("Should import a scriptPubKey with internal and with a wrong private key as non-solvable")
|
||||
key = get_key(self.nodes[0])
|
||||
wrong_privkey = get_key(self.nodes[0]).privkey
|
||||
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": "now",
|
||||
"keys": [wrong_privkey],
|
||||
"internal": True},
|
||||
success=True,
|
||||
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
solvable=False,
|
||||
timestamp=timestamp)
|
||||
|
||||
# Importing existing watch only address with new timestamp should replace saved timestamp.
|
||||
assert_greater_than(timestamp, watchonly_timestamp)
|
||||
self.log.info("Should replace previously saved watch only timestamp.")
|
||||
self.test_importmulti({"scriptPubKey": {"address": watchonly_address},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
watchonly_address,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=timestamp)
|
||||
watchonly_timestamp = timestamp
|
||||
|
||||
# restart nodes to check for proper serialization/deserialization of watch only address
|
||||
self.stop_nodes()
|
||||
self.start_nodes()
|
||||
test_address(self.nodes[1],
|
||||
watchonly_address,
|
||||
iswatchonly=True,
|
||||
ismine=False,
|
||||
timestamp=watchonly_timestamp)
|
||||
|
||||
# Bad or missing timestamps
|
||||
self.log.info("Should throw on invalid or missing timestamp values")
|
||||
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
|
||||
self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}])
|
||||
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
|
||||
self.nodes[1].importmulti, [{
|
||||
"scriptPubKey": key.p2pkh_script,
|
||||
"timestamp": ""
|
||||
}])
|
||||
|
||||
# Import P2WPKH address as watch only
|
||||
self.log.info("Should import a P2WPKH address as watch only")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2wpkh_addr,
|
||||
iswatchonly=True,
|
||||
solvable=False)
|
||||
|
||||
# Import P2WPKH address with public key but no private key
|
||||
self.log.info("Should import a P2WPKH address and public key as solvable but not spendable")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
|
||||
"timestamp": "now",
|
||||
"pubkeys": [key.pubkey]},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2wpkh_addr,
|
||||
ismine=False,
|
||||
solvable=True)
|
||||
|
||||
# Import P2WPKH address with key and check it is spendable
|
||||
self.log.info("Should import a P2WPKH address with key")
|
||||
key = get_key(self.nodes[0])
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
|
||||
"timestamp": "now",
|
||||
"keys": [key.privkey]},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2wpkh_addr,
|
||||
iswatchonly=False,
|
||||
ismine=True)
|
||||
|
||||
# P2WSH multisig address without scripts or keys
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.log.info("Should import a p2wsh multisig as watch only without respective redeem script and private keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr,
|
||||
solvable=False)
|
||||
|
||||
# Same P2WSH multisig address as above, but now with witnessscript + private keys
|
||||
self.log.info("Should import a p2wsh with respective witness script and private keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
|
||||
"timestamp": "now",
|
||||
"witnessscript": multisig.redeem_script,
|
||||
"keys": multisig.privkeys},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_addr,
|
||||
solvable=True,
|
||||
ismine=True,
|
||||
sigsrequired=2)
|
||||
|
||||
# P2SH-P2WPKH address with no redeemscript or public or private key
|
||||
key = get_key(self.nodes[0])
|
||||
self.log.info("Should import a p2sh-p2wpkh without redeem script or keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2sh_p2wpkh_addr,
|
||||
solvable=False,
|
||||
ismine=False)
|
||||
|
||||
# P2SH-P2WPKH address + redeemscript + public key with no private key
|
||||
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable")
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": key.p2sh_p2wpkh_redeem_script,
|
||||
"pubkeys": [key.pubkey]},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2sh_p2wpkh_addr,
|
||||
solvable=True,
|
||||
ismine=False)
|
||||
|
||||
# P2SH-P2WPKH address + redeemscript + private key
|
||||
key = get_key(self.nodes[0])
|
||||
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and private keys")
|
||||
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": key.p2sh_p2wpkh_redeem_script,
|
||||
"keys": [key.privkey]},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2sh_p2wpkh_addr,
|
||||
solvable=True,
|
||||
ismine=True)
|
||||
|
||||
# P2SH-P2WSH multisig + redeemscript with no private key
|
||||
multisig = get_multisig(self.nodes[0])
|
||||
self.log.info("Should import a p2sh-p2wsh with respective redeem script but no private key")
|
||||
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_p2wsh_addr},
|
||||
"timestamp": "now",
|
||||
"redeemscript": multisig.p2wsh_script,
|
||||
"witnessscript": multisig.redeem_script},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
multisig.p2sh_p2wsh_addr,
|
||||
solvable=True,
|
||||
ismine=False)
|
||||
|
||||
# Test importing of a P2SH-P2WPKH address via descriptor + private key
|
||||
key = get_key(self.nodes[0])
|
||||
self.log.info("Should not import a p2sh-p2wpkh address from descriptor without checksum and private key")
|
||||
self.test_importmulti({"desc": "sh(wpkh(" + key.pubkey + "))",
|
||||
"timestamp": "now",
|
||||
"label": "Unsuccessful P2SH-P2WPKH descriptor import",
|
||||
"keys": [key.privkey]},
|
||||
success=False,
|
||||
error_code=-5,
|
||||
error_message="Missing checksum")
|
||||
|
||||
# Test importing of a P2SH-P2WPKH address via descriptor + private key
|
||||
key = get_key(self.nodes[0])
|
||||
p2sh_p2wpkh_label = "Successful P2SH-P2WPKH descriptor import"
|
||||
self.log.info("Should import a p2sh-p2wpkh address from descriptor and private key")
|
||||
self.test_importmulti({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
|
||||
"timestamp": "now",
|
||||
"label": p2sh_p2wpkh_label,
|
||||
"keys": [key.privkey]},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
key.p2sh_p2wpkh_addr,
|
||||
solvable=True,
|
||||
ismine=True,
|
||||
labels=[p2sh_p2wpkh_label])
|
||||
|
||||
# Test ranged descriptor fails if range is not specified
|
||||
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
|
||||
addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
|
||||
addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
|
||||
desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
|
||||
self.log.info("Ranged descriptor import should fail without a specified range")
|
||||
self.test_importmulti({"desc": descsum_create(desc),
|
||||
"timestamp": "now"},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Descriptor is ranged, please specify the range')
|
||||
|
||||
# Test importing of a ranged descriptor with xpriv
|
||||
self.log.info("Should import the ranged descriptor with specified range as solvable")
|
||||
self.test_importmulti({"desc": descsum_create(desc),
|
||||
"timestamp": "now",
|
||||
"range": 1},
|
||||
success=True)
|
||||
for address in addresses:
|
||||
test_address(self.nodes[1],
|
||||
address,
|
||||
solvable=True,
|
||||
ismine=True)
|
||||
|
||||
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
|
||||
success=False, error_code=-8, error_message='End of range is too high')
|
||||
|
||||
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
|
||||
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
|
||||
|
||||
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
|
||||
success=False, error_code=-8, error_message='End of range is too high')
|
||||
|
||||
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
|
||||
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
|
||||
|
||||
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
|
||||
success=False, error_code=-8, error_message='Range is too large')
|
||||
|
||||
# Test importing a descriptor containing a WIF private key
|
||||
wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
|
||||
address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
|
||||
desc = "sh(wpkh(" + wif_priv + "))"
|
||||
self.log.info("Should import a descriptor with a WIF private key as spendable")
|
||||
self.test_importmulti({"desc": descsum_create(desc),
|
||||
"timestamp": "now"},
|
||||
success=True)
|
||||
test_address(self.nodes[1],
|
||||
address,
|
||||
solvable=True,
|
||||
ismine=True)
|
||||
|
||||
# dump the private key to ensure it matches what was imported
|
||||
privkey = self.nodes[1].dumpprivkey(address)
|
||||
assert_equal(privkey, wif_priv)
|
||||
|
||||
# Test importing of a P2PKH address via descriptor
|
||||
key = get_key(self.nodes[0])
|
||||
p2pkh_label = "P2PKH descriptor import"
|
||||
self.log.info("Should import a p2pkh address from descriptor")
|
||||
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
|
||||
"timestamp": "now",
|
||||
"label": p2pkh_label},
|
||||
True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
test_address(self.nodes[1],
|
||||
key.p2pkh_addr,
|
||||
solvable=True,
|
||||
ismine=False,
|
||||
labels=[p2pkh_label])
|
||||
|
||||
# Test import fails if both desc and scriptPubKey are provided
|
||||
key = get_key(self.nodes[0])
|
||||
self.log.info("Import should fail if both scriptPubKey and desc are provided")
|
||||
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
|
||||
"scriptPubKey": {"address": key.p2pkh_addr},
|
||||
"timestamp": "now"},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Both a descriptor and a scriptPubKey should not be provided.')
|
||||
|
||||
# Test import fails if neither desc nor scriptPubKey are present
|
||||
key = get_key(self.nodes[0])
|
||||
self.log.info("Import should fail if neither a descriptor nor a scriptPubKey are provided")
|
||||
self.test_importmulti({"timestamp": "now"},
|
||||
success=False,
|
||||
error_code=-8,
|
||||
error_message='Either a descriptor or scriptPubKey must be provided.')
|
||||
|
||||
# Test importing of a multisig via descriptor
|
||||
key1 = get_key(self.nodes[0])
|
||||
key2 = get_key(self.nodes[0])
|
||||
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
|
||||
self.test_importmulti({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
|
||||
"timestamp": "now"},
|
||||
success=True,
|
||||
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
|
||||
self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
|
||||
test_address(self.nodes[1],
|
||||
key1.p2pkh_addr,
|
||||
ismine=False,
|
||||
iswatchonly=False)
|
||||
|
||||
# Import pubkeys with key origin info
|
||||
self.log.info("Addresses should have hd keypath and master key id after import with key origin")
|
||||
pub_addr = self.nodes[1].getnewaddress()
|
||||
pub_addr = self.nodes[1].getnewaddress(address_type="bech32")
|
||||
info = self.nodes[1].getaddressinfo(pub_addr)
|
||||
pub = info['pubkey']
|
||||
pub_keypath = info['hdkeypath']
|
||||
pub_fpr = info['hdmasterfingerprint']
|
||||
result = self.nodes[0].importmulti(
|
||||
[{
|
||||
'desc' : descsum_create("wpkh([" + pub_fpr + pub_keypath[1:] +"]" + pub + ")"),
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
|
||||
assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr)
|
||||
assert_equal(pub_import_info['pubkey'], pub)
|
||||
assert_equal(pub_import_info['hdkeypath'], pub_keypath)
|
||||
|
||||
# Import privkeys with key origin info
|
||||
priv_addr = self.nodes[1].getnewaddress(address_type="bech32")
|
||||
info = self.nodes[1].getaddressinfo(priv_addr)
|
||||
priv = self.nodes[1].dumpprivkey(priv_addr)
|
||||
priv_keypath = info['hdkeypath']
|
||||
priv_fpr = info['hdmasterfingerprint']
|
||||
result = self.nodes[0].importmulti(
|
||||
[{
|
||||
'desc' : descsum_create("wpkh([" + priv_fpr + priv_keypath[1:] + "]" + priv + ")"),
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
priv_import_info = self.nodes[0].getaddressinfo(priv_addr)
|
||||
assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr)
|
||||
assert_equal(priv_import_info['hdkeypath'], priv_keypath)
|
||||
|
||||
# Make sure the key origin info are still there after a restart
|
||||
self.stop_nodes()
|
||||
self.start_nodes()
|
||||
import_info = self.nodes[0].getaddressinfo(pub_addr)
|
||||
assert_equal(import_info['hdmasterfingerprint'], pub_fpr)
|
||||
assert_equal(import_info['hdkeypath'], pub_keypath)
|
||||
import_info = self.nodes[0].getaddressinfo(priv_addr)
|
||||
assert_equal(import_info['hdmasterfingerprint'], priv_fpr)
|
||||
assert_equal(import_info['hdkeypath'], priv_keypath)
|
||||
|
||||
# Check legacy import does not import key origin info
|
||||
self.log.info("Legacy imports don't have key origin info")
|
||||
pub_addr = self.nodes[1].getnewaddress()
|
||||
info = self.nodes[1].getaddressinfo(pub_addr)
|
||||
pub = info['pubkey']
|
||||
result = self.nodes[0].importmulti(
|
||||
[{
|
||||
'scriptPubKey': {'address': pub_addr},
|
||||
'pubkeys': [pub],
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
|
||||
assert_equal(pub_import_info['pubkey'], pub)
|
||||
assert 'hdmasterfingerprint' not in pub_import_info
|
||||
assert 'hdkeypath' not in pub_import_info
|
||||
|
||||
# Bech32m addresses and descriptors cannot be imported
|
||||
self.log.info("Bech32m addresses and descriptors cannot be imported")
|
||||
self.test_importmulti(
|
||||
{
|
||||
"scriptPubKey": {"address": "bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqc8gma6"},
|
||||
"timestamp": "now",
|
||||
},
|
||||
success=False,
|
||||
error_code=-5,
|
||||
error_message="Bech32m addresses cannot be imported into legacy wallets",
|
||||
)
|
||||
self.test_importmulti(
|
||||
{
|
||||
"desc": descsum_create("tr({})".format(pub)),
|
||||
"timestamp": "now",
|
||||
},
|
||||
success=False,
|
||||
error_code=-5,
|
||||
error_message="Bech32m descriptors cannot be imported into legacy wallets",
|
||||
)
|
||||
|
||||
# Import some public keys to the keypool of a no privkey wallet
|
||||
self.log.info("Adding pubkey to keypool of disableprivkey wallet")
|
||||
self.nodes[1].createwallet(wallet_name="noprivkeys", disable_private_keys=True)
|
||||
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
|
||||
|
||||
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
|
||||
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wpkh(' + pub1 + ')'),
|
||||
'keypool': True,
|
||||
"timestamp": "now",
|
||||
},
|
||||
{
|
||||
'desc': descsum_create('wpkh(' + pub2 + ')'),
|
||||
'keypool': True,
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
assert result[1]['success']
|
||||
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2)
|
||||
newaddr1 = wrpc.getnewaddress(address_type="bech32")
|
||||
assert_equal(addr1, newaddr1)
|
||||
newaddr2 = wrpc.getnewaddress(address_type="bech32")
|
||||
assert_equal(addr2, newaddr2)
|
||||
|
||||
# Import some public keys to the internal keypool of a no privkey wallet
|
||||
self.log.info("Adding pubkey to internal keypool of disableprivkey wallet")
|
||||
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
|
||||
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wpkh(' + pub1 + ')'),
|
||||
'keypool': True,
|
||||
'internal': True,
|
||||
"timestamp": "now",
|
||||
},
|
||||
{
|
||||
'desc': descsum_create('wpkh(' + pub2 + ')'),
|
||||
'keypool': True,
|
||||
'internal': True,
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
assert result[1]['success']
|
||||
assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2)
|
||||
newaddr1 = wrpc.getrawchangeaddress(address_type="bech32")
|
||||
assert_equal(addr1, newaddr1)
|
||||
newaddr2 = wrpc.getrawchangeaddress(address_type="bech32")
|
||||
assert_equal(addr2, newaddr2)
|
||||
|
||||
# Import a multisig and make sure the keys don't go into the keypool
|
||||
self.log.info('Imported scripts with pubkeys should not have their pubkeys go into the keypool')
|
||||
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
|
||||
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
|
||||
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wsh(multi(2,' + pub1 + ',' + pub2 + '))'),
|
||||
'keypool': True,
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
|
||||
|
||||
# Cannot import those pubkeys to keypool of wallet with privkeys
|
||||
self.log.info("Pubkeys cannot be added to the keypool of a wallet with private keys")
|
||||
wrpc = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
|
||||
assert wrpc.getwalletinfo()['private_keys_enabled']
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wpkh(' + pub1 + ')'),
|
||||
'keypool': True,
|
||||
"timestamp": "now",
|
||||
}]
|
||||
)
|
||||
assert_equal(result[0]['error']['code'], -8)
|
||||
assert_equal(result[0]['error']['message'], "Keys can only be imported to the keypool when private keys are disabled")
|
||||
|
||||
# Make sure ranged imports import keys in order
|
||||
self.log.info('Key ranges should be imported in order')
|
||||
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
|
||||
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
|
||||
assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False)
|
||||
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
|
||||
addresses = [
|
||||
'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
|
||||
'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
|
||||
'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
|
||||
'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
|
||||
'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
|
||||
]
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
|
||||
'keypool': True,
|
||||
'timestamp': 'now',
|
||||
'range' : [0, 4],
|
||||
}]
|
||||
)
|
||||
for i in range(0, 5):
|
||||
addr = wrpc.getnewaddress('', 'bech32')
|
||||
assert_equal(addr, addresses[i])
|
||||
|
||||
# Create wallet with passphrase
|
||||
self.log.info('Test watchonly imports on a wallet with a passphrase, without unlocking')
|
||||
self.nodes[1].createwallet(wallet_name='w1', blank=True, passphrase='pass')
|
||||
wrpc = self.nodes[1].get_wallet_rpc('w1')
|
||||
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.",
|
||||
wrpc.importmulti, [{
|
||||
'desc': descsum_create('wpkh(' + pub1 + ')'),
|
||||
"timestamp": "now",
|
||||
}])
|
||||
|
||||
result = wrpc.importmulti(
|
||||
[{
|
||||
'desc': descsum_create('wpkh(' + pub1 + ')'),
|
||||
"timestamp": "now",
|
||||
"watchonly": True,
|
||||
}]
|
||||
)
|
||||
assert result[0]['success']
|
||||
|
||||
self.log.info("Multipath descriptors")
|
||||
self.nodes[1].createwallet(wallet_name="multipath", blank=True, disable_private_keys=True)
|
||||
w_multipath = self.nodes[1].get_wallet_rpc("multipath")
|
||||
self.nodes[1].createwallet(wallet_name="multipath_split", blank=True, disable_private_keys=True)
|
||||
w_multisplit = self.nodes[1].get_wallet_rpc("multipath_split")
|
||||
|
||||
res = w_multipath.importmulti([{"desc": descsum_create(f"wpkh({xpub}/<10;20>/0/*)"),
|
||||
"keypool": True,
|
||||
"range": 10,
|
||||
"timestamp": "now",
|
||||
"internal": True}])
|
||||
assert_equal(res[0]["success"], False)
|
||||
assert_equal(res[0]["error"]["code"], -5)
|
||||
assert_equal(res[0]["error"]["message"], "Cannot have multipath descriptor while also specifying 'internal'")
|
||||
|
||||
res = w_multipath.importmulti([{"desc": descsum_create(f"wpkh({xpub}/<10;20>/0/*)"),
|
||||
"keypool": True,
|
||||
"range": 10,
|
||||
"timestamp": "now"}])
|
||||
assert_equal(res[0]["success"], True)
|
||||
|
||||
res = w_multisplit.importmulti([{"desc": descsum_create(f"wpkh({xpub}/10/0/*)"),
|
||||
"keypool": True,
|
||||
"range": 10,
|
||||
"timestamp": "now"}])
|
||||
assert_equal(res[0]["success"], True)
|
||||
res = w_multisplit.importmulti([{"desc": descsum_create(f"wpkh({xpub}/20/0/*)"),
|
||||
"keypool": True,
|
||||
"range": 10,
|
||||
"internal": True,
|
||||
"timestamp": timestamp}])
|
||||
assert_equal(res[0]["success"], True)
|
||||
|
||||
for _ in range(0, 9):
|
||||
assert_equal(w_multipath.getnewaddress(address_type="bech32"), w_multisplit.getnewaddress(address_type="bech32"))
|
||||
assert_equal(w_multipath.getrawchangeaddress(address_type="bech32"), w_multisplit.getrawchangeaddress(address_type="bech32"))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ImportMultiTest(__file__).main()
|
|
@ -20,9 +20,6 @@ from test_framework.wallet_util import generate_keypair
|
|||
|
||||
|
||||
class ImportPrunedFundsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
@ -109,12 +106,8 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
|
|||
assert_equal(address_info['iswatchonly'], False)
|
||||
assert_equal(address_info['ismine'], False)
|
||||
address_info = wwatch.getaddressinfo(address2)
|
||||
if self.options.descriptors:
|
||||
assert_equal(address_info['iswatchonly'], False)
|
||||
assert_equal(address_info['ismine'], True)
|
||||
else:
|
||||
assert_equal(address_info['iswatchonly'], True)
|
||||
assert_equal(address_info['ismine'], False)
|
||||
address_info = w1.getaddressinfo(address3)
|
||||
assert_equal(address_info['iswatchonly'], False)
|
||||
assert_equal(address_info['ismine'], True)
|
||||
|
|
|
@ -1,149 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2021-2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""
|
||||
Test Inactive HD Chains.
|
||||
"""
|
||||
import shutil
|
||||
|
||||
from test_framework.authproxy import JSONRPCException
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.wallet_util import (
|
||||
get_generate_key,
|
||||
)
|
||||
|
||||
|
||||
class InactiveHDChainsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
self.extra_args = [["-keypool=10"], ["-nowallet", "-keypool=10"]]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
self.skip_if_no_bdb()
|
||||
self.skip_if_no_previous_releases()
|
||||
|
||||
def setup_nodes(self):
|
||||
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
|
||||
None,
|
||||
170200, # 0.17.2 Does not have the key metadata upgrade
|
||||
])
|
||||
|
||||
self.start_nodes()
|
||||
self.init_wallet(node=0)
|
||||
|
||||
def prepare_wallets(self, wallet_basename, encrypt=False):
|
||||
self.nodes[0].createwallet(wallet_name=f"{wallet_basename}_base", descriptors=False, blank=True)
|
||||
self.nodes[0].createwallet(wallet_name=f"{wallet_basename}_test", descriptors=False, blank=True)
|
||||
base_wallet = self.nodes[0].get_wallet_rpc(f"{wallet_basename}_base")
|
||||
test_wallet = self.nodes[0].get_wallet_rpc(f"{wallet_basename}_test")
|
||||
|
||||
# Setup both wallets with the same HD seed
|
||||
seed = get_generate_key()
|
||||
base_wallet.sethdseed(True, seed.privkey)
|
||||
test_wallet.sethdseed(True, seed.privkey)
|
||||
|
||||
if encrypt:
|
||||
# Encrypting will generate a new HD seed and flush the keypool
|
||||
test_wallet.encryptwallet("pass")
|
||||
else:
|
||||
# Generate a new HD seed on the test wallet
|
||||
test_wallet.sethdseed()
|
||||
|
||||
return base_wallet, test_wallet
|
||||
|
||||
def do_inactive_test(self, base_wallet, test_wallet, encrypt=False):
|
||||
default = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
|
||||
|
||||
# The first address should be known by both wallets.
|
||||
addr1 = base_wallet.getnewaddress()
|
||||
assert test_wallet.getaddressinfo(addr1)["ismine"]
|
||||
# The address at index 9 is the first address that the test wallet will not know initially
|
||||
for _ in range(0, 9):
|
||||
base_wallet.getnewaddress()
|
||||
addr2 = base_wallet.getnewaddress()
|
||||
assert not test_wallet.getaddressinfo(addr2)["ismine"]
|
||||
|
||||
# Send to first address on the old seed
|
||||
txid = default.sendtoaddress(addr1, 10)
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
# Wait for the test wallet to see the transaction
|
||||
def is_tx_available(txid):
|
||||
try:
|
||||
test_wallet.gettransaction(txid)
|
||||
return True
|
||||
except JSONRPCException:
|
||||
return False
|
||||
self.nodes[0].wait_until(lambda: is_tx_available(txid), timeout=10, check_interval=0.1)
|
||||
|
||||
if encrypt:
|
||||
# The test wallet will not be able to generate the topped up keypool
|
||||
# until it is unlocked. So it still should not know about the second address
|
||||
assert not test_wallet.getaddressinfo(addr2)["ismine"]
|
||||
test_wallet.walletpassphrase("pass", 1)
|
||||
|
||||
# The test wallet should now know about the second address as it
|
||||
# should have generated it in the inactive chain's keypool
|
||||
assert test_wallet.getaddressinfo(addr2)["ismine"]
|
||||
|
||||
# Send to second address on the old seed
|
||||
txid = default.sendtoaddress(addr2, 10)
|
||||
self.generate(self.nodes[0], 1)
|
||||
test_wallet.gettransaction(txid)
|
||||
|
||||
def test_basic(self):
|
||||
self.log.info("Test basic case for inactive HD chains")
|
||||
self.do_inactive_test(*self.prepare_wallets("basic"))
|
||||
|
||||
def test_encrypted_wallet(self):
|
||||
self.log.info("Test inactive HD chains when wallet is encrypted")
|
||||
self.do_inactive_test(*self.prepare_wallets("enc", encrypt=True), encrypt=True)
|
||||
|
||||
def test_without_upgraded_keymeta(self):
|
||||
# Test that it is possible to top up inactive hd chains even if there is no key origin
|
||||
# in CKeyMetadata. This tests for the segfault reported in
|
||||
# https://github.com/bitcoin/bitcoin/issues/21605
|
||||
self.log.info("Test that topping up inactive HD chains does not need upgraded key origin")
|
||||
|
||||
self.nodes[0].createwallet(wallet_name="keymeta_base", descriptors=False, blank=True)
|
||||
# Createwallet is overridden in the test framework so that the descriptor option can be filled
|
||||
# depending on the test's cli args. However we don't want to do that when using old nodes that
|
||||
# do not support descriptors. So we use the createwallet_passthrough function.
|
||||
self.nodes[1].createwallet_passthrough(wallet_name="keymeta_test")
|
||||
base_wallet = self.nodes[0].get_wallet_rpc("keymeta_base")
|
||||
test_wallet = self.nodes[1].get_wallet_rpc("keymeta_test")
|
||||
|
||||
# Setup both wallets with the same HD seed
|
||||
seed = get_generate_key()
|
||||
base_wallet.sethdseed(True, seed.privkey)
|
||||
test_wallet.sethdseed(True, seed.privkey)
|
||||
|
||||
# Encrypting will generate a new HD seed and flush the keypool
|
||||
test_wallet.encryptwallet("pass")
|
||||
|
||||
# Copy test wallet to node 0
|
||||
test_wallet.unloadwallet()
|
||||
test_wallet_dir = self.nodes[1].wallets_path / "keymeta_test"
|
||||
new_test_wallet_dir = self.nodes[0].wallets_path / "keymeta_test"
|
||||
shutil.copytree(test_wallet_dir, new_test_wallet_dir)
|
||||
self.nodes[0].loadwallet("keymeta_test")
|
||||
test_wallet = self.nodes[0].get_wallet_rpc("keymeta_test")
|
||||
|
||||
self.do_inactive_test(base_wallet, test_wallet, encrypt=True)
|
||||
|
||||
def run_test(self):
|
||||
self.generate(self.nodes[0], 101)
|
||||
|
||||
self.test_basic()
|
||||
self.test_encrypted_wallet()
|
||||
self.test_without_upgraded_keymeta()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
InactiveHDChainsTest(__file__).main()
|
|
@ -16,9 +16,6 @@ from test_framework.util import (
|
|||
from test_framework.wallet_util import WalletUnlock
|
||||
|
||||
class KeyPoolTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
|
@ -29,13 +26,9 @@ class KeyPoolTest(BitcoinTestFramework):
|
|||
nodes = self.nodes
|
||||
addr_before_encrypting = nodes[0].getnewaddress()
|
||||
addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting)
|
||||
wallet_info_old = nodes[0].getwalletinfo()
|
||||
if not self.options.descriptors:
|
||||
assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
|
||||
|
||||
# Encrypt wallet and wait to terminate
|
||||
nodes[0].encryptwallet('test')
|
||||
if self.options.descriptors:
|
||||
# Import hardened derivation only descriptors
|
||||
nodes[0].walletpassphrase('test', 10)
|
||||
nodes[0].importdescriptors([
|
||||
|
@ -83,22 +76,15 @@ class KeyPoolTest(BitcoinTestFramework):
|
|||
# Keep creating keys
|
||||
addr = nodes[0].getnewaddress()
|
||||
addr_data = nodes[0].getaddressinfo(addr)
|
||||
wallet_info = nodes[0].getwalletinfo()
|
||||
assert_not_equal(addr_before_encrypting_data['hdmasterfingerprint'], addr_data['hdmasterfingerprint'])
|
||||
if not self.options.descriptors:
|
||||
assert addr_data['hdseedid'] == wallet_info['hdseedid']
|
||||
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
|
||||
|
||||
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
|
||||
with WalletUnlock(nodes[0], 'test'):
|
||||
nodes[0].keypoolrefill(6)
|
||||
wi = nodes[0].getwalletinfo()
|
||||
if self.options.descriptors:
|
||||
assert_equal(wi['keypoolsize_hd_internal'], 24)
|
||||
assert_equal(wi['keypoolsize'], 24)
|
||||
else:
|
||||
assert_equal(wi['keypoolsize_hd_internal'], 6)
|
||||
assert_equal(wi['keypoolsize'], 6)
|
||||
|
||||
# drain the internal keys
|
||||
nodes[0].getrawchangeaddress()
|
||||
|
@ -152,26 +138,8 @@ class KeyPoolTest(BitcoinTestFramework):
|
|||
with WalletUnlock(nodes[0], 'test'):
|
||||
nodes[0].keypoolrefill(100)
|
||||
wi = nodes[0].getwalletinfo()
|
||||
if self.options.descriptors:
|
||||
assert_equal(wi['keypoolsize_hd_internal'], 400)
|
||||
assert_equal(wi['keypoolsize'], 400)
|
||||
else:
|
||||
assert_equal(wi['keypoolsize_hd_internal'], 100)
|
||||
assert_equal(wi['keypoolsize'], 100)
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Check that newkeypool entirely flushes the keypool
|
||||
start_keypath = nodes[0].getaddressinfo(nodes[0].getnewaddress())['hdkeypath']
|
||||
start_change_keypath = nodes[0].getaddressinfo(nodes[0].getrawchangeaddress())['hdkeypath']
|
||||
# flush keypool and get new addresses
|
||||
nodes[0].newkeypool()
|
||||
end_keypath = nodes[0].getaddressinfo(nodes[0].getnewaddress())['hdkeypath']
|
||||
end_change_keypath = nodes[0].getaddressinfo(nodes[0].getrawchangeaddress())['hdkeypath']
|
||||
# The new keypath index should be 100 more than the old one
|
||||
new_index = int(start_keypath.rsplit('/', 1)[1][:-1]) + 100
|
||||
new_change_index = int(start_change_keypath.rsplit('/', 1)[1][:-1]) + 100
|
||||
assert_equal(end_keypath, "m/0'/0'/" + str(new_index) + "'")
|
||||
assert_equal(end_change_keypath, "m/0'/1'/" + str(new_change_index) + "'")
|
||||
|
||||
# create a blank wallet
|
||||
nodes[0].createwallet(wallet_name='w2', blank=True, disable_private_keys=True)
|
||||
|
@ -183,10 +151,7 @@ class KeyPoolTest(BitcoinTestFramework):
|
|||
# import private key and fund it
|
||||
address = addr.pop()
|
||||
desc = w1.getaddressinfo(address)['desc']
|
||||
if self.options.descriptors:
|
||||
res = w2.importdescriptors([{'desc': desc, 'timestamp': 'now'}])
|
||||
else:
|
||||
res = w2.importmulti([{'desc': desc, 'timestamp': 'now'}])
|
||||
assert_equal(res[0]['success'], True)
|
||||
|
||||
with WalletUnlock(w1, 'test'):
|
||||
|
@ -222,9 +187,5 @@ class KeyPoolTest(BitcoinTestFramework):
|
|||
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], subtractFeeFromOutputs=[0], feeRate=0.00010, changeAddress=addr.pop())
|
||||
assert_equal("psbt" in res, True)
|
||||
|
||||
if not self.options.descriptors:
|
||||
msg = "Error: Private keys are disabled for this wallet"
|
||||
assert_raises_rpc_error(-4, msg, w2.keypoolrefill, 100)
|
||||
|
||||
if __name__ == '__main__':
|
||||
KeyPoolTest(__file__).main()
|
||||
|
|
|
@ -20,9 +20,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class KeypoolRestoreTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 5
|
||||
|
@ -45,9 +42,7 @@ class KeypoolRestoreTest(BitcoinTestFramework):
|
|||
for i in [1, 2, 3, 4]:
|
||||
self.connect_nodes(0, i)
|
||||
|
||||
output_types = ["legacy", "p2sh-segwit", "bech32"]
|
||||
if self.options.descriptors:
|
||||
output_types.append("bech32m")
|
||||
output_types = ["legacy", "p2sh-segwit", "bech32", "bech32m"]
|
||||
for i, output_type in enumerate(output_types):
|
||||
self.log.info("Generate keys for wallet with address type: {}".format(output_type))
|
||||
idx = i+1
|
||||
|
@ -84,7 +79,6 @@ class KeypoolRestoreTest(BitcoinTestFramework):
|
|||
assert_equal(self.nodes[idx].getbalance(), 15)
|
||||
assert_equal(self.nodes[idx].listtransactions()[0]['category'], "receive")
|
||||
# Check that we have marked all keys up to the used keypool key as used
|
||||
if self.options.descriptors:
|
||||
if output_type == 'legacy':
|
||||
assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/44h/1h/0h/0/110")
|
||||
elif output_type == 'p2sh-segwit':
|
||||
|
@ -93,8 +87,6 @@ class KeypoolRestoreTest(BitcoinTestFramework):
|
|||
assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/84h/1h/0h/0/110")
|
||||
elif output_type == 'bech32m':
|
||||
assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/86h/1h/0h/0/110")
|
||||
else:
|
||||
assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/0'/0'/110'")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -18,9 +18,6 @@ from test_framework.wallet_util import test_address
|
|||
|
||||
|
||||
class WalletLabelsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
@ -41,23 +38,11 @@ class WalletLabelsTest(BitcoinTestFramework):
|
|||
[node.getreceivedbylabel],
|
||||
[node.listsinceblock, node.getblockhash(0), 1, False, True, False],
|
||||
]
|
||||
if self.options.descriptors:
|
||||
response = node.importdescriptors([{
|
||||
'desc': f'pkh({pubkey})',
|
||||
'label': '*',
|
||||
'timestamp': 'now',
|
||||
}])
|
||||
else:
|
||||
rpc_calls.extend([
|
||||
[node.importprivkey, node.dumpprivkey(address)],
|
||||
[node.importaddress, address],
|
||||
])
|
||||
|
||||
response = node.importmulti([{
|
||||
'scriptPubKey': {'address': address},
|
||||
'label': '*',
|
||||
'timestamp': 'now',
|
||||
}])
|
||||
|
||||
assert_equal(response[0]['success'], False)
|
||||
assert_equal(response[0]['error']['code'], -11)
|
||||
|
@ -166,18 +151,6 @@ class WalletLabelsTest(BitcoinTestFramework):
|
|||
label.verify(node)
|
||||
assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "")
|
||||
|
||||
# Check that addmultisigaddress can assign labels.
|
||||
if not self.options.descriptors:
|
||||
for label in labels:
|
||||
addresses = []
|
||||
for _ in range(10):
|
||||
addresses.append(node.getnewaddress())
|
||||
multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
|
||||
label.add_address(multisig_address)
|
||||
label.purpose[multisig_address] = "send"
|
||||
label.verify(node)
|
||||
self.generate(node, COINBASE_MATURITY + 1)
|
||||
|
||||
# Check that setlabel can change the label of an address from a
|
||||
# different label.
|
||||
change_label(node, labels[0].addresses[0], labels[0], labels[1])
|
||||
|
@ -188,7 +161,6 @@ class WalletLabelsTest(BitcoinTestFramework):
|
|||
|
||||
self.invalid_label_name_test()
|
||||
|
||||
if self.options.descriptors:
|
||||
# This is a descriptor wallet test because of segwit v1+ addresses
|
||||
self.log.info('Check watchonly labels')
|
||||
node.createwallet(wallet_name='watch_only', disable_private_keys=True)
|
||||
|
@ -212,7 +184,7 @@ class WalletLabelsTest(BitcoinTestFramework):
|
|||
ad = BECH32_INVALID[l]
|
||||
assert_raises_rpc_error(
|
||||
-5,
|
||||
"Address is not valid" if self.options.descriptors else "Invalid Bitcoin address or script",
|
||||
"Address is not valid",
|
||||
lambda: wallet_watch_only.importaddress(label=l, rescan=False, address=ad),
|
||||
)
|
||||
|
||||
|
|
|
@ -19,9 +19,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class ListDescriptorsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
|
@ -36,11 +33,6 @@ class ListDescriptorsTest(BitcoinTestFramework):
|
|||
node = self.nodes[0]
|
||||
assert_raises_rpc_error(-18, 'No wallet is loaded.', node.listdescriptors)
|
||||
|
||||
if self.is_bdb_compiled():
|
||||
self.log.info('Test that the command is not available for legacy wallets.')
|
||||
node.createwallet(wallet_name='w1', descriptors=False)
|
||||
assert_raises_rpc_error(-4, 'listdescriptors is not available for non-descriptor wallets', node.listdescriptors)
|
||||
|
||||
self.log.info('Test the command for empty descriptors wallet.')
|
||||
node.createwallet(wallet_name='w2', blank=True, descriptors=True)
|
||||
assert_equal(0, len(node.get_wallet_rpc('w2').listdescriptors()['descriptors']))
|
||||
|
|
|
@ -16,9 +16,6 @@ from test_framework.wallet_util import test_address
|
|||
|
||||
|
||||
class ReceivedByTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
|
|
@ -19,9 +19,6 @@ from test_framework.wallet_util import generate_keypair
|
|||
from decimal import Decimal
|
||||
|
||||
class ListSinceBlockTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 4
|
||||
self.setup_clean_chain = True
|
||||
|
@ -45,7 +42,6 @@ class ListSinceBlockTest(BitcoinTestFramework):
|
|||
self.test_double_send()
|
||||
self.double_spends_filtered()
|
||||
self.test_targetconfirmations()
|
||||
if self.options.descriptors:
|
||||
self.test_desc()
|
||||
self.test_send_to_self()
|
||||
self.test_op_return()
|
||||
|
|
|
@ -22,9 +22,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class ListTransactionsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
@ -95,20 +92,6 @@ class ListTransactionsTest(BitcoinTestFramework):
|
|||
{"category": "receive", "amount": Decimal("0.44")},
|
||||
{"txid": txid})
|
||||
|
||||
if not self.options.descriptors:
|
||||
# include_watchonly is a legacy wallet feature, so don't test it for descriptor wallets
|
||||
self.log.info("Test 'include_watchonly' feature (legacy wallet)")
|
||||
pubkey = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
|
||||
multisig = self.nodes[1].createmultisig(1, [pubkey])
|
||||
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
|
||||
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
|
||||
self.generate(self.nodes[1], 1)
|
||||
assert_equal(len(self.nodes[0].listtransactions(label="watchonly", include_watchonly=True)), 1)
|
||||
assert len(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=False)) == 0
|
||||
assert_array_result(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=True),
|
||||
{"category": "receive", "amount": Decimal("0.1")},
|
||||
{"txid": txid, "label": "watchonly"})
|
||||
|
||||
self.run_rbf_opt_in_test()
|
||||
self.run_externally_generated_address_test()
|
||||
self.run_coinjoin_test()
|
||||
|
|
|
@ -35,9 +35,6 @@ from test_framework.wallet_util import (
|
|||
|
||||
|
||||
class WalletMigrationTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
@ -909,22 +906,6 @@ class WalletMigrationTest(BitcoinTestFramework):
|
|||
_, _, magic = struct.unpack("QII", data)
|
||||
assert_equal(magic, BTREE_MAGIC)
|
||||
|
||||
####################################################
|
||||
# Perform the same test with a loaded legacy wallet.
|
||||
# The wallet should remain loaded after the failure.
|
||||
#
|
||||
# This applies only when BDB is enabled, as the user
|
||||
# cannot interact with the legacy wallet database
|
||||
# without BDB support.
|
||||
if self.is_bdb_compiled() is not None:
|
||||
# Advance time to generate a different backup name
|
||||
self.master_node.setmocktime(self.master_node.getblockheader(self.master_node.getbestblockhash())['time'] + 100)
|
||||
assert "failed" not in self.master_node.listwallets()
|
||||
self.master_node.loadwallet("failed")
|
||||
assert_raises_rpc_error(-4, "Failed to create database", self.master_node.migratewallet, "failed")
|
||||
wallets = self.master_node.listwallets()
|
||||
assert "failed" in wallets and all(wallet not in wallets for wallet in ["failed_watchonly", "failed_solvables"])
|
||||
|
||||
def test_blank(self):
|
||||
self.log.info("Test that a blank wallet is migrated")
|
||||
wallet = self.create_legacy_wallet("blank", blank=True)
|
||||
|
|
|
@ -203,9 +203,6 @@ DESCS_PRIV = [
|
|||
|
||||
|
||||
class WalletMiniscriptTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.rpc_timeout = 180
|
||||
|
|
|
@ -18,9 +18,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class WalletMiniscriptDecayingMultisigDescriptorPSBTTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -15,9 +15,6 @@ from test_framework.util import (
|
|||
|
||||
|
||||
class WalletMultisigDescriptorPSBTTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
self.setup_clean_chain = True
|
||||
|
|
|
@ -52,7 +52,6 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
self.skip_if_no_wallet()
|
||||
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
parser.add_argument(
|
||||
'--data_wallets_dir',
|
||||
default=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/wallets/'),
|
||||
|
@ -119,8 +118,6 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
in_wallet_dir.append('w7') # w7 is not loaded or created, but will be listed by listwalletdir because w7_symlink
|
||||
to_create.append(os.path.join(self.options.tmpdir, 'extern/w6')) # External, not in the wallet dir, so we need to avoid adding it to in_wallet_dir
|
||||
to_load = [self.default_wallet_name]
|
||||
if not self.options.descriptors:
|
||||
to_load.append('w8')
|
||||
wallet_names = to_create + to_load # Wallet names loaded in the wallet
|
||||
in_wallet_dir += to_load # The loaded wallets are also in the wallet dir
|
||||
self.start_node(0)
|
||||
|
@ -142,7 +139,7 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
assert_equal(set(node.listwallets()), set(wallet_names))
|
||||
|
||||
# should raise rpc error if wallet path can't be created
|
||||
err_code = -4 if self.options.descriptors else -1
|
||||
err_code = -4
|
||||
assert_raises_rpc_error(err_code, "filesystem error:" if platform.system() != 'Windows' else "create_directories:", self.nodes[0].createwallet, "w8/bad")
|
||||
|
||||
# check that all requested wallets were created
|
||||
|
@ -157,14 +154,6 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
self.start_node(0, ['-wallet=w1', '-wallet=w1'])
|
||||
self.stop_node(0, 'Warning: Ignoring duplicate -wallet w1.')
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Only BDB doesn't open duplicate wallet files. SQLite does not have this limitation. While this may be desired in the future, it is not necessary
|
||||
# should not initialize if one wallet is a copy of another
|
||||
shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
|
||||
in_wallet_dir.append('w8_copy')
|
||||
exp_stderr = r"BerkeleyDatabase: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
|
||||
self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
|
||||
|
||||
# should not initialize if wallet file is a symlink
|
||||
os.symlink('w8', wallet_dir('w8_symlink'))
|
||||
self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], r'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX)
|
||||
|
@ -203,10 +192,7 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
os.mkdir(competing_wallet_dir)
|
||||
self.restart_node(0, ['-nowallet', '-walletdir=' + competing_wallet_dir])
|
||||
self.nodes[0].createwallet(self.default_wallet_name)
|
||||
if self.options.descriptors:
|
||||
exp_stderr = f"Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another instance of {self.config['environment']['CLIENT_NAME']}?"
|
||||
else:
|
||||
exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\S*\"!"
|
||||
self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
|
||||
|
||||
self.restart_node(0)
|
||||
|
@ -304,19 +290,6 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
|
||||
# Fail to load duplicate wallets
|
||||
assert_raises_rpc_error(-35, "Wallet \"w1\" is already loaded.", self.nodes[0].loadwallet, wallet_names[0])
|
||||
if not self.options.descriptors:
|
||||
# This tests the default wallet that BDB makes, so SQLite wallet doesn't need to test this
|
||||
# Fail to load duplicate wallets by different ways (directory and filepath)
|
||||
path = wallet_dir("wallet.dat")
|
||||
assert_raises_rpc_error(-35, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format(path), self.nodes[0].loadwallet, 'wallet.dat')
|
||||
|
||||
# Only BDB doesn't open duplicate wallet files. SQLite does not have this limitation. While this may be desired in the future, it is not necessary
|
||||
# Fail to load if one wallet is a copy of another
|
||||
assert_raises_rpc_error(-4, "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
|
||||
|
||||
# Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
|
||||
assert_raises_rpc_error(-4, "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
|
||||
|
||||
# Fail to load if wallet file is a symlink
|
||||
assert_raises_rpc_error(-4, "Wallet file verification failed. Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
|
||||
|
||||
|
@ -413,10 +386,7 @@ class MultiWalletTest(BitcoinTestFramework):
|
|||
self.start_node(1)
|
||||
wallet = os.path.join(self.options.tmpdir, 'my_wallet')
|
||||
self.nodes[0].createwallet(wallet)
|
||||
if self.options.descriptors:
|
||||
assert_raises_rpc_error(-4, "Unable to obtain an exclusive lock", self.nodes[1].loadwallet, wallet)
|
||||
else:
|
||||
assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
|
||||
self.nodes[0].unloadwallet(wallet)
|
||||
self.nodes[1].loadwallet(wallet)
|
||||
|
||||
|
|
|
@ -8,9 +8,6 @@ from test_framework.test_framework import BitcoinTestFramework
|
|||
from test_framework.util import assert_equal
|
||||
|
||||
class OrphanedBlockRewardTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
|
|
|
@ -1,158 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2022 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
"""Test wallet import on pruned node."""
|
||||
|
||||
from test_framework.util import assert_equal, assert_raises_rpc_error
|
||||
from test_framework.blocktools import (
|
||||
COINBASE_MATURITY,
|
||||
create_block
|
||||
)
|
||||
from test_framework.blocktools import create_coinbase
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
|
||||
from test_framework.script import (
|
||||
CScript,
|
||||
OP_RETURN,
|
||||
OP_TRUE,
|
||||
)
|
||||
|
||||
class WalletPruningTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, descriptors=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 2
|
||||
self.wallet_names = []
|
||||
self.extra_args = [
|
||||
[], # node dedicated to mining
|
||||
['-prune=550'], # node dedicated to testing pruning
|
||||
]
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
self.skip_if_no_bdb()
|
||||
|
||||
def mine_large_blocks(self, node, n):
|
||||
# Get the block parameters for the first block
|
||||
best_block = node.getblockheader(node.getbestblockhash())
|
||||
height = int(best_block["height"]) + 1
|
||||
self.nTime = max(self.nTime, int(best_block["time"])) + 1
|
||||
previousblockhash = int(best_block["hash"], 16)
|
||||
big_script = CScript([OP_RETURN] + [OP_TRUE] * 950000)
|
||||
# Set mocktime to accept all future blocks
|
||||
for i in self.nodes:
|
||||
if i.running:
|
||||
i.setmocktime(self.nTime + 600 * n)
|
||||
for _ in range(n):
|
||||
block = create_block(hashprev=previousblockhash, ntime=self.nTime, coinbase=create_coinbase(height, script_pubkey=big_script))
|
||||
block.solve()
|
||||
|
||||
# Submit to the node
|
||||
node.submitblock(block.serialize().hex())
|
||||
|
||||
previousblockhash = block.sha256
|
||||
height += 1
|
||||
|
||||
# Simulate 10 minutes of work time per block
|
||||
# Important for matching a timestamp with a block +- some window
|
||||
self.nTime += 600
|
||||
self.sync_all()
|
||||
|
||||
def test_wallet_import_pruned(self, wallet_name):
|
||||
self.log.info("Make sure we can import wallet when pruned and required blocks are still available")
|
||||
|
||||
wallet_file = wallet_name + ".dat"
|
||||
wallet_birthheight = self.get_birthheight(wallet_file)
|
||||
|
||||
# Verify that the block at wallet's birthheight is available at the pruned node
|
||||
self.nodes[1].getblock(self.nodes[1].getblockhash(wallet_birthheight))
|
||||
|
||||
# Import wallet into pruned node
|
||||
self.nodes[1].createwallet(wallet_name="wallet_pruned", descriptors=False, load_on_startup=True)
|
||||
self.nodes[1].importwallet(self.nodes[0].datadir_path / wallet_file)
|
||||
|
||||
# Make sure that prune node's wallet correctly accounts for balances
|
||||
assert_equal(self.nodes[1].getbalance(), self.nodes[0].getbalance())
|
||||
|
||||
self.log.info("- Done")
|
||||
|
||||
def test_wallet_import_pruned_with_missing_blocks(self, wallet_name):
|
||||
self.log.info("Make sure we cannot import wallet when pruned and required blocks are not available")
|
||||
|
||||
wallet_file = wallet_name + ".dat"
|
||||
wallet_birthheight = self.get_birthheight(wallet_file)
|
||||
|
||||
# Verify that the block at wallet's birthheight is not available at the pruned node
|
||||
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[1].getblock, self.nodes[1].getblockhash(wallet_birthheight))
|
||||
|
||||
# Make sure wallet cannot be imported because of missing blocks
|
||||
# This will try to rescan blocks `TIMESTAMP_WINDOW` (2h) before the wallet birthheight.
|
||||
# There are 6 blocks an hour, so 11 blocks (excluding birthheight).
|
||||
assert_raises_rpc_error(-4, f"Pruned blocks from height {wallet_birthheight - 11} required to import keys. Use RPC call getblockchaininfo to determine your pruned height.", self.nodes[1].importwallet, self.nodes[0].datadir_path / wallet_file)
|
||||
self.log.info("- Done")
|
||||
|
||||
def get_birthheight(self, wallet_file):
|
||||
"""Gets birthheight of a wallet on node0"""
|
||||
with open(self.nodes[0].datadir_path / wallet_file, 'r', encoding="utf8") as f:
|
||||
for line in f:
|
||||
if line.startswith('# * Best block at time of backup'):
|
||||
wallet_birthheight = int(line.split(' ')[9])
|
||||
return wallet_birthheight
|
||||
|
||||
def has_block(self, block_index):
|
||||
"""Checks if the pruned node has the specific blk0000*.dat file"""
|
||||
return (self.nodes[1].blocks_path / f"blk{block_index:05}.dat").is_file()
|
||||
|
||||
def create_wallet(self, wallet_name, *, unload=False):
|
||||
"""Creates and dumps a wallet on the non-pruned node0 to be later import by the pruned node"""
|
||||
self.nodes[0].createwallet(wallet_name=wallet_name, descriptors=False, load_on_startup=True)
|
||||
self.nodes[0].dumpwallet(self.nodes[0].datadir_path / f"{wallet_name}.dat")
|
||||
if (unload):
|
||||
self.nodes[0].unloadwallet(wallet_name)
|
||||
|
||||
def run_test(self):
|
||||
self.nTime = 0
|
||||
self.log.info("Warning! This test requires ~1.3GB of disk space")
|
||||
|
||||
self.log.info("Generating a long chain of blocks...")
|
||||
|
||||
# A blk*.dat file is 128MB
|
||||
# Generate 250 light blocks
|
||||
self.generate(self.nodes[0], 250)
|
||||
# Generate 50MB worth of large blocks in the blk00000.dat file
|
||||
self.mine_large_blocks(self.nodes[0], 50)
|
||||
|
||||
# Create a wallet which birth's block is in the blk00000.dat file
|
||||
wallet_birthheight_1 = "wallet_birthheight_1"
|
||||
assert_equal(self.has_block(1), False)
|
||||
self.create_wallet(wallet_birthheight_1, unload=True)
|
||||
|
||||
# Generate enough large blocks to reach pruning disk limit
|
||||
# Not pruning yet because we are still below PruneAfterHeight
|
||||
self.mine_large_blocks(self.nodes[0], 600)
|
||||
self.log.info("- Long chain created")
|
||||
|
||||
# Create a wallet with birth height > wallet_birthheight_1
|
||||
wallet_birthheight_2 = "wallet_birthheight_2"
|
||||
self.create_wallet(wallet_birthheight_2)
|
||||
|
||||
# Fund wallet to later verify that importwallet correctly accounts for balances
|
||||
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress(), sync_fun=self.no_op)
|
||||
|
||||
# We've reached pruning storage & height limit but
|
||||
# pruning doesn't run until another chunk (blk*.dat file) is allocated.
|
||||
# That's why we are generating another 5 large blocks
|
||||
self.mine_large_blocks(self.nodes[0], 5)
|
||||
|
||||
# blk00000.dat file is now pruned from node1
|
||||
assert_equal(self.has_block(0), False)
|
||||
|
||||
self.test_wallet_import_pruned(wallet_birthheight_2)
|
||||
self.test_wallet_import_pruned_with_missing_blocks(wallet_birthheight_1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletPruningTest(__file__).main()
|
|
@ -15,9 +15,6 @@ from test_framework.util import (
|
|||
BLOCK_TIME = 60 * 10
|
||||
|
||||
class WalletReindexTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
self.setup_clean_chain = True
|
||||
|
@ -57,18 +54,13 @@ class WalletReindexTest(BitcoinTestFramework):
|
|||
|
||||
# Depending on the wallet type, the birth time changes.
|
||||
wallet_birthtime = wallet_watch_only.getwalletinfo()['birthtime']
|
||||
if self.options.descriptors:
|
||||
# As blocks were generated every 10 min, the chain MTP timestamp is node_time - 60 min.
|
||||
assert_equal(self.node_time - BLOCK_TIME * 6, wallet_birthtime)
|
||||
else:
|
||||
# No way of importing scripts/addresses with a custom time on a legacy wallet.
|
||||
# It's always set to the beginning of time.
|
||||
assert_equal(wallet_birthtime, 1)
|
||||
|
||||
# Rescan the wallet to detect the missing transaction
|
||||
wallet_watch_only.rescanblockchain()
|
||||
assert_equal(wallet_watch_only.gettransaction(tx_id)['confirmations'], 50)
|
||||
assert_equal(wallet_watch_only.getbalances()['mine' if self.options.descriptors else 'watchonly']['trusted'], 2)
|
||||
assert_equal(wallet_watch_only.getbalances()['mine']['trusted'], 2)
|
||||
|
||||
# Reindex and wait for it to finish
|
||||
with node.assert_debug_log(expected_msgs=["initload thread exit"]):
|
||||
|
@ -81,12 +73,8 @@ class WalletReindexTest(BitcoinTestFramework):
|
|||
assert_equal(tx_info['confirmations'], 50)
|
||||
|
||||
# Depending on the wallet type, the birth time changes.
|
||||
if self.options.descriptors:
|
||||
# For descriptors, verify the wallet updated the birth time to the transaction time
|
||||
assert_equal(tx_info['time'], wallet_watch_only.getwalletinfo()['birthtime'])
|
||||
else:
|
||||
# For legacy, as the birth time was set to the beginning of time, verify it did not change
|
||||
assert_equal(wallet_birthtime, 1)
|
||||
|
||||
wallet_watch_only.unloadwallet()
|
||||
|
||||
|
|
|
@ -25,9 +25,6 @@ from test_framework.util import (
|
|||
)
|
||||
|
||||
class ReorgsRestoreTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 3
|
||||
|
||||
|
|
|
@ -16,9 +16,6 @@ from test_framework.wallet_util import test_address
|
|||
|
||||
|
||||
class WalletRescanUnconfirmed(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser, legacy=False)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
|
|
|
@ -22,9 +22,6 @@ from test_framework.util import (
|
|||
)
|
||||
|
||||
class ResendWalletTransactionsTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
|
|
|
@ -25,9 +25,6 @@ from test_framework.wallet_util import (
|
|||
|
||||
|
||||
class WalletSendTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 2
|
||||
# whitelist peers to speed up tx relay / mempool sync
|
||||
|
@ -197,7 +194,6 @@ class WalletSendTest(BitcoinTestFramework):
|
|||
w2 = self.nodes[1].get_wallet_rpc("w2")
|
||||
xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
|
||||
xpub = "tpubD6NzVbkrYhZ4YkEfMbRJkQyZe7wTkbTNRECozCtJPtdLRn6cT1QKb8yHjwAPcAr26eHBFYs5iLiFFnCbwPRsncCKUKCfubHDMGKzMVcN1Jg"
|
||||
if self.options.descriptors:
|
||||
w2.importdescriptors([{
|
||||
"desc": descsum_create("wpkh(" + xpriv + "/0/0/*)"),
|
||||
"timestamp": "now",
|
||||
|
@ -210,13 +206,10 @@ class WalletSendTest(BitcoinTestFramework):
|
|||
"active": True,
|
||||
"internal": True
|
||||
}])
|
||||
else:
|
||||
w2.sethdseed(True)
|
||||
|
||||
# w3 is a watch-only wallet, based on w2
|
||||
self.nodes[1].createwallet(wallet_name="w3", disable_private_keys=True)
|
||||
w3 = self.nodes[1].get_wallet_rpc("w3")
|
||||
if self.options.descriptors:
|
||||
# Match the privkeys in w2 for descriptors
|
||||
res = w3.importdescriptors([{
|
||||
"desc": descsum_create("wpkh(" + xpub + "/0/0/*)"),
|
||||
|
@ -238,44 +231,9 @@ class WalletSendTest(BitcoinTestFramework):
|
|||
|
||||
for _ in range(3):
|
||||
a2_receive = w2.getnewaddress()
|
||||
if not self.options.descriptors:
|
||||
# Because legacy wallets use exclusively hardened derivation, we can't do a ranged import like we do for descriptors
|
||||
a2_change = w2.getrawchangeaddress() # doesn't actually use change derivation
|
||||
res = w3.importmulti([{
|
||||
"desc": w2.getaddressinfo(a2_receive)["desc"],
|
||||
"timestamp": "now",
|
||||
"keypool": True,
|
||||
"watchonly": True
|
||||
},{
|
||||
"desc": w2.getaddressinfo(a2_change)["desc"],
|
||||
"timestamp": "now",
|
||||
"keypool": True,
|
||||
"internal": True,
|
||||
"watchonly": True
|
||||
}])
|
||||
assert_equal(res, [{"success": True}, {"success": True}])
|
||||
|
||||
w0.sendtoaddress(a2_receive, 10) # fund w3
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
if not self.options.descriptors:
|
||||
# w4 has private keys enabled, but only contains watch-only keys (from w2)
|
||||
# This is legacy wallet behavior only as descriptor wallets don't allow watchonly and non-watchonly things in the same wallet.
|
||||
self.nodes[1].createwallet(wallet_name="w4", disable_private_keys=False)
|
||||
w4 = self.nodes[1].get_wallet_rpc("w4")
|
||||
for _ in range(3):
|
||||
a2_receive = w2.getnewaddress()
|
||||
res = w4.importmulti([{
|
||||
"desc": w2.getaddressinfo(a2_receive)["desc"],
|
||||
"timestamp": "now",
|
||||
"keypool": False,
|
||||
"watchonly": True
|
||||
}])
|
||||
assert_equal(res, [{"success": True}])
|
||||
|
||||
w0.sendtoaddress(a2_receive, 10) # fund w4
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
self.log.info("Send to address...")
|
||||
self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
|
||||
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True)
|
||||
|
@ -306,16 +264,6 @@ class WalletSendTest(BitcoinTestFramework):
|
|||
res = w2.walletprocesspsbt(res["psbt"])
|
||||
assert res["complete"]
|
||||
|
||||
if not self.options.descriptors:
|
||||
# Descriptor wallets do not allow mixed watch-only and non-watch-only things in the same wallet.
|
||||
# This is specifically testing that w4 ignores its own private keys and creates a psbt with send
|
||||
# which is not something that needs to be tested in descriptor wallets.
|
||||
self.log.info("Create PSBT from wallet w4 with watch-only keys, sign with w2...")
|
||||
self.test_send(from_wallet=w4, to_wallet=w1, amount=1, expect_error=(-4, "Insufficient funds"))
|
||||
res = self.test_send(from_wallet=w4, to_wallet=w1, amount=1, include_watching=True, add_to_wallet=False)
|
||||
res = w2.walletprocesspsbt(res["psbt"])
|
||||
assert res["complete"]
|
||||
|
||||
self.log.info("Create OP_RETURN...")
|
||||
self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
|
||||
self.test_send(from_wallet=w0, data="Hello World", expect_error=(-8, "Data must be hexadecimal string (not 'Hello World')"))
|
||||
|
@ -510,10 +458,7 @@ class WalletSendTest(BitcoinTestFramework):
|
|||
|
||||
# Make a weird but signable script. sh(wsh(pkh())) descriptor accomplishes this
|
||||
desc = descsum_create("sh(wsh(pkh({})))".format(privkey))
|
||||
if self.options.descriptors:
|
||||
res = ext_fund.importdescriptors([{"desc": desc, "timestamp": "now"}])
|
||||
else:
|
||||
res = ext_fund.importmulti([{"desc": desc, "timestamp": "now"}])
|
||||
assert res[0]["success"]
|
||||
addr = self.nodes[0].deriveaddresses(desc)[0]
|
||||
addr_info = ext_fund.getaddressinfo(addr)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue