Page MenuHomePhabricator

No OneTemporary

diff --git a/src/rpc/misc.cpp b/src/rpc/misc.cpp
index 4e8b418fc..414ec10a8 100644
--- a/src/rpc/misc.cpp
+++ b/src/rpc/misc.cpp
@@ -1,743 +1,741 @@
// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2016 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <chainparams.h>
#include <config.h>
#include <httpserver.h>
#include <key_io.h>
#include <logging.h>
#include <node/context.h>
#include <outputtype.h>
#include <rpc/blockchain.h>
#include <rpc/server.h>
#include <rpc/util.h>
#include <scheduler.h>
#include <script/descriptor.h>
#include <util/check.h>
#include <util/strencodings.h>
#include <util/system.h>
#include <util/validation.h>
#include <univalue.h>
#include <cstdint>
#include <tuple>
#ifdef HAVE_MALLOC_INFO
#include <malloc.h>
#endif
static UniValue validateaddress(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"validateaddress",
"\nReturn information about the given bitcoin address.\n",
{
{"address", RPCArg::Type::STR, RPCArg::Optional::NO,
"The bitcoin address to validate"},
},
RPCResult{
"{\n"
" \"isvalid\" : true|false, (boolean) If the address is "
"valid or not. If not, this is the only property returned.\n"
" \"address\" : \"address\", (string) The bitcoin address "
"validated\n"
" \"scriptPubKey\" : \"hex\", (string) The hex-encoded "
"scriptPubKey generated by the address\n"
" \"isscript\" : true|false, (boolean) If the key is a "
"script\n"
"}\n"},
RPCExamples{HelpExampleCli("validateaddress",
"\"1PSSGeFHDnKNxiEyFrD1wcEaHr9hrQDDWc\"") +
HelpExampleRpc("validateaddress",
"\"1PSSGeFHDnKNxiEyFrD1wcEaHr9hrQDDWc\"")},
}
.Check(request);
CTxDestination dest =
DecodeDestination(request.params[0].get_str(), config.GetChainParams());
bool isValid = IsValidDestination(dest);
UniValue ret(UniValue::VOBJ);
ret.pushKV("isvalid", isValid);
if (isValid) {
if (ret["address"].isNull()) {
std::string currentAddress = EncodeDestination(dest, config);
ret.pushKV("address", currentAddress);
CScript scriptPubKey = GetScriptForDestination(dest);
ret.pushKV("scriptPubKey",
HexStr(scriptPubKey.begin(), scriptPubKey.end()));
UniValue detail = DescribeAddress(dest);
ret.pushKVs(detail);
}
}
return ret;
}
static UniValue createmultisig(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"createmultisig",
"\nCreates a multi-signature address with n signature of m keys "
"required.\n"
"It returns a json object with the address and redeemScript.\n",
{
{"nrequired", RPCArg::Type::NUM, RPCArg::Optional::NO,
"The number of required signatures out of the n keys."},
{"keys",
RPCArg::Type::ARR,
RPCArg::Optional::NO,
"A json array of hex-encoded public keys.",
{
{"key", RPCArg::Type::STR_HEX, RPCArg::Optional::OMITTED,
"The hex-encoded public key"},
}},
},
RPCResult{"{\n"
" \"address\":\"multisigaddress\", (string) The value of "
"the new multisig address.\n"
" \"redeemScript\":\"script\" (string) The string "
"value of the hex-encoded redemption script.\n"
"}\n"},
RPCExamples{
"\nCreate a multisig address from 2 public keys\n" +
HelpExampleCli("createmultisig",
"2 "
"\"["
"\\\"03789ed0bb717d88f7d321a368d905e7430207ebbd82bd3"
"42cf11ae157a7ace5fd\\\","
"\\\"03dbc6764b8884a92e871274b87583e6d5c2a58819473e1"
"7e107ef3f6aa5a61626\\\"]\"") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("createmultisig",
"2, "
"\"["
"\\\"03789ed0bb717d88f7d321a368d905e7430207ebbd82bd3"
"42cf11ae157a7ace5fd\\\","
"\\\"03dbc6764b8884a92e871274b87583e6d5c2a58819473e1"
"7e107ef3f6aa5a61626\\\"]\"")},
}
.Check(request);
int required = request.params[0].get_int();
// Get the public keys
const UniValue &keys = request.params[1].get_array();
std::vector<CPubKey> pubkeys;
for (size_t i = 0; i < keys.size(); ++i) {
if ((keys[i].get_str().length() ==
2 * CPubKey::COMPRESSED_PUBLIC_KEY_SIZE ||
keys[i].get_str().length() == 2 * CPubKey::PUBLIC_KEY_SIZE) &&
IsHex(keys[i].get_str())) {
pubkeys.push_back(HexToPubKey(keys[i].get_str()));
} else {
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
strprintf("Invalid public key: %s\n", keys[i].get_str()));
}
}
// Get the output type
OutputType output_type = OutputType::LEGACY;
// Construct using pay-to-script-hash:
FillableSigningProvider keystore;
CScript inner;
const CTxDestination dest = AddAndGetMultisigDestination(
required, pubkeys, output_type, keystore, inner);
UniValue result(UniValue::VOBJ);
result.pushKV("address", EncodeDestination(dest, config));
result.pushKV("redeemScript", HexStr(inner.begin(), inner.end()));
return result;
}
UniValue getdescriptorinfo(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"getdescriptorinfo",
{"\nAnalyses a descriptor.\n"},
{
{"descriptor", RPCArg::Type::STR, RPCArg::Optional::NO,
"The descriptor."},
},
RPCResult{"{\n"
" \"descriptor\" : \"desc\", (string) The "
"descriptor in canonical form, without private keys\n"
" \"checksum\" : \"chksum\", (string) The checksum "
"for the input descriptor\n"
" \"isrange\" : true|false, (boolean) Whether the "
"descriptor is ranged\n"
" \"issolvable\" : true|false, (boolean) Whether the "
"descriptor is solvable\n"
" \"hasprivatekeys\" : true|false, (boolean) Whether the "
"input descriptor contained at least one private key\n"
"}\n"},
RPCExamples{"Analyse a descriptor\n" +
HelpExampleCli("getdescriptorinfo",
"\"pkh([d34db33f/84h/0h/"
"0h]"
"0279be667ef9dcbbac55a06295Ce870b07029Bfcdb2"
"dce28d959f2815b16f81798)\"")}}
.Check(request);
RPCTypeCheck(request.params, {UniValue::VSTR});
FlatSigningProvider provider;
std::string error;
auto desc = Parse(request.params[0].get_str(), provider, error);
if (!desc) {
- throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
- strprintf("Invalid descriptor, %s", error));
+ throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, error);
}
UniValue result(UniValue::VOBJ);
result.pushKV("descriptor", desc->ToString());
result.pushKV("checksum",
GetDescriptorChecksum(request.params[0].get_str()));
result.pushKV("isrange", desc->IsRange());
result.pushKV("issolvable", desc->IsSolvable());
result.pushKV("hasprivatekeys", provider.keys.size() > 0);
return result;
}
UniValue deriveaddresses(const Config &config, const JSONRPCRequest &request) {
RPCHelpMan{
"deriveaddresses",
{"\nDerives one or more addresses corresponding to an output "
"descriptor.\n"
"Examples of output descriptors are:\n"
" pkh(<pubkey>) P2PKH outputs for the given "
"pubkey\n"
" sh(multi(<n>,<pubkey>,<pubkey>,...)) P2SH-multisig outputs for "
"the given threshold and pubkeys\n"
" raw(<hex script>) Outputs whose scriptPubKey "
"equals the specified hex scripts\n"
"\nIn the above, <pubkey> either refers to a fixed public key in "
"hexadecimal notation, or to an xpub/xprv optionally followed by one\n"
"or more path elements separated by \"/\", where \"h\" represents a "
"hardened child key.\n"
"For more information on output descriptors, see the documentation in "
"the doc/descriptors.md file.\n"},
{
{"descriptor", RPCArg::Type::STR, RPCArg::Optional::NO,
"The descriptor."},
{"range", RPCArg::Type::RANGE, RPCArg::Optional::OMITTED_NAMED_ARG,
"If a ranged descriptor is used, this specifies the end or the "
"range (in [begin,end] notation) to derive."},
},
RPCResult{"[ address ] (array) the derived addresses\n"},
RPCExamples{"First three pkh receive addresses\n" +
HelpExampleCli(
"deriveaddresses",
"\"pkh([d34db33f/84h/0h/0h]"
"xpub6DJ2dNUysrn5Vt36jH2KLBT2i1auw1tTSSomg8P"
"hqNiUtx8QX2SvC9nrHu81fT41fvDUnhMjEzQgXnQjKE"
"u3oaqMSzhSrHMxyyoEAmUHQbY/0/*)#3vhfv5h5\" \"[0,2]\"")}}
.Check(request);
// Range argument is checked later
RPCTypeCheck(request.params, {UniValue::VSTR, UniValueType()});
const std::string desc_str = request.params[0].get_str();
int64_t range_begin = 0;
int64_t range_end = 0;
if (request.params.size() >= 2 && !request.params[1].isNull()) {
std::tie(range_begin, range_end) =
ParseDescriptorRange(request.params[1]);
}
FlatSigningProvider key_provider;
std::string error;
auto desc = Parse(desc_str, key_provider, error, /* require_checksum = */ true);
if (!desc) {
- throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
- strprintf("Invalid descriptor, %s", error));
+ throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, error);
}
if (!desc->IsRange() && request.params.size() > 1) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Range should not be specified for an un-ranged descriptor");
}
if (desc->IsRange() && request.params.size() == 1) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Range must be specified for a ranged descriptor");
}
UniValue addresses(UniValue::VARR);
for (int i = range_begin; i <= range_end; ++i) {
FlatSigningProvider provider;
std::vector<CScript> scripts;
if (!desc->Expand(i, key_provider, scripts, provider)) {
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
strprintf("Cannot derive script without private keys"));
}
for (const CScript &script : scripts) {
CTxDestination dest;
if (!ExtractDestination(script, dest)) {
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
strprintf(
"Descriptor does not have a corresponding address"));
}
addresses.push_back(EncodeDestination(dest, config));
}
}
// This should not be possible, but an assert seems overkill:
if (addresses.empty()) {
throw JSONRPCError(RPC_MISC_ERROR, "Unexpected empty result");
}
return addresses;
}
static UniValue verifymessage(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"verifymessage",
"\nVerify a signed message\n",
{
{"address", RPCArg::Type::STR, RPCArg::Optional::NO,
"The bitcoin address to use for the signature."},
{"signature", RPCArg::Type::STR, RPCArg::Optional::NO,
"The signature provided by the signer in base 64 encoding (see "
"signmessage)."},
{"message", RPCArg::Type::STR, RPCArg::Optional::NO,
"The message that was signed."},
},
RPCResult{"true|false (boolean) If the signature is verified or "
"not.\n"},
RPCExamples{
"\nUnlock the wallet for 30 seconds\n" +
HelpExampleCli("walletpassphrase", "\"mypassphrase\" 30") +
"\nCreate the signature\n" +
HelpExampleCli(
"signmessage",
"\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XX\" \"my message\"") +
"\nVerify the signature\n" +
HelpExampleCli("verifymessage", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4"
"XX\" \"signature\" \"my "
"message\"") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("verifymessage", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4"
"XX\", \"signature\", \"my "
"message\"")},
}
.Check(request);
LOCK(cs_main);
std::string strAddress = request.params[0].get_str();
std::string strSign = request.params[1].get_str();
std::string strMessage = request.params[2].get_str();
CTxDestination destination =
DecodeDestination(strAddress, config.GetChainParams());
if (!IsValidDestination(destination)) {
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
}
const PKHash *pkhash = boost::get<PKHash>(&destination);
if (!pkhash) {
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
}
bool fInvalid = false;
std::vector<uint8_t> vchSig = DecodeBase64(strSign.c_str(), &fInvalid);
if (fInvalid) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Malformed base64 encoding");
}
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
CPubKey pubkey;
if (!pubkey.RecoverCompact(ss.GetHash(), vchSig)) {
return false;
}
return (pubkey.GetID() == *pkhash);
}
static UniValue signmessagewithprivkey(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"signmessagewithprivkey",
"\nSign a message with the private key of an address\n",
{
{"privkey", RPCArg::Type::STR, RPCArg::Optional::NO,
"The private key to sign the message with."},
{"message", RPCArg::Type::STR, RPCArg::Optional::NO,
"The message to create a signature of."},
},
RPCResult{"\"signature\" (string) The signature of the "
"message encoded in base 64\n"},
RPCExamples{"\nCreate the signature\n" +
HelpExampleCli("signmessagewithprivkey",
"\"privkey\" \"my message\"") +
"\nVerify the signature\n" +
HelpExampleCli("verifymessage",
"\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XX\" "
"\"signature\" \"my message\"") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("signmessagewithprivkey",
"\"privkey\", \"my message\"")},
}
.Check(request);
std::string strPrivkey = request.params[0].get_str();
std::string strMessage = request.params[1].get_str();
CKey key = DecodeSecret(strPrivkey);
if (!key.IsValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid private key");
}
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
std::vector<uint8_t> vchSig;
if (!key.SignCompact(ss.GetHash(), vchSig)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Sign failed");
}
return EncodeBase64(vchSig.data(), vchSig.size());
}
static UniValue setmocktime(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"setmocktime",
"\nSet the local time to given timestamp (-regtest only)\n",
{
{"timestamp", RPCArg::Type::NUM, RPCArg::Optional::NO,
"Unix seconds-since-epoch timestamp\n"
" Pass 0 to go back to using the system time."},
},
RPCResults{},
RPCExamples{""},
}
.Check(request);
if (!config.GetChainParams().MineBlocksOnDemand()) {
throw std::runtime_error(
"setmocktime for regression testing (-regtest mode) only");
}
// For now, don't change mocktime if we're in the middle of validation, as
// this could have an effect on mempool time-based eviction, as well as
// IsInitialBlockDownload().
// TODO: figure out the right way to synchronize around mocktime, and
// ensure all call sites of GetTime() are accessing this safely.
LOCK(cs_main);
RPCTypeCheck(request.params, {UniValue::VNUM});
int64_t mockTime = request.params[0].get_int64();
if (mockTime < 0) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Timestamp must be 0 or greater");
}
SetMockTime(mockTime);
return NullUniValue;
}
static UniValue mockscheduler(const Config &config,
const JSONRPCRequest &request) {
RPCHelpMan{
"mockscheduler",
"\nBump the scheduler into the future (-regtest only)\n",
{
{"delta_time", RPCArg::Type::NUM, RPCArg::Optional::NO,
"Number of seconds to forward the scheduler into the future."},
},
RPCResults{},
RPCExamples{""},
}
.Check(request);
if (!Params().IsMockableChain()) {
throw std::runtime_error(
"mockscheduler is for regression testing (-regtest mode) only");
}
// check params are valid values
RPCTypeCheck(request.params, {UniValue::VNUM});
int64_t delta_seconds = request.params[0].get_int64();
if ((delta_seconds <= 0) || (delta_seconds > 3600)) {
throw std::runtime_error(
"delta_time must be between 1 and 3600 seconds (1 hr)");
}
// protect against null pointer dereference
CHECK_NONFATAL(g_rpc_node);
CHECK_NONFATAL(g_rpc_node->scheduler);
g_rpc_node->scheduler->MockForward(std::chrono::seconds(delta_seconds));
return NullUniValue;
}
static UniValue RPCLockedMemoryInfo() {
LockedPool::Stats stats = LockedPoolManager::Instance().stats();
UniValue obj(UniValue::VOBJ);
obj.pushKV("used", uint64_t(stats.used));
obj.pushKV("free", uint64_t(stats.free));
obj.pushKV("total", uint64_t(stats.total));
obj.pushKV("locked", uint64_t(stats.locked));
obj.pushKV("chunks_used", uint64_t(stats.chunks_used));
obj.pushKV("chunks_free", uint64_t(stats.chunks_free));
return obj;
}
#ifdef HAVE_MALLOC_INFO
static std::string RPCMallocInfo() {
char *ptr = nullptr;
size_t size = 0;
FILE *f = open_memstream(&ptr, &size);
if (f) {
malloc_info(0, f);
fclose(f);
if (ptr) {
std::string rv(ptr, size);
free(ptr);
return rv;
}
}
return "";
}
#endif
static UniValue getmemoryinfo(const Config &config,
const JSONRPCRequest &request) {
/* Please, avoid using the word "pool" here in the RPC interface or help,
* as users will undoubtedly confuse it with the other "memory pool"
*/
RPCHelpMan{
"getmemoryinfo",
"Returns an object containing information about memory usage.\n",
{
{"mode", RPCArg::Type::STR, /* default */ "\"stats\"",
"determines what kind of information is returned.\n"
" - \"stats\" returns general statistics about memory usage in "
"the daemon.\n"
" - \"mallocinfo\" returns an XML string describing low-level "
"heap state (only available if compiled with glibc 2.10+)."},
},
{
RPCResult{
"mode \"stats\"",
"{\n"
" \"locked\": { (json object) Information about "
"locked memory manager\n"
" \"used\": xxxxx, (numeric) Number of bytes used\n"
" \"free\": xxxxx, (numeric) Number of bytes "
"available in current arenas\n"
" \"total\": xxxxxxx, (numeric) Total number of bytes "
"managed\n"
" \"locked\": xxxxxx, (numeric) Amount of bytes that "
"succeeded locking. If this number is smaller than total, "
"locking pages failed at some point and key data could be "
"swapped to disk.\n"
" \"chunks_used\": xxxxx, (numeric) Number allocated "
"chunks\n"
" \"chunks_free\": xxxxx, (numeric) Number unused chunks\n"
" }\n"
"}\n"},
RPCResult{"mode \"mallocinfo\"", "\"<malloc version=\"1\">...\"\n"},
},
RPCExamples{HelpExampleCli("getmemoryinfo", "") +
HelpExampleRpc("getmemoryinfo", "")},
}
.Check(request);
std::string mode =
request.params[0].isNull() ? "stats" : request.params[0].get_str();
if (mode == "stats") {
UniValue obj(UniValue::VOBJ);
obj.pushKV("locked", RPCLockedMemoryInfo());
return obj;
} else if (mode == "mallocinfo") {
#ifdef HAVE_MALLOC_INFO
return RPCMallocInfo();
#else
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"mallocinfo is only available when compiled with glibc 2.10+");
#endif
} else {
throw JSONRPCError(RPC_INVALID_PARAMETER, "unknown mode " + mode);
}
}
static void EnableOrDisableLogCategories(UniValue cats, bool enable) {
cats = cats.get_array();
for (size_t i = 0; i < cats.size(); ++i) {
std::string cat = cats[i].get_str();
bool success;
if (enable) {
success = LogInstance().EnableCategory(cat);
} else {
success = LogInstance().DisableCategory(cat);
}
if (!success) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"unknown logging category " + cat);
}
}
}
static UniValue logging(const Config &config, const JSONRPCRequest &request) {
RPCHelpMan{
"logging",
"Gets and sets the logging configuration.\n"
"When called without an argument, returns the list of categories with "
"status that are currently being debug logged or not.\n"
"When called with arguments, adds or removes categories from debug "
"logging and return the lists above.\n"
"The arguments are evaluated in order \"include\", \"exclude\".\n"
"If an item is both included and excluded, it will thus end up being "
"excluded.\n"
"The valid logging categories are: " +
ListLogCategories() +
"\n"
"In addition, the following are available as category names with "
"special meanings:\n"
" - \"all\", \"1\" : represent all logging categories.\n"
" - \"none\", \"0\" : even if other logging categories are "
"specified, ignore all of them.\n",
{
{"include",
RPCArg::Type::ARR,
RPCArg::Optional::OMITTED_NAMED_ARG,
"A json array of categories to add debug logging",
{
{"include_category", RPCArg::Type::STR,
RPCArg::Optional::OMITTED, "the valid logging category"},
}},
{"exclude",
RPCArg::Type::ARR,
RPCArg::Optional::OMITTED_NAMED_ARG,
"A json array of categories to remove debug logging",
{
{"exclude_category", RPCArg::Type::STR,
RPCArg::Optional::OMITTED, "the valid logging category"},
}},
},
RPCResult{"{ (json object where keys are the logging "
"categories, and values indicates its status\n"
" \"category\": 0|1, (numeric) if being debug logged "
"or not. 0:inactive, 1:active\n"
" ...\n"
"}\n"},
RPCExamples{
HelpExampleCli("logging", "\"[\\\"all\\\"]\" \"[\\\"http\\\"]\"") +
HelpExampleRpc("logging", "[\"all\"], \"[libevent]\"")},
}
.Check(request);
uint32_t original_log_categories = LogInstance().GetCategoryMask();
if (request.params[0].isArray()) {
EnableOrDisableLogCategories(request.params[0], true);
}
if (request.params[1].isArray()) {
EnableOrDisableLogCategories(request.params[1], false);
}
uint32_t updated_log_categories = LogInstance().GetCategoryMask();
uint32_t changed_log_categories =
original_log_categories ^ updated_log_categories;
/**
* Update libevent logging if BCLog::LIBEVENT has changed.
* If the library version doesn't allow it, UpdateHTTPServerLogging()
* returns false, in which case we should clear the BCLog::LIBEVENT flag.
* Throw an error if the user has explicitly asked to change only the
* libevent flag and it failed.
*/
if (changed_log_categories & BCLog::LIBEVENT) {
if (!UpdateHTTPServerLogging(
LogInstance().WillLogCategory(BCLog::LIBEVENT))) {
LogInstance().DisableCategory(BCLog::LIBEVENT);
if (changed_log_categories == BCLog::LIBEVENT) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"libevent logging cannot be updated when "
"using libevent before v2.1.1.");
}
}
}
UniValue result(UniValue::VOBJ);
std::vector<CLogCategoryActive> vLogCatActive = ListActiveLogCategories();
for (const auto &logCatActive : vLogCatActive) {
result.pushKV(logCatActive.category, logCatActive.active);
}
return result;
}
static UniValue echo(const Config &config, const JSONRPCRequest &request) {
if (request.fHelp) {
throw std::runtime_error(RPCHelpMan{
"echo|echojson ...",
"\nSimply echo back the input arguments. This command is for "
"testing.\n"
"\nThe difference between echo and echojson is that echojson has "
"argument conversion enabled in the client-side table in "
"bitcoin-cli and the GUI. There is no server-side difference.",
{},
RPCResults{},
RPCExamples{""},
}
.ToString());
}
CHECK_NONFATAL(request.params.size() != 100);
return request.params;
}
// clang-format off
static const CRPCCommand commands[] = {
// category name actor (function) argNames
// ------------------- ------------------------ ---------------------- ----------
{ "control", "getmemoryinfo", getmemoryinfo, {"mode"} },
{ "control", "logging", logging, {"include", "exclude"} },
{ "util", "validateaddress", validateaddress, {"address"} },
{ "util", "createmultisig", createmultisig, {"nrequired","keys"} },
{ "util", "deriveaddresses", deriveaddresses, {"descriptor", "begin", "end"} },
{ "util", "getdescriptorinfo", getdescriptorinfo, {"descriptor"} },
{ "util", "verifymessage", verifymessage, {"address","signature","message"} },
{ "util", "signmessagewithprivkey", signmessagewithprivkey, {"privkey","message"} },
/* Not shown in help */
{ "hidden", "setmocktime", setmocktime, {"timestamp"}},
{ "hidden", "mockscheduler", mockscheduler, {"delta_time"}},
{ "hidden", "echo", echo, {"arg0","arg1","arg2","arg3","arg4","arg5","arg6","arg7","arg8","arg9"}},
{ "hidden", "echojson", echo, {"arg0","arg1","arg2","arg3","arg4","arg5","arg6","arg7","arg8","arg9"}},
};
// clang-format on
void RegisterMiscRPCCommands(CRPCTable &t) {
for (unsigned int vcidx = 0; vcidx < ARRAYLEN(commands); vcidx++) {
t.appendCommand(commands[vcidx].name, &commands[vcidx]);
}
}
diff --git a/src/rpc/util.cpp b/src/rpc/util.cpp
index 90972b294..c864c2efe 100644
--- a/src/rpc/util.cpp
+++ b/src/rpc/util.cpp
@@ -1,779 +1,777 @@
// Copyright (c) 2017-2019 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <rpc/util.h>
#include <key_io.h>
#include <pubkey.h>
#include <script/descriptor.h>
#include <script/signingprovider.h>
#include <tinyformat.h>
#include <util/strencodings.h>
#include <util/string.h>
#include <tuple>
#include <boost/variant/static_visitor.hpp>
void RPCTypeCheck(const UniValue &params,
const std::list<UniValueType> &typesExpected,
bool fAllowNull) {
unsigned int i = 0;
for (const UniValueType &t : typesExpected) {
if (params.size() <= i) {
break;
}
const UniValue &v = params[i];
if (!(fAllowNull && v.isNull())) {
RPCTypeCheckArgument(v, t);
}
i++;
}
}
void RPCTypeCheckArgument(const UniValue &value,
const UniValueType &typeExpected) {
if (!typeExpected.typeAny && value.type() != typeExpected.type) {
throw JSONRPCError(RPC_TYPE_ERROR,
strprintf("Expected type %s, got %s",
uvTypeName(typeExpected.type),
uvTypeName(value.type())));
}
}
void RPCTypeCheckObj(const UniValue &o,
const std::map<std::string, UniValueType> &typesExpected,
bool fAllowNull, bool fStrict) {
for (const auto &t : typesExpected) {
const UniValue &v = find_value(o, t.first);
if (!fAllowNull && v.isNull()) {
throw JSONRPCError(RPC_TYPE_ERROR,
strprintf("Missing %s", t.first));
}
if (!(t.second.typeAny || v.type() == t.second.type ||
(fAllowNull && v.isNull()))) {
std::string err = strprintf("Expected type %s for %s, got %s",
uvTypeName(t.second.type), t.first,
uvTypeName(v.type()));
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
}
if (fStrict) {
for (const std::string &k : o.getKeys()) {
if (typesExpected.count(k) == 0) {
std::string err = strprintf("Unexpected key %s", k);
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
}
}
}
Amount AmountFromValue(const UniValue &value) {
if (!value.isNum() && !value.isStr()) {
throw JSONRPCError(RPC_TYPE_ERROR, "Amount is not a number or string");
}
int64_t n;
if (!ParseFixedPoint(value.getValStr(), 8, &n)) {
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount");
}
Amount amt = n * SATOSHI;
if (!MoneyRange(amt)) {
throw JSONRPCError(RPC_TYPE_ERROR, "Amount out of range");
}
return amt;
}
uint256 ParseHashV(const UniValue &v, std::string strName) {
std::string strHex(v.get_str());
if (64 != strHex.length()) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
strprintf("%s must be of length %d (not %d, for '%s')", strName, 64,
strHex.length(), strHex));
}
// Note: IsHex("") is false
if (!IsHex(strHex)) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
strName + " must be hexadecimal string (not '" +
strHex + "')");
}
return uint256S(strHex);
}
uint256 ParseHashO(const UniValue &o, std::string strKey) {
return ParseHashV(find_value(o, strKey), strKey);
}
std::vector<uint8_t> ParseHexV(const UniValue &v, std::string strName) {
std::string strHex;
if (v.isStr()) {
strHex = v.get_str();
}
if (!IsHex(strHex)) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
strName + " must be hexadecimal string (not '" +
strHex + "')");
}
return ParseHex(strHex);
}
std::vector<uint8_t> ParseHexO(const UniValue &o, std::string strKey) {
return ParseHexV(find_value(o, strKey), strKey);
}
std::string HelpExampleCli(const std::string &methodname,
const std::string &args) {
return "> bitcoin-cli " + methodname + " " + args + "\n";
}
std::string HelpExampleRpc(const std::string &methodname,
const std::string &args) {
return "> curl --user myusername --data-binary '{\"jsonrpc\": \"1.0\", "
"\"id\":\"curltest\", "
"\"method\": \"" +
methodname + "\", \"params\": [" + args +
"] }' -H 'content-type: text/plain;' http://127.0.0.1:8332/\n";
}
// Converts a hex string to a public key if possible
CPubKey HexToPubKey(const std::string &hex_in) {
if (!IsHex(hex_in)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid public key: " + hex_in);
}
CPubKey vchPubKey(ParseHex(hex_in));
if (!vchPubKey.IsFullyValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid public key: " + hex_in);
}
return vchPubKey;
}
// Retrieves a public key for an address from the given FillableSigningProvider
CPubKey AddrToPubKey(const CChainParams &chainparams,
FillableSigningProvider *const keystore,
const std::string &addr_in) {
CTxDestination dest = DecodeDestination(addr_in, chainparams);
if (!IsValidDestination(dest)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid address: " + addr_in);
}
CKeyID key = GetKeyForDestination(*keystore, dest);
if (key.IsNull()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
strprintf("%s does not refer to a key", addr_in));
}
CPubKey vchPubKey;
if (!keystore->GetPubKey(key, vchPubKey)) {
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
strprintf("no full public key for address %s", addr_in));
}
if (!vchPubKey.IsFullyValid()) {
throw JSONRPCError(RPC_INTERNAL_ERROR,
"Wallet contains an invalid public key");
}
return vchPubKey;
}
// Creates a multisig address from a given list of public keys, number of
// signatures required, and the address type
CTxDestination AddAndGetMultisigDestination(const int required,
const std::vector<CPubKey> &pubkeys,
OutputType type,
FillableSigningProvider &keystore,
CScript &script_out) {
// Gather public keys
if (required < 1) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"a multisignature address must require at least one key to redeem");
}
if ((int)pubkeys.size() < required) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
strprintf("not enough keys supplied (got %u keys, "
"but need at least %d to redeem)",
pubkeys.size(), required));
}
if (pubkeys.size() > 16) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Number of keys involved in the multisignature "
"address creation > 16\nReduce the number");
}
script_out = GetScriptForMultisig(required, pubkeys);
if (script_out.size() > MAX_SCRIPT_ELEMENT_SIZE) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
(strprintf("redeemScript exceeds size limit: %d > %d",
script_out.size(), MAX_SCRIPT_ELEMENT_SIZE)));
}
// Check if any keys are uncompressed. If so, the type is legacy
for (const CPubKey &pk : pubkeys) {
if (!pk.IsCompressed()) {
type = OutputType::LEGACY;
break;
}
}
// Make the address
CTxDestination dest =
AddAndGetDestinationForScript(keystore, script_out, type);
return dest;
}
class DescribeAddressVisitor : public boost::static_visitor<UniValue> {
public:
explicit DescribeAddressVisitor() {}
UniValue operator()(const CNoDestination &dest) const {
return UniValue(UniValue::VOBJ);
}
UniValue operator()(const PKHash &keyID) const {
UniValue obj(UniValue::VOBJ);
obj.pushKV("isscript", false);
return obj;
}
UniValue operator()(const ScriptHash &scriptID) const {
UniValue obj(UniValue::VOBJ);
obj.pushKV("isscript", true);
return obj;
}
};
UniValue DescribeAddress(const CTxDestination &dest) {
return boost::apply_visitor(DescribeAddressVisitor(), dest);
}
RPCErrorCode RPCErrorFromTransactionError(TransactionError terr) {
switch (terr) {
case TransactionError::MEMPOOL_REJECTED:
return RPC_TRANSACTION_REJECTED;
case TransactionError::ALREADY_IN_CHAIN:
return RPC_TRANSACTION_ALREADY_IN_CHAIN;
case TransactionError::P2P_DISABLED:
return RPC_CLIENT_P2P_DISABLED;
case TransactionError::INVALID_PSBT:
case TransactionError::PSBT_MISMATCH:
return RPC_INVALID_PARAMETER;
case TransactionError::SIGHASH_MISMATCH:
return RPC_DESERIALIZATION_ERROR;
default:
break;
}
return RPC_TRANSACTION_ERROR;
}
UniValue JSONRPCTransactionError(TransactionError terr,
const std::string &err_string) {
if (err_string.length() > 0) {
return JSONRPCError(RPCErrorFromTransactionError(terr), err_string);
} else {
return JSONRPCError(RPCErrorFromTransactionError(terr),
TransactionErrorString(terr));
}
}
/**
* A pair of strings that can be aligned (through padding) with other Sections
* later on
*/
struct Section {
Section(const std::string &left, const std::string &right)
: m_left{left}, m_right{right} {}
const std::string m_left;
const std::string m_right;
};
/**
* Keeps track of RPCArgs by transforming them into sections for the purpose
* of serializing everything to a single string
*/
struct Sections {
std::vector<Section> m_sections;
size_t m_max_pad{0};
void PushSection(const Section &s) {
m_max_pad = std::max(m_max_pad, s.m_left.size());
m_sections.push_back(s);
}
/**
* Serializing RPCArgs depends on the outer type. Only arrays and
* dictionaries can be nested in json. The top-level outer type is "named
* arguments", a mix between a dictionary and arrays.
*/
enum class OuterType {
ARR,
OBJ,
// Only set on first recursion
NAMED_ARG,
};
/**
* Recursive helper to translate an RPCArg into sections
*/
void Push(const RPCArg &arg, const size_t current_indent = 5,
const OuterType outer_type = OuterType::NAMED_ARG) {
const auto indent = std::string(current_indent, ' ');
const auto indent_next = std::string(current_indent + 2, ' ');
// Dictionary keys must have a name
const bool push_name{outer_type == OuterType::OBJ};
switch (arg.m_type) {
case RPCArg::Type::STR_HEX:
case RPCArg::Type::STR:
case RPCArg::Type::NUM:
case RPCArg::Type::AMOUNT:
case RPCArg::Type::RANGE:
case RPCArg::Type::BOOL: {
// Nothing more to do for non-recursive types on first recursion
if (outer_type == OuterType::NAMED_ARG) {
return;
}
auto left = indent;
if (arg.m_type_str.size() != 0 && push_name) {
left += "\"" + arg.m_name + "\": " + arg.m_type_str.at(0);
} else {
left += push_name ? arg.ToStringObj(/* oneline */ false)
: arg.ToString(/* oneline */ false);
}
left += ",";
PushSection({left, arg.ToDescriptionString()});
break;
}
case RPCArg::Type::OBJ:
case RPCArg::Type::OBJ_USER_KEYS: {
const auto right = outer_type == OuterType::NAMED_ARG
? ""
: arg.ToDescriptionString();
PushSection({indent +
(push_name ? "\"" + arg.m_name + "\": " : "") +
"{",
right});
for (const auto &arg_inner : arg.m_inner) {
Push(arg_inner, current_indent + 2, OuterType::OBJ);
}
if (arg.m_type != RPCArg::Type::OBJ) {
PushSection({indent_next + "...", ""});
}
PushSection(
{indent + "}" +
(outer_type != OuterType::NAMED_ARG ? "," : ""),
""});
break;
}
case RPCArg::Type::ARR: {
auto left = indent;
left += push_name ? "\"" + arg.m_name + "\": " : "";
left += "[";
const auto right = outer_type == OuterType::NAMED_ARG
? ""
: arg.ToDescriptionString();
PushSection({left, right});
for (const auto &arg_inner : arg.m_inner) {
Push(arg_inner, current_indent + 2, OuterType::ARR);
}
PushSection({indent_next + "...", ""});
PushSection(
{indent + "]" +
(outer_type != OuterType::NAMED_ARG ? "," : ""),
""});
break;
}
// no default case, so the compiler can warn about missing cases
}
}
/**
* Concatenate all sections with proper padding
*/
std::string ToString() const {
std::string ret;
const size_t pad = m_max_pad + 4;
for (const auto &s : m_sections) {
if (s.m_right.empty()) {
ret += s.m_left;
ret += "\n";
continue;
}
std::string left = s.m_left;
left.resize(pad, ' ');
ret += left;
// Properly pad after newlines
std::string right;
size_t begin = 0;
size_t new_line_pos = s.m_right.find_first_of('\n');
while (true) {
right += s.m_right.substr(begin, new_line_pos - begin);
if (new_line_pos == std::string::npos) {
// No new line
break;
}
right += "\n" + std::string(pad, ' ');
begin = s.m_right.find_first_not_of(' ', new_line_pos + 1);
if (begin == std::string::npos) {
break; // Empty line
}
new_line_pos = s.m_right.find_first_of('\n', begin + 1);
}
ret += right;
ret += "\n";
}
return ret;
}
};
RPCHelpMan::RPCHelpMan(std::string name, std::string description,
std::vector<RPCArg> args, RPCResults results,
RPCExamples examples)
: m_name{std::move(name)}, m_description{std::move(description)},
m_args{std::move(args)}, m_results{std::move(results)},
m_examples{std::move(examples)} {
std::set<std::string> named_args;
for (const auto &arg : m_args) {
// Should have unique named arguments
CHECK_NONFATAL(named_args.insert(arg.m_name).second);
}
}
std::string RPCResults::ToDescriptionString() const {
std::string result;
for (const auto &r : m_results) {
if (r.m_cond.empty()) {
result += "\nResult:\n";
} else {
result += "\nResult (" + r.m_cond + "):\n";
}
result += r.m_result;
}
return result;
}
std::string RPCExamples::ToDescriptionString() const {
return m_examples.empty() ? m_examples : "\nExamples:\n" + m_examples;
}
bool RPCHelpMan::IsValidNumArgs(size_t num_args) const {
size_t num_required_args = 0;
for (size_t n = m_args.size(); n > 0; --n) {
if (!m_args.at(n - 1).IsOptional()) {
num_required_args = n;
break;
}
}
return num_required_args <= num_args && num_args <= m_args.size();
}
std::string RPCHelpMan::ToString() const {
std::string ret;
// Oneline summary
ret += m_name;
bool was_optional{false};
for (const auto &arg : m_args) {
const bool optional = arg.IsOptional();
ret += " ";
if (optional) {
if (!was_optional) {
ret += "( ";
}
was_optional = true;
} else {
if (was_optional) {
ret += ") ";
}
was_optional = false;
}
ret += arg.ToString(/* oneline */ true);
}
if (was_optional) {
ret += " )";
}
ret += "\n";
// Description
ret += m_description;
// Arguments
Sections sections;
for (size_t i{0}; i < m_args.size(); ++i) {
const auto &arg = m_args.at(i);
if (i == 0) {
ret += "\nArguments:\n";
}
// Push named argument name and description
sections.m_sections.emplace_back(std::to_string(i + 1) + ". " +
arg.m_name,
arg.ToDescriptionString());
sections.m_max_pad = std::max(sections.m_max_pad,
sections.m_sections.back().m_left.size());
// Recursively push nested args
sections.Push(arg);
}
ret += sections.ToString();
// Result
ret += m_results.ToDescriptionString();
// Examples
ret += m_examples.ToDescriptionString();
return ret;
}
bool RPCArg::IsOptional() const {
if (m_fallback.which() == 1) {
return true;
} else {
return RPCArg::Optional::NO != boost::get<RPCArg::Optional>(m_fallback);
}
}
std::string RPCArg::ToDescriptionString() const {
std::string ret;
ret += "(";
if (m_type_str.size() != 0) {
ret += m_type_str.at(1);
} else {
switch (m_type) {
case Type::STR_HEX:
case Type::STR: {
ret += "string";
break;
}
case Type::NUM: {
ret += "numeric";
break;
}
case Type::AMOUNT: {
ret += "numeric or string";
break;
}
case Type::RANGE: {
ret += "numeric or array";
break;
}
case Type::BOOL: {
ret += "boolean";
break;
}
case Type::OBJ:
case Type::OBJ_USER_KEYS: {
ret += "json object";
break;
}
case Type::ARR: {
ret += "json array";
break;
}
// no default case, so the compiler can warn about missing cases
}
}
if (m_fallback.which() == 1) {
ret += ", optional, default=" + boost::get<std::string>(m_fallback);
} else {
switch (boost::get<RPCArg::Optional>(m_fallback)) {
case RPCArg::Optional::OMITTED: {
// nothing to do. Element is treated as if not present and has
// no default value
break;
}
case RPCArg::Optional::OMITTED_NAMED_ARG: {
ret += ", optional"; // Default value is "null"
break;
}
case RPCArg::Optional::NO: {
ret += ", required";
break;
}
// no default case, so the compiler can warn about missing cases
}
}
ret += ")";
ret += m_description.empty() ? "" : " " + m_description;
return ret;
}
std::string RPCArg::ToStringObj(const bool oneline) const {
std::string res;
res += "\"";
res += m_name;
if (oneline) {
res += "\":";
} else {
res += "\": ";
}
switch (m_type) {
case Type::STR:
return res + "\"str\"";
case Type::STR_HEX:
return res + "\"hex\"";
case Type::NUM:
return res + "n";
case Type::RANGE:
return res + "n or [n,n]";
case Type::AMOUNT:
return res + "amount";
case Type::BOOL:
return res + "bool";
case Type::ARR:
res += "[";
for (const auto &i : m_inner) {
res += i.ToString(oneline) + ",";
}
return res + "...]";
case Type::OBJ:
case Type::OBJ_USER_KEYS:
// Currently unused, so avoid writing dead code
assert(false);
// no default case, so the compiler can warn about missing cases
}
assert(false);
}
std::string RPCArg::ToString(const bool oneline) const {
if (oneline && !m_oneline_description.empty()) {
return m_oneline_description;
}
switch (m_type) {
case Type::STR_HEX:
case Type::STR: {
return "\"" + m_name + "\"";
}
case Type::NUM:
case Type::RANGE:
case Type::AMOUNT:
case Type::BOOL: {
return m_name;
}
case Type::OBJ:
case Type::OBJ_USER_KEYS: {
const std::string res = Join(m_inner, ",", [&](const RPCArg &i) {
return i.ToStringObj(oneline);
});
if (m_type == Type::OBJ) {
return "{" + res + "}";
} else {
return "{" + res + ",...}";
}
}
case Type::ARR: {
std::string res;
for (const auto &i : m_inner) {
res += i.ToString(oneline) + ",";
}
return "[" + res + "...]";
}
// no default case, so the compiler can warn about missing cases
}
assert(false);
}
static std::pair<int64_t, int64_t> ParseRange(const UniValue &value) {
if (value.isNum()) {
return {0, value.get_int64()};
}
if (value.isArray() && value.size() == 2 && value[0].isNum() &&
value[1].isNum()) {
int64_t low = value[0].get_int64();
int64_t high = value[1].get_int64();
if (low > high) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Range specified as [begin,end] must not have begin after end");
}
return {low, high};
}
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Range must be specified as end or as [begin,end]");
}
std::pair<int64_t, int64_t> ParseDescriptorRange(const UniValue &value) {
int64_t low, high;
std::tie(low, high) = ParseRange(value);
if (low < 0) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Range should be greater or equal than 0");
}
if ((high >> 31) != 0) {
throw JSONRPCError(RPC_INVALID_PARAMETER, "End of range is too high");
}
if (high >= low + 1000000) {
throw JSONRPCError(RPC_INVALID_PARAMETER, "Range is too large");
}
return {low, high};
}
std::vector<CScript>
EvalDescriptorStringOrObject(const UniValue &scanobject,
FlatSigningProvider &provider) {
std::string desc_str;
std::pair<int64_t, int64_t> range = {0, 1000};
if (scanobject.isStr()) {
desc_str = scanobject.get_str();
} else if (scanobject.isObject()) {
UniValue desc_uni = find_value(scanobject, "desc");
if (desc_uni.isNull()) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Descriptor needs to be provided in scan object");
}
desc_str = desc_uni.get_str();
UniValue range_uni = find_value(scanobject, "range");
if (!range_uni.isNull()) {
range = ParseDescriptorRange(range_uni);
}
} else {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Scan object needs to be either a string or an object");
}
std::string error;
auto desc = Parse(desc_str, provider, error);
if (!desc) {
- throw JSONRPCError(
- RPC_INVALID_ADDRESS_OR_KEY,
- strprintf("Invalid descriptor '%s', %s", desc_str, error));
+ throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, error);
}
if (!desc->IsRange()) {
range.first = 0;
range.second = 0;
}
std::vector<CScript> ret;
for (int i = range.first; i <= range.second; ++i) {
std::vector<CScript> scripts;
if (!desc->Expand(i, provider, scripts, provider)) {
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
strprintf("Cannot derive script without private keys: '%s'",
desc_str));
}
std::move(scripts.begin(), scripts.end(), std::back_inserter(ret));
}
return ret;
}
diff --git a/src/script/descriptor.cpp b/src/script/descriptor.cpp
index 0af370cc5..df71f7e84 100644
--- a/src/script/descriptor.cpp
+++ b/src/script/descriptor.cpp
@@ -1,1161 +1,1194 @@
// Copyright (c) 2018 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <script/descriptor.h>
#include <chainparams.h> // For Params()
#include <config.h>
#include <key_io.h>
#include <pubkey.h>
#include <script/standard.h>
#include <span.h>
#include <util/bip32.h>
#include <util/strencodings.h>
#include <util/system.h>
#include <memory>
#include <string>
namespace {
////////////////////////////////////////////////////////////////////////////
// Checksum //
////////////////////////////////////////////////////////////////////////////
// This section implements a checksum algorithm for descriptors with the
// following properties:
// * Mistakes in a descriptor string are measured in "symbol errors". The higher
// the number of symbol errors, the harder it is to detect:
// * An error substituting a character from 0123456789()[],'/*abcdefgh@:$%{}
// for
// another in that set always counts as 1 symbol error.
// * Note that hex encoded keys are covered by these characters. Xprvs and
// xpubs use other characters too, but already have their own checksum
// mechanism.
// * Function names like "multi()" use other characters, but mistakes in
// these would generally result in an unparseable descriptor.
// * A case error always counts as 1 symbol error.
// * Any other 1 character substitution error counts as 1 or 2 symbol errors.
// * Any 1 symbol error is always detected.
// * Any 2 or 3 symbol error in a descriptor of up to 49154 characters is always
// detected.
// * Any 4 symbol error in a descriptor of up to 507 characters is always
// detected.
// * Any 5 symbol error in a descriptor of up to 77 characters is always
// detected.
// * Is optimized to minimize the chance a 5 symbol error in a descriptor up to
// 387 characters is undetected
// * Random errors have a chance of 1 in 2**40 of being undetected.
//
// These properties are achieved by expanding every group of 3 (non checksum)
// characters into 4 GF(32) symbols, over which a cyclic code is defined.
/*
* Interprets c as 8 groups of 5 bits which are the coefficients of a degree 8
* polynomial over GF(32), multiplies that polynomial by x, computes its
* remainder modulo a generator, and adds the constant term val.
*
* This generator is G(x) = x^8 + {30}x^7 + {23}x^6 + {15}x^5 + {14}x^4 +
* {10}x^3 + {6}x^2 + {12}x + {9}. It is chosen to define an cyclic error
* detecting code which is selected by:
* - Starting from all BCH codes over GF(32) of degree 8 and below, which by
* construction guarantee detecting 3 errors in windows up to 19000 symbols.
* - Taking all those generators, and for degree 7 ones, extend them to degree 8
* by adding all degree-1 factors.
* - Selecting just the set of generators that guarantee detecting 4 errors in a
* window of length 512.
* - Selecting one of those with best worst-case behavior for 5 errors in
* windows of length up to 512.
*
* The generator and the constants to implement it can be verified using this
* Sage code: B = GF(2) # Binary field BP.<b> = B[] # Polynomials over the
* binary field F_mod = b**5 + b**3 + 1 F.<f> = GF(32, modulus=F_mod,
* repr='int') # GF(32) definition FP.<x> = F[] # Polynomials over GF(32) E_mod
* = x**3 + x + F.fetch_int(8) E.<e> = F.extension(E_mod) # Extension field
* definition alpha = e**2743 # Choice of an element in extension field for p in
* divisors(E.order() - 1): # Verify alpha has order 32767. assert((alpha**p ==
* 1) == (p % 32767 == 0)) G = lcm([(alpha**i).minpoly() for i in
* [1056,1057,1058]] + [x + 1]) print(G) # Print out the generator for i in
* [1,2,4,8,16]: # Print out {1,2,4,8,16}*(G mod x^8), packed in hex integers.
* v = 0
* for coef in reversed((F.fetch_int(i)*(G %
* x**8)).coefficients(sparse=True)): v = v*32 + coef.integer_representation()
* print("0x%x" % v)
*/
uint64_t PolyMod(uint64_t c, int val) {
uint8_t c0 = c >> 35;
c = ((c & 0x7ffffffff) << 5) ^ val;
if (c0 & 1) {
c ^= 0xf5dee51989;
}
if (c0 & 2) {
c ^= 0xa9fdca3312;
}
if (c0 & 4) {
c ^= 0x1bab10e32d;
}
if (c0 & 8) {
c ^= 0x3706b1677a;
}
if (c0 & 16) {
c ^= 0x644d626ffd;
}
return c;
}
std::string DescriptorChecksum(const Span<const char> &span) {
/** A character set designed such that:
* - The most common 'unprotected' descriptor characters (hex, keypaths)
* are in the first group of 32.
* - Case errors cause an offset that's a multiple of 32.
* - As many alphabetic characters are in the same group (while following
* the above restrictions).
*
* If p(x) gives the position of a character c in this character set, every
* group of 3 characters (a,b,c) is encoded as the 4 symbols (p(a) & 31,
* p(b) & 31, p(c) & 31, (p(a) / 32) + 3 * (p(b) / 32) + 9 * (p(c) / 32).
* This means that changes that only affect the lower 5 bits of the
* position, or only the higher 2 bits, will just affect a single symbol.
*
* As a result, within-group-of-32 errors count as 1 symbol, as do
* cross-group errors that don't affect the position within the groups.
*/
static std::string INPUT_CHARSET = "0123456789()[],'/*abcdefgh@:$%{}"
"IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~"
"ijklmnopqrstuvwxyzABCDEFGH`#\"\\ ";
/** The character set for the checksum itself (same as bech32). */
static std::string CHECKSUM_CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l";
uint64_t c = 1;
int cls = 0;
int clscount = 0;
for (auto ch : span) {
auto pos = INPUT_CHARSET.find(ch);
if (pos == std::string::npos) {
return "";
}
// Emit a symbol for the position inside the group, for every character.
c = PolyMod(c, pos & 31);
// Accumulate the group numbers
cls = cls * 3 + (pos >> 5);
if (++clscount == 3) {
// Emit an extra symbol representing the group numbers, for every 3
// characters.
c = PolyMod(c, cls);
cls = 0;
clscount = 0;
}
}
if (clscount > 0) {
c = PolyMod(c, cls);
}
for (int j = 0; j < 8; ++j) {
// Shift further to determine the checksum.
c = PolyMod(c, 0);
}
// Prevent appending zeroes from not affecting the checksum.
c ^= 1;
std::string ret(8, ' ');
for (int j = 0; j < 8; ++j) {
ret[j] = CHECKSUM_CHARSET[(c >> (5 * (7 - j))) & 31];
}
return ret;
}
std::string AddChecksum(const std::string &str) {
return str + "#" + DescriptorChecksum(MakeSpan(str));
}
////////////////////////////////////////////////////////////////////////////
// Internal representation //
////////////////////////////////////////////////////////////////////////////
typedef std::vector<uint32_t> KeyPath;
/** Interface for public key objects in descriptors. */
struct PubkeyProvider {
virtual ~PubkeyProvider() = default;
/** Derive a public key. If key==nullptr, only info is desired. */
virtual bool GetPubKey(int pos, const SigningProvider &arg, CPubKey *key,
KeyOriginInfo &info) const = 0;
/** Whether this represent multiple public keys at different positions. */
virtual bool IsRange() const = 0;
/** Get the size of the generated public key(s) in bytes (33 or 65). */
virtual size_t GetSize() const = 0;
/** Get the descriptor string form. */
virtual std::string ToString() const = 0;
/**
* Get the descriptor string form including private data (if available in
* arg).
*/
virtual bool ToPrivateString(const SigningProvider &arg,
std::string &out) const = 0;
};
class OriginPubkeyProvider final : public PubkeyProvider {
KeyOriginInfo m_origin;
std::unique_ptr<PubkeyProvider> m_provider;
std::string OriginString() const {
return HexStr(std::begin(m_origin.fingerprint),
std::end(m_origin.fingerprint)) +
FormatHDKeypath(m_origin.path);
}
public:
OriginPubkeyProvider(KeyOriginInfo info,
std::unique_ptr<PubkeyProvider> provider)
: m_origin(std::move(info)), m_provider(std::move(provider)) {}
bool GetPubKey(int pos, const SigningProvider &arg, CPubKey *key,
KeyOriginInfo &info) const override {
if (!m_provider->GetPubKey(pos, arg, key, info)) {
return false;
}
std::copy(std::begin(m_origin.fingerprint),
std::end(m_origin.fingerprint), info.fingerprint);
info.path.insert(info.path.begin(), m_origin.path.begin(),
m_origin.path.end());
return true;
}
bool IsRange() const override { return m_provider->IsRange(); }
size_t GetSize() const override { return m_provider->GetSize(); }
std::string ToString() const override {
return "[" + OriginString() + "]" + m_provider->ToString();
}
bool ToPrivateString(const SigningProvider &arg,
std::string &ret) const override {
std::string sub;
if (!m_provider->ToPrivateString(arg, sub)) {
return false;
}
ret = "[" + OriginString() + "]" + std::move(sub);
return true;
}
};
/** An object representing a parsed constant public key in a descriptor. */
class ConstPubkeyProvider final : public PubkeyProvider {
CPubKey m_pubkey;
public:
ConstPubkeyProvider(const CPubKey &pubkey) : m_pubkey(pubkey) {}
bool GetPubKey(int pos, const SigningProvider &arg, CPubKey *key,
KeyOriginInfo &info) const override {
if (key) {
*key = m_pubkey;
}
info.path.clear();
CKeyID keyid = m_pubkey.GetID();
std::copy(keyid.begin(), keyid.begin() + sizeof(info.fingerprint),
info.fingerprint);
return true;
}
bool IsRange() const override { return false; }
size_t GetSize() const override { return m_pubkey.size(); }
std::string ToString() const override {
return HexStr(m_pubkey.begin(), m_pubkey.end());
}
bool ToPrivateString(const SigningProvider &arg,
std::string &ret) const override {
CKey key;
if (!arg.GetKey(m_pubkey.GetID(), key)) {
return false;
}
ret = EncodeSecret(key);
return true;
}
};
enum class DeriveType {
NO,
UNHARDENED,
HARDENED,
};
/** An object representing a parsed extended public key in a descriptor. */
class BIP32PubkeyProvider final : public PubkeyProvider {
CExtPubKey m_extkey;
KeyPath m_path;
DeriveType m_derive;
bool GetExtKey(const SigningProvider &arg, CExtKey &ret) const {
CKey key;
if (!arg.GetKey(m_extkey.pubkey.GetID(), key)) {
return false;
}
ret.nDepth = m_extkey.nDepth;
std::copy(m_extkey.vchFingerprint,
m_extkey.vchFingerprint + sizeof(ret.vchFingerprint),
ret.vchFingerprint);
ret.nChild = m_extkey.nChild;
ret.chaincode = m_extkey.chaincode;
ret.key = key;
return true;
}
bool IsHardened() const {
if (m_derive == DeriveType::HARDENED) {
return true;
}
for (auto entry : m_path) {
if (entry >> 31) {
return true;
}
}
return false;
}
public:
BIP32PubkeyProvider(const CExtPubKey &extkey, KeyPath path,
DeriveType derive)
: m_extkey(extkey), m_path(std::move(path)), m_derive(derive) {}
bool IsRange() const override { return m_derive != DeriveType::NO; }
size_t GetSize() const override { return 33; }
bool GetPubKey(int pos, const SigningProvider &arg, CPubKey *key,
KeyOriginInfo &info) const override {
if (key) {
if (IsHardened()) {
CExtKey extkey;
if (!GetExtKey(arg, extkey)) {
return false;
}
for (auto entry : m_path) {
extkey.Derive(extkey, entry);
}
if (m_derive == DeriveType::UNHARDENED) {
extkey.Derive(extkey, pos);
}
if (m_derive == DeriveType::HARDENED) {
extkey.Derive(extkey, pos | 0x80000000UL);
}
*key = extkey.Neuter().pubkey;
} else {
// TODO: optimize by caching
CExtPubKey extkey = m_extkey;
for (auto entry : m_path) {
extkey.Derive(extkey, entry);
}
if (m_derive == DeriveType::UNHARDENED) {
extkey.Derive(extkey, pos);
}
assert(m_derive != DeriveType::HARDENED);
*key = extkey.pubkey;
}
}
CKeyID keyid = m_extkey.pubkey.GetID();
std::copy(keyid.begin(), keyid.begin() + sizeof(info.fingerprint),
info.fingerprint);
info.path = m_path;
if (m_derive == DeriveType::UNHARDENED) {
info.path.push_back(uint32_t(pos));
}
if (m_derive == DeriveType::HARDENED) {
info.path.push_back(uint32_t(pos) | 0x80000000L);
}
return true;
}
std::string ToString() const override {
std::string ret = EncodeExtPubKey(m_extkey) + FormatHDKeypath(m_path);
if (IsRange()) {
ret += "/*";
if (m_derive == DeriveType::HARDENED) {
ret += '\'';
}
}
return ret;
}
bool ToPrivateString(const SigningProvider &arg,
std::string &out) const override {
CExtKey key;
if (!GetExtKey(arg, key)) {
return false;
}
out = EncodeExtKey(key) + FormatHDKeypath(m_path);
if (IsRange()) {
out += "/*";
if (m_derive == DeriveType::HARDENED) {
out += '\'';
}
}
return true;
}
};
/** Base class for all Descriptor implementations. */
class DescriptorImpl : public Descriptor {
//! Public key arguments for this descriptor (size 1 for PK, PKH; any size
//! of Multisig).
const std::vector<std::unique_ptr<PubkeyProvider>> m_pubkey_args;
//! The sub-descriptor argument (nullptr for everything but SH).
const std::unique_ptr<DescriptorImpl> m_script_arg;
//! The string name of the descriptor function.
const std::string m_name;
protected:
//! Return a serialization of anything except pubkey and script arguments,
//! to be prepended to those.
virtual std::string ToStringExtra() const { return ""; }
/**
* A helper function to construct the scripts for this descriptor.
*
* This function is invoked once for every CScript produced by evaluating
* m_script_arg, or just once in case m_script_arg is nullptr.
* @param pubkeys The evaluations of the m_pubkey_args field.
* @param script The evaluation of m_script_arg (or nullptr when
m_script_arg is nullptr).
* @param out A FlatSigningProvider to put scripts or public keys in that
are necessary to the solver.
* The script arguments to this function are automatically
added, as is the origin info of the provided pubkeys.
* @return A vector with scriptPubKeys for this descriptor.
*/
virtual std::vector<CScript>
MakeScripts(const std::vector<CPubKey> &pubkeys, const CScript *script,
FlatSigningProvider &out) const = 0;
public:
DescriptorImpl(std::vector<std::unique_ptr<PubkeyProvider>> pubkeys,
std::unique_ptr<DescriptorImpl> script,
const std::string &name)
: m_pubkey_args(std::move(pubkeys)), m_script_arg(std::move(script)),
m_name(name) {}
bool IsSolvable() const override {
if (m_script_arg) {
if (!m_script_arg->IsSolvable()) {
return false;
}
}
return true;
}
bool IsRange() const final {
for (const auto &pubkey : m_pubkey_args) {
if (pubkey->IsRange()) {
return true;
}
}
if (m_script_arg) {
if (m_script_arg->IsRange()) {
return true;
}
}
return false;
}
bool ToStringHelper(const SigningProvider *arg, std::string &out,
bool priv) const {
std::string extra = ToStringExtra();
size_t pos = extra.size() > 0 ? 1 : 0;
std::string ret = m_name + "(" + extra;
for (const auto &pubkey : m_pubkey_args) {
if (pos++) {
ret += ",";
}
std::string tmp;
if (priv) {
if (!pubkey->ToPrivateString(*arg, tmp)) {
return false;
}
} else {
tmp = pubkey->ToString();
}
ret += std::move(tmp);
}
if (m_script_arg) {
if (pos++) {
ret += ",";
}
std::string tmp;
if (!m_script_arg->ToStringHelper(arg, tmp, priv)) {
return false;
}
ret += std::move(tmp);
}
out = std::move(ret) + ")";
return true;
}
std::string ToString() const final {
std::string ret;
ToStringHelper(nullptr, ret, false);
return AddChecksum(ret);
}
bool ToPrivateString(const SigningProvider &arg,
std::string &out) const override final {
bool ret = ToStringHelper(&arg, out, true);
out = AddChecksum(out);
return ret;
}
bool ExpandHelper(int pos, const SigningProvider &arg,
Span<const uint8_t> *cache_read,
std::vector<CScript> &output_scripts,
FlatSigningProvider &out,
std::vector<uint8_t> *cache_write) const {
std::vector<std::pair<CPubKey, KeyOriginInfo>> entries;
entries.reserve(m_pubkey_args.size());
// Construct temporary data in `entries` and `subscripts`, to avoid
// producing output in case of failure.
for (const auto &p : m_pubkey_args) {
entries.emplace_back();
if (!p->GetPubKey(pos, arg,
cache_read ? nullptr : &entries.back().first,
entries.back().second)) {
return false;
}
if (cache_read) {
// Cached expanded public key exists, use it.
if (cache_read->size() == 0) {
return false;
}
bool compressed =
((*cache_read)[0] == 0x02 || (*cache_read)[0] == 0x03) &&
cache_read->size() >= 33;
bool uncompressed =
((*cache_read)[0] == 0x04) && cache_read->size() >= 65;
if (!(compressed || uncompressed)) {
return false;
}
CPubKey pubkey(cache_read->begin(),
cache_read->begin() + (compressed ? 33 : 65));
entries.back().first = pubkey;
*cache_read = cache_read->subspan(compressed ? 33 : 65);
}
if (cache_write) {
cache_write->insert(cache_write->end(),
entries.back().first.begin(),
entries.back().first.end());
}
}
std::vector<CScript> subscripts;
if (m_script_arg) {
FlatSigningProvider subprovider;
if (!m_script_arg->ExpandHelper(pos, arg, cache_read, subscripts,
subprovider, cache_write)) {
return false;
}
out = Merge(out, subprovider);
}
std::vector<CPubKey> pubkeys;
pubkeys.reserve(entries.size());
for (auto &entry : entries) {
pubkeys.push_back(entry.first);
out.origins.emplace(
entry.first.GetID(),
std::make_pair<CPubKey, KeyOriginInfo>(
CPubKey(entry.first), std::move(entry.second)));
}
if (m_script_arg) {
for (const auto &subscript : subscripts) {
out.scripts.emplace(CScriptID(subscript), subscript);
std::vector<CScript> addscripts =
MakeScripts(pubkeys, &subscript, out);
for (auto &addscript : addscripts) {
output_scripts.push_back(std::move(addscript));
}
}
} else {
output_scripts = MakeScripts(pubkeys, nullptr, out);
}
return true;
}
bool Expand(int pos, const SigningProvider &provider,
std::vector<CScript> &output_scripts, FlatSigningProvider &out,
std::vector<uint8_t> *cache = nullptr) const final {
return ExpandHelper(pos, provider, nullptr, output_scripts, out, cache);
}
bool ExpandFromCache(int pos, const std::vector<uint8_t> &cache,
std::vector<CScript> &output_scripts,
FlatSigningProvider &out) const final {
Span<const uint8_t> span = MakeSpan(cache);
return ExpandHelper(pos, DUMMY_SIGNING_PROVIDER, &span, output_scripts,
out, nullptr) &&
span.size() == 0;
}
};
/** Construct a vector with one element, which is moved into it. */
template <typename T> std::vector<T> Singleton(T elem) {
std::vector<T> ret;
ret.emplace_back(std::move(elem));
return ret;
}
/** A parsed addr(A) descriptor. */
class AddressDescriptor final : public DescriptorImpl {
const CTxDestination m_destination;
protected:
std::string ToStringExtra() const override {
return EncodeDestination(m_destination, GetConfig());
}
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &,
const CScript *,
FlatSigningProvider &) const override {
return Singleton(GetScriptForDestination(m_destination));
}
public:
AddressDescriptor(CTxDestination destination)
: DescriptorImpl({}, {}, "addr"),
m_destination(std::move(destination)) {}
bool IsSolvable() const final { return false; }
};
/** A parsed raw(H) descriptor. */
class RawDescriptor final : public DescriptorImpl {
const CScript m_script;
protected:
std::string ToStringExtra() const override {
return HexStr(m_script.begin(), m_script.end());
}
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &,
const CScript *,
FlatSigningProvider &) const override {
return Singleton(m_script);
}
public:
RawDescriptor(CScript script)
: DescriptorImpl({}, {}, "raw"), m_script(std::move(script)) {}
bool IsSolvable() const final { return false; }
};
/** A parsed pk(P) descriptor. */
class PKDescriptor final : public DescriptorImpl {
protected:
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &keys,
const CScript *,
FlatSigningProvider &) const override {
return Singleton(GetScriptForRawPubKey(keys[0]));
}
public:
PKDescriptor(std::unique_ptr<PubkeyProvider> prov)
: DescriptorImpl(Singleton(std::move(prov)), {}, "pk") {}
};
/** A parsed pkh(P) descriptor. */
class PKHDescriptor final : public DescriptorImpl {
protected:
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &keys,
const CScript *,
FlatSigningProvider &out) const override {
CKeyID id = keys[0].GetID();
out.pubkeys.emplace(id, keys[0]);
return Singleton(GetScriptForDestination(PKHash(id)));
}
public:
PKHDescriptor(std::unique_ptr<PubkeyProvider> prov)
: DescriptorImpl(Singleton(std::move(prov)), {}, "pkh") {}
};
/** A parsed combo(P) descriptor. */
class ComboDescriptor final : public DescriptorImpl {
protected:
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &keys,
const CScript *,
FlatSigningProvider &out) const override {
std::vector<CScript> ret;
CKeyID id = keys[0].GetID();
out.pubkeys.emplace(id, keys[0]);
// P2PK
ret.emplace_back(GetScriptForRawPubKey(keys[0]));
// P2PKH
ret.emplace_back(GetScriptForDestination(PKHash(id)));
return ret;
}
public:
ComboDescriptor(std::unique_ptr<PubkeyProvider> prov)
: DescriptorImpl(Singleton(std::move(prov)), {}, "combo") {}
};
/** A parsed multi(...) descriptor. */
class MultisigDescriptor final : public DescriptorImpl {
const int m_threshold;
protected:
std::string ToStringExtra() const override {
return strprintf("%i", m_threshold);
}
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &keys,
const CScript *,
FlatSigningProvider &) const override {
return Singleton(GetScriptForMultisig(m_threshold, keys));
}
public:
MultisigDescriptor(int threshold,
std::vector<std::unique_ptr<PubkeyProvider>> providers)
: DescriptorImpl(std::move(providers), {}, "multi"),
m_threshold(threshold) {}
};
/** A parsed sh(...) descriptor. */
class SHDescriptor final : public DescriptorImpl {
protected:
std::vector<CScript> MakeScripts(const std::vector<CPubKey> &,
const CScript *script,
FlatSigningProvider &) const override {
return Singleton(GetScriptForDestination(ScriptHash(*script)));
}
public:
SHDescriptor(std::unique_ptr<DescriptorImpl> desc)
: DescriptorImpl({}, std::move(desc), "sh") {}
};
////////////////////////////////////////////////////////////////////////////
// Parser //
////////////////////////////////////////////////////////////////////////////
enum class ParseScriptContext {
TOP,
P2SH,
};
/**
* Parse a constant. If successful, sp is updated to skip the constant and
* return true.
*/
bool Const(const std::string &str, Span<const char> &sp) {
if ((size_t)sp.size() >= str.size() &&
std::equal(str.begin(), str.end(), sp.begin())) {
sp = sp.subspan(str.size());
return true;
}
return false;
}
/**
* Parse a function call. If successful, sp is updated to be the function's
* argument(s).
*/
bool Func(const std::string &str, Span<const char> &sp) {
if ((size_t)sp.size() >= str.size() + 2 && sp[str.size()] == '(' &&
sp[sp.size() - 1] == ')' &&
std::equal(str.begin(), str.end(), sp.begin())) {
sp = sp.subspan(str.size() + 1, sp.size() - str.size() - 2);
return true;
}
return false;
}
/** Return the expression that sp begins with, and update sp to skip it. */
Span<const char> Expr(Span<const char> &sp) {
int level = 0;
auto it = sp.begin();
while (it != sp.end()) {
if (*it == '(') {
++level;
} else if (level && *it == ')') {
--level;
} else if (level == 0 && (*it == ')' || *it == ',')) {
break;
}
++it;
}
Span<const char> ret = sp.first(it - sp.begin());
sp = sp.subspan(it - sp.begin());
return ret;
}
/** Split a string on every instance of sep, returning a vector. */
std::vector<Span<const char>> Split(const Span<const char> &sp, char sep) {
std::vector<Span<const char>> ret;
auto it = sp.begin();
auto start = it;
while (it != sp.end()) {
if (*it == sep) {
ret.emplace_back(start, it);
start = it + 1;
}
++it;
}
ret.emplace_back(start, it);
return ret;
}
/**
* Parse a key path, being passed a split list of elements (the first element is
* ignored).
*/
NODISCARD bool ParseKeyPath(const std::vector<Span<const char>> &split,
KeyPath &out, std::string &error) {
for (size_t i = 1; i < split.size(); ++i) {
Span<const char> elem = split[i];
bool hardened = false;
if (elem.size() > 0 &&
(elem[elem.size() - 1] == '\'' || elem[elem.size() - 1] == 'h')) {
elem = elem.first(elem.size() - 1);
hardened = true;
}
uint32_t p;
- if (!ParseUInt32(std::string(elem.begin(), elem.end()), &p) ||
- p > 0x7FFFFFFFUL) {
+ if (!ParseUInt32(std::string(elem.begin(), elem.end()), &p)) {
+ error = strprintf("Key path value '%s' is not a valid uint32",
+ std::string(elem.begin(), elem.end()).c_str());
+ return false;
+ } else if (p > 0x7FFFFFFFUL) {
error = strprintf("Key path value %u is out of range", p);
return false;
}
out.push_back(p | (uint32_t(hardened) << 31));
}
return true;
}
/** Parse a public key that excludes origin information. */
std::unique_ptr<PubkeyProvider> ParsePubkeyInner(const Span<const char> &sp,
FlatSigningProvider &out,
std::string &error) {
auto split = Split(sp, '/');
std::string str(split[0].begin(), split[0].end());
+ if (str.size() == 0) {
+ error = "No key provided";
+ return nullptr;
+ }
if (split.size() == 1) {
if (IsHex(str)) {
std::vector<uint8_t> data = ParseHex(str);
CPubKey pubkey(data);
if (pubkey.IsFullyValid()) {
return std::make_unique<ConstPubkeyProvider>(pubkey);
}
+ error = strprintf("Pubkey '%s' is invalid", str);
+ return nullptr;
}
CKey key = DecodeSecret(str);
if (key.IsValid()) {
CPubKey pubkey = key.GetPubKey();
out.keys.emplace(pubkey.GetID(), key);
return std::make_unique<ConstPubkeyProvider>(pubkey);
}
}
CExtKey extkey = DecodeExtKey(str);
CExtPubKey extpubkey = DecodeExtPubKey(str);
if (!extkey.key.IsValid() && !extpubkey.pubkey.IsValid()) {
error = strprintf("key '%s' is not valid", str);
return nullptr;
}
KeyPath path;
DeriveType type = DeriveType::NO;
if (split.back() == MakeSpan("*").first(1)) {
split.pop_back();
type = DeriveType::UNHARDENED;
} else if (split.back() == MakeSpan("*'").first(2) ||
split.back() == MakeSpan("*h").first(2)) {
split.pop_back();
type = DeriveType::HARDENED;
}
if (!ParseKeyPath(split, path, error)) {
return nullptr;
}
if (extkey.key.IsValid()) {
extpubkey = extkey.Neuter();
out.keys.emplace(extpubkey.pubkey.GetID(), extkey.key);
}
return std::make_unique<BIP32PubkeyProvider>(extpubkey, std::move(path),
type);
}
/** Parse a public key including origin information (if enabled). */
std::unique_ptr<PubkeyProvider> ParsePubkey(const Span<const char> &sp,
FlatSigningProvider &out,
std::string &error) {
auto origin_split = Split(sp, ']');
if (origin_split.size() > 2) {
error = "Multiple ']' characters found for a single pubkey";
return nullptr;
}
if (origin_split.size() == 1) {
return ParsePubkeyInner(origin_split[0], out, error);
}
if (origin_split[0].size() < 1 || origin_split[0][0] != '[') {
- error = strprintf(
- "Key origin expected but not found, got '%s' instead",
- std::string(origin_split[0].begin(), origin_split[0].end()));
+ error = strprintf("Key origin start '[ character expected but not "
+ "found, got '%c' instead",
+ origin_split[0][0]);
return nullptr;
}
auto slash_split = Split(origin_split[0].subspan(1), '/');
if (slash_split[0].size() != 8) {
error = strprintf("Fingerprint is not 4 bytes (%u characters instead "
"of 8 characters)",
slash_split[0].size());
return nullptr;
}
std::string fpr_hex =
std::string(slash_split[0].begin(), slash_split[0].end());
if (!IsHex(fpr_hex)) {
error = strprintf("Fingerprint '%s' is not hex", fpr_hex);
return nullptr;
}
auto fpr_bytes = ParseHex(fpr_hex);
KeyOriginInfo info;
static_assert(sizeof(info.fingerprint) == 4, "Fingerprint must be 4 bytes");
assert(fpr_bytes.size() == 4);
std::copy(fpr_bytes.begin(), fpr_bytes.end(), info.fingerprint);
if (!ParseKeyPath(slash_split, info.path, error)) {
return nullptr;
}
auto provider = ParsePubkeyInner(origin_split[1], out, error);
if (!provider) {
return nullptr;
}
return std::make_unique<OriginPubkeyProvider>(std::move(info),
std::move(provider));
}
/** Parse a script in a particular context. */
std::unique_ptr<DescriptorImpl> ParseScript(Span<const char> &sp,
ParseScriptContext ctx,
FlatSigningProvider &out,
std::string &error) {
auto expr = Expr(sp);
if (Func("pk", expr)) {
auto pubkey = ParsePubkey(expr, out, error);
if (!pubkey) {
return nullptr;
}
return std::make_unique<PKDescriptor>(std::move(pubkey));
}
if (Func("pkh", expr)) {
auto pubkey = ParsePubkey(expr, out, error);
if (!pubkey) {
return nullptr;
}
return std::make_unique<PKHDescriptor>(std::move(pubkey));
}
if (ctx == ParseScriptContext::TOP && Func("combo", expr)) {
auto pubkey = ParsePubkey(expr, out, error);
if (!pubkey) {
return nullptr;
}
return std::make_unique<ComboDescriptor>(std::move(pubkey));
+ } else if (ctx != ParseScriptContext::TOP && Func("combo", expr)) {
+ error = "Cannot have combo in non-top level";
+ return nullptr;
}
if (Func("multi", expr)) {
auto threshold = Expr(expr);
uint32_t thres;
std::vector<std::unique_ptr<PubkeyProvider>> providers;
if (!ParseUInt32(std::string(threshold.begin(), threshold.end()),
&thres)) {
- error = strprintf("multi threshold %u out of range", thres);
+ error = strprintf(
+ "Multi threshold '%s' is not valid",
+ std::string(threshold.begin(), threshold.end()).c_str());
return nullptr;
}
size_t script_size = 0;
while (expr.size()) {
if (!Const(",", expr)) {
- error = strprintf("multi: expected ',', got '%c'", expr[0]);
+ error = strprintf("Multi: expected ',', got '%c'", expr[0]);
return nullptr;
}
auto arg = Expr(expr);
auto pk = ParsePubkey(arg, out, error);
if (!pk) {
return nullptr;
}
script_size += pk->GetSize() + 1;
providers.emplace_back(std::move(pk));
}
- if (providers.size() < 1 || providers.size() > 16 || thres < 1 ||
- thres > providers.size()) {
+ if (providers.size() < 1 || providers.size() > 16) {
+ error = strprintf("Cannot have %u keys in multisig; must have "
+ "between 1 and 16 keys, inclusive",
+ providers.size());
+ return nullptr;
+ } else if (thres < 1) {
+ error = strprintf(
+ "Multisig threshold cannot be %d, must be at least 1", thres);
+ return nullptr;
+ } else if (thres > providers.size()) {
+ error =
+ strprintf("Multisig threshold cannot be larger than the number "
+ "of keys; threshold is %d but only %u keys specified",
+ thres, providers.size());
return nullptr;
}
if (ctx == ParseScriptContext::TOP) {
if (providers.size() > 3) {
- error = strprintf("Cannot %u pubkeys in bare multisig; only at "
- "most 3 pubkeys",
+ error = strprintf("Cannot have %u pubkeys in bare multisig; "
+ "only at most 3 pubkeys",
providers.size());
return nullptr;
}
}
if (ctx == ParseScriptContext::P2SH) {
if (script_size + 3 > 520) {
error = strprintf("P2SH script is too large, %d bytes is "
"larger than 520 bytes",
script_size + 3);
return nullptr;
}
}
return std::make_unique<MultisigDescriptor>(thres,
std::move(providers));
}
if (ctx == ParseScriptContext::TOP && Func("sh", expr)) {
auto desc = ParseScript(expr, ParseScriptContext::P2SH, out, error);
if (!desc || expr.size()) {
return nullptr;
}
return std::make_unique<SHDescriptor>(std::move(desc));
+ } else if (ctx != ParseScriptContext::TOP && Func("sh", expr)) {
+ error = "Cannot have sh in non-top level";
+ return nullptr;
}
if (ctx == ParseScriptContext::TOP && Func("addr", expr)) {
CTxDestination dest =
DecodeDestination(std::string(expr.begin(), expr.end()), Params());
if (!IsValidDestination(dest)) {
error = "Address is not valid";
return nullptr;
}
return std::make_unique<AddressDescriptor>(std::move(dest));
}
if (ctx == ParseScriptContext::TOP && Func("raw", expr)) {
std::string str(expr.begin(), expr.end());
if (!IsHex(str)) {
error = "Raw script is not hex";
return nullptr;
}
auto bytes = ParseHex(str);
return std::make_unique<RawDescriptor>(
CScript(bytes.begin(), bytes.end()));
}
+ if (ctx == ParseScriptContext::P2SH) {
+ error = "A function is needed within P2SH";
+ return nullptr;
+ }
error = strprintf("%s is not a valid descriptor function",
std::string(expr.begin(), expr.end()));
return nullptr;
}
static std::unique_ptr<PubkeyProvider>
InferPubkey(const CPubKey &pubkey, ParseScriptContext,
const SigningProvider &provider) {
auto key_provider = std::make_unique<ConstPubkeyProvider>(pubkey);
KeyOriginInfo info;
if (provider.GetKeyOrigin(pubkey.GetID(), info)) {
return std::make_unique<OriginPubkeyProvider>(std::move(info),
std::move(key_provider));
}
return key_provider;
}
std::unique_ptr<DescriptorImpl> InferScript(const CScript &script,
ParseScriptContext ctx,
const SigningProvider &provider) {
std::vector<std::vector<uint8_t>> data;
txnouttype txntype = Solver(script, data);
if (txntype == TX_PUBKEY) {
CPubKey pubkey(data[0].begin(), data[0].end());
if (pubkey.IsValid()) {
return std::make_unique<PKDescriptor>(
InferPubkey(pubkey, ctx, provider));
}
}
if (txntype == TX_PUBKEYHASH) {
uint160 hash(data[0]);
CKeyID keyid(hash);
CPubKey pubkey;
if (provider.GetPubKey(keyid, pubkey)) {
return std::make_unique<PKHDescriptor>(
InferPubkey(pubkey, ctx, provider));
}
}
if (txntype == TX_MULTISIG) {
std::vector<std::unique_ptr<PubkeyProvider>> providers;
for (size_t i = 1; i + 1 < data.size(); ++i) {
CPubKey pubkey(data[i].begin(), data[i].end());
providers.push_back(InferPubkey(pubkey, ctx, provider));
}
return std::make_unique<MultisigDescriptor>((int)data[0][0],
std::move(providers));
}
if (txntype == TX_SCRIPTHASH && ctx == ParseScriptContext::TOP) {
uint160 hash(data[0]);
CScriptID scriptid(hash);
CScript subscript;
if (provider.GetCScript(scriptid, subscript)) {
auto sub =
InferScript(subscript, ParseScriptContext::P2SH, provider);
if (sub) {
return std::make_unique<SHDescriptor>(std::move(sub));
}
}
}
CTxDestination dest;
if (ExtractDestination(script, dest)) {
if (GetScriptForDestination(dest) == script) {
return std::make_unique<AddressDescriptor>(std::move(dest));
}
}
return std::make_unique<RawDescriptor>(script);
}
} // namespace
/**
* Check a descriptor checksum, and update desc to be the checksum-less part.
*/
bool CheckChecksum(Span<const char> &sp, bool require_checksum,
std::string &error, std::string *out_checksum = nullptr) {
auto check_split = Split(sp, '#');
if (check_split.size() > 2) {
error = "Multiple '#' symbols";
return false;
}
if (check_split.size() == 1 && require_checksum) {
error = "Missing checksum";
return false;
}
if (check_split.size() == 2) {
if (check_split[1].size() != 8) {
error =
strprintf("Expected 8 character checksum, not %u characters",
check_split[1].size());
return false;
}
}
auto checksum = DescriptorChecksum(check_split[0]);
if (checksum.empty()) {
error = "Invalid characters in payload";
return false;
}
if (check_split.size() == 2) {
if (!std::equal(checksum.begin(), checksum.end(),
check_split[1].begin())) {
error = strprintf(
"Provided checksum '%s' does not match computed checksum '%s'",
std::string(check_split[1].begin(), check_split[1].end()),
checksum);
return false;
}
}
if (out_checksum) {
*out_checksum = std::move(checksum);
}
sp = check_split[0];
return true;
}
std::unique_ptr<Descriptor> Parse(const std::string &descriptor,
FlatSigningProvider &out, std::string &error,
bool require_checksum) {
Span<const char> sp(descriptor.data(), descriptor.size());
if (!CheckChecksum(sp, require_checksum, error)) {
return nullptr;
}
auto ret = ParseScript(sp, ParseScriptContext::TOP, out, error);
if (sp.size() == 0 && ret) {
return std::unique_ptr<Descriptor>(std::move(ret));
}
return nullptr;
}
std::string GetDescriptorChecksum(const std::string &descriptor) {
std::string ret;
std::string error;
Span<const char> sp(descriptor.data(), descriptor.size());
if (!CheckChecksum(sp, false, error, &ret)) {
return "";
}
return ret;
}
std::unique_ptr<Descriptor> InferDescriptor(const CScript &script,
const SigningProvider &provider) {
return InferScript(script, ParseScriptContext::TOP, provider);
}
diff --git a/src/wallet/rpcdump.cpp b/src/wallet/rpcdump.cpp
index 996c1dc7b..44118057a 100644
--- a/src/wallet/rpcdump.cpp
+++ b/src/wallet/rpcdump.cpp
@@ -1,1787 +1,1786 @@
// Copyright (c) 2009-2016 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <chain.h>
#include <config.h>
#include <core_io.h>
#include <interfaces/chain.h>
#include <key_io.h>
#include <merkleblock.h>
#include <rpc/server.h>
#include <rpc/util.h>
#include <script/descriptor.h>
#include <script/script.h>
#include <script/standard.h>
#include <sync.h>
#include <util/bip32.h>
#include <util/system.h>
#include <util/time.h>
#include <util/translation.h>
#include <wallet/rpcwallet.h>
#include <wallet/wallet.h>
#include <boost/algorithm/string.hpp>
#include <cstdint>
#include <tuple>
static std::string EncodeDumpString(const std::string &str) {
std::stringstream ret;
for (const uint8_t c : str) {
if (c <= 32 || c >= 128 || c == '%') {
ret << '%' << HexStr(&c, &c + 1);
} else {
ret << c;
}
}
return ret.str();
}
static std::string DecodeDumpString(const std::string &str) {
std::stringstream ret;
for (unsigned int pos = 0; pos < str.length(); pos++) {
uint8_t c = str[pos];
if (c == '%' && pos + 2 < str.length()) {
c = (((str[pos + 1] >> 6) * 9 + ((str[pos + 1] - '0') & 15)) << 4) |
((str[pos + 2] >> 6) * 9 + ((str[pos + 2] - '0') & 15));
pos += 2;
}
ret << c;
}
return ret.str();
}
static bool GetWalletAddressesForKey(const Config &config,
CWallet *const pwallet,
const CKeyID &keyid, std::string &strAddr,
std::string &strLabel)
EXCLUSIVE_LOCKS_REQUIRED(pwallet->cs_wallet) {
bool fLabelFound = false;
CKey key;
pwallet->GetKey(keyid, key);
for (const auto &dest : GetAllDestinationsForKey(key.GetPubKey())) {
if (pwallet->mapAddressBook.count(dest)) {
if (!strAddr.empty()) {
strAddr += ",";
}
strAddr += EncodeDestination(dest, config);
strLabel = EncodeDumpString(pwallet->mapAddressBook[dest].name);
fLabelFound = true;
}
}
if (!fLabelFound) {
strAddr = EncodeDestination(
GetDestinationForKey(key.GetPubKey(),
pwallet->m_default_address_type),
config);
}
return fLabelFound;
}
static const int64_t TIMESTAMP_MIN = 0;
static void RescanWallet(CWallet &wallet, const WalletRescanReserver &reserver,
int64_t time_begin = TIMESTAMP_MIN,
bool update = true) {
int64_t scanned_time = wallet.RescanFromTime(time_begin, reserver, update);
if (wallet.IsAbortingRescan()) {
throw JSONRPCError(RPC_MISC_ERROR, "Rescan aborted by user.");
} else if (scanned_time > time_begin) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Rescan was unable to fully rescan the blockchain. "
"Some transactions may be missing.");
}
}
UniValue importprivkey(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importprivkey",
"\nAdds a private key (as returned by dumpprivkey) to your wallet. "
"Requires a new wallet backup.\n"
"Hint: use importmulti to import more than one private key.\n"
"\nNote: This call can take minutes to complete if rescan is true, "
"during that time, other rpc calls\n"
"may report that the imported key exists but related transactions are "
"still missing, leading to temporarily incorrect/bogus balances and "
"unspent outputs until rescan completes.\n",
{
{"privkey", RPCArg::Type::STR, RPCArg::Optional::NO,
"The private key (see dumpprivkey)"},
{"label", RPCArg::Type::STR, /* default */
"current label if address exists, otherwise \"\"",
"An optional label"},
{"rescan", RPCArg::Type::BOOL, /* default */ "true",
"Rescan the wallet for transactions"},
},
RPCResults{},
RPCExamples{
"\nDump a private key\n" +
HelpExampleCli("dumpprivkey", "\"myaddress\"") +
"\nImport the private key with rescan\n" +
HelpExampleCli("importprivkey", "\"mykey\"") +
"\nImport using a label and without rescan\n" +
HelpExampleCli("importprivkey", "\"mykey\" \"testing\" false") +
"\nImport using default blank label and without rescan\n" +
HelpExampleCli("importprivkey", "\"mykey\" \"\" false") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("importprivkey", "\"mykey\", \"testing\", false")},
}
.Check(request);
if (pwallet->IsWalletFlagSet(WALLET_FLAG_DISABLE_PRIVATE_KEYS)) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Cannot import private keys to a wallet with "
"private keys disabled");
}
WalletRescanReserver reserver(pwallet);
bool fRescan = true;
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
EnsureWalletIsUnlocked(pwallet);
std::string strSecret = request.params[0].get_str();
std::string strLabel = "";
if (!request.params[1].isNull()) {
strLabel = request.params[1].get_str();
}
// Whether to perform rescan after import
if (!request.params[2].isNull()) {
fRescan = request.params[2].get_bool();
}
if (fRescan && pwallet->chain().getPruneMode()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Rescan is disabled in pruned mode");
}
if (fRescan && !reserver.reserve()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Wallet is currently rescanning. Abort existing "
"rescan or wait.");
}
CKey key = DecodeSecret(strSecret);
if (!key.IsValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid private key encoding");
}
CPubKey pubkey = key.GetPubKey();
assert(key.VerifyPubKey(pubkey));
CKeyID vchAddress = pubkey.GetID();
{
pwallet->MarkDirty();
// We don't know which corresponding address will be used;
// label all new addresses, and label existing addresses if a
// label was passed.
for (const auto &dest : GetAllDestinationsForKey(pubkey)) {
if (!request.params[1].isNull() ||
pwallet->mapAddressBook.count(dest) == 0) {
pwallet->SetAddressBook(dest, strLabel, "receive");
}
}
// Use timestamp of 1 to scan the whole chain
if (!pwallet->ImportPrivKeys({{vchAddress, key}}, 1)) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Error adding key to wallet");
}
}
}
if (fRescan) {
RescanWallet(*pwallet, reserver);
}
return NullUniValue;
}
UniValue abortrescan(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"abortrescan",
"\nStops current wallet rescan triggered by an RPC call, e.g. by an "
"importprivkey call.\n",
{},
RPCResults{},
RPCExamples{"\nImport a private key\n" +
HelpExampleCli("importprivkey", "\"mykey\"") +
"\nAbort the running wallet rescan\n" +
HelpExampleCli("abortrescan", "") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("abortrescan", "")},
}
.Check(request);
if (!pwallet->IsScanning() || pwallet->IsAbortingRescan()) {
return false;
}
pwallet->AbortRescan();
return true;
}
UniValue importaddress(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importaddress",
"\nAdds an address or script (in hex) that can be watched as if it "
"were in your wallet but cannot be used to spend. Requires a new "
"wallet backup.\n"
"\nNote: This call can take minutes to complete if rescan is true, "
"during that time, other rpc calls\n"
"may report that the imported address exists but related transactions "
"are still missing, leading to temporarily incorrect/bogus balances "
"and unspent outputs until rescan completes.\n"
"If you have the full public key, you should call importpubkey instead "
"of this.\n"
"\nNote: If you import a non-standard raw script in hex form, outputs "
"sending to it will be treated\n"
"as change, and not show up in many RPCs.\n",
{
{"address", RPCArg::Type::STR, RPCArg::Optional::NO,
"The Bitcoin address (or hex-encoded script)"},
{"label", RPCArg::Type::STR, /* default */ "\"\"",
"An optional label"},
{"rescan", RPCArg::Type::BOOL, /* default */ "true",
"Rescan the wallet for transactions"},
{"p2sh", RPCArg::Type::BOOL, /* default */ "false",
"Add the P2SH version of the script as well"},
},
RPCResults{},
RPCExamples{
"\nImport an address with rescan\n" +
HelpExampleCli("importaddress", "\"myaddress\"") +
"\nImport using a label without rescan\n" +
HelpExampleCli("importaddress", "\"myaddress\" \"testing\" false") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("importaddress",
"\"myaddress\", \"testing\", false")},
}
.Check(request);
std::string strLabel;
if (!request.params[1].isNull()) {
strLabel = request.params[1].get_str();
}
// Whether to perform rescan after import
bool fRescan = true;
if (!request.params[2].isNull()) {
fRescan = request.params[2].get_bool();
}
if (fRescan && pwallet->chain().getPruneMode()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Rescan is disabled in pruned mode");
}
WalletRescanReserver reserver(pwallet);
if (fRescan && !reserver.reserve()) {
throw JSONRPCError(
RPC_WALLET_ERROR,
"Wallet is currently rescanning. Abort existing rescan or wait.");
}
// Whether to import a p2sh version, too
bool fP2SH = false;
if (!request.params[3].isNull()) {
fP2SH = request.params[3].get_bool();
}
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
CTxDestination dest = DecodeDestination(request.params[0].get_str(),
config.GetChainParams());
if (IsValidDestination(dest)) {
if (fP2SH) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Cannot use the p2sh flag with an address - "
"use a script instead");
}
pwallet->MarkDirty();
pwallet->ImportScriptPubKeys(
strLabel, {GetScriptForDestination(dest)},
false /* have_solving_data */, true /* apply_label */,
1 /* timestamp */);
} else if (IsHex(request.params[0].get_str())) {
std::vector<uint8_t> data(ParseHex(request.params[0].get_str()));
CScript redeem_script(data.begin(), data.end());
std::set<CScript> scripts = {redeem_script};
pwallet->ImportScripts(scripts, 0 /* timestamp */);
if (fP2SH) {
scripts.insert(GetScriptForDestination(
ScriptHash(CScriptID(redeem_script))));
}
pwallet->ImportScriptPubKeys(
strLabel, scripts, false /* have_solving_data */,
true /* apply_label */, 1 /* timestamp */);
} else {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid Bitcoin address or script");
}
}
if (fRescan) {
RescanWallet(*pwallet, reserver);
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
pwallet->ReacceptWalletTransactions(*locked_chain);
}
}
return NullUniValue;
}
UniValue importprunedfunds(const Config &config,
const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importprunedfunds",
"\nImports funds without rescan. Corresponding address or script must "
"previously be included in wallet. Aimed towards pruned wallets. The "
"end-user is responsible to import additional transactions that "
"subsequently spend the imported outputs or rescan after the point in "
"the blockchain the transaction is included.\n",
{
{"rawtransaction", RPCArg::Type::STR_HEX, RPCArg::Optional::NO,
"A raw transaction in hex funding an already-existing address in "
"wallet"},
{"txoutproof", RPCArg::Type::STR_HEX, RPCArg::Optional::NO,
"The hex output from gettxoutproof that contains the transaction"},
},
RPCResults{},
RPCExamples{""},
}
.Check(request);
CMutableTransaction tx;
if (!DecodeHexTx(tx, request.params[0].get_str())) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
uint256 txid = tx.GetId();
CWalletTx wtx(pwallet, MakeTransactionRef(std::move(tx)));
CDataStream ssMB(ParseHexV(request.params[1], "proof"), SER_NETWORK,
PROTOCOL_VERSION);
CMerkleBlock merkleBlock;
ssMB >> merkleBlock;
// Search partial merkle tree in proof for our transaction and index in
// valid block
std::vector<uint256> vMatch;
std::vector<size_t> vIndex;
size_t txnIndex = 0;
if (merkleBlock.txn.ExtractMatches(vMatch, vIndex) ==
merkleBlock.header.hashMerkleRoot) {
auto locked_chain = pwallet->chain().lock();
if (locked_chain->getBlockHeight(merkleBlock.header.GetHash()) ==
nullopt) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Block not found in chain");
}
std::vector<uint256>::const_iterator it;
if ((it = std::find(vMatch.begin(), vMatch.end(), txid)) ==
vMatch.end()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Transaction given doesn't exist in proof");
}
txnIndex = vIndex[it - vMatch.begin()];
} else {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Something wrong with merkleblock");
}
wtx.nIndex = txnIndex;
wtx.hashBlock = merkleBlock.header.GetHash();
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
if (pwallet->IsMine(*wtx.tx)) {
pwallet->AddToWallet(wtx, false);
return NullUniValue;
}
throw JSONRPCError(
RPC_INVALID_ADDRESS_OR_KEY,
"No addresses in wallet correspond to included transaction");
}
UniValue removeprunedfunds(const Config &config,
const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"removeprunedfunds",
"\nDeletes the specified transaction from the wallet. Meant for use "
"with pruned wallets and as a companion to importprunedfunds. This "
"will affect wallet balances.\n",
{
{"txid", RPCArg::Type::STR_HEX, RPCArg::Optional::NO,
"The hex-encoded id of the transaction you are deleting"},
},
RPCResults{},
RPCExamples{HelpExampleCli("removeprunedfunds",
"\"a8d0c0184dde994a09ec054286f1ce581bebf4644"
"6a512166eae7628734ea0a5\"") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("removeprunedfunds",
"\"a8d0c0184dde994a09ec054286f1ce581bebf4644"
"6a512166eae7628734ea0a5\"")},
}
.Check(request);
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
TxId txid(ParseHashV(request.params[0], "txid"));
std::vector<TxId> txIds;
txIds.push_back(txid);
std::vector<TxId> txIdsOut;
if (pwallet->ZapSelectTx(txIds, txIdsOut) != DBErrors::LOAD_OK) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Could not properly delete the transaction.");
}
if (txIdsOut.empty()) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Transaction does not exist in wallet.");
}
return NullUniValue;
}
UniValue importpubkey(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importpubkey",
"\nAdds a public key (in hex) that can be watched as if it were in "
"your wallet but cannot be used to spend. Requires a new wallet "
"backup.\n"
"\nNote: This call can take minutes to complete if rescan is true, "
"during that time, other rpc calls\n"
"may report that the imported pubkey exists but related transactions "
"are still missing, leading to temporarily incorrect/bogus balances "
"and unspent outputs until rescan completes.\n",
{
{"pubkey", RPCArg::Type::STR, RPCArg::Optional::NO,
"The hex-encoded public key"},
{"label", RPCArg::Type::STR, /* default */ "\"\"",
"An optional label"},
{"rescan", RPCArg::Type::BOOL, /* default */ "true",
"Rescan the wallet for transactions"},
},
RPCResults{},
RPCExamples{
"\nImport a public key with rescan\n" +
HelpExampleCli("importpubkey", "\"mypubkey\"") +
"\nImport using a label without rescan\n" +
HelpExampleCli("importpubkey", "\"mypubkey\" \"testing\" false") +
"\nAs a JSON-RPC call\n" +
HelpExampleRpc("importpubkey", "\"mypubkey\", \"testing\", false")},
}
.Check(request);
std::string strLabel;
if (!request.params[1].isNull()) {
strLabel = request.params[1].get_str();
}
// Whether to perform rescan after import
bool fRescan = true;
if (!request.params[2].isNull()) {
fRescan = request.params[2].get_bool();
}
if (fRescan && pwallet->chain().getPruneMode()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Rescan is disabled in pruned mode");
}
WalletRescanReserver reserver(pwallet);
if (fRescan && !reserver.reserve()) {
throw JSONRPCError(
RPC_WALLET_ERROR,
"Wallet is currently rescanning. Abort existing rescan or wait.");
}
if (!IsHex(request.params[0].get_str())) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Pubkey must be a hex string");
}
std::vector<uint8_t> data(ParseHex(request.params[0].get_str()));
CPubKey pubKey(data.begin(), data.end());
if (!pubKey.IsFullyValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Pubkey is not a valid public key");
}
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
std::set<CScript> script_pub_keys;
for (const auto &dest : GetAllDestinationsForKey(pubKey)) {
script_pub_keys.insert(GetScriptForDestination(dest));
}
pwallet->MarkDirty();
pwallet->ImportScriptPubKeys(strLabel, script_pub_keys,
true /* have_solving_data */,
true /* apply_label */, 1 /* timestamp */);
pwallet->ImportPubKeys({pubKey.GetID()}, {{pubKey.GetID(), pubKey}},
{} /* key_origins */, false /* add_keypool */,
false /* internal */, 1 /* timestamp */);
}
if (fRescan) {
RescanWallet(*pwallet, reserver);
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
pwallet->ReacceptWalletTransactions(*locked_chain);
}
}
return NullUniValue;
}
UniValue importwallet(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importwallet",
"\nImports keys from a wallet dump file (see dumpwallet). Requires a "
"new wallet backup to include imported keys.\n",
{
{"filename", RPCArg::Type::STR, RPCArg::Optional::NO,
"The wallet file"},
},
RPCResults{},
RPCExamples{"\nDump the wallet\n" +
HelpExampleCli("dumpwallet", "\"test\"") +
"\nImport the wallet\n" +
HelpExampleCli("importwallet", "\"test\"") +
"\nImport using the json rpc call\n" +
HelpExampleRpc("importwallet", "\"test\"")},
}
.Check(request);
if (pwallet->chain().getPruneMode()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Importing wallets is disabled in pruned mode");
}
WalletRescanReserver reserver(pwallet);
if (!reserver.reserve()) {
throw JSONRPCError(
RPC_WALLET_ERROR,
"Wallet is currently rescanning. Abort existing rescan or wait.");
}
int64_t nTimeBegin = 0;
bool fGood = true;
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
EnsureWalletIsUnlocked(pwallet);
fsbridge::ifstream file;
file.open(request.params[0].get_str(), std::ios::in | std::ios::ate);
if (!file.is_open()) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Cannot open wallet dump file");
}
Optional<int> tip_height = locked_chain->getHeight();
nTimeBegin = tip_height ? locked_chain->getBlockTime(*tip_height) : 0;
int64_t nFilesize = std::max<int64_t>(1, file.tellg());
file.seekg(0, file.beg);
// Use uiInterface.ShowProgress instead of pwallet.ShowProgress because
// pwallet.ShowProgress has a cancel button tied to AbortRescan which we
// don't want for this progress bar showing the import progress.
// uiInterface.ShowProgress does not have a cancel button.
// show progress dialog in GUI
pwallet->chain().showProgress(
strprintf("%s " + _("Importing...").translated,
pwallet->GetDisplayName()),
0, false);
std::vector<std::tuple<CKey, int64_t, bool, std::string>> keys;
std::vector<std::pair<CScript, int64_t>> scripts;
while (file.good()) {
pwallet->chain().showProgress(
"",
std::max(1, std::min<int>(50, 100 * double(file.tellg()) /
double(nFilesize))),
false);
std::string line;
std::getline(file, line);
if (line.empty() || line[0] == '#') {
continue;
}
std::vector<std::string> vstr;
boost::split(vstr, line, boost::is_any_of(" "));
if (vstr.size() < 2) {
continue;
}
CKey key = DecodeSecret(vstr[0]);
if (key.IsValid()) {
int64_t nTime = ParseISO8601DateTime(vstr[1]);
std::string strLabel;
bool fLabel = true;
for (size_t nStr = 2; nStr < vstr.size(); nStr++) {
if (vstr[nStr].front() == '#') {
break;
}
if (vstr[nStr] == "change=1") {
fLabel = false;
}
if (vstr[nStr] == "reserve=1") {
fLabel = false;
}
if (vstr[nStr].substr(0, 6) == "label=") {
strLabel = DecodeDumpString(vstr[nStr].substr(6));
fLabel = true;
}
}
keys.push_back(std::make_tuple(key, nTime, fLabel, strLabel));
} else if (IsHex(vstr[0])) {
std::vector<uint8_t> vData(ParseHex(vstr[0]));
CScript script = CScript(vData.begin(), vData.end());
int64_t birth_time = ParseISO8601DateTime(vstr[1]);
scripts.push_back(
std::pair<CScript, int64_t>(script, birth_time));
}
}
file.close();
// We now know whether we are importing private keys, so we can error if
// private keys are disabled
if (keys.size() > 0 &&
pwallet->IsWalletFlagSet(WALLET_FLAG_DISABLE_PRIVATE_KEYS)) {
// hide progress dialog in GUI
pwallet->chain().showProgress("", 100, false);
throw JSONRPCError(
RPC_WALLET_ERROR,
"Importing wallets is disabled when private keys are disabled");
}
double total = double(keys.size() + scripts.size());
double progress = 0;
for (const auto &key_tuple : keys) {
pwallet->chain().showProgress(
"",
std::max(50, std::min<int>(75, 100 * progress / total) + 50),
false);
const CKey &key = std::get<0>(key_tuple);
int64_t time = std::get<1>(key_tuple);
bool has_label = std::get<2>(key_tuple);
std::string label = std::get<3>(key_tuple);
CPubKey pubkey = key.GetPubKey();
assert(key.VerifyPubKey(pubkey));
CKeyID keyid = pubkey.GetID();
pwallet->WalletLogPrintf("Importing %s...\n",
EncodeDestination(PKHash(keyid), config));
if (!pwallet->ImportPrivKeys({{keyid, key}}, time)) {
pwallet->WalletLogPrintf(
"Error importing key for %s\n",
EncodeDestination(PKHash(keyid), config));
fGood = false;
continue;
}
if (has_label) {
pwallet->SetAddressBook(PKHash(keyid), label, "receive");
}
nTimeBegin = std::min(nTimeBegin, time);
progress++;
}
for (const auto &script_pair : scripts) {
pwallet->chain().showProgress(
"",
std::max(50, std::min<int>(75, 100 * progress / total) + 50),
false);
const CScript &script = script_pair.first;
int64_t time = script_pair.second;
if (!pwallet->ImportScripts({script}, time)) {
pwallet->WalletLogPrintf("Error importing script %s\n",
HexStr(script));
fGood = false;
continue;
}
if (time > 0) {
nTimeBegin = std::min(nTimeBegin, time);
}
progress++;
}
// hide progress dialog in GUI
pwallet->chain().showProgress("", 100, false);
}
// hide progress dialog in GUI
pwallet->chain().showProgress("", 100, false);
RescanWallet(*pwallet, reserver, nTimeBegin, false /* update */);
pwallet->MarkDirty();
if (!fGood) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Error adding some keys/scripts to wallet");
}
return NullUniValue;
}
UniValue dumpprivkey(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"dumpprivkey",
"\nReveals the private key corresponding to 'address'.\n"
"Then the importprivkey can be used with this output\n",
{
{"address", RPCArg::Type::STR, RPCArg::Optional::NO,
"The bitcoin address for the private key"},
},
RPCResult{"\"key\" (string) The private key\n"},
RPCExamples{HelpExampleCli("dumpprivkey", "\"myaddress\"") +
HelpExampleCli("importprivkey", "\"mykey\"") +
HelpExampleRpc("dumpprivkey", "\"myaddress\"")},
}
.Check(request);
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
EnsureWalletIsUnlocked(pwallet);
std::string strAddress = request.params[0].get_str();
CTxDestination dest =
DecodeDestination(strAddress, config.GetChainParams());
if (!IsValidDestination(dest)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid Bitcoin address");
}
auto keyid = GetKeyForDestination(*pwallet, dest);
if (keyid.IsNull()) {
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to a key");
}
CKey vchSecret;
if (!pwallet->GetKey(keyid, vchSecret)) {
throw JSONRPCError(RPC_WALLET_ERROR, "Private key for address " +
strAddress + " is not known");
}
return EncodeSecret(vchSecret);
}
UniValue dumpwallet(const Config &config, const JSONRPCRequest &request) {
std::shared_ptr<CWallet> const wallet = GetWalletForJSONRPCRequest(request);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"dumpwallet",
"\nDumps all wallet keys in a human-readable format to a server-side "
"file. This does not allow overwriting existing files.\n"
"Imported scripts are included in the dumpsfile, but corresponding "
"addresses may not be added automatically by importwallet.\n"
"Note that if your wallet contains keys which are not derived from "
"your HD seed (e.g. imported keys), these are not covered by\n"
"only backing up the seed itself, and must be backed up too (e.g. "
"ensure you back up the whole dumpfile).\n",
{
{"filename", RPCArg::Type::STR, RPCArg::Optional::NO,
"The filename with path (either absolute or relative to "
"bitcoind)"},
},
RPCResult{"{ (json object)\n"
" \"filename\" : { (string) The filename with full "
"absolute path\n"
"}\n"},
RPCExamples{HelpExampleCli("dumpwallet", "\"test\"") +
HelpExampleRpc("dumpwallet", "\"test\"")},
}
.Check(request);
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
EnsureWalletIsUnlocked(pwallet);
fs::path filepath = request.params[0].get_str();
filepath = fs::absolute(filepath);
/**
* Prevent arbitrary files from being overwritten. There have been reports
* that users have overwritten wallet files this way:
* https://github.com/bitcoin/bitcoin/issues/9934
* It may also avoid other security issues.
*/
if (fs::exists(filepath)) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
filepath.string() + " already exists. If you are "
"sure this is what you want, "
"move it out of the way first");
}
fsbridge::ofstream file;
file.open(filepath);
if (!file.is_open()) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Cannot open wallet dump file");
}
std::map<CKeyID, int64_t> mapKeyBirth;
const std::map<CKeyID, int64_t> &mapKeyPool = pwallet->GetAllReserveKeys();
pwallet->GetKeyBirthTimes(*locked_chain, mapKeyBirth);
std::set<CScriptID> scripts = pwallet->GetCScripts();
// sort time/key pairs
std::vector<std::pair<int64_t, CKeyID>> vKeyBirth;
for (const auto &entry : mapKeyBirth) {
vKeyBirth.push_back(std::make_pair(entry.second, entry.first));
}
mapKeyBirth.clear();
std::sort(vKeyBirth.begin(), vKeyBirth.end());
// produce output
file << strprintf("# Wallet dump created by Bitcoin %s\n", CLIENT_BUILD);
file << strprintf("# * Created on %s\n", FormatISO8601DateTime(GetTime()));
const Optional<int> tip_height = locked_chain->getHeight();
file << strprintf("# * Best block at time of backup was %i (%s),\n",
tip_height.value_or(-1),
tip_height
? locked_chain->getBlockHash(*tip_height).ToString()
: "(missing block hash)");
file << strprintf("# mined on %s\n",
tip_height ? FormatISO8601DateTime(
locked_chain->getBlockTime(*tip_height))
: "(missing block time)");
file << "\n";
// add the base58check encoded extended master if the wallet uses HD
CKeyID seed_id = pwallet->GetHDChain().seed_id;
if (!seed_id.IsNull()) {
CKey seed;
if (pwallet->GetKey(seed_id, seed)) {
CExtKey masterKey;
masterKey.SetSeed(seed.begin(), seed.size());
file << "# extended private masterkey: " << EncodeExtKey(masterKey)
<< "\n\n";
}
}
for (std::vector<std::pair<int64_t, CKeyID>>::const_iterator it =
vKeyBirth.begin();
it != vKeyBirth.end(); it++) {
const CKeyID &keyid = it->second;
std::string strTime = FormatISO8601DateTime(it->first);
std::string strAddr;
std::string strLabel;
CKey key;
if (pwallet->GetKey(keyid, key)) {
file << strprintf("%s %s ", EncodeSecret(key), strTime);
if (GetWalletAddressesForKey(config, pwallet, keyid, strAddr,
strLabel)) {
file << strprintf("label=%s", strLabel);
} else if (keyid == seed_id) {
file << "hdseed=1";
} else if (mapKeyPool.count(keyid)) {
file << "reserve=1";
} else if (pwallet->mapKeyMetadata[keyid].hdKeypath == "s") {
file << "inactivehdseed=1";
} else {
file << "change=1";
}
file << strprintf(
" # addr=%s%s\n", strAddr,
(pwallet->mapKeyMetadata[keyid].has_key_origin
? " hdkeypath=" +
WriteHDKeypath(
pwallet->mapKeyMetadata[keyid].key_origin.path)
: ""));
}
}
file << "\n";
for (const CScriptID &scriptid : scripts) {
CScript script;
std::string create_time = "0";
std::string address = EncodeDestination(ScriptHash(scriptid), config);
// get birth times for scripts with metadata
auto it = pwallet->m_script_metadata.find(scriptid);
if (it != pwallet->m_script_metadata.end()) {
create_time = FormatISO8601DateTime(it->second.nCreateTime);
}
if (pwallet->GetCScript(scriptid, script)) {
file << strprintf("%s %s script=1",
HexStr(script.begin(), script.end()),
create_time);
file << strprintf(" # addr=%s\n", address);
}
}
file << "\n";
file << "# End of dump\n";
file.close();
UniValue reply(UniValue::VOBJ);
reply.pushKV("filename", filepath.string());
return reply;
}
struct ImportData {
// Input data
//! Provided redeemScript; will be moved to `import_scripts` if relevant.
std::unique_ptr<CScript> redeemscript;
// Output data
std::set<CScript> import_scripts;
//! Import these private keys if available (the value indicates whether if
//! the key is required for solvability)
std::map<CKeyID, bool> used_keys;
std::map<CKeyID, std::pair<CPubKey, KeyOriginInfo>> key_origins;
};
enum class ScriptContext {
//! Top-level scriptPubKey
TOP,
//! P2SH redeemScript
P2SH,
};
// Analyse the provided scriptPubKey, determining which keys and which redeem
// scripts from the ImportData struct are needed to spend it, and mark them as
// used. Returns an error string, or the empty string for success.
static std::string RecurseImportData(const CScript &script,
ImportData &import_data,
const ScriptContext script_ctx) {
// Use Solver to obtain script type and parsed pubkeys or hashes:
std::vector<std::vector<uint8_t>> solverdata;
txnouttype script_type = Solver(script, solverdata);
switch (script_type) {
case TX_PUBKEY: {
CPubKey pubkey(solverdata[0].begin(), solverdata[0].end());
import_data.used_keys.emplace(pubkey.GetID(), false);
return "";
}
case TX_PUBKEYHASH: {
CKeyID id = CKeyID(uint160(solverdata[0]));
import_data.used_keys[id] = true;
return "";
}
case TX_SCRIPTHASH: {
if (script_ctx == ScriptContext::P2SH) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Trying to nest P2SH inside another P2SH");
}
assert(script_ctx == ScriptContext::TOP);
CScriptID id = CScriptID(uint160(solverdata[0]));
// Remove redeemscript from import_data to check for superfluous
// script later.
auto subscript = std::move(import_data.redeemscript);
if (!subscript) {
return "missing redeemscript";
}
if (CScriptID(*subscript) != id) {
return "redeemScript does not match the scriptPubKey";
}
import_data.import_scripts.emplace(*subscript);
return RecurseImportData(*subscript, import_data,
ScriptContext::P2SH);
}
case TX_MULTISIG: {
for (size_t i = 1; i + 1 < solverdata.size(); ++i) {
CPubKey pubkey(solverdata[i].begin(), solverdata[i].end());
import_data.used_keys.emplace(pubkey.GetID(), false);
}
return "";
}
case TX_NULL_DATA:
return "unspendable script";
case TX_NONSTANDARD:
default:
return "unrecognized script";
}
}
static UniValue ProcessImportLegacy(
CWallet *const pwallet, ImportData &import_data,
std::map<CKeyID, CPubKey> &pubkey_map, std::map<CKeyID, CKey> &privkey_map,
std::set<CScript> &script_pub_keys, bool &have_solving_data,
const UniValue &data, std::vector<CKeyID> &ordered_pubkeys) {
UniValue warnings(UniValue::VARR);
// First ensure scriptPubKey has either a script or JSON with "address"
// string
const UniValue &scriptPubKey = data["scriptPubKey"];
bool isScript = scriptPubKey.getType() == UniValue::VSTR;
if (!isScript && !(scriptPubKey.getType() == UniValue::VOBJ &&
scriptPubKey.exists("address"))) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"scriptPubKey must be string with script or JSON "
"with address string");
}
const std::string &output =
isScript ? scriptPubKey.get_str() : scriptPubKey["address"].get_str();
// Optional fields.
const std::string &strRedeemScript =
data.exists("redeemscript") ? data["redeemscript"].get_str() : "";
const UniValue &pubKeys =
data.exists("pubkeys") ? data["pubkeys"].get_array() : UniValue();
const UniValue &keys =
data.exists("keys") ? data["keys"].get_array() : UniValue();
const bool internal =
data.exists("internal") ? data["internal"].get_bool() : false;
const bool watchOnly =
data.exists("watchonly") ? data["watchonly"].get_bool() : false;
if (data.exists("range")) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Range should not be specified for a non-descriptor import");
}
// Generate the script and destination for the scriptPubKey provided
CScript script;
if (!isScript) {
CTxDestination dest = DecodeDestination(output, pwallet->chainParams);
if (!IsValidDestination(dest)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid address \"" + output + "\"");
}
script = GetScriptForDestination(dest);
} else {
if (!IsHex(output)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid scriptPubKey \"" + output + "\"");
}
std::vector<uint8_t> vData(ParseHex(output));
script = CScript(vData.begin(), vData.end());
CTxDestination dest;
if (!ExtractDestination(script, dest) && !internal) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Internal must be set to true for "
"nonstandard scriptPubKey imports.");
}
}
script_pub_keys.emplace(script);
// Parse all arguments
if (strRedeemScript.size()) {
if (!IsHex(strRedeemScript)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid redeem script \"" + strRedeemScript +
"\": must be hex string");
}
auto parsed_redeemscript = ParseHex(strRedeemScript);
import_data.redeemscript = std::make_unique<CScript>(
parsed_redeemscript.begin(), parsed_redeemscript.end());
}
for (size_t i = 0; i < pubKeys.size(); ++i) {
const auto &str = pubKeys[i].get_str();
if (!IsHex(str)) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Pubkey \"" + str + "\" must be a hex string");
}
auto parsed_pubkey = ParseHex(str);
CPubKey pubkey(parsed_pubkey.begin(), parsed_pubkey.end());
if (!pubkey.IsFullyValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Pubkey \"" + str +
"\" is not a valid public key");
}
pubkey_map.emplace(pubkey.GetID(), pubkey);
ordered_pubkeys.push_back(pubkey.GetID());
}
for (size_t i = 0; i < keys.size(); ++i) {
const auto &str = keys[i].get_str();
CKey key = DecodeSecret(str);
if (!key.IsValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid private key encoding");
}
CPubKey pubkey = key.GetPubKey();
CKeyID id = pubkey.GetID();
if (pubkey_map.count(id)) {
pubkey_map.erase(id);
}
privkey_map.emplace(id, key);
}
// Verify and process input data
have_solving_data =
import_data.redeemscript || pubkey_map.size() || privkey_map.size();
if (have_solving_data) {
// Match up data in import_data with the scriptPubKey in script.
auto error = RecurseImportData(script, import_data, ScriptContext::TOP);
// Verify whether the watchonly option corresponds to the
// availability of private keys.
bool spendable = std::all_of(
import_data.used_keys.begin(), import_data.used_keys.end(),
[&](const std::pair<CKeyID, bool> &used_key) {
return privkey_map.count(used_key.first) > 0;
});
if (!watchOnly && !spendable) {
warnings.push_back("Some private keys are missing, outputs "
"will be considered watchonly. If this is "
"intentional, specify the watchonly flag.");
}
if (watchOnly && spendable) {
warnings.push_back(
"All private keys are provided, outputs will be considered "
"spendable. If this is intentional, do not specify the "
"watchonly flag.");
}
// Check that all required keys for solvability are provided.
if (error.empty()) {
for (const auto &require_key : import_data.used_keys) {
if (!require_key.second) {
// Not a required key
continue;
}
if (pubkey_map.count(require_key.first) == 0 &&
privkey_map.count(require_key.first) == 0) {
error = "some required keys are missing";
}
}
}
if (!error.empty()) {
warnings.push_back("Importing as non-solvable: " + error +
". If this is intentional, don't provide "
"any keys, pubkeys or redeemscript.");
import_data = ImportData();
pubkey_map.clear();
privkey_map.clear();
have_solving_data = false;
} else {
// RecurseImportData() removes any relevant redeemscript from
// import_data, so we can use that to discover if a superfluous
// one was provided.
if (import_data.redeemscript) {
warnings.push_back(
"Ignoring redeemscript as this is not a P2SH script.");
}
for (auto it = privkey_map.begin(); it != privkey_map.end();) {
auto oldit = it++;
if (import_data.used_keys.count(oldit->first) == 0) {
warnings.push_back("Ignoring irrelevant private key.");
privkey_map.erase(oldit);
}
}
for (auto it = pubkey_map.begin(); it != pubkey_map.end();) {
auto oldit = it++;
auto key_data_it = import_data.used_keys.find(oldit->first);
if (key_data_it == import_data.used_keys.end() ||
!key_data_it->second) {
warnings.push_back("Ignoring public key \"" +
HexStr(oldit->first) +
"\" as it doesn't appear inside P2PKH.");
pubkey_map.erase(oldit);
}
}
}
}
return warnings;
}
static UniValue ProcessImportDescriptor(ImportData &import_data,
std::map<CKeyID, CPubKey> &pubkey_map,
std::map<CKeyID, CKey> &privkey_map,
std::set<CScript> &script_pub_keys,
bool &have_solving_data,
const UniValue &data,
std::vector<CKeyID> &ordered_pubkeys) {
UniValue warnings(UniValue::VARR);
const std::string &descriptor = data["desc"].get_str();
FlatSigningProvider keys;
std::string error;
auto parsed_desc =
Parse(descriptor, keys, error, /* require_checksum = */ true);
if (!parsed_desc) {
- throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
- strprintf("Descriptor is invalid, %s", error));
+ throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, error);
}
have_solving_data = parsed_desc->IsSolvable();
const bool watch_only =
data.exists("watchonly") ? data["watchonly"].get_bool() : false;
int64_t range_start = 0, range_end = 0;
if (!parsed_desc->IsRange() && data.exists("range")) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Range should not be specified for an un-ranged descriptor");
} else if (parsed_desc->IsRange()) {
if (!data.exists("range")) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Descriptor is ranged, please specify the range");
}
std::tie(range_start, range_end) = ParseDescriptorRange(data["range"]);
}
const UniValue &priv_keys =
data.exists("keys") ? data["keys"].get_array() : UniValue();
// Expand all descriptors to get public keys and scripts.
// TODO: get private keys from descriptors too
for (int i = range_start; i <= range_end; ++i) {
FlatSigningProvider out_keys;
std::vector<CScript> scripts_temp;
parsed_desc->Expand(i, keys, scripts_temp, out_keys);
std::copy(scripts_temp.begin(), scripts_temp.end(),
std::inserter(script_pub_keys, script_pub_keys.end()));
for (const auto &key_pair : out_keys.pubkeys) {
ordered_pubkeys.push_back(key_pair.first);
}
for (const auto &x : out_keys.scripts) {
import_data.import_scripts.emplace(x.second);
}
std::copy(out_keys.pubkeys.begin(), out_keys.pubkeys.end(),
std::inserter(pubkey_map, pubkey_map.end()));
import_data.key_origins.insert(out_keys.origins.begin(),
out_keys.origins.end());
}
for (size_t i = 0; i < priv_keys.size(); ++i) {
const auto &str = priv_keys[i].get_str();
CKey key = DecodeSecret(str);
if (!key.IsValid()) {
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,
"Invalid private key encoding");
}
CPubKey pubkey = key.GetPubKey();
CKeyID id = pubkey.GetID();
// Check if this private key corresponds to a public key from the
// descriptor
if (!pubkey_map.count(id)) {
warnings.push_back("Ignoring irrelevant private key.");
} else {
privkey_map.emplace(id, key);
}
}
// Check if all the public keys have corresponding private keys in the
// import for spendability. This does not take into account threshold
// multisigs which could be spendable without all keys. Thus, threshold
// multisigs without all keys will be considered not spendable here, even if
// they are, perhaps triggering a false warning message. This is consistent
// with the current wallet IsMine check.
bool spendable =
std::all_of(pubkey_map.begin(), pubkey_map.end(),
[&](const std::pair<CKeyID, CPubKey> &used_key) {
return privkey_map.count(used_key.first) > 0;
}) &&
std::all_of(
import_data.key_origins.begin(), import_data.key_origins.end(),
[&](const std::pair<CKeyID, std::pair<CPubKey, KeyOriginInfo>>
&entry) { return privkey_map.count(entry.first) > 0; });
if (!watch_only && !spendable) {
warnings.push_back(
"Some private keys are missing, outputs will be considered "
"watchonly. If this is intentional, specify the watchonly flag.");
}
if (watch_only && spendable) {
warnings.push_back("All private keys are provided, outputs will be "
"considered spendable. If this is intentional, do "
"not specify the watchonly flag.");
}
return warnings;
}
static UniValue ProcessImport(CWallet *const pwallet, const UniValue &data,
const int64_t timestamp)
EXCLUSIVE_LOCKS_REQUIRED(pwallet->cs_wallet) {
UniValue warnings(UniValue::VARR);
UniValue result(UniValue::VOBJ);
try {
const bool internal =
data.exists("internal") ? data["internal"].get_bool() : false;
// Internal addresses should not have a label
if (internal && data.exists("label")) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Internal addresses should not have a label");
}
const std::string &label =
data.exists("label") ? data["label"].get_str() : "";
const bool add_keypool =
data.exists("keypool") ? data["keypool"].get_bool() : false;
// Add to keypool only works with privkeys disabled
if (add_keypool &&
!pwallet->IsWalletFlagSet(WALLET_FLAG_DISABLE_PRIVATE_KEYS)) {
throw JSONRPCError(RPC_INVALID_PARAMETER,
"Keys can only be imported to the keypool when "
"private keys are disabled");
}
ImportData import_data;
std::map<CKeyID, CPubKey> pubkey_map;
std::map<CKeyID, CKey> privkey_map;
std::set<CScript> script_pub_keys;
std::vector<CKeyID> ordered_pubkeys;
bool have_solving_data;
if (data.exists("scriptPubKey") && data.exists("desc")) {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Both a descriptor and a scriptPubKey should not be provided.");
} else if (data.exists("scriptPubKey")) {
warnings = ProcessImportLegacy(
pwallet, import_data, pubkey_map, privkey_map, script_pub_keys,
have_solving_data, data, ordered_pubkeys);
} else if (data.exists("desc")) {
warnings = ProcessImportDescriptor(
import_data, pubkey_map, privkey_map, script_pub_keys,
have_solving_data, data, ordered_pubkeys);
} else {
throw JSONRPCError(
RPC_INVALID_PARAMETER,
"Either a descriptor or scriptPubKey must be provided.");
}
// If private keys are disabled, abort if private keys are being
// imported
if (pwallet->IsWalletFlagSet(WALLET_FLAG_DISABLE_PRIVATE_KEYS) &&
!privkey_map.empty()) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Cannot import private keys to a wallet with "
"private keys disabled");
}
// Check whether we have any work to do
for (const CScript &script : script_pub_keys) {
if (::IsMine(*pwallet, script) & ISMINE_SPENDABLE) {
throw JSONRPCError(RPC_WALLET_ERROR,
"The wallet already contains the private "
"key for this address or script (\"" +
HexStr(script.begin(), script.end()) +
"\")");
}
}
// All good, time to import
pwallet->MarkDirty();
if (!pwallet->ImportScripts(import_data.import_scripts, timestamp)) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Error adding script to wallet");
}
if (!pwallet->ImportPrivKeys(privkey_map, timestamp)) {
throw JSONRPCError(RPC_WALLET_ERROR, "Error adding key to wallet");
}
if (!pwallet->ImportPubKeys(ordered_pubkeys, pubkey_map,
import_data.key_origins, add_keypool,
internal, timestamp)) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Error adding address to wallet");
}
if (!pwallet->ImportScriptPubKeys(label, script_pub_keys,
have_solving_data, !internal,
timestamp)) {
throw JSONRPCError(RPC_WALLET_ERROR,
"Error adding address to wallet");
}
result.pushKV("success", UniValue(true));
} catch (const UniValue &e) {
result.pushKV("success", UniValue(false));
result.pushKV("error", e);
} catch (...) {
result.pushKV("success", UniValue(false));
result.pushKV("error",
JSONRPCError(RPC_MISC_ERROR, "Missing required fields"));
}
if (warnings.size()) {
result.pushKV("warnings", warnings);
}
return result;
}
static int64_t GetImportTimestamp(const UniValue &data, int64_t now) {
if (data.exists("timestamp")) {
const UniValue &timestamp = data["timestamp"];
if (timestamp.isNum()) {
return timestamp.get_int64();
} else if (timestamp.isStr() && timestamp.get_str() == "now") {
return now;
}
throw JSONRPCError(RPC_TYPE_ERROR,
strprintf("Expected number or \"now\" timestamp "
"value for key. got type %s",
uvTypeName(timestamp.type())));
}
throw JSONRPCError(RPC_TYPE_ERROR,
"Missing required timestamp field for key");
}
UniValue importmulti(const Config &config, const JSONRPCRequest &mainRequest) {
std::shared_ptr<CWallet> const wallet =
GetWalletForJSONRPCRequest(mainRequest);
CWallet *const pwallet = wallet.get();
if (!EnsureWalletIsAvailable(pwallet, mainRequest.fHelp)) {
return NullUniValue;
}
RPCHelpMan{
"importmulti",
"\nImport addresses/scripts (with private or public keys, redeem "
"script (P2SH)), rescanning all addresses in one-shot-only (rescan can "
"be disabled via options). Requires a new wallet backup.\n",
{
{"requests",
RPCArg::Type::ARR,
RPCArg::Optional::NO,
"Data to be imported",
{
{
"",
RPCArg::Type::OBJ,
RPCArg::Optional::OMITTED,
"",
{
{"desc", RPCArg::Type::STR, RPCArg::Optional::OMITTED,
"Descriptor to import. If using descriptor, do not "
"also provide address/scriptPubKey, scripts, or "
"pubkeys"},
{"scriptPubKey",
RPCArg::Type::STR,
RPCArg::Optional::NO,
"Type of scriptPubKey (string for script, json for "
"address). Should not be provided if using a "
"descriptor",
/* oneline_description */ "",
{"\"<script>\" | { \"address\":\"<address>\" }",
"string / json"}},
{"timestamp",
RPCArg::Type::NUM,
RPCArg::Optional::NO,
"Creation time of the key in seconds since epoch "
"(Jan 1 1970 GMT),\n"
" "
" or the string \"now\" to "
"substitute the current synced blockchain time. The "
"timestamp of the oldest\n"
" "
" key will determine how far back "
"blockchain rescans need to begin for missing wallet "
"transactions.\n"
" "
" \"now\" can be specified to "
"bypass scanning, for keys which are known to never "
"have been used, and\n"
" "
" 0 can be specified to scan the "
"entire blockchain. Blocks up to 2 hours before the "
"earliest key\n"
" "
" creation time of all keys being "
"imported by the importmulti call will be scanned.",
/* oneline_description */ "",
{"timestamp | \"now\"", "integer / string"}},
{"redeemscript", RPCArg::Type::STR,
RPCArg::Optional::OMITTED,
"Allowed only if the scriptPubKey is a P2SH "
"address/scriptPubKey"},
{"pubkeys",
RPCArg::Type::ARR,
/* default */ "empty array",
"Array of strings giving pubkeys to import. They "
"must occur in P2PKH scripts. They are not required "
"when the private key is also provided (see the "
"\"keys\" argument).",
{
{"pubKey", RPCArg::Type::STR,
RPCArg::Optional::OMITTED, ""},
}},
{"keys",
RPCArg::Type::ARR,
/* default */ "empty array",
"Array of strings giving private keys to import. The "
"corresponding public keys must occur in the output "
"or redeemscript.",
{
{"key", RPCArg::Type::STR,
RPCArg::Optional::OMITTED, ""},
}},
{"range", RPCArg::Type::RANGE,
RPCArg::Optional::OMITTED,
"If a ranged descriptor is used, this specifies the "
"end or the range (in the form [begin,end]) to "
"import"},
{"internal", RPCArg::Type::BOOL,
/* default */ "false",
"Stating whether matching outputs should be treated "
"as not incoming payments (also known as change)"},
{"watchonly", RPCArg::Type::BOOL,
/* default */ "false",
"Stating whether matching outputs should be "
"considered watched even when not all private keys "
"are provided."},
{"label", RPCArg::Type::STR, /* default */ "''",
"Label to assign to the address, only allowed with "
"internal=false"},
},
},
},
"\"requests\""},
{"options",
RPCArg::Type::OBJ,
RPCArg::Optional::OMITTED_NAMED_ARG,
"",
{
{"rescan", RPCArg::Type::BOOL, /* default */ "true",
"Stating if should rescan the blockchain after all imports"},
},
"\"options\""},
},
RPCResult{"\nResponse is an array with the same size as the input "
"that has the execution result :\n"
" [{\"success\": true}, {\"success\": true, "
"\"warnings\": [\"Ignoring irrelevant private key\"]}, "
"{\"success\": false, \"error\": {\"code\": -1, "
"\"message\": \"Internal Server Error\"}}, ...]\n"},
RPCExamples{
HelpExampleCli(
"importmulti",
"'[{ \"scriptPubKey\": { \"address\": \"<my address>\" }, "
"\"timestamp\":1455191478 }, "
"{ \"scriptPubKey\": { \"address\": \"<my 2nd address>\" "
"}, "
"\"label\": \"example 2\", \"timestamp\": 1455191480 }]'") +
HelpExampleCli(
"importmulti",
"'[{ \"scriptPubKey\": { \"address\": \"<my address>\" }, "
"\"timestamp\":1455191478 }]' '{ \"rescan\": false}'")
},
}
.Check(mainRequest);
RPCTypeCheck(mainRequest.params, {UniValue::VARR, UniValue::VOBJ});
const UniValue &requests = mainRequest.params[0];
// Default options
bool fRescan = true;
if (!mainRequest.params[1].isNull()) {
const UniValue &options = mainRequest.params[1];
if (options.exists("rescan")) {
fRescan = options["rescan"].get_bool();
}
}
WalletRescanReserver reserver(pwallet);
if (fRescan && !reserver.reserve()) {
throw JSONRPCError(
RPC_WALLET_ERROR,
"Wallet is currently rescanning. Abort existing rescan or wait.");
}
int64_t now = 0;
bool fRunScan = false;
int64_t nLowestTimestamp = 0;
UniValue response(UniValue::VARR);
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
EnsureWalletIsUnlocked(pwallet);
// Verify all timestamps are present before importing any keys.
const Optional<int> tip_height = locked_chain->getHeight();
now =
tip_height ? locked_chain->getBlockMedianTimePast(*tip_height) : 0;
for (const UniValue &data : requests.getValues()) {
GetImportTimestamp(data, now);
}
const int64_t minimumTimestamp = 1;
if (fRescan && tip_height) {
nLowestTimestamp = locked_chain->getBlockTime(*tip_height);
} else {
fRescan = false;
}
for (const UniValue &data : requests.getValues()) {
const int64_t timestamp =
std::max(GetImportTimestamp(data, now), minimumTimestamp);
const UniValue result = ProcessImport(pwallet, data, timestamp);
response.push_back(result);
if (!fRescan) {
continue;
}
// If at least one request was successful then allow rescan.
if (result["success"].get_bool()) {
fRunScan = true;
}
// Get the lowest timestamp.
if (timestamp < nLowestTimestamp) {
nLowestTimestamp = timestamp;
}
}
}
if (fRescan && fRunScan && requests.size()) {
int64_t scannedTime = pwallet->RescanFromTime(
nLowestTimestamp, reserver, true /* update */);
{
auto locked_chain = pwallet->chain().lock();
LOCK(pwallet->cs_wallet);
pwallet->ReacceptWalletTransactions(*locked_chain);
}
if (pwallet->IsAbortingRescan()) {
throw JSONRPCError(RPC_MISC_ERROR, "Rescan aborted by user.");
}
if (scannedTime > nLowestTimestamp) {
std::vector<UniValue> results = response.getValues();
response.clear();
response.setArray();
size_t i = 0;
for (const UniValue &request : requests.getValues()) {
// If key creation date is within the successfully scanned
// range, or if the import result already has an error set, let
// the result stand unmodified. Otherwise replace the result
// with an error message.
if (scannedTime <= GetImportTimestamp(request, now) ||
results.at(i).exists("error")) {
response.push_back(results.at(i));
} else {
UniValue result = UniValue(UniValue::VOBJ);
result.pushKV("success", UniValue(false));
result.pushKV(
"error",
JSONRPCError(
RPC_MISC_ERROR,
strprintf(
"Rescan failed for key with creation timestamp "
"%d. There was an error reading a block from "
"time %d, which is after or within %d seconds "
"of key creation, and could contain "
"transactions pertaining to the key. As a "
"result, transactions and coins using this key "
"may not appear in the wallet. This error "
"could be caused by pruning or data corruption "
"(see bitcoind log for details) and could be "
"dealt with by downloading and rescanning the "
"relevant blocks (see -reindex and -rescan "
"options).",
GetImportTimestamp(request, now),
scannedTime - TIMESTAMP_WINDOW - 1,
TIMESTAMP_WINDOW)));
response.push_back(std::move(result));
}
++i;
}
}
}
return response;
}
// clang-format off
static const CRPCCommand commands[] = {
// category name actor (function) argNames
// ------------------- ------------------------ ---------------------- ----------
{ "wallet", "abortrescan", abortrescan, {} },
{ "wallet", "dumpprivkey", dumpprivkey, {"address"} },
{ "wallet", "dumpwallet", dumpwallet, {"filename"} },
{ "wallet", "importmulti", importmulti, {"requests","options"} },
{ "wallet", "importprivkey", importprivkey, {"privkey","label","rescan"} },
{ "wallet", "importwallet", importwallet, {"filename"} },
{ "wallet", "importaddress", importaddress, {"address","label","rescan","p2sh"} },
{ "wallet", "importprunedfunds", importprunedfunds, {"rawtransaction","txoutproof"} },
{ "wallet", "importpubkey", importpubkey, {"pubkey","label","rescan"} },
{ "wallet", "removeprunedfunds", removeprunedfunds, {"txid"} },
};
// clang-format on
void RegisterDumpRPCCommands(
interfaces::Chain &chain,
std::vector<std::unique_ptr<interfaces::Handler>> &handlers) {
for (unsigned int vcidx = 0; vcidx < ARRAYLEN(commands); vcidx++) {
handlers.emplace_back(chain.handleRpc(commands[vcidx]));
}
}
diff --git a/test/functional/rpc_deriveaddresses.py b/test/functional/rpc_deriveaddresses.py
index ed297bdd4..2becf038c 100755
--- a/test/functional/rpc_deriveaddresses.py
+++ b/test/functional/rpc_deriveaddresses.py
@@ -1,110 +1,110 @@
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the deriveaddresses rpc call."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import assert_equal, assert_raises_rpc_error
class DeriveaddressesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.supports_cli = 1
def run_test(self):
- assert_raises_rpc_error(-5, "Invalid descriptor",
+ assert_raises_rpc_error(-5, "Missing checksum",
self.nodes[0].deriveaddresses, "a")
descriptor = "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)#rdfjd0a9"
address = "bchreg:qzgrvmwc8vevauc25j86hgfpduz8j98yvvyr0qx0ew"
assert_equal(self.nodes[0].deriveaddresses(descriptor), [address])
descriptor = descriptor[:-9]
assert_raises_rpc_error(-5,
- "Invalid descriptor",
+ "Missing checksum",
self.nodes[0].deriveaddresses,
descriptor)
descriptor_pubkey = "pkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)#7st8eans"
address = "bchreg:qzgrvmwc8vevauc25j86hgfpduz8j98yvvyr0qx0ew"
assert_equal(self.nodes[0].deriveaddresses(
descriptor_pubkey), [address])
ranged_descriptor = "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)#77vpsvm5"
assert_equal(
self.nodes[0].deriveaddresses(ranged_descriptor, [1, 2]),
["bchreg:qz7mjsvr6gglnl389gnfxmqx0asxp0hcvqjx829c6k", "bchreg:qq9q9wefpjzuna7qhuzz7rvck9tuhrzp3gvrzd8kx2"])
assert_equal(
self.nodes[0].deriveaddresses(ranged_descriptor, 2),
[address, "bchreg:qz7mjsvr6gglnl389gnfxmqx0asxp0hcvqjx829c6k", "bchreg:qq9q9wefpjzuna7qhuzz7rvck9tuhrzp3gvrzd8kx2"])
assert_raises_rpc_error(
-8,
"Range should not be specified for an un-ranged descriptor",
self.nodes[0].deriveaddresses,
descsum_create(
"pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"),
[0, 2])
assert_raises_rpc_error(
-8,
"Range must be specified for a ranged descriptor",
self.nodes[0].deriveaddresses,
descsum_create("pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"))
assert_raises_rpc_error(
-8,
"End of range is too high",
self.nodes[0].deriveaddresses,
descsum_create(
"pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"),
10000000000)
assert_raises_rpc_error(
-8,
"Range is too large",
self.nodes[0].deriveaddresses,
descsum_create(
"pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"),
[1000000000, 2000000000])
assert_raises_rpc_error(
-8,
"Range specified as [begin,end] must not have begin after end",
self.nodes[0].deriveaddresses,
descsum_create(
"pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"),
[2, 0])
assert_raises_rpc_error(
-8,
"Range should be greater or equal than 0",
self.nodes[0].deriveaddresses,
descsum_create(
"pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"),
[-1, 0])
combo_descriptor = descsum_create(
"combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)")
assert_equal(self.nodes[0].deriveaddresses(
combo_descriptor), [address, address])
hardened_without_privkey_descriptor = descsum_create(
"pkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1'/1/0)")
assert_raises_rpc_error(-5,
"Cannot derive script without private keys",
self.nodes[0].deriveaddresses,
hardened_without_privkey_descriptor)
bare_multisig_descriptor = descsum_create(
"multi(1, tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0, tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)")
assert_raises_rpc_error(-5,
"Descriptor does not have a corresponding address",
self.nodes[0].deriveaddresses,
bare_multisig_descriptor)
if __name__ == '__main__':
DeriveaddressesTest().main()
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index 8ea3d62b9..342a6344f 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -1,741 +1,741 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC.
Test importmulti by generating keys on node0, importing the scriptPubKeys and
addresses on node1 and then testing the address info for the different address
variants.
- `get_key()` and `get_multisig()` are called to generate keys on node0 and
return the privkeys, pubkeys and all variants of scriptPubKey and address.
- `test_importmulti()` is called to send an importmulti call to node1, test
success, and (if unsuccessful) test the error code and error message returned.
- `test_address()` is called to call getaddressinfo for an address on node1
and test the values returned."""
from test_framework.script import (
CScript,
OP_NOP,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.wallet_util import (
get_key,
get_multisig,
test_address,
)
class ImportMultiTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.setup_nodes()
def test_importmulti(self, req, success, error_code=None,
error_message=None, warnings=[]):
"""Run importmulti and assert success"""
result = self.nodes[1].importmulti([req])
observed_warnings = []
if 'warnings' in result[0]:
observed_warnings = result[0]['warnings']
assert_equal(
"\n".join(
sorted(warnings)), "\n".join(
sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
assert_equal(result[0]['error']['code'], error_code)
assert_equal(result[0]['error']['message'], error_message)
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
node0_address1 = self.nodes[0].getaddressinfo(
self.nodes[0].getnewaddress())
# Check only one address
assert_equal(node0_address1['ismine'], True)
# Node 1 sync test
assert_equal(self.nodes[1].getblockcount(), 1)
# Address Test - before import
address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# Bitcoin Address (implicit non-internal)
self.log.info("Should import an address")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=False)
watchonly_address = key.p2pkh_addr
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
self.test_importmulti({"scriptPubKey": {"address": "not valid address"},
"timestamp": "now"},
success=False,
error_code=-5,
error_message='Invalid address \"not valid address\"')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=True)
# ScriptPubKey + internal + label
self.log.info(
"Should not allow a label to be specified when internal is true")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True,
"label": "Example label"},
success=False,
error_code=-8,
error_message='Internal addresses should not have a label')
# Nonstandard scriptPubKey + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal flag")
nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex()
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Public key + !Internal(explicit)
self.log.info("Should import an address with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": False},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal
self.log.info(
"Should import a scriptPubKey with internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": True},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# Nonstandard scriptPubKey + Public key + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"pubkeys": [key.pubkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
self.log.info(
"Should not import an address with private key if is already imported")
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-4,
error_message='The wallet already contains the private key for this address or script ("' + key.p2pkh_script + '")')
# Address + Private key + watchonly
self.log.info(
"Should import an address with private key and with watchonly")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey],
"watchonly": True},
success=True,
warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# ScriptPubKey + Private key + internal
self.log.info(
"Should import a scriptPubKey with internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [key.privkey],
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# Nonstandard scriptPubKey + Private key + !internal
self.log.info(
"Should not import a nonstandard scriptPubKey without internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# P2SH address
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
isscript=True,
iswatchonly=True,
timestamp=timestamp)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh with respective redeem script")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(
self.nodes[1],
multisig.p2sh_addr,
timestamp=timestamp,
iswatchonly=True,
ismine=False,
solvable=True)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.log.info(
"Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2]},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
multisig.p2sh_addr,
timestamp=timestamp,
ismine=False,
iswatchonly=True,
solvable=True)
p2shunspent = self.nodes[1].listunspent(
0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(
self.nodes[1].getbestblockhash())['mediantime']
self.log.info(
"Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2],
"watchonly": True},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
iswatchonly=True,
ismine=False,
solvable=True,
timestamp=timestamp)
# Address + Public key + !Internal + Wrong pubkey
self.log.info(
"Should not import an address with the wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [wrong_key]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info(
"Should import a scriptPubKey with internal and with a wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [wrong_key],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Address + Private key + !watchonly + Wrong private key
self.log.info(
"Should import an address with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [wrong_privkey]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info(
"Should import a scriptPubKey with internal and with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [wrong_privkey],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Importing existing watch only address with new timestamp should
# replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
self.test_importmulti({"scriptPubKey": {"address": watchonly_address},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of
# watch only address
self.stop_nodes()
self.start_nodes()
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": key.p2pkh_script,
"timestamp": ""
}])
# Test that importing of a P2PKH address via descriptor without
# checksum fails
key = get_key(self.nodes[0])
self.log.info(
"Should fail to import a p2pkh address from descriptor with no checksum")
self.test_importmulti({"desc": "pkh(" + key.pubkey + ")",
"timestamp": "now",
"label": "Descriptor import test"},
success=False,
error_code=-5,
- error_message='Descriptor is invalid, Missing checksum')
+ error_message='Missing checksum')
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
# hdkeypath=m/0'/0'/0' and 1'
addresses = [
"2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf",
"2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"]
desc = "sh(pkh(" + xpriv + "/0'/0'/*'" + "))"
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
success=False, error_code=-8, error_message='Range is too large')
# Test importing of a P2PKH address via descriptor
key = get_key(self.nodes[0])
self.log.info("Should import a p2pkh address from descriptor")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"label": "Descriptor import test"},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
solvable=True,
ismine=False,
label="Descriptor import test")
# Test import fails if both desc and scriptPubKey are provided
key = get_key(self.nodes[0])
self.log.info(
"Import should fail if both scriptPubKey and desc are provided")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Both a descriptor and a scriptPubKey should not be provided.')
# Test import fails if neither desc nor scriptPubKey are present
key = get_key(self.nodes[0])
self.log.info(
"Import should fail if neither a descriptor nor a scriptPubKey are provided")
self.test_importmulti({"timestamp": "now"},
success=False,
error_code=-8,
error_message='Either a descriptor or scriptPubKey must be provided.')
# Test importing of a multisig via descriptor
key1 = get_key(self.nodes[0])
key2 = get_key(self.nodes[0])
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
self.test_importmulti({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
"timestamp": "now"},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
self.log.info(
"Should not treat individual keys from the imported bare multisig as watchonly")
test_address(self.nodes[1],
key1.p2pkh_addr,
ismine=False,
iswatchonly=False)
# Import pubkeys with key origin info
self.log.info(
"Addresses should have hd keypath and master key id after import with key origin")
pub_addr = self.nodes[1].getnewaddress()
pub_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
pub_keypath = info['hdkeypath']
pub_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc': descsum_create("pkh([" + pub_fpr + pub_keypath[1:] + "]" + pub + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(pub_import_info['pubkey'], pub)
assert_equal(pub_import_info['hdkeypath'], pub_keypath)
# Import privkeys with key origin info
priv_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(priv_addr)
priv = self.nodes[1].dumpprivkey(priv_addr)
priv_keypath = info['hdkeypath']
priv_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc': descsum_create("pkh([" + priv_fpr + priv_keypath[1:] + "]" + priv + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
priv_import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(priv_import_info['hdkeypath'], priv_keypath)
# Make sure the key origin info are still there after a restart
self.stop_nodes()
self.start_nodes()
import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(import_info['hdkeypath'], pub_keypath)
import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(import_info['hdkeypath'], priv_keypath)
# Check legacy import does not import key origin info
self.log.info("Legacy imports don't have key origin info")
pub_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
result = self.nodes[0].importmulti(
[{
'scriptPubKey': {'address': pub_addr},
'pubkeys': [pub],
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['pubkey'], pub)
assert 'hdmasterfingerprint' not in pub_import_info
assert 'hdkeypath' not in pub_import_info
# Import some public keys to the keypool of a no privkey wallet
self.log.info("Adding pubkey to keypool of disableprivkey wallet")
self.nodes[1].createwallet(
wallet_name="noprivkeys",
disable_private_keys=True)
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
},
{
'desc': descsum_create('pkh(' + pub2 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2)
newaddr1 = wrpc.getnewaddress()
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getnewaddress()
assert_equal(addr2, newaddr2)
# Import some public keys to the internal keypool of a no privkey
# wallet
self.log.info(
"Adding pubkey to internal keypool of disableprivkey wallet")
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
},
{
'desc': descsum_create('pkh(' + pub2 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2)
newaddr1 = wrpc.getrawchangeaddress()
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getrawchangeaddress()
assert_equal(addr2, newaddr2)
# Import a multisig and make sure the keys don't go into the keypool
self.log.info(
'Imported scripts with pubkeys shoud not have their pubkeys go into the keypool')
addr1 = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('sh(multi(2,' + pub1 + ',' + pub2 + '))'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
# Cannot import those pubkeys to keypool of wallet with privkeys
self.log.info(
"Pubkeys cannot be added to the keypool of a wallet with private keys")
wrpc = self.nodes[1].get_wallet_rpc("")
assert wrpc.getwalletinfo()['private_keys_enabled']
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert_equal(result[0]['error']['code'], -8)
assert_equal(
result[0]['error']['message'],
"Keys can only be imported to the keypool when private keys are disabled")
# Make sure ranged imports import keys in order
self.log.info('Key ranges should be imported in order')
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False)
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
addresses = [
'bchreg:qp0v86h53rc92hjrlpwzpjtdlgzsxu25svryj39hul', # m/0'/0'/0
'bchreg:qqasy0zlkdleqt4pkn8fs4ehm5gnnz6qpgzxm0035q', # m/0'/0'/1
'bchreg:qp0sp4wlhctvprqvdt2dgvqcfdjssu04xgk64mmwew', # m/0'/0'/2
'bchreg:qrhn24tegn04cptfv4ldhtkduxq55zcwryhvnfcm3r', # m/0'/0'/3
'bchreg:qzpqhett2uwltq803vrxv7zkqhft5vsnmca8ds9jjp', # m/0'/0'/4
]
result = wrpc.importmulti(
[{
'desc': descsum_create('pkh([80002067/0h/0h]' + xpub + '/*)'),
'keypool': True,
'timestamp': 'now',
'range': [0, 4],
}]
)
self.log.info(result)
for i in range(0, 5):
addr = wrpc.getnewaddress('')
assert_equal(addr, addresses[i])
if __name__ == '__main__':
ImportMultiTest().main()

File Metadata

Mime Type
text/x-diff
Expires
Wed, Jan 29, 17:12 (11 h, 21 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5053392
Default Alt Text
(214 KB)

Event Timeline