Merge pull request #47 from bitfeed-project/v2.3.1

v2.3.1
This commit is contained in:
Mononaut 2022-05-18 15:44:40 +01:00 committed by GitHub
commit 951a3514fe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 271 additions and 198 deletions

View File

@ -5,8 +5,6 @@ map $sent_http_content_type $expires {
application/javascript max;
}
proxy_cache_path /var/cache/nginx/bitfeed levels=1:2 keys_zone=bitfeed:10m max_size=500m inactive=1w use_temp_path=off;
server {
listen 80;
@ -20,11 +18,6 @@ server {
}
location /api {
proxy_cache bitfeed;
proxy_cache_revalidate on;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_background_update on;
proxy_cache_lock on;
proxy_pass http://wsmonobackend;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;

View File

@ -1,6 +1,6 @@
{
"name": "bitfeed-client",
"version": "2.3.0",
"version": "2.3.1",
"scripts": {
"build": "rollup -c",
"dev": "rollup -c -w",

View File

@ -120,7 +120,7 @@ $: {
}
} else inputs = []
if ($detailTx && $detailTx.outputs) {
if ($detailTx.isCoinbase || !$detailTx.is_inflated || !$detailTx.fee) {
if ($detailTx.isCoinbase || $detailTx.fee == null) {
outputs = expandAddresses($detailTx.outputs, truncate)
} else {
outputs = [{address: 'fee', value: $detailTx.fee, fee: true}, ...expandAddresses($detailTx.outputs, truncate)]
@ -515,6 +515,15 @@ async function goToBlock(e) {
animation-iteration-count: infinite;
}
}
&.disabled {
cursor: normal;
opacity: 0.5;
.chevron .outline {
stroke-opacity: 1;
fill-opacity: 1;
}
}
}
}
@ -660,7 +669,7 @@ async function goToBlock(e) {
</div>
</div>
{:else}
{#if $detailTx.is_inflated && $detailTx.fee != null && $detailTx.feerate != null}
{#if $detailTx.fee != null && $detailTx.feerate != null}
<div class="pane fields">
<div class="field">
<span class="label">fee</span>
@ -748,8 +757,14 @@ async function goToBlock(e) {
<path d="M 107.628,257.54 327.095,38.078 404,114.989 261.506,257.483 404,399.978 327.086,476.89 Z" class="outline" />
</svg>
</span>
{:else if spends[output.index] == true}
<span class="put-link disabled" in:fade|local={{ duration: 200 }} title="spent">
<svg class="chevron right" height="1.2em" width="1.2em" viewBox="0 0 512 512">
<path d="M 107.628,257.54 327.095,38.078 404,114.989 261.506,257.483 404,399.978 327.086,476.89 Z" class="outline" />
</svg>
</span>
{:else if spends[output.index]}
<a href="/tx/{spends[output.index].vin}:{spends[output.index].txid}" on:click={(e) => goToSpend(e, spends[output.index])} class="put-link" in:fade|local={{ duration: 200 }}>
<a href="/tx/{spends[output.index].vin}:{spends[output.index].txid}" on:click={(e) => goToSpend(e, spends[output.index])} title="spent" class="put-link" in:fade|local={{ duration: 200 }}>
<svg class="chevron right" height="1.2em" width="1.2em" viewBox="0 0 512 512">
<path d="M 107.628,257.54 327.095,38.078 404,114.989 261.506,257.483 404,399.978 327.086,476.89 Z" class="outline" />
</svg>

View File

@ -575,7 +575,7 @@
{/if}
{#if $loading}
<div class="loading-overlay" in:fade={{ delay: 500, duration: 500 }} out:fade={{ duration: 200 }}>
<div class="loading-overlay" in:fade={{ delay: 1000, duration: 500 }} out:fade={{ duration: 200 }}>
<div class="loading-wrapper">
<LoadingAnimation />
<p class="loading-msg">loading</p>

View File

@ -214,7 +214,7 @@ export default class TxController {
for (let i = 0; i < block.txns.length; i++) {
if (this.poolScene.remove(block.txns[i].id)) {
knownCount++
this.txs[block.txns[i].id].setData(block.txns[i])
this.txs[block.txns[i].id].mergeData(block.txns[i])
this.txs[block.txns[i].id].setBlock(block)
this.blockScene.insert(this.txs[block.txns[i].id], 0, false)
} else {

View File

@ -1,6 +1,7 @@
import { serverConnected, serverDelay, lastBlockId } from '../stores.js'
import config from '../config.js'
import api from '../utils/api.js'
import { fetchBlockByHash } from '../utils/search.js'
let mempoolTimer
let lastBlockSeen
@ -121,17 +122,8 @@ class TxStream {
async fetchBlock (id, calledOnLoad) {
if (!id) return
if (id !== lastBlockSeen) {
try {
console.log('downloading block', id)
const response = await fetch(`${api.uri}/api/block/${id}`, {
method: 'GET'
})
let blockData = await response.json()
console.log('downloaded block', id)
window.dispatchEvent(new CustomEvent('bitcoin_block', { detail: { block: blockData, realtime: !calledOnLoad} }))
} catch (err) {
console.log("failed to download block ", id)
}
const blockData = await fetchBlockByHash(id)
window.dispatchEvent(new CustomEvent('bitcoin_block', { detail: { block: blockData, realtime: !calledOnLoad} }))
} else {
console.log('already seen block ', lastBlockSeen)
}

View File

@ -49,6 +49,23 @@ export default class BitcoinTx {
this.view = new TxView(this)
}
mergeData ({ version, inflated, preview, id, value, fee, vbytes, numInputs, inputs, outputs, time, block }, isCoinbase=false) {
this.setData({
version,
inflated: this.is_inflated || inflated,
preview: this.is_preview && preview,
id,
value,
fee: this.fee || fee,
vbytes,
numInputs: this.numInputs || numInputs,
inputs: this.inputs,
outputs: this.outputs,
time,
block
})
}
setData ({ version, inflated, preview, id, value, fee, vbytes, numInputs, inputs, outputs, time, block }, isCoinbase=false) {
this.version = version
this.is_inflated = !!inflated

View File

@ -140,19 +140,31 @@ async function fetchTx (txid) {
async function fetchBlockByHash (hash) {
if (!hash || (currentBlockVal && hash === currentBlockVal.id)) return true
// try to fetch static block
console.log('downloading block', hash)
let response = await fetch(`${api.uri}/api/block/${hash}`, {
method: 'GET'
})
if (!response) throw new Error('null response')
if (!response) {
console.log('failed to download block', hash)
throw new Error('null response')
}
if (response && response.status == 200) {
const blockData = await response.json()
let block
if (blockData) {
if (blockData.id) {
return new BitcoinBlock(blockData)
} else return BitcoinBlock.decompress(blockData)
block = new BitcoinBlock(blockData)
} else block = BitcoinBlock.decompress(blockData)
}
if (block && block.id) {
console.log('downloaded block', block.id)
} else {
console.log('failed to download block', block.id)
}
return block
}
}
export {fetchBlockByHash as fetchBlockByHash}
async function fetchBlockByHeight (height) {
if (height == null) return
@ -178,9 +190,13 @@ async function fetchSpends (txid) {
const result = await response.json()
return result.map(output => {
if (output) {
return {
txid: output[0],
vin: output[1],
if (output === true) {
return true
} else {
return {
txid: output[0],
vin: output[1],
}
}
} else {
return null

View File

@ -11,6 +11,30 @@
#### Installation
Set the `MIX_TARGET` environment variable to choose a build target (defaults to "personal")
"personal" - tailored to low traffic personal deployments. resource-intensive features & dependencies disabled
```shell
export MIX_TARGET=personal
```
or
"public" - tailored to high traffic, high performance public deployments.
```shell
export MIX_TARGET=public
```
✅❌
| feature | "public" | "personal" |
|---|---|---|
| Spend index | ✅ | ❌ |
```shell
mix do deps.get
mix do deps.compile
@ -27,7 +51,6 @@ The API server expects the following environment variables to be set:
| LOG_LEVEL | Tailor logging verbosity. either "error", "info" (default) or "debug" |
| RPC_POOLS | Number of connection pools for RPC requests to Bitcoin Core |
| RPC_POOL_SIZE | Number of connections maintained per pool (RPC_POOLS x RPC_POOL_SIZE should be substantially lower than `rpcworkqueue` in bitcoin.conf) |
| INDEXED | 'true' to build indexes required for certain features (see [INDEXES.md](https://github.com/bitfeed-project/block/master/server/INDEXES.md) for details). Omit this variable to disable indexing |
| BITCOIN_HOST | Bitcoin node host address |
| BITCOIN_ZMQ_RAWBLOCK_PORT | Bitcoin node ZMQ port for block events (to match `zmqpubrawblock` in bitcoin.conf) |
| BITCOIN_ZMQ_RAWTX_PORT | Bitcoin node ZMQ port for transaction events (to match `zmqpubrawtx` in bitcoin.conf) |

View File

@ -175,7 +175,7 @@ defmodule BitcoinStream.RPC do
end
defp do_batch_request(method, batch_params, host, port, creds) do
case Jason.encode(Enum.map(batch_params, fn [params, id] -> %{method: method, params: [params], id: id} end)) do
case Jason.encode(Enum.map(batch_params, fn [params, id] -> %{method: method, params: params, id: id} end)) do
{:ok, body} ->
async_request(body, host, port, creds)

View File

@ -366,97 +366,88 @@ defmodule BitcoinStream.Mempool do
end
defp sync_mempool(pid, txns) do
sync_mempool_txns(pid, txns, 0)
sync_mempool_txns(pid, txns)
end
#
# defp sync_mempool_txn(pid, txid) do
# case :ets.lookup(:mempool_cache, txid) do
# [] ->
# with {:ok, 200, hextx} <- RPC.request(:rpc, "getrawtransaction", [txid]),
# rawtx <- Base.decode16!(hextx, case: :lower),
# {:ok, txn } <- BitcoinTx.decode(rawtx) do
# register(pid, txid, nil, false);
# insert(pid, txid, txn)
# else
# _ -> Logger.debug("sync_mempool_txn failed #{txid}")
# end
#
# [_] -> true
# end
# end
#
# defp sync_mempool_txns(_, [], count) do
# count
# end
#
# defp sync_mempool_txns(pid, [head | tail], count) do
# Logger.debug("Syncing mempool tx #{count}/#{count + length(tail) + 1} | #{head}");
# sync_mempool_txn(pid, head);
# sync_mempool_txns(pid, tail, count + 1)
# end
defp sync_mempool_txn(pid, txid) do
case :ets.lookup(:mempool_cache, txid) do
[] ->
with {:ok, 200, hextx} <- RPC.request(:rpc, "getrawtransaction", [txid]),
rawtx <- Base.decode16!(hextx, case: :lower),
{:ok, txn } <- BitcoinTx.decode(rawtx),
inflated_txn <- BitcoinTx.inflate(txn, false) do
register(pid, txid, nil, false);
insert(pid, txid, inflated_txn)
else
_ -> Logger.debug("sync_mempool_txn failed #{txid}")
defp sync_batch(pid, batch) do
with batch_params <- Enum.map(batch, fn txid -> [[txid], txid] end),
{:ok, 200, txs} <- RPC.batch_request(:rpc, "getrawtransaction", batch_params, true),
failures <- Enum.filter(txs, fn %{"error" => error} -> error != nil end),
successes <- Enum.filter(txs, fn %{"error" => error} -> error == nil end) do
Enum.each(successes, fn tx ->
with %{"error" => nil, "id" => _txid, "result" => hextx} <- tx,
rawtx <- Base.decode16!(hextx, case: :lower),
{:ok, txn } <- BitcoinTx.decode(rawtx) do
register(pid, txn.id, nil, false);
insert(pid, txn.id, txn)
end
end);
case length(failures) do
count when count > 0 ->
Logger.info("Failed to sync #{length(failures)} transactions")
_ -> false
end
[_] -> true
end
end
defp sync_mempool_txns(_, [], count) do
count
end
defp sync_mempool_txns(pid, [head | tail], count) do
Logger.debug("Syncing mempool tx #{count}/#{count + length(tail) + 1} | #{head}");
sync_mempool_txn(pid, head);
sync_mempool_txns(pid, tail, count + 1)
end
# when transaction inflation fails, we fall back to storing deflated inputs in the cache
# the repair function scans the mempool cache for deflated inputs, and attempts to reinflate
def repair(_pid) do
Logger.debug("Checking mempool integrity");
repaired = :ets.foldl(&(repair_mempool_txn/2), 0, :mempool_cache);
if repaired > 0 do
Logger.info("MEMPOOL CHECK COMPLETE #{repaired} REPAIRED");
{:ok, length(successes)}
else
Logger.debug("MEMPOOL REPAIR NOT REQUIRED");
_ ->
:error
end
:ok
catch
err ->
Logger.error("Failed to repair mempool: #{inspect(err)}");
Logger.error("unexpected error syncing batch");
IO.inspect(err);
:error
end
defp repair_mempool_txn(entry, repaired) do
case entry do
# unprocessed
{_, nil, _} ->
repaired
# valid entry, already inflated
{_txid, {_inputs, _total, true}, _} ->
repaired
# valid entry, not inflated
# repair
{txid, {_inputs, _total, false}, status} ->
Logger.debug("repairing #{txid}");
with {:ok, 200, hextx} <- RPC.request(:rpc, "getrawtransaction", [txid]),
rawtx <- Base.decode16!(hextx, case: :lower),
{:ok, txn } <- BitcoinTx.decode(rawtx),
inflated_txn <- BitcoinTx.inflate(txn, false) do
if inflated_txn.inflated do
:ets.insert(:mempool_cache, {txid, { inflated_txn.inputs, inflated_txn.value + inflated_txn.fee, inflated_txn.inflated }, status});
cache_spends(txid, inflated_txn.inputs);
Logger.debug("repaired #{repaired} mempool txns #{txid}");
repaired + 1
else
Logger.debug("failed to inflate transaction for repair #{txid}");
repaired
end
else
_ -> Logger.debug("failed to fetch transaction for repair #{txid}");
repaired
end
# catch all
other ->
Logger.error("unexpected cache entry: #{inspect(other)}");
repaired
end
catch
err ->
Logger.debug("unexpected error repairing transaction: #{inspect(err)}");
repaired
defp sync_mempool_txns(_pid, [], count) do
{:ok, count}
end
defp sync_mempool_txns(pid, [next_chunk | rest], count) do
case sync_batch(pid, next_chunk) do
{:ok, batch_count} ->
Logger.info("synced #{batch_count + count} mempool transactions");
sync_mempool_txns(pid, rest, batch_count + count)
_ ->
:failed
end
end
def sync_mempool_txns(pid, txns) do
sync_mempool_txns(pid, Enum.chunk_every(txns, 100), 0)
end
defp cache_sync_ids(pid, txns) do
:ets.delete_all_objects(:sync_cache);
cache_sync_ids(pid, txns, 0)

View File

@ -83,9 +83,6 @@ defmodule BitcoinStream.Mempool.Sync do
Logger.debug("updated to #{newcount}");
end
# repair transactions with deflated inputs
Mempool.repair(:mempool);
# next check in 1 minute
Process.send_after(self(), :resync, 60 * 1000)
else

View File

@ -11,7 +11,6 @@ defmodule BitcoinStream.Protocol.Block do
"""
alias BitcoinStream.Protocol.Transaction, as: BitcoinTx
alias BitcoinStream.Mempool, as: Mempool
@derive Jason.Encoder
defstruct [
@ -24,7 +23,6 @@ defstruct [
:nonce,
:txn_count,
:txns,
:fees,
:value,
:id
]
@ -34,7 +32,7 @@ def decode(block_binary) do
hex <- Base.encode16(block_binary, case: :lower),
{:ok, raw_block} <- Bitcoinex.Block.decode(hex),
id <- Bitcoinex.Block.block_id(block_binary),
{summarised_txns, total_value, total_fees} <- summarise_txns(raw_block.txns)
{summarised_txns, total_value} <- summarise_txns(raw_block.txns)
do
{:ok, %__MODULE__{
version: raw_block.version,
@ -45,7 +43,6 @@ def decode(block_binary) do
bytes: bytes,
txn_count: raw_block.txn_count,
txns: summarised_txns,
fees: total_fees,
value: total_value,
id: id
}}
@ -64,7 +61,7 @@ def parse(hex) do
bytes <- byte_size(block_binary),
{:ok, raw_block} <- Bitcoinex.Block.decode(hex),
id <- Bitcoinex.Block.block_id(block_binary),
{summarised_txns, total_value, total_fees} <- summarise_txns(raw_block.txns)
{summarised_txns, total_value} <- summarise_txns(raw_block.txns)
do
{:ok, %__MODULE__{
version: raw_block.version,
@ -75,7 +72,6 @@ def parse(hex) do
bytes: bytes,
txn_count: raw_block.txn_count,
txns: summarised_txns,
fees: total_fees,
value: total_value,
id: id
}}
@ -92,8 +88,8 @@ end
defp summarise_txns([coinbase | txns]) do
# Mempool.is_done returns false while the mempool is still syncing
with extended_coinbase <- BitcoinTx.extend(coinbase),
{summarised, total, fees} <- summarise_txns(txns, [], 0, 0, Mempool.is_done(:mempool)) do
{[extended_coinbase | summarised], total + extended_coinbase.value, fees}
{summarised, total} <- summarise_txns(txns, [], 0) do
{[extended_coinbase | summarised], total + extended_coinbase.value}
else
err ->
Logger.error("Failed to inflate block");
@ -102,29 +98,13 @@ defp summarise_txns([coinbase | txns]) do
end
end
defp summarise_txns([], summarised, total, fees, do_inflate) do
if do_inflate do
{Enum.reverse(summarised), total, fees}
else
{Enum.reverse(summarised), total, nil}
end
defp summarise_txns([], summarised, total) do
{Enum.reverse(summarised), total}
end
defp summarise_txns([next | rest], summarised, total, fees, do_inflate) do
extended_txn = BitcoinTx.extend(next)
# if the mempool is still syncing, inflating txs will take too long, so skip it
if do_inflate do
inflated_txn = BitcoinTx.inflate(extended_txn, false)
if (inflated_txn.inflated) do
# Logger.debug("Processing block tx #{length(summarised)}/#{length(summarised) + length(rest) + 1} | #{extended_txn.id}");
summarise_txns(rest, [inflated_txn | summarised], total + inflated_txn.value, fees + inflated_txn.fee, true)
else
summarise_txns(rest, [inflated_txn | summarised], total + inflated_txn.value, nil, false)
end
else
summarise_txns(rest, [extended_txn | summarised], total + extended_txn.value, nil, false)
end
defp summarise_txns([next | rest], summarised, total) do
extended_txn = BitcoinTx.extend(next);
summarise_txns(rest, [extended_txn | summarised], total + extended_txn.value)
end
end

View File

@ -125,7 +125,7 @@ defmodule BitcoinStream.Protocol.Transaction do
end
defp inflate_batch(batch, fail_fast) do
with batch_params <- Enum.map(batch, fn input -> [input.prev_txid, input.prev_txid <> "#{input.prev_vout}"] end),
with batch_params <- Enum.map(batch, fn input -> [[input.prev_txid], input.prev_txid <> "#{input.prev_vout}"] end),
batch_map <- Enum.into(batch, %{}, fn p -> {p.prev_txid <> "#{p.prev_vout}", p} end),
{:ok, 200, txs} <- RPC.batch_request(:rpc, "getrawtransaction", batch_params, fail_fast),
successes <- Enum.filter(txs, fn %{"error" => error} -> error == nil end),
@ -183,27 +183,7 @@ defmodule BitcoinStream.Protocol.Transaction do
# Retrieves cached inputs if available,
# otherwise inflates inputs in batches of up to 100
def inflate_inputs(txid, inputs, fail_fast) do
case :ets.lookup(:mempool_cache, txid) do
# cache miss, actually inflate
[] ->
inflate_inputs(Enum.chunk_every(inputs, 100), [], 0, fail_fast)
# cache hit, but processed inputs not available
[{_, nil, _}] ->
inflate_inputs(Enum.chunk_every(inputs, 100), [], 0, fail_fast)
# cache hit, but inputs not inflated
[{_, {_inputs, _total, false}, _}] ->
inflate_inputs(Enum.chunk_every(inputs, 100), [], 0, fail_fast)
# cache hit, just return the cached values
[{_, {inputs, total, true}, _}] ->
{:ok, inputs, total}
other ->
Logger.error("unexpected mempool cache response while inflating inputs #{inspect(other)}");
inflate_inputs(Enum.chunk_every(inputs, 100), [], 0, fail_fast)
end
inflate_inputs(Enum.chunk_every(inputs, 100), [], 0, fail_fast)
end
end

View File

@ -86,20 +86,14 @@ defmodule BitcoinStream.Router do
end
defp get_block(hash) do
last_id = BlockData.get_block_id(:block_data);
if hash == last_id do
payload = BlockData.get_json_block(:block_data);
{:ok, payload, true}
with {:ok, 200, block} <- RPC.request(:rpc, "getblock", [hash, 2]),
{:ok, cleaned} <- BlockData.clean_block(block),
{:ok, payload} <- Jason.encode(cleaned) do
{:ok, payload, false}
else
with {:ok, 200, block} <- RPC.request(:rpc, "getblock", [hash, 2]),
{:ok, cleaned} <- BlockData.clean_block(block),
{:ok, payload} <- Jason.encode(cleaned) do
{:ok, payload, false}
else
err ->
IO.inspect(err);
:err
end
err ->
IO.inspect(err);
:err
end
end

View File

@ -13,7 +13,7 @@ defmodule BitcoinStream.Server do
{ rpc_pool_size, "" } = Integer.parse(System.get_env("RPC_POOL_SIZE") || "16");
log_level = System.get_env("LOG_LEVEL");
btc_host = System.get_env("BITCOIN_HOST");
indexed = System.get_env("INDEXED")
indexed = System.get_env("TARGET") == "public";
case log_level do
"debug" ->

View File

@ -17,7 +17,7 @@ defmodule BitcoinStream.Index.Spend do
@impl true
def init([indexed]) do
:ets.new(:spend_cache, [:set, :public, :named_table]);
if (indexed != nil) do
if (indexed) do
{:ok, dbref} = :rocksdb.open(String.to_charlist("data/index/spend"), [create_if_missing: true]);
Process.send_after(self(), :sync, 2000);
{:ok, [dbref, indexed, false]}
@ -28,14 +28,14 @@ defmodule BitcoinStream.Index.Spend do
@impl true
def terminate(_reason, [dbref, indexed, _done]) do
if (indexed != nil) do
if (indexed) do
:rocksdb.close(dbref)
end
end
@impl true
def handle_info(:sync, [dbref, indexed, done]) do
if (indexed != nil) do
if (indexed) do
case sync(dbref) do
true ->
{:noreply, [dbref, indexed, true]}
@ -57,7 +57,7 @@ defmodule BitcoinStream.Index.Spend do
@impl true
def handle_call({:get_tx_spends, txid}, _from, [dbref, indexed, done]) do
case get_transaction_spends(dbref, txid, (indexed != nil)) do
case get_transaction_spends(dbref, txid, indexed) do
{:ok, spends} ->
{:reply, {:ok, spends}, [dbref, indexed, done]}
@ -69,7 +69,7 @@ defmodule BitcoinStream.Index.Spend do
@impl true
def handle_cast(:new_block, [dbref, indexed, done]) do
if (indexed != nil and done) do
if (indexed and done) do
case sync(dbref) do
true ->
{:noreply, [dbref, indexed, true]}
@ -86,7 +86,7 @@ defmodule BitcoinStream.Index.Spend do
@impl true
def handle_cast({:block_disconnected, hash}, [dbref, indexed, done]) do
Logger.info("block disconnected: #{hash}");
if (indexed != nil and done) do
if (indexed and done) do
block_disconnected(dbref, hash)
end
{:noreply, [dbref, indexed, done]}
@ -297,12 +297,81 @@ defmodule BitcoinStream.Index.Spend do
end
end
defp batch_spend_status(txid, batch) do
with batch_params <- Enum.map(batch, fn index -> [[txid, index, true], index] end),
{:ok, 200, txouts} <- RPC.batch_request(:rpc, "gettxout", batch_params, false),
utxos <- Enum.map(txouts, fn txout ->
case txout do
%{"error" => nil, "id" => index, "result" => nil} ->
{ index, true }
%{"error" => nil, "id" => index, "result" => result} ->
{ index, false }
%{"error" => error, "id" => index } ->
{ index, false }
end
end),
utxoMap <- Enum.into(Enum.filter(utxos, fn utxo ->
case utxo do
{ _index, false } ->
false
{ _index, true } ->
true
_ -> false
end
end), %{})
do
{:ok, utxoMap}
else
_ ->
:error
end
end
defp get_spend_status(_txid, [], results) do
results
end
defp get_spend_status(txid, [next_batch | rest], results) do
case batch_spend_status(txid, next_batch) do
{:ok, result} ->
get_spend_status(txid, rest, Map.merge(results, result))
_ -> :err
end
end
defp get_spend_status(txid, numOutputs) do
index_list = Enum.to_list(0..(numOutputs - 1))
spend_map = get_spend_status(txid, Enum.chunk_every(index_list, 100), %{});
Enum.map(index_list, fn index ->
case Map.get(spend_map, index) do
true -> true
_ -> false
end
end)
end
defp get_spend_status(txid) do
with {:ok, 200, hextx} <- RPC.request(:rpc, "getrawtransaction", [txid]),
rawtx <- Base.decode16!(hextx, case: :lower),
{:ok, tx } <- BitcoinTx.decode(rawtx) do
get_spend_status(txid, length(tx.outputs))
else
_ -> []
end
end
defp get_chain_spends(dbref, binary_txid, use_index) do
case (if use_index do :rocksdb.get(dbref, binary_txid, []) else :not_found end) do
case (if use_index do :rocksdb.get(dbref, binary_txid, []) else :unindexed end) do
{:ok, spends} ->
spends
:not_found ->
:unindexed ->
# uninitialized, try to construct on-the-fly from RPC data
txid = Base.encode16(binary_txid);
with {:ok, 200, hextx} <- RPC.request(:rpc, "getrawtransaction", [txid]),
@ -339,23 +408,29 @@ defmodule BitcoinStream.Index.Spend do
end
defp get_transaction_spends(dbref, txid, use_index) do
binary_txid = Base.decode16!(txid, [case: :lower]);
chain_spends = get_chain_spends(dbref, binary_txid, use_index);
spend_list = unpack_spends(chain_spends);
spend_list = add_mempool_spends(txid, spend_list);
{:ok, spend_list}
if (use_index) do
binary_txid = Base.decode16!(txid, [case: :lower]);
chain_spends = get_chain_spends(dbref, binary_txid, use_index);
spend_list = unpack_spends(chain_spends);
spend_list = add_mempool_spends(txid, spend_list);
{:ok, spend_list}
else
spend_list = get_spend_status(txid);
spend_list = add_mempool_spends(txid, spend_list);
{:ok, spend_list}
end
end
defp add_mempool_spends(_txid, _index, [], added) do
Enum.reverse(added)
end
defp add_mempool_spends(txid, index, [false | rest], added) do
defp add_mempool_spends(txid, index, [spend | rest], added) when is_boolean(spend) do
case :ets.lookup(:spend_cache, [txid, index]) do
[] ->
add_mempool_spends(txid, index + 1, rest, [false | added])
[{[_index, _txid], spend}] ->
add_mempool_spends(txid, index + 1, rest, [spend | added])
[{[_index, _txid], mempool_spend}] ->
add_mempool_spends(txid, index + 1, rest, [mempool_spend | added])
end
end
defp add_mempool_spends(txid, index, [spend | rest], added) do

View File

@ -4,7 +4,7 @@ defmodule BitcoinStream.MixProject do
def project do
[
app: :bitcoin_stream,
version: "2.3.0",
version: "2.3.1",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
@ -42,7 +42,7 @@ defmodule BitcoinStream.MixProject do
{:corsica, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:jason, "~> 1.1"},
{:rocksdb, "~> 1.6"}
{:rocksdb, "~> 1.6", targets: :public}
]
end
end