fix: Update some db inserts

This commit is contained in:
Nathan Woodburn 2025-02-09 22:03:26 +11:00
parent 1e783c9775
commit 0012ecc77f
Signed by: nathanwoodburn
GPG Key ID: 203B000478AD0EF1
2 changed files with 30 additions and 30 deletions

59
main.py
View File

@ -1,5 +1,6 @@
import json
from clickhouse_driver import Client
import clickhouse_connect
import requests
from time import sleep
import json
@ -46,7 +47,7 @@ if os.getenv("DB_NAME"):
# Clickhouse Database Setup
dbSave = Client(
dbSave = clickhouse_connect.create_client(
host=DB_HOST,
user=DB_USER,
password=DB_PASSWORD,
@ -96,23 +97,21 @@ def saveTransactions(txList, blockHeight):
return
# Prepare data for batch insert
txValues = []
for txData in txList:
print('.', end='', flush=True)
txValues.append((
txValues = [
(
txData["hash"], txData["witnessHash"], txData["fee"], txData["rate"],
txData["mtime"], blockHeight, txData["index"], txData["version"],
json.dumps(txData["inputs"]), json.dumps(txData["outputs"]),
txData["locktime"], txData["hex"]
))
)
for txData in txList
]
print(f"Inserting {len(txValues)} transactions...")
return dbSave.insert("transactions", txValues, column_names=[
"hash", "witnessHash", "fee", "rate", "mtime", "block", "tx_index", "version",
"inputs", "outputs", "locktime", "hex"
])
# Bulk insert transactions
query = """
INSERT INTO transactions (hash, witnessHash, fee, rate, mtime, block, tx_index, version,
inputs, outputs, locktime, hex)
VALUES
"""
return dbSave.execute(query, txValues)
def saveBlock(blockData):
@ -122,22 +121,22 @@ def saveBlock(blockData):
saveTransactions(blockData["txs"], blockData["height"])
# Insert block if it doesn't exist
query = """
INSERT INTO blocks (hash, height, depth, version, prevBlock, merkleRoot, witnessRoot,
treeRoot, reservedRoot, time, bits, nonce, extraNonce, mask, txs)
VALUES
"""
blockValues = (
blockValues = [(
blockData["hash"], blockData["height"], blockData["depth"], blockData["version"],
blockData["prevBlock"], blockData["merkleRoot"], blockData["witnessRoot"],
blockData["treeRoot"], blockData["reservedRoot"], blockData["time"],
blockData["bits"], blockData["nonce"], blockData["extraNonce"],
blockData["mask"], json.dumps(hashes)
)
blockData["mask"], json.dumps(hashes) # Convert tx hashes to JSON string
)]
dbSave.execute(query, blockValues)
print('')
dbSave.insert("blocks", blockValues, column_names=[
"hash", "height", "depth", "version", "prevBlock", "merkleRoot", "witnessRoot",
"treeRoot", "reservedRoot", "time", "bits", "nonce", "extraNonce",
"mask", "txs"
])
print('block saved')
# def setupDB():
# """Creates the database tables"""
@ -258,12 +257,12 @@ def getNamesFromBlock(height):
json.dumps(nameInfo["bids"])
))
query = """
INSERT INTO names (name, nameHash, state, height, lastRenewal, owner, value, highest, data, transfer, revoked, claimed, renewals, registered, expired, weak, stats, start, txs, bids)
VALUES
"""
return dbSave.execute(query, queryData)
dbSave.insert("names", queryData, column_names=[
"name", "nameHash", "state", "height", "lastRenewal", "owner", "value", "highest",
"data", "transfer", "revoked", "claimed", "renewals", "registered", "expired",
"weak", "stats", "start", "txs", "bids"
])
return 0
def getNodeHeight():

View File

@ -1,4 +1,5 @@
clickhouse-driver
clickhouse-connect
requests
python-dotenv
flask