feat: Speed up db pushing

This commit is contained in:
Nathan Woodburn 2025-02-06 12:26:44 +11:00
parent fb28968550
commit 78cb528e45
Signed by: nathanwoodburn
GPG Key ID: 203B000478AD0EF1

72
main.py
View File

@ -76,38 +76,58 @@ def indexBlock(blockHeight):
return 0 return 0
def saveTransaction(txData,blockHeight): def saveTransactions(txList, blockHeight):
with dbSave.cursor() as cursor: if not txList:
# Check if transaction exists in database return
cursor.execute("SELECT * FROM transactions WHERE hash = %s", (txData["hash"],))
txExists = cursor.fetchone()
if txExists:
print('*',end='',flush=True)
return
cursor.execute("INSERT INTO transactions (hash, witnessHash, fee, rate, mtime, block, `index`, version, inputs, outputs, locktime, hex) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", (txData["hash"], txData["witnessHash"], txData["fee"], txData["rate"], txData["mtime"], blockHeight, txData["index"], txData["version"], json.dumps(txData["inputs"]), json.dumps(txData["outputs"]), txData["locktime"], txData["hex"]))
dbSave.commit()
print('.',end='',flush=True) # Prepare data for batch insert
txValues = []
for txData in txList:
txValues.append((
txData["hash"], txData["witnessHash"], txData["fee"], txData["rate"],
txData["mtime"], blockHeight, txData["index"], txData["version"],
json.dumps(txData["inputs"]), json.dumps(txData["outputs"]),
txData["locktime"], txData["hex"]
))
# Bulk insert transactions
query = """
INSERT INTO transactions (hash, witnessHash, fee, rate, mtime, block, `index`, version,
inputs, outputs, locktime, hex)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE hash=hash
"""
with dbSave.cursor() as cursor:
cursor.executemany(query, txValues)
dbSave.commit()
print('.', end='', flush=True)
def saveBlock(blockData): def saveBlock(blockData):
hashes = [] hashes = [tx["hash"] for tx in blockData["txs"]]
for tx in blockData["txs"]:
saveTransaction(tx,blockData["height"]) # Bulk save transactions
hashes.append(tx["hash"]) saveTransactions(blockData["txs"], blockData["height"])
# Insert block if it doesn't exist
query = """
INSERT INTO blocks (hash, height, depth, version, prevBlock, merkleRoot, witnessRoot,
treeRoot, reservedRoot, time, bits, nonce, extraNonce, mask, txs)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE hash=hash
"""
blockValues = (
blockData["hash"], blockData["height"], blockData["depth"], blockData["version"],
blockData["prevBlock"], blockData["merkleRoot"], blockData["witnessRoot"],
blockData["treeRoot"], blockData["reservedRoot"], blockData["time"],
blockData["bits"], blockData["nonce"], blockData["extraNonce"],
blockData["mask"], json.dumps(hashes)
)
# Create a new connection
with dbSave.cursor() as cursor: with dbSave.cursor() as cursor:
# Check if block exists in database cursor.execute(query, blockValues)
cursor.execute("SELECT * FROM blocks WHERE height = %s", (blockData["height"],))
blockExists = cursor.fetchone()
if blockExists:
print('-',flush=True)
return
cursor.execute("INSERT INTO blocks (hash, height, depth, version, prevBlock, merkleRoot, witnessRoot, treeRoot, reservedRoot, time, bits, nonce, extraNonce, mask, txs) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", (blockData["hash"], blockData["height"], blockData["depth"], blockData["version"], blockData["prevBlock"], blockData["merkleRoot"], blockData["witnessRoot"], blockData["treeRoot"], blockData["reservedRoot"], blockData["time"], blockData["bits"], blockData["nonce"], blockData["extraNonce"], blockData["mask"], json.dumps(hashes)))
dbSave.commit() dbSave.commit()
print('') print('')