Skip to content
Snippets Groups Projects
Commit 0000a8e2 authored by MILLER Loic's avatar MILLER Loic
Browse files

Merge branch 'master' of gitlab.imt-atlantique.fr:b23loiso/mining-in-logarithmic-space

parents 2bd0ca8e 09c2cb5d
No related branches found
No related tags found
No related merge requests found
cff-version: 1.2.0
title: Mining in Logarithmic Space with variable difficulty
message: >-
If you use this software, please cite it using the
metadata from this file.
type: software
authors:
- given-names: Benjamin
family-names: Loison
email: benjamin.loison@ens-paris-saclay.fr
affiliation: École Normale Supérieure Paris-Saclay
- given-names: Loïc
family-names: Miller
email: loic.miller@imt-atlantique.fr
affiliation: IMT Atlantique
repository-code: >-
https://gitlab.imt-atlantique.fr/b23loiso/mining-in-logarithmic-space
keywords:
- Blockchain
- Proof of Work
- Non-Interactive Proofs of Proofs of Work
- Bitcoin
license: TO_BE_DETERMINED
commit: COMMIT_TO_UPDATE
version: VERSION_TO_UPDATE_OR_REMOVE
date-released: '2023-08-20'
......@@ -15,6 +15,8 @@ def get_parser():
help="Turn on debugging")
parser.add_argument("--verify-correctness", action="store_true",
help="Turn on correctness comparison")
parser.add_argument("--verify-online-property", action="store_true",
help="Turn on online property verification")
# Bitcoin block loading
parser.add_argument("--load-from-headers", action="store_true", help="Load data from headers")
......
......@@ -30,7 +30,7 @@ def loadHeaders(break_at):
if not LOAD_FROM_HEADERS_FILE:
headersNumber = break_at if break_at else int(cli(['getblockcount']))
headerHashes = pull('getblockhash', list(range(headersNumber)), 2)
headers = pull('getblockheader', headerHashes, 3)
headers = pull('getblockheader', headerHashes, 4)
else:
with open(config.HEADERS_FILE_PATH) as f:
headers = json.load(f)
......@@ -42,7 +42,10 @@ def getBlockByHeight(height):
blockHeader = headers[height]
_hash = int(blockHeader['hash'], 16)
bits = bits_to_target(int(blockHeader['bits'], 16))
block = Block(height, bits, _hash)
# How is `mediantime` computed and is it only increasing?
# Note that `time` isn't only increasing. Cf https://en.bitcoin.it/wiki/Block_timestamp
timestamp = int(blockHeader['time'])
block = Block(height, bits, _hash, timestamp)
return block
def cli(arguments):
......
......@@ -7,13 +7,14 @@ MINING_IN_LOGARITHMIC_SPACE_CONSTANT_DIFFICULTY_LEVEL = False
class Block:
# `target` and `_hash` are `int`s.
def __init__(self, height, target, _hash):
def __init__(self, height, target, _hash, timestamp):
self.height = height
self.target = target
# As penultimate and most minimal difference targets differ only up to the third decimal.
self.score = round(genesisTarget / target, 4) if IS_EPSILON_TARGET_DEFINITION else genesisTarget - target
self.level_min = 0
self.level = level(_hash, target)
self.timestamp = timestamp
def __hash__(self):
return hash((self.height, self.target, self.score, self.level_min, self.level))
......
......@@ -5,10 +5,13 @@ from block import IS_EPSILON_TARGET_DEFINITION
import json
from argparser import get_parser
import config
import copy
# Bitcoin parameters
κ = 256
KEEPING_ALL_BLOCKS_SINCE_LAST_M_HIGHER_BLOCK = False
# Minimal target difference
sanityTargetDifferenceThreshold = 0.03408116026637664
......@@ -51,9 +54,27 @@ def uparrow(C, μ):
return [block for block in C if block.level >= μ and block.level_min <= μ]
def getFirstBlockExceedingBlocksPerLevel(C, maximumBlocksPerLevel, ):
-= 1
blocksPerLevel = [0] * κ
firstBlockIndexPerLevel = [None] * κ
# Could maybe merge both cases if doesn't unoptimize much.
if KEEPING_ALL_BLOCKS_SINCE_LAST_M_HIGHER_BLOCK:
# Could maybe optimize by only keeping last `m` blocks height per level but due to Python list implementation I have doubts.
# Could maybe get rid of `blocksPerLevel` but only use `blocksHeightPerLevel` if don't apply just above optimization.
# Don't have to first fill data structures and only then check if can remove a block? It seems necessary, for instance if have in this order: 2 * m blocks at level 0, then 2 * m blocks at level 1 (even if this particular case is very unlikely), it won't return the index of the first block at level 0, as have to first ingest half of level 1 blocks and then the considered block height is higher than this middle level 1 block. While it should return the index of the first block at level 0.
blocksHeightPerLevel = [[]] * κ
for CIndex, block in enumerate(C):
for level in range(block.level_min, min(block.level, ) + 1):
blocksPerLevel[level] += 1
blocksHeightPerLevel[level] += [C[CIndex].height]
if firstBlockIndexPerLevel[level] == None:
firstBlockIndexPerLevel[level] = CIndex
for level in range():
firstBlockLevelIndex = firstBlockIndexPerLevel[level]
if blocksPerLevel[level] > maximumBlocksPerLevel and blocksPerLevel[level + 1] >= m and C[firstBlockLevelIndex].height < blocksHeightPerLevel[level + 1][-m]:
return firstBlockLevelIndex, level
return None, None
else:
-= 1
for CIndex, block in enumerate(C):
for level in range(block.level_min, min(block.level, ) + 1):
blocksPerLevel[level] += 1
......@@ -126,7 +147,7 @@ if args.headers != config.HEADERS_FILE_PATH:
print(args)
# Mining in Logarithmic Space parameters
k = args.unstable_part_length # TODO: be able to redo Loïc computation
k = args.unstable_part_length
m = 3 * k
debugging = args.debugging
......@@ -141,12 +162,17 @@ lvls = {}
targets = []
compressSize = []
compressScore = []
timestamps = []
previous_score = 0
previous_ℓ = 0
previous_score, previous_score_online = 0, 0
previous_ℓ, previous_ℓ_online = 0, 0
C = []
Π = []
headersNumber = bitcoin.loadHeaders(args.break_at)
for height in range(headersNumber):
#if height >= 428088:
# debugging = True
# verifyCorrectness = True
lastTimeCheck = time.time()
b = bitcoin.getBlockByHeight(height)
......@@ -154,6 +180,7 @@ for height in range(headersNumber):
targets += [b.target]
compressSize += [len(Π)]
compressScore += [previous_score]
timestamps += [b.timestamp]
#printTimeSinceLastTimeCheck('block retrieval')
isLastIteration = height == headersNumber - 1
......@@ -177,6 +204,19 @@ for height in range(headersNumber):
#printTimeSinceLastTimeCheck('dissolve computation')
for μ in D:
debug(bcolors.WARNING + f'μ: {μ}' + bcolors.ENDC + f', len(Π): {len(Π)}, ' + bcolors.OKGREEN + f'len(D[μ]): {len(D[μ])}' + bcolors.ENDC + f' ({[f"{b.height} ({b.level_min} - {b.level}, {round(b.score, 4)})" for b in D[μ]]})')
if args.verify_online_property:
C += [b]
Π_online, previous_score_online, previous_ℓ_online = Compress(m, k, copy.deepcopy(C), previous_score_online, previous_ℓ_online)
if Π != Π_online:
print("Online property doesn't hold!")
for id_, Π, previous_score, previous_ℓ in [
['Iterative', Π, previous_score, previous_ℓ],
['All of a sudden', Π_online, previous_score_online, previous_ℓ_online]
]:
(D, , χ, previous_score) = Dissolve(m, k, Π, previous_score, previous_ℓ)
for μ in D:
print(f'{id_}: ' + bcolors.WARNING + f'μ: {μ}' + bcolors.ENDC + f', len(Π): {len(Π)}, ' + bcolors.OKGREEN + f'len(D[μ]): {len(D[μ])}' + bcolors.ENDC + f' ({[f"{b.height} ({b.level_min} - {b.level}, {round(b.score, 4)})" for b in D[μ]]})')
exit(1)
#if ℓ == 2:
# break
if args.break_at:
......@@ -188,7 +228,8 @@ for height in range(headersNumber):
data = {
'targets': targets,
'compressSize': compressSize,
'compressScore': compressScore
'compressScore': compressScore,
'timestamps': timestamps
}
with open('data.json', 'w') as f:
......
......@@ -104,6 +104,7 @@ lvls = {}
targets = []
compressSize = []
compressScore = []
timestamps = []
previous_score = 0
previous_ℓ = 0
......@@ -117,6 +118,7 @@ for height in range(headersNumber):
targets += [b.target]
compressSize += [len(Π)]
compressScore += [previous_score]
timestamps += [b.timestamp]
#printTimeSinceLastTimeCheck('block retrieval')
isLastIteration = height == headersNumber - 1
......@@ -151,7 +153,8 @@ for height in range(headersNumber):
data = {
'targets': targets,
'compressSize': compressSize,
'compressScore': compressScore
'compressScore': compressScore,
'timestamps': timestamps
}
with open('data.json', 'w') as f:
......
import json
import math
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime
SHOW_TARGET_RECOMPUTATION = False
......@@ -10,39 +12,64 @@ with open('data.json') as f:
with open('data_constant_difficulty.json') as f:
dataConstantDifficulty = json.load(f)
with open('data_keeping_all_blocks_since_last_m_higher_block.json') as f:
dataKeepingAllBlocksSinceLastMHigherBlock = json.load(f)
fig, targetsAxis = plt.subplots()
fig.subplots_adjust(right = 0.8)
fig.subplots_adjust(bottom = 0.4)
compressedSizeAxis = targetsAxis.twinx()
compressedScoreAxis = targetsAxis.twinx()
axes = [targetsAxis, targetsAxis.twinx(), compressedScoreAxis, targetsAxis.twinx(), compressedScoreAxis]
axes = [targetsAxis, compressedSizeAxis, compressedScoreAxis, targetsAxis.twinx(), compressedScoreAxis, compressedSizeAxis, compressedScoreAxis]
targets = data['targets']
timestamps = [datetime.datetime.fromtimestamp(timestamp) for timestamp in data['timestamps']]
for x, (target, nextTarget) in enumerate(zip(targets, targets[1:])):
if nextTarget < target:
firstTargetDecreaseCurve = plt.axvline(x = x, color = 'purple', label = 'first target decrease')
firstTargetDecreaseCurve = plt.axvline(x = timestamps[x], color = 'purple', label = 'first target decrease')
break
targetRecomputationCurve = []
if SHOW_TARGET_RECOMPUTATION:
for x in range(0, len(targets), 2016):
targetRecomputationCurve = [plt.axvline(x = x, alpha = 0.1, label = 'target recomputation')]
targetRecomputationCurve = [plt.axvline(x = timestamps[x], alpha = 0.1, label = 'target recomputation')]
for x, (target, nextTarget) in enumerate(zip(targets, targets[1:])):
if nextTarget > target:
targetIncreaseCurve = plt.axvline(x = x, alpha = 0.5, color = 'red', label = 'target increase')
targetIncreaseCurve = plt.axvline(x = timestamps[x], alpha = 0.5, color = 'red', label = 'target increase')
axesColors = ['blue', 'green', 'brown', 'orange', 'brown', 'red', 'purple']
axesLabels = ['targets', 'compressed blockchain size (in blocks)', 'compressed blockchain score', 'constant difficulty compressed blockchain size (in blocks)', 'constant difficulty compressed blockchain score', 'compressed blockchain size keeping all blocks since last m higher block (in blocks)', 'compressed blockchain score keeping all blocks since last m higher block']
axesYValues = [targets, data['compressSize'], data['compressScore'], dataConstantDifficulty['compressSize'], dataConstantDifficulty['compressScore'], dataKeepingAllBlocksSinceLastMHigherBlock['compressSize'], dataKeepingAllBlocksSinceLastMHigherBlock['compressScore']]
blockHeightsCache = {}
class CustomDateFormatter(mdates.ticker.Formatter):
def __call__(self, x, pos=0):
datetime_ = mdates.num2date(x)
if not x in blockHeightsCache:
for timestampsIndex, timestamp in enumerate(timestamps):
if datetime_.timestamp() <= timestamp.timestamp():
blockHeightsCache[x] = timestampsIndex
break
blockHeight = f' - {blockHeightsCache[x]}' if x in blockHeightsCache else ''
result = datetime_.strftime('%m/%Y') + blockHeight
return result
axesColors = ['blue', 'green', 'brown', 'orange', 'brown']
axesLabels = ['targets', 'compressed blockchain size (in blocks)', 'compressed blockchain score', 'constant difficulty compressed blockchain size (in blocks)', 'constant difficulty compressed blockchain score']
axesYValues = [targets, data['compressSize'], data['compressScore'], dataConstantDifficulty['compressSize'], dataConstantDifficulty['compressScore']]
plt.gca().xaxis.set_major_formatter(CustomDateFormatter())
curves = []
for curvesIndex, (axis, color, label, yValues) in enumerate(zip(axes, axesColors, axesLabels, axesYValues)):
dashes = (None, None) if curvesIndex != 4 else [1, 25]
alpha = 1 if curvesIndex != 3 else 0.5
curves += axis.plot(yValues, label = label, color = color, dashes = dashes, alpha = alpha)
# TODO: remove below, could maybe precise file names in scripts not to mess up
# Due to not having exported `timestamps` for every execution:
comparableLength = min(len(timestamps), len(yValues))
curves += axis.plot(timestamps[:comparableLength], yValues[:comparableLength], label = label, color = color, dashes = dashes, alpha = alpha)
plt.gcf().autofmt_xdate()
targetsAxis.set_yscale('log')
targetsAxis.set_yscale('log', base=2)
axes[2].spines.right.set_position(('axes', 1.1))
axes[2].set_yscale('log')
......@@ -55,15 +82,21 @@ curves.insert(-2, legendNewLine[0])
curves += legendNewLine + targetRecomputationCurve + [firstTargetDecreaseCurve] + [targetIncreaseCurve]
labels = [curve.get_label() for curve in curves]
plt.legend(curves, labels, loc='upper center', framealpha=1, bbox_to_anchor=(0.5, -0.125))
plt.legend(curves, labels, loc='upper center', framealpha=1, bbox_to_anchor=(0.5, -0.345))
targetsAxis.set_xlabel('Block height')
targetsAxis.set_xlabel('Block date and height')
for ax, color in zip(axes, axesColors):
ax.tick_params(axis='y', colors=color)
plt.title('Variable difficulty Bitcoin compressed blockchain evolution')
title = 'Variable difficulty Bitcoin compressed blockchain evolution'
plt.title(title)
fig.subplots_adjust(top=0.965,
bottom=0.525,
left=0.125,
right=0.8)
plt.show()
fig.set_size_inches((11.2, 7.5), forward=False)
#plt.savefig('data.svg')
\ No newline at end of file
plt.savefig(f'{title.lower().replace(" ", "_")}.svg')
plt.show()
\ No newline at end of file
import json
import math
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime
SHOW_TARGET_RECOMPUTATION = False
with open('data.json') as f:
data = json.load(f)
fig, targetsAxis = plt.subplots()
compressedScoreAxis = targetsAxis.twinx()
axes = [targetsAxis, targetsAxis.twinx(), compressedScoreAxis]
targets = data['targets']
timestamps = [datetime.datetime.fromtimestamp(timestamp) for timestamp in data['timestamps']]
for x, (target, nextTarget) in enumerate(zip(targets, targets[1:])):
if nextTarget < target:
firstTargetDecreaseCurve = plt.axvline(x = timestamps[x], color = 'purple', label = 'first target decrease')
break
targetRecomputationCurve = []
if SHOW_TARGET_RECOMPUTATION:
for x in range(0, len(targets), 2016):
targetRecomputationCurve = [plt.axvline(x = timestamps[x], alpha = 0.1, label = 'target recomputation')]
for x, (target, nextTarget) in enumerate(zip(targets, targets[1:])):
if nextTarget > target:
targetIncreaseCurve = plt.axvline(x = timestamps[x], alpha = 0.5, color = 'red', label = 'target increase')
axesColors = ['blue', 'green', 'brown']
axesLabels = ['targets', 'compressed blockchain size (in blocks)', 'compressed blockchain score']
axesYValues = [targets, data['compressSize'], data['compressScore']]
blockHeightsCache = {}
class CustomDateFormatter(mdates.ticker.Formatter):
def __call__(self, x, pos=0):
datetime_ = mdates.num2date(x)
if not x in blockHeightsCache:
for timestampsIndex, timestamp in enumerate(timestamps):
if datetime_.timestamp() <= timestamp.timestamp():
blockHeightsCache[x] = timestampsIndex
break
blockHeight = f' - {blockHeightsCache[x]}' if x in blockHeightsCache else ''
result = datetime_.strftime('%m/%Y') + blockHeight
return result
plt.gca().xaxis.set_major_formatter(CustomDateFormatter())
curves = []
for curvesIndex, (axis, color, label, yValues) in enumerate(zip(axes, axesColors, axesLabels, axesYValues)):
curves += axis.plot(timestamps, yValues, label = label, color = color)
plt.gcf().autofmt_xdate()
targetsAxis.set_yscale('log', base=2)
axes[2].spines.right.set_position(('axes', 1.1))
axes[2].set_yscale('log')
legendNewLine = targetsAxis.plot([], [], color='none', label=' ')
for curve in targetRecomputationCurve + [firstTargetDecreaseCurve, targetIncreaseCurve][::-1]:
curves.insert(1, curve)
curves.insert(-2, legendNewLine[0])
labels = [curve.get_label() for curve in curves]
plt.legend(curves, labels, loc='upper center', framealpha=1, bbox_to_anchor=(0.5, -0.18))
fig.subplots_adjust(left = 0.035)
fig.subplots_adjust(right = 0.875)
fig.subplots_adjust(bottom = 0.32)
fig.subplots_adjust(top = 0.965)
targetsAxis.set_xlabel('Block height')
for ax, color in zip(axes, axesColors):
ax.tick_params(axis='y', colors=color)
title = 'Variable difficulty Bitcoin compressed blockchain evolution'
plt.title(title)
plt.show()
fig.set_size_inches((11.2, 7.5), forward=False)
#plt.savefig(f'{title.lower().replace(" ", "_")}.svg')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment