more cleanup

This commit is contained in:
Daniel van Flymen 2017-12-29 15:38:57 -05:00
parent acb84b81e0
commit 8cd2a13747
8 changed files with 68 additions and 128 deletions

2
Pipfile.lock generated
View File

@ -1,6 +1,6 @@
{
"_meta": {
"hash": {
"hash_block": {
"sha256": "12bb22c1e036c3ea23ae6dfcd3682d0ff1f37b0e7817ba87aa3fe22ceb12ed9d"
},
"host-environment-markers": {

View File

@ -1,8 +1,8 @@
import logging
from datetime import datetime
from hashlib import sha256
from database import Block, db
from helpers import hash_block
logger = logging.getLogger('root.blockchain')
@ -12,12 +12,11 @@ class Blockchain:
def __init__(self):
self.current_transactions = []
self.difficulty = 4
self.current_block = None
# Create the genesis block if necessary
# Create the genesis block if it doesn't exist
if not self.last_block:
block = self.build_block()
block['hash'] = self.hash(block)
block['hash'] = hash_block(block)
self.save_block(block)
logger.info("✨ Created genesis block")
@ -50,8 +49,8 @@ class Blockchain:
while current_index < len(chain):
block = chain[current_index]
# Check that the hash of the block is correct
if block['previous_hash'] != self.hash(last_block):
# Check that the hash_block of the block is correct
if block['previous_hash'] != self.hash_block(last_block):
return False
# Check that the Proof of Work is correct
@ -116,16 +115,3 @@ class Blockchain:
:return: <Block>
"""
return db.query(Block).order_by(Block.height.desc()).first()
@staticmethod
def hash(b):
"""
Creates a SHA-256 hash of the fields for a Block
"""
byte_array = f"{b['height']}" \
f"{b['timestamp']}" \
f"{b['transactions']}" \
f"{b['previous_hash']}" \
f"{b['proof']}".encode()
return sha256(byte_array).hexdigest()

View File

@ -1,6 +1,3 @@
import json
from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, PickleType, String, create_engine
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import scoped_session, sessionmaker

View File

@ -1,4 +1,4 @@
from datetime import date, datetime
from hashlib import sha256
from sqlalchemy import func
@ -11,12 +11,12 @@ def set_config(key, value, replace=False):
if config_value is None:
db.add(Config(key=key, value=value))
db.commit()
return
return value
if config_value != value and replace is True:
db.add(Config(key=key, value=value))
db.commit()
return
return value
return config_value
@ -39,11 +39,14 @@ def get_random_peers(limit=10):
return db.query(Peer).order_by(func.random()).limit(limit)
def json_serializer(obj):
def hash_block(block):
"""
JSON serializer for objects not serializable by default json code
Creates a SHA-256 hash_block of the fields for a Block
"""
byte_array = f"{block['height']}" \
f"{block['timestamp']}" \
f"{block['transactions']}" \
f"{block['previous_hash']}" \
f"{block['proof']}".encode()
if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError("Type %s not serializable" % type(obj))
return sha256(byte_array).hexdigest()

View File

@ -1,9 +1,10 @@
import asyncio
import logging
import multiprocessing
from datetime import datetime
from hashlib import sha256
import signal
from blockchain import Blockchain
from helpers import hash_block
from tasks import we_should_still_be_mining
log = logging.getLogger('root.mining')
@ -20,7 +21,7 @@ def proof_of_work(current_block, difficulty, event):
# String of 64 f's replaced with 3 leading zeros (if the difficulty is 3): 000fff...f
target = str.ljust("0" * difficulty, 64, "f")
guess_hash = Blockchain.hash(current_block)
guess_hash = hash_block(current_block)
while guess_hash > target:
# Check if we should still be mining
@ -28,18 +29,47 @@ def proof_of_work(current_block, difficulty, event):
# raise Exception("STOP MINING")
current_block['timestamp'] = datetime.utcnow()
current_block['proof'] += 1
guess_hash = Blockchain.hash(current_block)
guess_hash = hash_block(current_block)
current_block['hash'] = guess_hash
return current_block
def miner(pipe, event):
while True:
task = pipe.recv()
log.info(f"Received new mining task with difficulty {task['difficulty']}")
log.debug(f"Received new mining task with difficulty {task['difficulty']}")
if task:
found_block = proof_of_work(task['block'], task['difficulty'], event)
pipe.send({'found_block': found_block})
async def mining_controller(app):
pipe, remote_pipe = multiprocessing.Pipe()
event = multiprocessing.Event()
# Spawn a new process consisting of the miner() function
# and send the right end of the pipe to it
process = multiprocessing.Process(target=miner, args=(remote_pipe, event))
process.start()
pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5})
while True:
event.set()
# We'll check the pipe every 100 ms
await asyncio.sleep(0.1)
# Check if we should still be mining
if not we_should_still_be_mining():
event.clear()
if pipe.poll():
result = pipe.recv()
found_block = result['found_block']
app.blockchain.save_block(found_block)
log.info(f"Mined Block {found_block['height']} containing {len(found_block['transactions'])} transactions")
pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5})

View File

@ -3,7 +3,8 @@ from sanic.response import json
from sqlalchemy import func
from database import Peer, db, reset_db
from tasks import initiate_node, mining_controller, peer_discovery
from tasks import initiate_node, peer_discovery
from mining import mining_controller
app = Sanic()

View File

@ -1,15 +1,12 @@
import asyncio
import logging
import multiprocessing
import time
from uuid import uuid4
import aiohttp
from blockchain import Blockchain
from database import db
from helpers import get_config, set_config, get_random_peers
from mining import miner
from helpers import get_config, get_random_peers, set_config
from networking import PortMapper
@ -18,20 +15,18 @@ log = logging.getLogger('root.tasks')
def initiate_node(app):
# Set up TCP Redirect (Port Forwarding)
port_mapper = PortMapper()
port_mapper.add_portmapping(8080, 8080, 'TCP', 'Electron')
# port_mapper = PortMapper()
# port_mapper.add_portmapping(8080, 8080, 'TCP', 'Electron')
# Set the identifier (unique Id) for our node
node_identifier = get_config('node_identifier')
if not node_identifier:
node_identifier = set_config(key='node_identifier', value=uuid4().hex)
# Set the identifier (unique Id) for our node (if it doesn't exist)
node_identifier = set_config(key='node_identifier', value=uuid4().hex)
app.request_headers = {
'content-type': 'application/json',
'x-node-identifier': node_identifier,
'x-node-ip': port_mapper.external_ip,
'x-node-port': port_mapper.external_port,
}
# app.request_headers = {
# 'content-type': 'application/json',
# 'x-node-identifier': node_identifier,
# 'x-node-ip': port_mapper.external_ip,
# 'x-node-port': port_mapper.external_port,
# }
log.info('Node Identifier: %s', node_identifier)
@ -65,77 +60,5 @@ async def watch_blockchain(app):
await asyncio.sleep(2)
async def consensus():
"""
Our Consensus Algorithm. It makes sure we have a valid up-to-date chain.
"""
# Asynchronously grab the chain from each peer
# Validate it, then replace ours if necessary
def resolve_conflicts(self):
"""
This is our consensus algorithm, it resolves conflicts
by replacing our chain with the longest one in the network.
:return: True if our chain was replaced, False if not
"""
neighbours = self.nodes
new_chain = None
# We're only looking for chains longer than ours
max_length = len(self.chain)
# Grab and verify the chains from all the nodes in our network
for node in neighbours:
response = requests.get(f'http://{node}/chain')
if response.status_code == 200:
length = response.json()['length']
chain = response.json()['chain']
# Check if the length is longer and the chain is valid
if length > max_length and self.valid_chain(chain):
max_length = length
new_chain = chain
# Replace our chain if we discovered a new, valid chain longer than ours
if new_chain:
self.chain = new_chain
return True
return False
def we_should_still_be_mining():
return True
async def mining_controller(app):
pipe, remote_pipe = multiprocessing.Pipe()
event = multiprocessing.Event()
# Spawn a new process consisting of the miner() function
# and send the right end of the pipe to it
process = multiprocessing.Process(target=miner, args=(remote_pipe, event))
process.start()
pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5})
while True:
event.set()
# We'll check the pipe every 100 ms
await asyncio.sleep(0.1)
# Check if we should still be mining
if not we_should_still_be_mining():
event.clear()
if pipe.poll():
result = pipe.recv()
found_block = result['found_block']
app.blockchain.save_block(found_block)
pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5})

View File

@ -101,4 +101,4 @@ class TestHashingAndProofs(BlockchainTestCase):
new_hash = hashlib.sha256(new_block_json).hexdigest()
assert len(new_hash) == 64
assert new_hash == self.blockchain.hash(new_block)
assert new_hash == self.blockchain.hash_block(new_block)