From 8cd2a13747fd6df791a735dde91bf56fe26cfa81 Mon Sep 17 00:00:00 2001 From: Daniel van Flymen Date: Fri, 29 Dec 2017 15:38:57 -0500 Subject: [PATCH] more cleanup --- Pipfile.lock | 2 +- blockchain.py | 24 ++-------- database.py | 3 -- helpers.py | 19 ++++---- mining.py | 44 +++++++++++++++--- node.py | 3 +- tasks.py | 99 +++++----------------------------------- tests/test_blockchain.py | 2 +- 8 files changed, 68 insertions(+), 128 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index f4c0c63..392e092 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,6 +1,6 @@ { "_meta": { - "hash": { + "hash_block": { "sha256": "12bb22c1e036c3ea23ae6dfcd3682d0ff1f37b0e7817ba87aa3fe22ceb12ed9d" }, "host-environment-markers": { diff --git a/blockchain.py b/blockchain.py index 8f7ed2f..2f0f22d 100644 --- a/blockchain.py +++ b/blockchain.py @@ -1,8 +1,8 @@ import logging from datetime import datetime -from hashlib import sha256 from database import Block, db +from helpers import hash_block logger = logging.getLogger('root.blockchain') @@ -12,12 +12,11 @@ class Blockchain: def __init__(self): self.current_transactions = [] self.difficulty = 4 - self.current_block = None - # Create the genesis block if necessary + # Create the genesis block if it doesn't exist if not self.last_block: block = self.build_block() - block['hash'] = self.hash(block) + block['hash'] = hash_block(block) self.save_block(block) logger.info("✨ Created genesis block") @@ -50,8 +49,8 @@ class Blockchain: while current_index < len(chain): block = chain[current_index] - # Check that the hash of the block is correct - if block['previous_hash'] != self.hash(last_block): + # Check that the hash_block of the block is correct + if block['previous_hash'] != self.hash_block(last_block): return False # Check that the Proof of Work is correct @@ -116,16 +115,3 @@ class Blockchain: :return: """ return db.query(Block).order_by(Block.height.desc()).first() - - @staticmethod - def hash(b): - """ - Creates a SHA-256 hash of the fields for a Block - """ - byte_array = f"{b['height']}" \ - f"{b['timestamp']}" \ - f"{b['transactions']}" \ - f"{b['previous_hash']}" \ - f"{b['proof']}".encode() - - return sha256(byte_array).hexdigest() diff --git a/database.py b/database.py index d288ca0..ea11481 100644 --- a/database.py +++ b/database.py @@ -1,6 +1,3 @@ -import json -from datetime import datetime - from sqlalchemy import Column, DateTime, Integer, PickleType, String, create_engine from sqlalchemy.ext.declarative import declarative_base, declared_attr from sqlalchemy.orm import scoped_session, sessionmaker diff --git a/helpers.py b/helpers.py index d461cb0..1b22c92 100644 --- a/helpers.py +++ b/helpers.py @@ -1,4 +1,4 @@ -from datetime import date, datetime +from hashlib import sha256 from sqlalchemy import func @@ -11,12 +11,12 @@ def set_config(key, value, replace=False): if config_value is None: db.add(Config(key=key, value=value)) db.commit() - return + return value if config_value != value and replace is True: db.add(Config(key=key, value=value)) db.commit() - return + return value return config_value @@ -39,11 +39,14 @@ def get_random_peers(limit=10): return db.query(Peer).order_by(func.random()).limit(limit) -def json_serializer(obj): +def hash_block(block): """ - JSON serializer for objects not serializable by default json code + Creates a SHA-256 hash_block of the fields for a Block """ + byte_array = f"{block['height']}" \ + f"{block['timestamp']}" \ + f"{block['transactions']}" \ + f"{block['previous_hash']}" \ + f"{block['proof']}".encode() - if isinstance(obj, (datetime, date)): - return obj.isoformat() - raise TypeError("Type %s not serializable" % type(obj)) + return sha256(byte_array).hexdigest() diff --git a/mining.py b/mining.py index 31806cf..5b1653d 100644 --- a/mining.py +++ b/mining.py @@ -1,9 +1,10 @@ +import asyncio import logging +import multiprocessing from datetime import datetime -from hashlib import sha256 -import signal -from blockchain import Blockchain +from helpers import hash_block +from tasks import we_should_still_be_mining log = logging.getLogger('root.mining') @@ -20,7 +21,7 @@ def proof_of_work(current_block, difficulty, event): # String of 64 f's replaced with 3 leading zeros (if the difficulty is 3): 000fff...f target = str.ljust("0" * difficulty, 64, "f") - guess_hash = Blockchain.hash(current_block) + guess_hash = hash_block(current_block) while guess_hash > target: # Check if we should still be mining @@ -28,18 +29,47 @@ def proof_of_work(current_block, difficulty, event): # raise Exception("STOP MINING") current_block['timestamp'] = datetime.utcnow() current_block['proof'] += 1 - guess_hash = Blockchain.hash(current_block) + guess_hash = hash_block(current_block) current_block['hash'] = guess_hash return current_block def miner(pipe, event): - while True: task = pipe.recv() - log.info(f"Received new mining task with difficulty {task['difficulty']}") + log.debug(f"Received new mining task with difficulty {task['difficulty']}") if task: found_block = proof_of_work(task['block'], task['difficulty'], event) pipe.send({'found_block': found_block}) + + +async def mining_controller(app): + pipe, remote_pipe = multiprocessing.Pipe() + event = multiprocessing.Event() + + # Spawn a new process consisting of the miner() function + # and send the right end of the pipe to it + process = multiprocessing.Process(target=miner, args=(remote_pipe, event)) + process.start() + + pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5}) + + while True: + event.set() + + # We'll check the pipe every 100 ms + await asyncio.sleep(0.1) + + # Check if we should still be mining + if not we_should_still_be_mining(): + event.clear() + + if pipe.poll(): + result = pipe.recv() + found_block = result['found_block'] + + app.blockchain.save_block(found_block) + log.info(f"Mined Block {found_block['height']} containing {len(found_block['transactions'])} transactions") + pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5}) diff --git a/node.py b/node.py index dab521e..540f113 100644 --- a/node.py +++ b/node.py @@ -3,7 +3,8 @@ from sanic.response import json from sqlalchemy import func from database import Peer, db, reset_db -from tasks import initiate_node, mining_controller, peer_discovery +from tasks import initiate_node, peer_discovery +from mining import mining_controller app = Sanic() diff --git a/tasks.py b/tasks.py index f6329e0..e73fb1d 100644 --- a/tasks.py +++ b/tasks.py @@ -1,15 +1,12 @@ import asyncio import logging -import multiprocessing -import time from uuid import uuid4 import aiohttp from blockchain import Blockchain from database import db -from helpers import get_config, set_config, get_random_peers -from mining import miner +from helpers import get_config, get_random_peers, set_config from networking import PortMapper @@ -18,20 +15,18 @@ log = logging.getLogger('root.tasks') def initiate_node(app): # Set up TCP Redirect (Port Forwarding) - port_mapper = PortMapper() - port_mapper.add_portmapping(8080, 8080, 'TCP', 'Electron') + # port_mapper = PortMapper() + # port_mapper.add_portmapping(8080, 8080, 'TCP', 'Electron') - # Set the identifier (unique Id) for our node - node_identifier = get_config('node_identifier') - if not node_identifier: - node_identifier = set_config(key='node_identifier', value=uuid4().hex) + # Set the identifier (unique Id) for our node (if it doesn't exist) + node_identifier = set_config(key='node_identifier', value=uuid4().hex) - app.request_headers = { - 'content-type': 'application/json', - 'x-node-identifier': node_identifier, - 'x-node-ip': port_mapper.external_ip, - 'x-node-port': port_mapper.external_port, - } + # app.request_headers = { + # 'content-type': 'application/json', + # 'x-node-identifier': node_identifier, + # 'x-node-ip': port_mapper.external_ip, + # 'x-node-port': port_mapper.external_port, + # } log.info('Node Identifier: %s', node_identifier) @@ -65,77 +60,5 @@ async def watch_blockchain(app): await asyncio.sleep(2) -async def consensus(): - """ - Our Consensus Algorithm. It makes sure we have a valid up-to-date chain. - """ - - # Asynchronously grab the chain from each peer - # Validate it, then replace ours if necessary - def resolve_conflicts(self): - """ - This is our consensus algorithm, it resolves conflicts - by replacing our chain with the longest one in the network. - - :return: True if our chain was replaced, False if not - """ - - neighbours = self.nodes - new_chain = None - - # We're only looking for chains longer than ours - max_length = len(self.chain) - - # Grab and verify the chains from all the nodes in our network - for node in neighbours: - response = requests.get(f'http://{node}/chain') - - if response.status_code == 200: - length = response.json()['length'] - chain = response.json()['chain'] - - # Check if the length is longer and the chain is valid - if length > max_length and self.valid_chain(chain): - max_length = length - new_chain = chain - - # Replace our chain if we discovered a new, valid chain longer than ours - if new_chain: - self.chain = new_chain - return True - - return False - - def we_should_still_be_mining(): return True - - -async def mining_controller(app): - pipe, remote_pipe = multiprocessing.Pipe() - event = multiprocessing.Event() - - # Spawn a new process consisting of the miner() function - # and send the right end of the pipe to it - process = multiprocessing.Process(target=miner, args=(remote_pipe, event)) - process.start() - - pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5}) - - while True: - event.set() - - # We'll check the pipe every 100 ms - await asyncio.sleep(0.1) - - # Check if we should still be mining - if not we_should_still_be_mining(): - event.clear() - - if pipe.poll(): - result = pipe.recv() - found_block = result['found_block'] - - app.blockchain.save_block(found_block) - - pipe.send({'block': app.blockchain.build_block(), 'difficulty': 5}) diff --git a/tests/test_blockchain.py b/tests/test_blockchain.py index 750f7ae..1434756 100644 --- a/tests/test_blockchain.py +++ b/tests/test_blockchain.py @@ -101,4 +101,4 @@ class TestHashingAndProofs(BlockchainTestCase): new_hash = hashlib.sha256(new_block_json).hexdigest() assert len(new_hash) == 64 - assert new_hash == self.blockchain.hash(new_block) + assert new_hash == self.blockchain.hash_block(new_block)