Updating for collaborators

This commit is contained in:
Daniel van Flymen 2017-12-28 15:52:11 -05:00
parent 0df3b84428
commit 5021b69937
10 changed files with 295 additions and 205 deletions

2
.gitignore vendored
View File

@ -102,3 +102,5 @@ ENV/
# PyCharm
.idea/
*.db

View File

@ -20,4 +20,5 @@ flask = "==0.12.2"
requests = "==2.18.4"
sqlalchemy = "*"
aioodbc = "*"
sqlalchemy-aio = "*"
sqlalchemy-aio = "*"
sanic = "*"

65
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "d2951f633be558eed649ff12d93c9acbeb9ada5f228abff3e5a873891a6a78dc"
"sha256": "8b56e0c14f675dc0d54950a061f31508a06a5103afce16f4d1e3d7002b72463d"
},
"host-environment-markers": {
"implementation_name": "cpython",
@ -29,6 +29,13 @@
]
},
"default": {
"aiofiles": {
"hashes": [
"sha256:25c66ea3872d05d53292a6b3f7fa0f86691512076446d83a505d227b5e76f668",
"sha256:852a493a877b73e11823bfd4e8e5ef2610d70d12c9eaed961bcd9124d8de8c10"
],
"version": "==0.3.2"
},
"aioodbc": {
"hashes": [
"sha256:1a859a4ac7de85bb7a743e22da3942fb046c18fed27fb68bb2ac01750ed259a7"
@ -63,6 +70,12 @@
],
"version": "==0.12.2"
},
"httptools": {
"hashes": [
"sha256:f50dcb27178416c3a4113e9e1b392be5d1ff56ae1e474fe80869ed8530505e4c"
],
"version": "==0.0.10"
},
"idna": {
"hashes": [
"sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4",
@ -121,6 +134,13 @@
],
"version": "==2.18.4"
},
"sanic": {
"hashes": [
"sha256:18a3bd729093ac93a245849c44045c505a11e6d36da5bf231cb986bfb1e3c14c",
"sha256:22b1a6f1dc55db8a136335cb0961afa95040ca78aa8c78425a40d91e8618e60e"
],
"version": "==0.7.0"
},
"six": {
"hashes": [
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb",
@ -130,9 +150,9 @@
},
"sqlalchemy": {
"hashes": [
"sha256:8b79a5ed91cdcb5abe97b0045664c55c140aec09e5dd5c01303e23de5fe7a95a"
"sha256:7dda3e0b1b12215e3bb05368d1abbf7d747112a43738e0a4e6deb466b83fd88e"
],
"version": "==1.1.15"
"version": "==1.2.0"
},
"sqlalchemy-aio": {
"hashes": [
@ -141,6 +161,12 @@
],
"version": "==0.11.0"
},
"ujson": {
"hashes": [
"sha256:f66073e5506e91d204ab0c614a148d5aa938bdbf104751be66f8ad7a222f5f86"
],
"version": "==1.35"
},
"urllib3": {
"hashes": [
"sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
@ -148,6 +174,39 @@
],
"version": "==1.22"
},
"uvloop": {
"hashes": [
"sha256:01cf7199728867f406ba5af78cc47c80acd663ccc52cae105e737a997f1b2bca",
"sha256:e7c871ba3edd5fcf2afb756de88a9a65245070161e24f75abe79c0a241bb8c76",
"sha256:b057ef2b0d0162c1ef257f43a95f59bfec37ee9a75cc5412d6b7f9ac6d1d69cb",
"sha256:89c3bfaad77625490c42a6b99af1879234767ab0c31dd193486a909506e5e549",
"sha256:68574150720a380509a3409bf2941be0199cfdacff144a97502fb29c250ba927",
"sha256:7fba5f390db607b2f026bc598df6b2a2a9e062bffe82910b5ffe2b88560135e5",
"sha256:7ee14835a75c72227d3f8a3f370519a3106a6f02e5453f275f16437ebdb92953"
],
"version": "==0.9.1"
},
"websockets": {
"hashes": [
"sha256:f5192da704535a7cbf76d6e99c1ec4af7e8d1288252bf5a2385d414509ded0cf",
"sha256:0c31bc832d529dc7583d324eb6c836a4f362032a1902723c112cf57883488d8c",
"sha256:da7610a017f5343fdf765f4e0eb6fd0dfd08264ca1565212b110836d9367fc9c",
"sha256:fd81af8cf3e69f9a97f3a6c0623a0527de0f922c2df725f00cd7646d478af632",
"sha256:3d425ae081fb4ba1eef9ecf30472ffd79f8e868297ccc7a47993c96dbf2a819c",
"sha256:ebdd4f18fe7e3bea9bd3bf446b0f4117739478caa2c76e4f0fb72cc45b03cbd7",
"sha256:3859ca16c229ddb0fa21c5090e4efcb037c08ce69b0c1dfed6122c3f98cd0c22",
"sha256:d1a0572b6edb22c9208e3e5381064e09d287d2a915f90233fef994ee7a14a935",
"sha256:80188abdadd23edaaea05ce761dc9a2e1df31a74a0533967f0dcd9560c85add0",
"sha256:fecf51c13195c416c22422353b306dddb9c752e4b80b21e0fa1fccbe38246677",
"sha256:367ff945bc0950ad9634591e2afe50bf2222bc4fad1088a386c4bb700888026e",
"sha256:6df87698022aef2596bffdfecc96d656db59c8d719708c8a471daa815ee61656",
"sha256:341824d8c9ad53fc43cca3fa9407f294125fa258592f7676640396501448e57e",
"sha256:64896a6b3368c959b8096b655e46f03dfa65b96745249f374bd6a35705cc3489",
"sha256:1f3e5a52cab6daa3d432c7b0de0a14109be39d2bfaad033ee5de4a3d3e11dcdf",
"sha256:da4d4fbe059b0453e726d6d993760065d69b823a27efc3040402a6fcfe6a1ed9"
],
"version": "==4.0.1"
},
"werkzeug": {
"hashes": [
"sha256:f3000aa146ce8a9da8ca3e978e0e931c2a58eb56c323a5efb6b4307f7832b549",

View File

@ -2,6 +2,10 @@ import hashlib
from datetime import datetime
from database import Block, db
import logging
logger = logging.getLogger('root.blockchain')
class Blockchain:
@ -9,10 +13,15 @@ class Blockchain:
self.current_transactions = []
self.difficulty = 4
# Create the genesis block with a height of 0
self.new_block(previous_hash='1', proof=100, height=0)
# Create the genesis block if necessary
if not self.get_blocks(0):
self.new_block(previous_hash='1', proof=100, height=0)
logger.info("✨ Created genesis block")
def get_blocks(self, height=0):
logger.info("Blockchain Initiated")
@staticmethod
def get_blocks(height=0):
"""
Returns all blocks from a given height
@ -110,8 +119,8 @@ class Blockchain:
:param block: Block
"""
block_string = block.to_json().encode()
return hashlib.sha256(block_string).hexdigest()
block_bytes = block.to_json().encode()
return hashlib.sha256(block_bytes).hexdigest()
def proof_of_work(self, last_proof):
"""
@ -141,94 +150,3 @@ class Blockchain:
guess_hash = hashlib.sha256(guess).hexdigest()
return guess_hash[:self.difficulty] == '0' * self.difficulty # In Python, '0' * 4 gives '0000'
# @app.route('/mine', methods=['GET'])
# def mine():
# # We run the proof of work algorithm to get the next proof...
# last_block = blockchain.last_block
# last_proof = last_block['proof']
# proof = blockchain.proof_of_work(last_proof)
#
# # We must receive a reward for finding the proof.
# # The sender is "0" to signify that this node has mined a new coin.
# blockchain.new_transaction(
# sender="0",
# recipient=node_identifier,
# amount=1,
# )
#
# # Forge the new Block by adding it to the chain
# previous_hash = blockchain.hash(last_block)
# block = blockchain.new_block(proof, previous_hash)
#
# response = {
# 'message': "New Block Forged",
# 'index': block['index'],
# 'transactions': block['transactions'],
# 'proof': block['proof'],
# 'previous_hash': block['previous_hash'],
# }
# return jsonify(response), 200
# @app.route('/transactions/new', methods=['POST'])
# def new_transaction():
# values = request.get_json()
#
# # Check that the required fields are in the POST'ed data
# required = ['sender', 'recipient', 'amount']
# if not all(k in values for k in required):
# return 'Missing values', 400
#
# # Create a new Transaction
# index = blockchain.new_transaction(values['sender'], values['recipient'], values['amount'])
#
# response = {'message': f'Transaction will be added to Block {index}'}
# return jsonify(response), 201
#
#
# @app.route('/chain', methods=['GET'])
# def full_chain():
# response = {
# 'chain': blockchain.chain,
# 'length': len(blockchain.chain),
# }
# return jsonify(response), 200
#
#
# @app.route('/nodes/register', methods=['POST'])
# def register_nodes():
# values = request.get_json()
#
# nodes = values.get('nodes')
# if nodes is None:
# return "Error: Please supply a valid list of nodes", 400
#
# for node in nodes:
# blockchain.register_node(node)
#
# response = {
# 'message': 'New nodes have been added',
# 'total_nodes': list(blockchain.nodes),
# }
# return jsonify(response), 201
#
#
# @app.route('/nodes/resolve', methods=['GET'])
# def consensus():
# replaced = blockchain.resolve_conflicts()
#
# if replaced:
# response = {
# 'message': 'Our chain was replaced',
# 'new_chain': blockchain.chain
# }
# else:
# response = {
# 'message': 'Our chain is authoritative',
# 'chain': blockchain.chain
# }
#
# return jsonify(response), 200
#

View File

@ -13,31 +13,30 @@ db = scoped_session(sessionmaker(bind=engine))
class BaseModel(object):
@declared_attr
def __tablename__(self):
return self.__name__.lower() # Ensures all tables have the same name as their models (below)
def to_json(self):
"""
Convenience method to convert any database row to JSON
:return: <JSON>
Ensures all tables have the same name as their models (below)
"""
return json.dumps({c.name: getattr(self, c.name) for c in self.__table__.columns}, sort_keys=True)
return self.__name__.lower()
def to_dict(self):
"""
Convenience method to convert any database row to dict
:return: <JSON>
Helper method to convert any database row to dict
"""
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def to_json(self):
"""
Helper method to convert any database row to JSON
"""
return json.dumps(self.to_dict(), sort_keys=True)
Base = declarative_base(cls=BaseModel)
class Peer(Base):
identifier = Column(String(32), primary_key=True)
ip = Column(String, index=True, unique=True)
hostname = Column(String, index=True, unique=True)
class Block(Base):
@ -56,9 +55,7 @@ class Config(Base):
def reset_db():
"""
Deletes and Re-creates the Database
:return:
Drops and Re-creates the Database
"""
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)

View File

@ -1,4 +1,6 @@
from database import db, Config
from sqlalchemy import func
from database import Config, Peer, db
def set_config(key, value, replace=False):
@ -14,6 +16,8 @@ def set_config(key, value, replace=False):
db.commit()
return
return config_value
def get_config(key, default=None):
config = db.query(Config).filter_by(key=key).first()
@ -21,3 +25,13 @@ def get_config(key, default=None):
return config.value
else:
return default
def get_random_peers(limit=10):
"""
Returns random peers
:param limit: How many peers to return
:return:
"""
return db.query(Peer).order_by(func.random()).limit(limit)

54
mining.py Normal file
View File

@ -0,0 +1,54 @@
import logging
from hashlib import sha256
import signal
log = logging.getLogger('root.mining')
def valid_proof(last_hash, proof, target):
"""
Validates the Proof
:param last_hash: Hash of the previous block
:param proof: Current Proof
:param target: Target Difficulty
:return: True if correct, False if not.
"""
guess = f'{last_hash}{proof}'.encode()
guess_hash = sha256(guess).hexdigest()
return guess_hash < target
def proof_of_work(last_hash, difficulty, event):
"""
Simple Proof of Work Algorithm
:param last_hash: The hash of the previous block
:param difficulty: The minimum number of leading zeros
"""
# String of 64 f's replaced with 3 leading zeros (if the difficulty is 3): 000fff...f
target = str.ljust("0" * difficulty, 64, "f")
proof = 0
while valid_proof(last_hash, proof, target) is False:
# Check if we should still be mining
# if not event.is_set():
# raise Exception("STOP MINING")
proof += 1
return proof
def miner(right, event):
while True:
log.info(f'Waiting for task')
latest_task = right.recv()
log.info(f"Received new task for hash {latest_task['last_hash']} "
f"with difficulty {latest_task['difficulty']}")
if latest_task:
proof = proof_of_work(latest_task['last_hash'], latest_task['difficulty'], event)
right.send(proof)

View File

@ -1,42 +1,65 @@
import miniupnpc
import logging
upnpc = miniupnpc.UPnP()
upnpc.discoverdelay = 200
ndevices = upnpc.discover()
print('%d UPNP device(s) detected', ndevices)
upnpc.selectigd()
external_ip = upnpc.externalipaddress()
print('external ip: %s', external_ip)
print('status: %s, connection type: %s',
upnpc.statusinfo(),
upnpc.connectiontype())
log = logging.getLogger('root.networking')
# find a free port for the redirection
port = 8080
external_port = port
found = False
while True:
redirect = upnpc.getspecificportmapping(external_port, 'TCP')
if redirect is None:
found = True
break
if external_port >= 65535:
break
external_port = external_port + 1
class PortMapper(object):
def __init__(self):
client = miniupnpc.UPnP()
client.discoverdelay = 200
print('No redirect candidate %s TCP => %s port %u TCP',
external_ip, upnpc.lanaddr, port)
try:
log.info('Searching for Internet Gateway Devices... (timeout: %sms)', client.discoverdelay)
device_count = client.discover()
print('trying to redirect %s port %u TCP => %s port %u TCP',
external_ip, external_port, upnpc.lanaddr, port)
log.info('Found %s devices', device_count)
res = upnpc.addportmapping(external_port, 'TCP',
upnpc.lanaddr, port,
'pyethereum p2p port %u' % external_port,
'')
self.client = client
self.device_count = device_count
self.internal_ip = None
self.external_ip = None
self.external_port = None
print('Success to redirect %s port %u TCP => %s port %u TCP',
external_ip, external_port, upnpc.lanaddr, port)
except Exception as e:
log.error('An unexpected error occurred: %s', e)
self.client = None
def add_portmapping(self, internal_port, external_port, protocol, label=''):
if self.client is None:
log.error('No uPnP devices were found on the network')
return
try:
self.client.selectigd()
self.internal_ip = self.client.lanaddr
self.external_ip = self.client.externalipaddress()
log.info('Internal IP: %s', self.internal_ip)
log.info('External IP: %s', self.external_ip)
log.info('Attempting %s redirect: %s:%s -> %s:%s', protocol,
self.external_ip, external_port,
self.internal_ip, internal_port)
# Find an available port for the redirect
port_mapping = self.client.getspecificportmapping(external_port, protocol)
while port_mapping is None and external_port < 65536:
external_port += 1
port_mapping = self.client.getspecificportmapping(external_port, protocol)
success = self.client.addportmapping(external_port, protocol, self.internal_ip, internal_port, label, '')
if success:
log.info('Successful %s redirect: %s:%s -> %s:%s', protocol,
self.external_ip, external_port,
self.internal_ip, internal_port)
self.external_port = external_port
else:
log.error('Failed to map a port')
except Exception as e:
log.error('An unexpected error occurred: %s', e)

34
node.py
View File

@ -1,31 +1,18 @@
from uuid import uuid4
from sanic import Sanic
from sanic.response import json
from sqlalchemy import func
from blockchain import Blockchain
from database import Peer, db, reset_db
from helpers import get_config, set_config
from tasks import populate_peers, watch_blockchain, add_stuff, mining_controller
from tasks import initiate_node, mining_controller, peer_discovery
app = Sanic()
app.debug = True
# reset_db()
initiate_node(app)
@app.listener('before_server_start')
async def set_node_identifier(_app, loop):
node_identifier = get_config('node_identifier')
if not node_identifier:
set_config(key='node_identifier', value=uuid4().hex)
reset_db()
app.blockchain = Blockchain()
app.add_task(populate_peers(app))
# app.add_task(watch_blockchain(app))
# app.add_task(add_stuff(app))
app.add_task(mining_controller)
app.add_task(peer_discovery(app))
app.add_task(mining_controller(app))
@app.route("/")
@ -36,15 +23,18 @@ async def peers(request):
@app.route("/transactions")
async def current_transactions(request):
return json(app.blockchain.current_transactions())
if request.method == 'GET':
return json(app.blockchain.current_transactions)
elif request.method == 'POST':
return json({"text": "thanks for your transaction"})
@app.route("/blocks")
async def blocks(request):
# height = request.parsed_args['height']
blocks = app.blockchain.get_blocks()
return json(blocks)
random_blocks = app.blockchain.get_blocks()
return json(random_blocks)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080)
app.run(debug=False, host="0.0.0.0", port=8080)

104
tasks.py
View File

@ -1,57 +1,70 @@
import time
import asyncio
import logging
import multiprocessing
import time
from uuid import uuid4
import aiohttp
from sqlalchemy import func
from database import Peer, db
from blockchain import Blockchain
from database import db
from helpers import get_config, set_config, get_random_peers
from mining import miner
from networking import PortMapper
def get_random_peers(limit=10):
"""
Returns random peers
:param limit: How many peers to return
:return:
"""
return db.query(Peer).order_by(func.random()).limit(limit)
log = logging.getLogger('root.tasks')
async def populate_peers(app):
def initiate_node(app):
# Set up TCP Redirect (Port Forwarding)
port_mapper = PortMapper()
port_mapper.add_portmapping(8080, 8080, 'TCP', 'Electron')
# Set the identifier (unique Id) for our node
node_identifier = get_config('node_identifier')
if not node_identifier:
node_identifier = set_config(key='node_identifier', value=uuid4().hex)
app.request_headers = {
'content-type': 'application/json',
'x-node-identifier': node_identifier,
'x-node-ip': port_mapper.external_ip,
'x-node-port': port_mapper.external_port,
}
log.info('Node Identifier: %s', node_identifier)
# Add the Blockchain helper class to the app
app.blockchain = Blockchain()
async def peer_discovery(app):
"""
Ask random peers to return peers they know about
"""
print(app)
while True:
peers = get_random_peers()
async with aiohttp.ClientSession() as session:
for peer in peers:
try:
async with session.get(f'http://{peer.ip}:8000', timeout=3) as resp:
print(resp.status)
print(await resp.text())
except asyncio.TimeoutError:
db.delete(peer)
db.commit()
print(f"{peer.ip}: Deleting node")
for peer in peers:
try:
response = await aiohttp.request('GET', 'peer.hostname', headers=app.request_headers)
print(f'Made request: {response.status}')
except asyncio.TimeoutError:
db.delete(peer)
db.commit()
print(f'{peer.hostname}: Deleted node')
await asyncio.sleep(10)
async def watch_blockchain(app):
while True:
print(f"TXN: {app.blockchain.current_transactions}")
print(f'TXN: {app.blockchain.current_transactions}')
await asyncio.sleep(2)
async def add_stuff(app):
while True:
await asyncio.sleep(1.5)
app.blockchain.current_transactions.append("a")
async def consensus():
"""
Our Consensus Algorithm. It makes sure we have a valid up-to-date chain.
@ -94,12 +107,31 @@ async def consensus():
return False
def miner():
def we_should_still_be_mining():
return True
async def mining_controller(app):
left, right = multiprocessing.Pipe()
event = multiprocessing.Event()
# Spawn a new process consisting of the miner() function
# and send the right end of the pipe to it
process = multiprocessing.Process(target=miner, args=(right, event))
process.start()
left.send({'last_hash': 123, 'difficulty': 6})
while True:
time.sleep(2)
print('Hey! Mining is happening!')
event.set()
# We'll check the pipe every 100 ms
await asyncio.sleep(1)
async def mining_controller():
p = multiprocessing.Process(target=miner, args=())
p.start()
# Check if we should still be mining
if not we_should_still_be_mining():
event.clear()
if left.poll():
result = left.recv()
print(f'A new block was found with proof: {result}')