Merge branch 'refactoring2' of gitlab.com:beardog/Onionr
commit
45dcfb9e08
|
@ -12,3 +12,4 @@ run.sh
|
||||||
onionr/data-encrypted.dat
|
onionr/data-encrypted.dat
|
||||||
onionr/.onionr-lock
|
onionr/.onionr-lock
|
||||||
core
|
core
|
||||||
|
.vscode/*
|
||||||
|
|
|
@ -22,16 +22,17 @@ from flask import request, Response, abort
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
from gevent.wsgi import WSGIServer
|
from gevent.wsgi import WSGIServer
|
||||||
import sys, random, threading, hmac, hashlib, base64, time, math, os, logger, config
|
import sys, random, threading, hmac, hashlib, base64, time, math, os, logger, config
|
||||||
|
|
||||||
from core import Core
|
from core import Core
|
||||||
|
from onionrblockapi import Block
|
||||||
import onionrutils, onionrcrypto
|
import onionrutils, onionrcrypto
|
||||||
|
|
||||||
class API:
|
class API:
|
||||||
'''
|
'''
|
||||||
Main HTTP API (Flask)
|
Main HTTP API (Flask)
|
||||||
'''
|
'''
|
||||||
def validateToken(self, token):
|
def validateToken(self, token):
|
||||||
'''
|
'''
|
||||||
Validate that the client token (hmac) matches the given token
|
Validate that the client token matches the given token
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
if not hmac.compare_digest(self.clientToken, token):
|
if not hmac.compare_digest(self.clientToken, token):
|
||||||
|
@ -51,7 +52,7 @@ class API:
|
||||||
|
|
||||||
config.reload()
|
config.reload()
|
||||||
|
|
||||||
if config.get('devmode', True):
|
if config.get('dev_mode', True):
|
||||||
self._developmentMode = True
|
self._developmentMode = True
|
||||||
logger.set_level(logger.LEVEL_DEBUG)
|
logger.set_level(logger.LEVEL_DEBUG)
|
||||||
else:
|
else:
|
||||||
|
@ -64,29 +65,26 @@ class API:
|
||||||
self._crypto = onionrcrypto.OnionrCrypto(self._core)
|
self._crypto = onionrcrypto.OnionrCrypto(self._core)
|
||||||
self._utils = onionrutils.OnionrUtils(self._core)
|
self._utils = onionrutils.OnionrUtils(self._core)
|
||||||
app = flask.Flask(__name__)
|
app = flask.Flask(__name__)
|
||||||
bindPort = int(config.get('client')['port'])
|
bindPort = int(config.get('client.port', 59496))
|
||||||
self.bindPort = bindPort
|
self.bindPort = bindPort
|
||||||
self.clientToken = config.get('client')['client_hmac']
|
self.clientToken = config.get('client.hmac')
|
||||||
self.timeBypassToken = base64.b16encode(os.urandom(32)).decode()
|
self.timeBypassToken = base64.b16encode(os.urandom(32)).decode()
|
||||||
|
|
||||||
self.i2pEnabled = config.get('i2p')['host']
|
self.i2pEnabled = config.get('i2p.host', False)
|
||||||
|
|
||||||
self.mimeType = 'text/plain'
|
self.mimeType = 'text/plain'
|
||||||
|
|
||||||
with open('data/time-bypass.txt', 'w') as bypass:
|
with open('data/time-bypass.txt', 'w') as bypass:
|
||||||
bypass.write(self.timeBypassToken)
|
bypass.write(self.timeBypassToken)
|
||||||
|
|
||||||
if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
|
||||||
logger.debug('Your web password (KEEP SECRET): ' + logger.colors.underline + self.clientToken)
|
|
||||||
|
|
||||||
if not debug and not self._developmentMode:
|
if not debug and not self._developmentMode:
|
||||||
hostNums = [random.randint(1, 255), random.randint(1, 255), random.randint(1, 255)]
|
hostOctets = [127, random.randint(0x02, 0xFF), random.randint(0x02, 0xFF), random.randint(0x02, 0xFF)]
|
||||||
self.host = '127.' + str(hostNums[0]) + '.' + str(hostNums[1]) + '.' + str(hostNums[2])
|
self.host = '.'.join(hostOctets)
|
||||||
else:
|
else:
|
||||||
self.host = '127.0.0.1'
|
self.host = '127.0.0.1'
|
||||||
hostFile = open('data/host.txt', 'w')
|
|
||||||
hostFile.write(self.host)
|
with open('data/host.txt', 'w') as file:
|
||||||
hostFile.close()
|
file.write(self.host)
|
||||||
|
|
||||||
@app.before_request
|
@app.before_request
|
||||||
def beforeReq():
|
def beforeReq():
|
||||||
|
@ -127,7 +125,7 @@ class API:
|
||||||
except:
|
except:
|
||||||
data = ''
|
data = ''
|
||||||
startTime = math.floor(time.time())
|
startTime = math.floor(time.time())
|
||||||
# we should keep a hash DB of requests (with hmac) to prevent replays
|
|
||||||
action = request.args.get('action')
|
action = request.args.get('action')
|
||||||
#if not self.debug:
|
#if not self.debug:
|
||||||
token = request.args.get('token')
|
token = request.args.get('token')
|
||||||
|
@ -192,8 +190,6 @@ class API:
|
||||||
pass
|
pass
|
||||||
elif action == 'ping':
|
elif action == 'ping':
|
||||||
resp = Response("pong!")
|
resp = Response("pong!")
|
||||||
elif action == 'getHMAC':
|
|
||||||
resp = Response(self._crypto.generateSymmetric())
|
|
||||||
elif action == 'getSymmetric':
|
elif action == 'getSymmetric':
|
||||||
resp = Response(self._crypto.generateSymmetric())
|
resp = Response(self._crypto.generateSymmetric())
|
||||||
elif action == 'getDBHash':
|
elif action == 'getDBHash':
|
||||||
|
@ -213,13 +209,12 @@ class API:
|
||||||
resp = Response('')
|
resp = Response('')
|
||||||
# setData should be something the communicator initiates, not this api
|
# setData should be something the communicator initiates, not this api
|
||||||
elif action == 'getData':
|
elif action == 'getData':
|
||||||
|
resp = ''
|
||||||
if self._utils.validateHash(data):
|
if self._utils.validateHash(data):
|
||||||
if not os.path.exists('data/blocks/' + data + '.db'):
|
if not os.path.exists('data/blocks/' + data + '.db'):
|
||||||
try:
|
block = Block(hash=data.encode(), core=self._core)
|
||||||
resp = base64.b64encode(self._core.getData(data))
|
resp = base64.b64encode(block.getRaw().encode()).decode()
|
||||||
except TypeError:
|
if len(resp) == 0:
|
||||||
resp = ""
|
|
||||||
if resp == False:
|
|
||||||
abort(404)
|
abort(404)
|
||||||
resp = ""
|
resp = ""
|
||||||
resp = Response(resp)
|
resp = Response(resp)
|
||||||
|
@ -258,7 +253,7 @@ class API:
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
||||||
logger.info('Starting client on ' + self.host + ':' + str(bindPort) + '...', timestamp=True)
|
logger.info('Starting client on ' + self.host + ':' + str(bindPort) + '...', timestamp=False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.http_server = WSGIServer((self.host, bindPort), app)
|
self.http_server = WSGIServer((self.host, bindPort), app)
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
'''
|
'''
|
||||||
import sqlite3, requests, hmac, hashlib, time, sys, os, math, logger, urllib.parse, base64, binascii, random, json, threading
|
import sqlite3, requests, hmac, hashlib, time, sys, os, math, logger, urllib.parse, base64, binascii, random, json, threading
|
||||||
import core, onionrutils, onionrcrypto, netcontroller, onionrproofs, config, onionrplugins as plugins
|
import core, onionrutils, onionrcrypto, netcontroller, onionrproofs, config, onionrplugins as plugins
|
||||||
|
from onionrblockapi import Block
|
||||||
|
|
||||||
class OnionrCommunicate:
|
class OnionrCommunicate:
|
||||||
def __init__(self, debug, developmentMode):
|
def __init__(self, debug, developmentMode):
|
||||||
|
@ -73,6 +74,10 @@ class OnionrCommunicate:
|
||||||
# Loads in and starts the enabled plugins
|
# Loads in and starts the enabled plugins
|
||||||
plugins.reload()
|
plugins.reload()
|
||||||
|
|
||||||
|
# Print nice header thing :)
|
||||||
|
if config.get('general.display_header', True):
|
||||||
|
self.header()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
command = self._core.daemonQueue()
|
command = self._core.daemonQueue()
|
||||||
# Process blocks based on a timer
|
# Process blocks based on a timer
|
||||||
|
@ -122,16 +127,16 @@ class OnionrCommunicate:
|
||||||
announceAttempts = 3
|
announceAttempts = 3
|
||||||
announceAttemptCount = 0
|
announceAttemptCount = 0
|
||||||
announceVal = False
|
announceVal = False
|
||||||
logger.info('Announcing node to ' + command[1], timestamp=True)
|
logger.info('Announcing node to %s...' % command[1], timestamp=True)
|
||||||
while not announceVal:
|
while not announceVal:
|
||||||
announceAttemptCount += 1
|
announceAttemptCount += 1
|
||||||
announceVal = self.performGet('announce', command[1], data=self._core.hsAdder.replace('\n', ''), skipHighFailureAddress=True)
|
announceVal = self.performGet('announce', command[1], data=self._core.hsAdder.replace('\n', ''), skipHighFailureAddress=True)
|
||||||
logger.info(announceVal)
|
# logger.info(announceVal)
|
||||||
if announceAttemptCount >= announceAttempts:
|
if announceAttemptCount >= announceAttempts:
|
||||||
logger.warn('Unable to announce to ' + command[1])
|
logger.warn('Unable to announce to %s' % command[1])
|
||||||
break
|
break
|
||||||
elif command[0] == 'runCheck':
|
elif command[0] == 'runCheck':
|
||||||
logger.info('Status check; looks good.')
|
logger.debug('Status check; looks good.')
|
||||||
open('data/.runcheck', 'w+').close()
|
open('data/.runcheck', 'w+').close()
|
||||||
elif command[0] == 'kex':
|
elif command[0] == 'kex':
|
||||||
self.pexCount = pexTimer - 1
|
self.pexCount = pexTimer - 1
|
||||||
|
@ -144,7 +149,7 @@ class OnionrCommunicate:
|
||||||
|
|
||||||
logger.info('Checking for callbacks with connection %s...' % data['id'])
|
logger.info('Checking for callbacks with connection %s...' % data['id'])
|
||||||
|
|
||||||
self.check_callbacks(data, config.get('dc_execcallbacks', True))
|
self.check_callbacks(data, config.get('general.dc_execcallbacks', True))
|
||||||
|
|
||||||
events.event('incoming_direct_connection', data = {'callback' : True, 'communicator' : self, 'data' : data})
|
events.event('incoming_direct_connection', data = {'callback' : True, 'communicator' : self, 'data' : data})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -187,13 +192,17 @@ class OnionrCommunicate:
|
||||||
id_peer_cache = {}
|
id_peer_cache = {}
|
||||||
|
|
||||||
def registerTimer(self, timerName, rate, timerFunc=None):
|
def registerTimer(self, timerName, rate, timerFunc=None):
|
||||||
'''Register a communicator timer'''
|
'''
|
||||||
|
Register a communicator timer
|
||||||
|
'''
|
||||||
self.communicatorTimers[timerName] = rate
|
self.communicatorTimers[timerName] = rate
|
||||||
self.communicatorTimerCounts[timerName] = 0
|
self.communicatorTimerCounts[timerName] = 0
|
||||||
self.communicatorTimerFuncs[timerName] = timerFunc
|
self.communicatorTimerFuncs[timerName] = timerFunc
|
||||||
|
|
||||||
def timerTick(self):
|
def timerTick(self):
|
||||||
'''Increments timers "ticks" and calls funcs if applicable'''
|
'''
|
||||||
|
Increments timers "ticks" and calls funcs if applicable
|
||||||
|
'''
|
||||||
tName = ''
|
tName = ''
|
||||||
for i in self.communicatorTimers.items():
|
for i in self.communicatorTimers.items():
|
||||||
tName = i[0]
|
tName = i[0]
|
||||||
|
@ -340,7 +349,7 @@ class OnionrCommunicate:
|
||||||
If yet another callback is requested, it can be put in the `callback` parameter.
|
If yet another callback is requested, it can be put in the `callback` parameter.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if config.get('dc_response', True):
|
if config.get('general.dc_response', True):
|
||||||
data['id'] = identifier
|
data['id'] = identifier
|
||||||
data['sender'] = open('data/hs/hostname').read()
|
data['sender'] = open('data/hs/hostname').read()
|
||||||
data['callback'] = True
|
data['callback'] = True
|
||||||
|
@ -475,9 +484,9 @@ class OnionrCommunicate:
|
||||||
lastDB = self._core.getAddressInfo(i, 'DBHash')
|
lastDB = self._core.getAddressInfo(i, 'DBHash')
|
||||||
|
|
||||||
if lastDB == None:
|
if lastDB == None:
|
||||||
logger.debug('Fetching hash from %s, no previous known.' % str(i))
|
logger.debug('Fetching db hash from %s, no previous known.' % str(i))
|
||||||
else:
|
else:
|
||||||
logger.debug('Fetching hash from %s, %s last known' % (str(i), str(lastDB)))
|
logger.debug('Fetching db hash from %s, %s last known' % (str(i), str(lastDB)))
|
||||||
|
|
||||||
currentDB = self.performGet('getDBHash', i)
|
currentDB = self.performGet('getDBHash', i)
|
||||||
|
|
||||||
|
@ -616,7 +625,9 @@ class OnionrCommunicate:
|
||||||
return
|
return
|
||||||
|
|
||||||
def removeBlockFromProcessingList(self, block):
|
def removeBlockFromProcessingList(self, block):
|
||||||
'''Remove a block from the processing list'''
|
'''
|
||||||
|
Remove a block from the processing list
|
||||||
|
'''
|
||||||
try:
|
try:
|
||||||
self.blocksProcessing.remove(block)
|
self.blocksProcessing.remove(block)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -723,7 +734,8 @@ class OnionrCommunicate:
|
||||||
r = requests.get(url, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
|
r = requests.get(url, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
|
||||||
retData = r.text
|
retData = r.text
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
logger.debug("%s failed with peer %s" % (action, peer))
|
logger.debug('%s failed with peer %s' % (action, peer))
|
||||||
|
logger.debug('Error: %s' % str(e))
|
||||||
retData = False
|
retData = False
|
||||||
|
|
||||||
if not retData:
|
if not retData:
|
||||||
|
@ -746,10 +758,17 @@ class OnionrCommunicate:
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def header(self, message = logger.colors.fg.pink + logger.colors.bold + 'Onionr' + logger.colors.reset + logger.colors.fg.pink + ' has started.'):
|
||||||
|
if os.path.exists('static-data/header.txt'):
|
||||||
|
with open('static-data/header.txt', 'rb') as file:
|
||||||
|
# only to stdout, not file or log or anything
|
||||||
|
print(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n'))
|
||||||
|
logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n')
|
||||||
|
|
||||||
shouldRun = False
|
shouldRun = False
|
||||||
debug = True
|
debug = True
|
||||||
developmentMode = False
|
developmentMode = False
|
||||||
if config.get('devmode', True):
|
if config.get('general.dev_mode', True):
|
||||||
developmentMode = True
|
developmentMode = True
|
||||||
try:
|
try:
|
||||||
if sys.argv[1] == 'run':
|
if sys.argv[1] == 'run':
|
||||||
|
|
|
@ -0,0 +1,365 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
'''
|
||||||
|
Onionr - P2P Microblogging Platform & Social network.
|
||||||
|
|
||||||
|
This file contains both the OnionrCommunicate class for communcating with peers
|
||||||
|
and code to operate as a daemon, getting commands from the command queue database (see core.Core.daemonQueue)
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
import sys, os, core, config, json, onionrblockapi as block, requests, time, logger, threading, onionrplugins as plugins, base64
|
||||||
|
import onionrexceptions
|
||||||
|
from defusedxml import minidom
|
||||||
|
|
||||||
|
class OnionrCommunicatorDaemon:
|
||||||
|
def __init__(self, debug, developmentMode):
|
||||||
|
logger.warn('New (unstable) communicator is being used.')
|
||||||
|
|
||||||
|
self.timers = []
|
||||||
|
self._core = core.Core(torPort=sys.argv[2])
|
||||||
|
self.nistSaltTimestamp = 0
|
||||||
|
self.powSalt = 0
|
||||||
|
self.delay = 1
|
||||||
|
self.proxyPort = sys.argv[2]
|
||||||
|
self.startTime = self._core._utils.getEpoch()
|
||||||
|
|
||||||
|
self.onlinePeers = []
|
||||||
|
self.offlinePeers = []
|
||||||
|
|
||||||
|
self.threadCounts = {}
|
||||||
|
|
||||||
|
self.shutdown = False
|
||||||
|
|
||||||
|
self.blockQueue = [] # list of new blocks to download
|
||||||
|
|
||||||
|
# Clear the daemon queue for any dead messages
|
||||||
|
if os.path.exists(self._core.queueDB):
|
||||||
|
self._core.clearDaemonQueue()
|
||||||
|
|
||||||
|
# Loads in and starts the enabled plugins
|
||||||
|
plugins.reload()
|
||||||
|
|
||||||
|
# Print nice header thing :)
|
||||||
|
if config.get('general.display_header', True):
|
||||||
|
self.header()
|
||||||
|
|
||||||
|
if debug or developmentMode:
|
||||||
|
OnionrCommunicatorTimers(self, self.heartbeat, 10)
|
||||||
|
|
||||||
|
self.getOnlinePeers()
|
||||||
|
OnionrCommunicatorTimers(self, self.daemonCommands, 5)
|
||||||
|
OnionrCommunicatorTimers(self, self.detectAPICrash, 5)
|
||||||
|
OnionrCommunicatorTimers(self, self.getOnlinePeers, 60)
|
||||||
|
OnionrCommunicatorTimers(self, self.lookupBlocks, 7)
|
||||||
|
OnionrCommunicatorTimers(self, self.getBlocks, 10)
|
||||||
|
OnionrCommunicatorTimers(self, self.clearOfflinePeer, 120)
|
||||||
|
OnionrCommunicatorTimers(self, self.lookupKeys, 125)
|
||||||
|
OnionrCommunicatorTimers(self, self.lookupAdders, 600)
|
||||||
|
|
||||||
|
# Main daemon loop, mainly for calling timers, do not do any complex operations here
|
||||||
|
while not self.shutdown:
|
||||||
|
for i in self.timers:
|
||||||
|
i.processTimer()
|
||||||
|
time.sleep(self.delay)
|
||||||
|
logger.info('Goodbye.')
|
||||||
|
|
||||||
|
def lookupKeys(self):
|
||||||
|
'''Lookup new keys'''
|
||||||
|
logger.info('LOOKING UP NEW KEYS')
|
||||||
|
tryAmount = 1
|
||||||
|
for i in range(tryAmount):
|
||||||
|
peer = self.pickOnlinePeer()
|
||||||
|
newKeys = self.peerAction(peer, action='kex')
|
||||||
|
self._core._utils.mergeKeys(newKeys)
|
||||||
|
|
||||||
|
self.decrementThreadCount('lookupKeys')
|
||||||
|
return
|
||||||
|
|
||||||
|
def lookupAdders(self):
|
||||||
|
'''Lookup new peer addresses'''
|
||||||
|
logger.info('LOOKING UP NEW ADDRESSES')
|
||||||
|
tryAmount = 1
|
||||||
|
for i in range(tryAmount):
|
||||||
|
peer = self.pickOnlinePeer()
|
||||||
|
newAdders = self.peerAction(peer, action='pex')
|
||||||
|
self._core._utils.mergeAdders(newAdders)
|
||||||
|
|
||||||
|
self.decrementThreadCount('lookupKeys')
|
||||||
|
def lookupBlocks(self):
|
||||||
|
'''Lookup new blocks'''
|
||||||
|
logger.info('LOOKING UP NEW BLOCKS')
|
||||||
|
tryAmount = 2
|
||||||
|
newBlocks = ''
|
||||||
|
for i in range(tryAmount):
|
||||||
|
peer = self.pickOnlinePeer()
|
||||||
|
newDBHash = self.peerAction(peer, 'getDBHash')
|
||||||
|
if newDBHash == False:
|
||||||
|
continue
|
||||||
|
if newDBHash != self._core.getAddressInfo(peer, 'DBHash'):
|
||||||
|
self._core.setAddressInfo(peer, 'DBHash', newDBHash)
|
||||||
|
newBlocks = self.peerAction(peer, 'getBlockHashes')
|
||||||
|
if newBlocks != False:
|
||||||
|
# if request was a success
|
||||||
|
for i in newBlocks.split('\n'):
|
||||||
|
if self._core._utils.validateHash(i):
|
||||||
|
# if newline seperated string is valid hash
|
||||||
|
if not os.path.exists('data/blocks/' + i + '.db'):
|
||||||
|
# if block does not exist on disk and is not already in block queue
|
||||||
|
if i not in self.blockQueue:
|
||||||
|
self.blockQueue.append(i)
|
||||||
|
self.decrementThreadCount('lookupBlocks')
|
||||||
|
return
|
||||||
|
|
||||||
|
def getBlocks(self):
|
||||||
|
'''download new blocks'''
|
||||||
|
for blockHash in self.blockQueue:
|
||||||
|
logger.info("ATTEMPTING TO DOWNLOAD " + blockHash)
|
||||||
|
content = self.peerAction(self.pickOnlinePeer(), 'getData', data=blockHash)
|
||||||
|
if content != False:
|
||||||
|
try:
|
||||||
|
content = content.encode()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
content = base64.b64decode(content)
|
||||||
|
if self._core._crypto.sha3Hash(content) == blockHash:
|
||||||
|
content = content.decode() # decode here because sha3Hash needs bytes above
|
||||||
|
metas = self._core._utils.getBlockMetadataFromData(content) # returns tuple(metadata, meta), meta is also in metadata
|
||||||
|
metadata = metas[0]
|
||||||
|
meta = metas[1]
|
||||||
|
if self._core._utils.validateMetadata(metadata):
|
||||||
|
if self._core._crypto.verifyPow(metas[2], metadata):
|
||||||
|
logger.info('Block passed proof, saving.')
|
||||||
|
self._core.setData(content)
|
||||||
|
self._core.addToBlockDB(blockHash, dataSaved=True)
|
||||||
|
else:
|
||||||
|
logger.warn('POW failed for block ' + blockHash)
|
||||||
|
else:
|
||||||
|
logger.warn('Metadata for ' + blockHash + ' is invalid.')
|
||||||
|
self.blockQueue.remove(blockHash)
|
||||||
|
else:
|
||||||
|
logger.warn('Block hash validation failed for ' + blockHash + ' got ' + self._core._crypto.sha3Hash(content))
|
||||||
|
self.decrementThreadCount('getBlocks')
|
||||||
|
return
|
||||||
|
|
||||||
|
def pickOnlinePeer(self):
|
||||||
|
'''randomly picks peer from pool without bias (using secrets module)'''
|
||||||
|
retData = ''
|
||||||
|
while True:
|
||||||
|
peerLength = len(self.onlinePeers)
|
||||||
|
if peerLength <= 0:
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
# get a random online peer, securely. May get stuck in loop if network is lost or if all peers in pool magically disconnect at once
|
||||||
|
retData = self.onlinePeers[self._core._crypto.secrets.randbelow(peerLength)]
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
return retData
|
||||||
|
|
||||||
|
def decrementThreadCount(self, threadName):
|
||||||
|
'''Decrement amount of a thread name if more than zero, called when a function meant to be run in a thread ends'''
|
||||||
|
try:
|
||||||
|
if self.threadCounts[threadName] > 0:
|
||||||
|
self.threadCounts[threadName] -= 1
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def clearOfflinePeer(self):
|
||||||
|
'''Removes the longest offline peer to retry later'''
|
||||||
|
try:
|
||||||
|
removed = self.offlinePeers.pop(0)
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.debug('removed ' + removed + ' from offline list to try them again.')
|
||||||
|
self.decrementThreadCount('clearOfflinePeer')
|
||||||
|
|
||||||
|
def getOnlinePeers(self):
|
||||||
|
'''Manages the self.onlinePeers attribute list'''
|
||||||
|
logger.info('Refreshing peer pool.')
|
||||||
|
maxPeers = 4
|
||||||
|
needed = maxPeers - len(self.onlinePeers)
|
||||||
|
|
||||||
|
for i in range(needed):
|
||||||
|
self.connectNewPeer()
|
||||||
|
self.decrementThreadCount('getOnlinePeers')
|
||||||
|
|
||||||
|
def connectNewPeer(self, peer=''):
|
||||||
|
'''Adds a new random online peer to self.onlinePeers'''
|
||||||
|
retData = False
|
||||||
|
tried = self.offlinePeers
|
||||||
|
if peer != '':
|
||||||
|
if self._core._utils.validateID(peer):
|
||||||
|
peerList = [peer]
|
||||||
|
else:
|
||||||
|
raise onionrexceptions.InvalidAddress('Will not attempt connection test to invalid address')
|
||||||
|
else:
|
||||||
|
peerList = self._core.listAdders()
|
||||||
|
|
||||||
|
if len(peerList) == 0:
|
||||||
|
peerList.extend(self._core.bootstrapList)
|
||||||
|
|
||||||
|
for address in peerList:
|
||||||
|
if len(address) == 0 or address in tried or address in self.onlinePeers:
|
||||||
|
continue
|
||||||
|
if self.peerAction(address, 'ping') == 'pong!':
|
||||||
|
logger.info('connected to ' + address)
|
||||||
|
self.onlinePeers.append(address)
|
||||||
|
retData = address
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
tried.append(address)
|
||||||
|
logger.debug('failed to connect to ' + address)
|
||||||
|
else:
|
||||||
|
if len(self.onlinePeers) == 0:
|
||||||
|
logger.warn('Could not connect to any peer')
|
||||||
|
return retData
|
||||||
|
|
||||||
|
def printOnlinePeers(self):
|
||||||
|
'''logs online peer list'''
|
||||||
|
if len(self.onlinePeers) == 0:
|
||||||
|
logger.warn('No online peers')
|
||||||
|
return
|
||||||
|
for i in self.onlinePeers:
|
||||||
|
logger.info(self.onlinePeers[i])
|
||||||
|
|
||||||
|
def peerAction(self, peer, action, data=''):
|
||||||
|
'''Perform a get request to a peer'''
|
||||||
|
if len(peer) == 0:
|
||||||
|
return False
|
||||||
|
logger.info('Performing ' + action + ' with ' + peer + ' on port ' + str(self.proxyPort))
|
||||||
|
url = 'http://' + peer + '/public/?action=' + action
|
||||||
|
if len(data) > 0:
|
||||||
|
url += '&data=' + data
|
||||||
|
retData = self._core._utils.doGetRequest(url, port=self.proxyPort)
|
||||||
|
if retData == False:
|
||||||
|
try:
|
||||||
|
self.onlinePeers.remove(peer)
|
||||||
|
self.getOnlinePeers() # Will only add a new peer to pool if needed
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return retData
|
||||||
|
|
||||||
|
def heartbeat(self):
|
||||||
|
'''Show a heartbeat debug message'''
|
||||||
|
currentTime = self._core._utils.getEpoch() - self.startTime
|
||||||
|
logger.debug('heartbeat, running seconds: ' + str(currentTime))
|
||||||
|
self.decrementThreadCount('heartbeat')
|
||||||
|
|
||||||
|
def daemonCommands(self):
|
||||||
|
'''process daemon commands from daemonQueue'''
|
||||||
|
cmd = self._core.daemonQueue()
|
||||||
|
|
||||||
|
if cmd is not False:
|
||||||
|
if cmd[0] == 'shutdown':
|
||||||
|
self.shutdown = True
|
||||||
|
elif cmd[0] == 'announceNode':
|
||||||
|
self.announce(cmd[1])
|
||||||
|
elif cmd[0] == 'runCheck':
|
||||||
|
logger.debug('Status check; looks good.')
|
||||||
|
open('data/.runcheck', 'w+').close()
|
||||||
|
elif cmd[0] == 'connectedPeers':
|
||||||
|
self.printOnlinePeers()
|
||||||
|
else:
|
||||||
|
logger.info('Recieved daemonQueue command:' + cmd[0])
|
||||||
|
self.decrementThreadCount('daemonCommands')
|
||||||
|
|
||||||
|
def announce(self, peer):
|
||||||
|
'''Announce to peers'''
|
||||||
|
announceCount = 0
|
||||||
|
announceAmount = 2
|
||||||
|
for peer in self._core.listAdders():
|
||||||
|
announceCount += 1
|
||||||
|
if self.peerAction(peer, 'announce', self._core.hsAdder) == 'Success':
|
||||||
|
logger.info('Successfully introduced node to ' + peer)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if announceCount == announceAmount:
|
||||||
|
logger.warn('Could not introduce node. Try again soon')
|
||||||
|
break
|
||||||
|
|
||||||
|
def detectAPICrash(self):
|
||||||
|
'''exit if the api server crashes/stops'''
|
||||||
|
if self._core._utils.localCommand('ping', silent=False) != 'pong':
|
||||||
|
for i in range(5):
|
||||||
|
if self._core._utils.localCommand('ping') == 'pong':
|
||||||
|
break # break for loop
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
# This executes if the api is NOT detected to be running
|
||||||
|
logger.error('Daemon detected API crash (or otherwise unable to reach API after long time), stopping...')
|
||||||
|
self.shutdown = True
|
||||||
|
self.decrementThreadCount('detectAPICrash')
|
||||||
|
|
||||||
|
def header(self, message = logger.colors.fg.pink + logger.colors.bold + 'Onionr' + logger.colors.reset + logger.colors.fg.pink + ' has started.'):
|
||||||
|
if os.path.exists('static-data/header.txt'):
|
||||||
|
with open('static-data/header.txt', 'rb') as file:
|
||||||
|
# only to stdout, not file or log or anything
|
||||||
|
print(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n'))
|
||||||
|
logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n')
|
||||||
|
|
||||||
|
class OnionrCommunicatorTimers:
|
||||||
|
def __init__(self, daemonInstance, timerFunction, frequency, makeThread=True, threadAmount=1, maxThreads=5):
|
||||||
|
self.timerFunction = timerFunction
|
||||||
|
self.frequency = frequency
|
||||||
|
self.threadAmount = threadAmount
|
||||||
|
self.makeThread = makeThread
|
||||||
|
self.daemonInstance = daemonInstance
|
||||||
|
self.maxThreads = maxThreads
|
||||||
|
self._core = self.daemonInstance._core
|
||||||
|
|
||||||
|
self.daemonInstance.timers.append(self)
|
||||||
|
self.count = 0
|
||||||
|
|
||||||
|
def processTimer(self):
|
||||||
|
self.count += 1
|
||||||
|
try:
|
||||||
|
self.daemonInstance.threadCounts[self.timerFunction.__name__]
|
||||||
|
except KeyError:
|
||||||
|
self.daemonInstance.threadCounts[self.timerFunction.__name__] = 0
|
||||||
|
|
||||||
|
if self.count == self.frequency:
|
||||||
|
if self.makeThread:
|
||||||
|
for i in range(self.threadAmount):
|
||||||
|
if self.daemonInstance.threadCounts[self.timerFunction.__name__] >= self.maxThreads:
|
||||||
|
logger.warn(self.timerFunction.__name__ + ' has too many current threads to start anymore.')
|
||||||
|
else:
|
||||||
|
self.daemonInstance.threadCounts[self.timerFunction.__name__] += 1
|
||||||
|
newThread = threading.Thread(target=self.timerFunction)
|
||||||
|
newThread.start()
|
||||||
|
else:
|
||||||
|
self.timerFunction()
|
||||||
|
self.count = 0
|
||||||
|
|
||||||
|
|
||||||
|
shouldRun = False
|
||||||
|
debug = True
|
||||||
|
developmentMode = False
|
||||||
|
if config.get('general.dev_mode', True):
|
||||||
|
developmentMode = True
|
||||||
|
try:
|
||||||
|
if sys.argv[1] == 'run':
|
||||||
|
shouldRun = True
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
if shouldRun:
|
||||||
|
try:
|
||||||
|
OnionrCommunicatorDaemon(debug, developmentMode)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
sys.exit(1)
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('Error occured in Communicator', error = e, timestamp = False)
|
|
@ -28,9 +28,20 @@ def get(key, default = None):
|
||||||
Gets the key from configuration, or returns `default`
|
Gets the key from configuration, or returns `default`
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if is_set(key):
|
key = str(key).split('.')
|
||||||
return get_config()[key]
|
data = _config
|
||||||
|
|
||||||
|
last = key.pop()
|
||||||
|
|
||||||
|
for item in key:
|
||||||
|
if (not item in data) or (not type(data[item]) == dict):
|
||||||
return default
|
return default
|
||||||
|
data = data[item]
|
||||||
|
|
||||||
|
if not last in data:
|
||||||
|
return default
|
||||||
|
|
||||||
|
return data[last]
|
||||||
|
|
||||||
def set(key, value = None, savefile = False):
|
def set(key, value = None, savefile = False):
|
||||||
'''
|
'''
|
||||||
|
@ -38,16 +49,40 @@ def set(key, value = None, savefile = False):
|
||||||
'''
|
'''
|
||||||
|
|
||||||
global _config
|
global _config
|
||||||
|
|
||||||
|
key = str(key).split('.')
|
||||||
|
data = _config
|
||||||
|
|
||||||
|
last = key.pop()
|
||||||
|
|
||||||
|
for item in key:
|
||||||
|
if (not item in data) or (not type(data[item]) == dict):
|
||||||
|
data[item] = dict()
|
||||||
|
data = data[item]
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
del _config[key]
|
del data[last]
|
||||||
else:
|
else:
|
||||||
_config[key] = value
|
data[last] = value
|
||||||
|
|
||||||
if savefile:
|
if savefile:
|
||||||
save()
|
save()
|
||||||
|
|
||||||
def is_set(key):
|
def is_set(key):
|
||||||
return key in get_config() and not get_config()[key] is None
|
key = str(key).split('.')
|
||||||
|
data = _config
|
||||||
|
|
||||||
|
last = key.pop()
|
||||||
|
|
||||||
|
for item in key:
|
||||||
|
if (not item in data) or (not type(data[item]) == dict):
|
||||||
|
return False
|
||||||
|
data = data[item]
|
||||||
|
|
||||||
|
if not last in data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def check():
|
def check():
|
||||||
'''
|
'''
|
||||||
|
@ -71,7 +106,7 @@ def save():
|
||||||
check()
|
check()
|
||||||
try:
|
try:
|
||||||
with open(get_config_file(), 'w', encoding="utf8") as configfile:
|
with open(get_config_file(), 'w', encoding="utf8") as configfile:
|
||||||
json.dump(get_config(), configfile, indent=2, sort_keys=True)
|
json.dump(get_config(), configfile, indent=2)
|
||||||
except:
|
except:
|
||||||
logger.warn('Failed to write to configuration file.')
|
logger.warn('Failed to write to configuration file.')
|
||||||
|
|
||||||
|
|
137
onionr/core.py
137
onionr/core.py
|
@ -18,10 +18,9 @@
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
'''
|
'''
|
||||||
import sqlite3, os, sys, time, math, base64, tarfile, getpass, simplecrypt, hashlib, nacl, logger, json, netcontroller, math, config
|
import sqlite3, os, sys, time, math, base64, tarfile, getpass, simplecrypt, hashlib, nacl, logger, json, netcontroller, math, config
|
||||||
#from Crypto.Cipher import AES
|
from onionrblockapi import Block
|
||||||
#from Crypto import Random
|
|
||||||
|
|
||||||
import onionrutils, onionrcrypto, onionrproofs, onionrevents as events
|
import onionrutils, onionrcrypto, onionrproofs, onionrevents as events, onionrexceptions, onionrvalues
|
||||||
|
|
||||||
if sys.version_info < (3, 6):
|
if sys.version_info < (3, 6):
|
||||||
try:
|
try:
|
||||||
|
@ -31,7 +30,7 @@ if sys.version_info < (3, 6):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
class Core:
|
class Core:
|
||||||
def __init__(self):
|
def __init__(self, torPort=0):
|
||||||
'''
|
'''
|
||||||
Initialize Core Onionr library
|
Initialize Core Onionr library
|
||||||
'''
|
'''
|
||||||
|
@ -45,6 +44,7 @@ class Core:
|
||||||
|
|
||||||
self.bootstrapFileLocation = 'static-data/bootstrap-nodes.txt'
|
self.bootstrapFileLocation = 'static-data/bootstrap-nodes.txt'
|
||||||
self.bootstrapList = []
|
self.bootstrapList = []
|
||||||
|
self.requirements = onionrvalues.OnionrValues()
|
||||||
|
|
||||||
if not os.path.exists('data/'):
|
if not os.path.exists('data/'):
|
||||||
os.mkdir('data/')
|
os.mkdir('data/')
|
||||||
|
@ -111,7 +111,8 @@ class Core:
|
||||||
'''
|
'''
|
||||||
Add an address to the address database (only tor currently)
|
Add an address to the address database (only tor currently)
|
||||||
'''
|
'''
|
||||||
if address == config.get('i2p')['ownAddr']:
|
if address == config.get('i2p.ownAddr', None):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
if self._utils.validateID(address):
|
if self._utils.validateID(address):
|
||||||
conn = sqlite3.connect(self.addressDB)
|
conn = sqlite3.connect(self.addressDB)
|
||||||
|
@ -139,6 +140,7 @@ class Core:
|
||||||
|
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
logger.debug('Invalid ID')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def removeAddress(self, address):
|
def removeAddress(self, address):
|
||||||
|
@ -193,6 +195,7 @@ class Core:
|
||||||
speed int,
|
speed int,
|
||||||
success int,
|
success int,
|
||||||
DBHash text,
|
DBHash text,
|
||||||
|
powValue text,
|
||||||
failure int,
|
failure int,
|
||||||
lastConnect int
|
lastConnect int
|
||||||
);
|
);
|
||||||
|
@ -368,18 +371,17 @@ class Core:
|
||||||
'''
|
'''
|
||||||
retData = False
|
retData = False
|
||||||
if not os.path.exists(self.queueDB):
|
if not os.path.exists(self.queueDB):
|
||||||
conn = sqlite3.connect(self.queueDB)
|
self.makeDaemonDB()
|
||||||
c = conn.cursor()
|
|
||||||
# Create table
|
|
||||||
c.execute('''CREATE TABLE commands
|
|
||||||
(id integer primary key autoincrement, command text, data text, date text)''')
|
|
||||||
conn.commit()
|
|
||||||
else:
|
else:
|
||||||
conn = sqlite3.connect(self.queueDB)
|
conn = sqlite3.connect(self.queueDB)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
try:
|
||||||
for row in c.execute('SELECT command, data, date, min(ID) FROM commands group by id'):
|
for row in c.execute('SELECT command, data, date, min(ID) FROM commands group by id'):
|
||||||
retData = row
|
retData = row
|
||||||
break
|
break
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
self.makeDaemonDB()
|
||||||
|
else:
|
||||||
if retData != False:
|
if retData != False:
|
||||||
c.execute('DELETE FROM commands WHERE id=?;', (retData[3],))
|
c.execute('DELETE FROM commands WHERE id=?;', (retData[3],))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
@ -389,6 +391,16 @@ class Core:
|
||||||
|
|
||||||
return retData
|
return retData
|
||||||
|
|
||||||
|
def makeDaemonDB(self):
|
||||||
|
'''generate the daemon queue db'''
|
||||||
|
conn = sqlite3.connect(self.queueDB)
|
||||||
|
c = conn.cursor()
|
||||||
|
# Create table
|
||||||
|
c.execute('''CREATE TABLE commands
|
||||||
|
(id integer primary key autoincrement, command text, data text, date text)''')
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
def daemonQueueAdd(self, command, data=''):
|
def daemonQueueAdd(self, command, data=''):
|
||||||
'''
|
'''
|
||||||
Add a command to the daemon queue, used by the communication daemon (communicator.py)
|
Add a command to the daemon queue, used by the communication daemon (communicator.py)
|
||||||
|
@ -484,6 +496,7 @@ class Core:
|
||||||
trust int 6
|
trust int 6
|
||||||
pubkeyExchanged int 7
|
pubkeyExchanged int 7
|
||||||
hashID text 8
|
hashID text 8
|
||||||
|
pow text 9
|
||||||
'''
|
'''
|
||||||
conn = sqlite3.connect(self.peerDB)
|
conn = sqlite3.connect(self.peerDB)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
@ -656,65 +669,81 @@ class Core:
|
||||||
conn.close()
|
conn.close()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def insertBlock(self, data, header='txt', sign=False):
|
def insertBlock(self, data, header='txt', sign=False, encryptType='', symKey='', asymPeer='', meta = {}):
|
||||||
'''
|
'''
|
||||||
Inserts a block into the network
|
Inserts a block into the network
|
||||||
|
encryptType must be specified to encrypt a block
|
||||||
'''
|
'''
|
||||||
|
|
||||||
powProof = onionrproofs.POW(data)
|
|
||||||
powToken = ''
|
|
||||||
# wait for proof to complete
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
powToken = powProof.getResult()
|
|
||||||
if powToken == False:
|
|
||||||
time.sleep(0.3)
|
|
||||||
continue
|
|
||||||
powHash = powToken[0]
|
|
||||||
powToken = base64.b64encode(powToken[1])
|
|
||||||
try:
|
|
||||||
powToken = powToken.decode()
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
break
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.warn("Got keyboard interrupt while working on inserting block, stopping.")
|
|
||||||
powProof.shutdown()
|
|
||||||
return ''
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data.decode()
|
data.decode()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
data = data.encode()
|
data = data.encode()
|
||||||
|
|
||||||
retData = ''
|
retData = ''
|
||||||
metadata = {'type': header, 'powHash': powHash, 'powToken': powToken}
|
|
||||||
sig = {}
|
|
||||||
|
|
||||||
metadata = json.dumps(metadata)
|
|
||||||
metadata = metadata.encode()
|
|
||||||
signature = ''
|
signature = ''
|
||||||
|
signer = ''
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
if sign:
|
# only use header if not set in provided meta
|
||||||
signature = self._crypto.edSign(metadata + b'\n' + data, self._crypto.privKey, encodeResult=True)
|
|
||||||
ourID = self._crypto.pubKeyHashID()
|
|
||||||
# Convert from bytes on some py versions?
|
|
||||||
try:
|
try:
|
||||||
ourID = ourID.decode()
|
meta['type']
|
||||||
|
except KeyError:
|
||||||
|
meta['type'] = header # block type
|
||||||
|
|
||||||
|
jsonMeta = json.dumps(meta)
|
||||||
|
|
||||||
|
if encryptType in ('asym', 'sym', ''):
|
||||||
|
metadata['encryptType'] = encryptType
|
||||||
|
else:
|
||||||
|
raise onionrexceptions.InvalidMetadata('encryptType must be asym or sym, or blank')
|
||||||
|
|
||||||
|
# sign before encrypt, as unauthenticated crypto should not be a problem here
|
||||||
|
if sign:
|
||||||
|
signature = self._crypto.edSign(jsonMeta + data, key=self._crypto.privKey, encodeResult=True)
|
||||||
|
signer = self._crypto.pubKeyHashID()
|
||||||
|
|
||||||
|
if len(jsonMeta) > 1000:
|
||||||
|
raise onionrexceptions.InvalidMetadata('meta in json encoded form must not exceed 1000 bytes')
|
||||||
|
|
||||||
|
# encrypt block metadata/sig/content
|
||||||
|
if encryptType == 'sym':
|
||||||
|
if len(symKey) < self.requirements.passwordLength:
|
||||||
|
raise onionrexceptions.SecurityError('Weak encryption key')
|
||||||
|
jsonMeta = self._crypto.symmetricEncrypt(jsonMeta, key=symKey, returnEncoded=True)
|
||||||
|
data = self._crypto.symmetricEncrypt(data, key=symKey, returnEncoded=True)
|
||||||
|
signature = self._crypto.symmetricEncrypt(signature, key=symKey, returnEncoded=True)
|
||||||
|
signer = self._crypto.symmetricEncrypt(signer, key=symKey, returnEncoded=True)
|
||||||
|
elif encryptType == 'asym':
|
||||||
|
if self._utils.validatePubKey(asymPeer):
|
||||||
|
jsonMeta = self._crypto.pubKeyEncrypt(jsonMeta, asymPeer, encodedData=True)
|
||||||
|
data = self._crypto.pubKeyEncrypt(data, asymPeer, encodedData=True)
|
||||||
|
signature = self._crypto.pubKeyEncrypt(signature, asymPeer, encodedData=True)
|
||||||
|
else:
|
||||||
|
raise onionrexceptions.InvalidPubkey(asymPeer + ' is not a valid base32 encoded ed25519 key')
|
||||||
|
|
||||||
|
powProof = onionrproofs.POW(data)
|
||||||
|
|
||||||
|
# wait for proof to complete
|
||||||
|
powToken = powProof.waitForResult()
|
||||||
|
|
||||||
|
powToken = base64.b64encode(powToken[1])
|
||||||
|
try:
|
||||||
|
powToken = powToken.decode()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
metadata = {'sig': signature, 'meta': metadata.decode()}
|
|
||||||
metadata = json.dumps(metadata)
|
|
||||||
metadata = metadata.encode()
|
|
||||||
|
|
||||||
if len(data) == 0:
|
# compile metadata
|
||||||
logger.error('Will not insert empty block')
|
metadata['meta'] = jsonMeta
|
||||||
else:
|
metadata['sig'] = signature
|
||||||
addedHash = self.setData(metadata + b'\n' + data)
|
metadata['signer'] = signer
|
||||||
self.addToBlockDB(addedHash, selfInsert=True)
|
metadata['powRandomToken'] = powToken
|
||||||
self.setBlockType(addedHash, header)
|
metadata['time'] = str(self._utils.getEpoch())
|
||||||
retData = addedHash
|
|
||||||
|
payload = json.dumps(metadata).encode() + b'\n' + data
|
||||||
|
retData = self.setData(payload)
|
||||||
|
self.addToBlockDB(retData, selfInsert=True, dataSaved=True)
|
||||||
|
|
||||||
return retData
|
return retData
|
||||||
|
|
||||||
def introduceNode(self):
|
def introduceNode(self):
|
||||||
|
|
|
@ -0,0 +1,331 @@
|
||||||
|
"""Generate cryptographically strong pseudo-random numbers suitable for
|
||||||
|
managing secrets such as account authentication, tokens, and similar.
|
||||||
|
|
||||||
|
See PEP 506 for more information.
|
||||||
|
https://www.python.org/dev/peps/pep-0506/
|
||||||
|
|
||||||
|
|
||||||
|
A. HISTORY OF THE SOFTWARE
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||||
|
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||||
|
as a successor of a language called ABC. Guido remains Python's
|
||||||
|
principal author, although it includes many contributions from others.
|
||||||
|
|
||||||
|
In 1995, Guido continued his work on Python at the Corporation for
|
||||||
|
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||||
|
in Reston, Virginia where he released several versions of the
|
||||||
|
software.
|
||||||
|
|
||||||
|
In May 2000, Guido and the Python core development team moved to
|
||||||
|
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||||
|
year, the PythonLabs team moved to Digital Creations, which became
|
||||||
|
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
||||||
|
https://www.python.org/psf/) was formed, a non-profit organization
|
||||||
|
created specifically to own Python-related Intellectual Property.
|
||||||
|
Zope Corporation was a sponsoring member of the PSF.
|
||||||
|
|
||||||
|
All Python releases are Open Source (see http://www.opensource.org for
|
||||||
|
the Open Source Definition). Historically, most, but not all, Python
|
||||||
|
releases have also been GPL-compatible; the table below summarizes
|
||||||
|
the various releases.
|
||||||
|
|
||||||
|
Release Derived Year Owner GPL-
|
||||||
|
from compatible? (1)
|
||||||
|
|
||||||
|
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||||
|
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||||
|
1.6 1.5.2 2000 CNRI no
|
||||||
|
2.0 1.6 2000 BeOpen.com no
|
||||||
|
1.6.1 1.6 2001 CNRI yes (2)
|
||||||
|
2.1 2.0+1.6.1 2001 PSF no
|
||||||
|
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||||
|
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||||
|
2.1.2 2.1.1 2002 PSF yes
|
||||||
|
2.1.3 2.1.2 2002 PSF yes
|
||||||
|
2.2 and above 2.1.1 2001-now PSF yes
|
||||||
|
|
||||||
|
Footnotes:
|
||||||
|
|
||||||
|
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||||
|
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||||
|
a modified version without making your changes open source. The
|
||||||
|
GPL-compatible licenses make it possible to combine Python with
|
||||||
|
other software that is released under the GPL; the others don't.
|
||||||
|
|
||||||
|
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||||
|
because its license has a choice of law clause. According to
|
||||||
|
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||||
|
is "not incompatible" with the GPL.
|
||||||
|
|
||||||
|
Thanks to the many outside volunteers who have worked under Guido's
|
||||||
|
direction to make these releases possible.
|
||||||
|
|
||||||
|
|
||||||
|
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||||
|
===============================================================
|
||||||
|
|
||||||
|
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||||
|
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||||
|
otherwise using this software ("Python") in source or binary form and
|
||||||
|
its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||||
|
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||||
|
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||||
|
distribute, and otherwise use Python alone or in any derivative version,
|
||||||
|
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||||
|
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||||
|
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All
|
||||||
|
Rights Reserved" are retained in Python alone or in any derivative version
|
||||||
|
prepared by Licensee.
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python.
|
||||||
|
|
||||||
|
4. PSF is making Python available to Licensee on an "AS IS"
|
||||||
|
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. Nothing in this License Agreement shall be deemed to create any
|
||||||
|
relationship of agency, partnership, or joint venture between PSF and
|
||||||
|
Licensee. This License Agreement does not grant permission to use PSF
|
||||||
|
trademarks or trade name in a trademark sense to endorse or promote
|
||||||
|
products or services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By copying, installing or otherwise using Python, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||||
|
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||||
|
Individual or Organization ("Licensee") accessing and otherwise using
|
||||||
|
this software in source or binary form and its associated
|
||||||
|
documentation ("the Software").
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this BeOpen Python License
|
||||||
|
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||||
|
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||||
|
and/or display publicly, prepare derivative works, distribute, and
|
||||||
|
otherwise use the Software alone or in any derivative version,
|
||||||
|
provided, however, that the BeOpen Python License is retained in the
|
||||||
|
Software, alone or in any derivative version prepared by Licensee.
|
||||||
|
|
||||||
|
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||||
|
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||||
|
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||||
|
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||||
|
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
5. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
6. This License Agreement shall be governed by and interpreted in all
|
||||||
|
respects by the law of the State of California, excluding conflict of
|
||||||
|
law provisions. Nothing in this License Agreement shall be deemed to
|
||||||
|
create any relationship of agency, partnership, or joint venture
|
||||||
|
between BeOpen and Licensee. This License Agreement does not grant
|
||||||
|
permission to use BeOpen trademarks or trade names in a trademark
|
||||||
|
sense to endorse or promote products or services of Licensee, or any
|
||||||
|
third party. As an exception, the "BeOpen Python" logos available at
|
||||||
|
http://www.pythonlabs.com/logos.html may be used according to the
|
||||||
|
permissions granted on that web page.
|
||||||
|
|
||||||
|
7. By copying, installing or otherwise using the software, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||||
|
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||||
|
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||||
|
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||||
|
source or binary form and its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||||
|
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||||
|
license to reproduce, analyze, test, perform and/or display publicly,
|
||||||
|
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||||
|
alone or in any derivative version, provided, however, that CNRI's
|
||||||
|
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||||
|
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||||
|
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||||
|
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||||
|
Agreement, Licensee may substitute the following text (omitting the
|
||||||
|
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||||
|
conditions in CNRI's License Agreement. This Agreement together with
|
||||||
|
Python 1.6.1 may be located on the Internet using the following
|
||||||
|
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||||
|
Agreement may also be obtained from a proxy server on the Internet
|
||||||
|
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python 1.6.1.
|
||||||
|
|
||||||
|
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||||
|
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. This License Agreement shall be governed by the federal
|
||||||
|
intellectual property law of the United States, including without
|
||||||
|
limitation the federal copyright law, and, to the extent such
|
||||||
|
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||||
|
Virginia, excluding Virginia's conflict of law provisions.
|
||||||
|
Notwithstanding the foregoing, with regard to derivative works based
|
||||||
|
on Python 1.6.1 that incorporate non-separable material that was
|
||||||
|
previously distributed under the GNU General Public License (GPL), the
|
||||||
|
law of the Commonwealth of Virginia shall govern this License
|
||||||
|
Agreement only as to issues arising under or with respect to
|
||||||
|
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||||
|
License Agreement shall be deemed to create any relationship of
|
||||||
|
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||||
|
License Agreement does not grant permission to use CNRI trademarks or
|
||||||
|
trade name in a trademark sense to endorse or promote products or
|
||||||
|
services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||||
|
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||||
|
bound by the terms and conditions of this License Agreement.
|
||||||
|
|
||||||
|
ACCEPT
|
||||||
|
|
||||||
|
|
||||||
|
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||||
|
The Netherlands. All rights reserved.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software and its
|
||||||
|
documentation for any purpose and without fee is hereby granted,
|
||||||
|
provided that the above copyright notice appear in all copies and that
|
||||||
|
both that copyright notice and this permission notice appear in
|
||||||
|
supporting documentation, and that the name of Stichting Mathematisch
|
||||||
|
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||||
|
distribution of the software without specific, written prior
|
||||||
|
permission.
|
||||||
|
|
||||||
|
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||||
|
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||||
|
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||||
|
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
|
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = ['choice', 'randbelow', 'randbits', 'SystemRandom',
|
||||||
|
'token_bytes', 'token_hex', 'token_urlsafe',
|
||||||
|
'compare_digest',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import os
|
||||||
|
|
||||||
|
from hmac import compare_digest
|
||||||
|
from random import SystemRandom
|
||||||
|
|
||||||
|
_sysrand = SystemRandom()
|
||||||
|
|
||||||
|
randbits = _sysrand.getrandbits
|
||||||
|
choice = _sysrand.choice
|
||||||
|
|
||||||
|
def randbelow(exclusive_upper_bound):
|
||||||
|
"""Return a random int in the range [0, n)."""
|
||||||
|
if exclusive_upper_bound <= 0:
|
||||||
|
raise ValueError("Upper bound must be positive.")
|
||||||
|
return _sysrand._randbelow(exclusive_upper_bound)
|
||||||
|
|
||||||
|
DEFAULT_ENTROPY = 32 # number of bytes to return by default
|
||||||
|
|
||||||
|
def token_bytes(nbytes=None):
|
||||||
|
"""Return a random byte string containing *nbytes* bytes.
|
||||||
|
|
||||||
|
If *nbytes* is ``None`` or not supplied, a reasonable
|
||||||
|
default is used.
|
||||||
|
|
||||||
|
>>> token_bytes(16) #doctest:+SKIP
|
||||||
|
b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b'
|
||||||
|
|
||||||
|
"""
|
||||||
|
if nbytes is None:
|
||||||
|
nbytes = DEFAULT_ENTROPY
|
||||||
|
return os.urandom(nbytes)
|
||||||
|
|
||||||
|
def token_hex(nbytes=None):
|
||||||
|
"""Return a random text string, in hexadecimal.
|
||||||
|
|
||||||
|
The string has *nbytes* random bytes, each byte converted to two
|
||||||
|
hex digits. If *nbytes* is ``None`` or not supplied, a reasonable
|
||||||
|
default is used.
|
||||||
|
|
||||||
|
>>> token_hex(16) #doctest:+SKIP
|
||||||
|
'f9bf78b9a18ce6d46a0cd2b0b86df9da'
|
||||||
|
|
||||||
|
"""
|
||||||
|
return binascii.hexlify(token_bytes(nbytes)).decode('ascii')
|
||||||
|
|
||||||
|
def token_urlsafe(nbytes=None):
|
||||||
|
"""Return a random URL-safe text string, in Base64 encoding.
|
||||||
|
|
||||||
|
The string has *nbytes* random bytes. If *nbytes* is ``None``
|
||||||
|
or not supplied, a reasonable default is used.
|
||||||
|
|
||||||
|
>>> token_urlsafe(16) #doctest:+SKIP
|
||||||
|
'Drmhze6EPcv0fN_81Bj-nA'
|
||||||
|
|
||||||
|
"""
|
||||||
|
tok = token_bytes(nbytes)
|
||||||
|
return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii')
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import subprocess, os, random, sys, logger, time, signal
|
import subprocess, os, random, sys, logger, time, signal
|
||||||
|
from onionrblockapi import Block
|
||||||
|
|
||||||
class NetController:
|
class NetController:
|
||||||
'''
|
'''
|
||||||
|
@ -102,7 +103,7 @@ DataDirectory data/tordata/
|
||||||
logger.fatal("Got keyboard interrupt")
|
logger.fatal("Got keyboard interrupt")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.info('Finished starting Tor', timestamp=True)
|
logger.debug('Finished starting Tor.', timestamp=True)
|
||||||
self.readyState = True
|
self.readyState = True
|
||||||
|
|
||||||
myID = open('data/hs/hostname', 'r')
|
myID = open('data/hs/hostname', 'r')
|
||||||
|
|
174
onionr/onionr.py
174
onionr/onionr.py
|
@ -31,6 +31,8 @@ import api, core, config, logger, onionrplugins as plugins, onionrevents as even
|
||||||
import onionrutils
|
import onionrutils
|
||||||
from onionrutils import OnionrUtils
|
from onionrutils import OnionrUtils
|
||||||
from netcontroller import NetController
|
from netcontroller import NetController
|
||||||
|
from onionrblockapi import Block
|
||||||
|
import onionrproofs
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from urllib3.contrib.socks import SOCKSProxyManager
|
from urllib3.contrib.socks import SOCKSProxyManager
|
||||||
|
@ -38,8 +40,9 @@ except ImportError:
|
||||||
raise Exception("You need the PySocks module (for use with socks5 proxy to use Tor)")
|
raise Exception("You need the PySocks module (for use with socks5 proxy to use Tor)")
|
||||||
|
|
||||||
ONIONR_TAGLINE = 'Anonymous P2P Platform - GPLv3 - https://Onionr.VoidNet.Tech'
|
ONIONR_TAGLINE = 'Anonymous P2P Platform - GPLv3 - https://Onionr.VoidNet.Tech'
|
||||||
ONIONR_VERSION = '0.0.0' # for debugging and stuff
|
ONIONR_VERSION = '0.1.0' # for debugging and stuff
|
||||||
API_VERSION = '2' # increments of 1; only change when something fundemental about how the API works changes. This way other nodes knows how to communicate without learning too much information about you.
|
ONIONR_VERSION_TUPLE = tuple(ONIONR_VERSION.split('.')) # (MAJOR, MINOR, VERSION)
|
||||||
|
API_VERSION = '3' # increments of 1; only change when something fundemental about how the API works changes. This way other nodes knows how to communicate without learning too much information about you.
|
||||||
|
|
||||||
class Onionr:
|
class Onionr:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -64,22 +67,22 @@ class Onionr:
|
||||||
config.set_config(json.loads(open('static-data/default_config.json').read())) # this is the default config, it will be overwritten if a config file already exists. Else, it saves it
|
config.set_config(json.loads(open('static-data/default_config.json').read())) # this is the default config, it will be overwritten if a config file already exists. Else, it saves it
|
||||||
else:
|
else:
|
||||||
# the default config file doesn't exist, try hardcoded config
|
# the default config file doesn't exist, try hardcoded config
|
||||||
config.set_config({'devmode': True, 'log': {'file': {'output': True, 'path': 'data/output.log'}, 'console': {'output': True, 'color': True}}})
|
config.set_config({'dev_mode': True, 'log': {'file': {'output': True, 'path': 'data/output.log'}, 'console': {'output': True, 'color': True}}})
|
||||||
if not data_exists:
|
if not data_exists:
|
||||||
config.save()
|
config.save()
|
||||||
config.reload() # this will read the configuration file into memory
|
config.reload() # this will read the configuration file into memory
|
||||||
|
|
||||||
settings = 0b000
|
settings = 0b000
|
||||||
if config.get('log', {'console': {'color': True}})['console']['color']:
|
if config.get('log.console.color', True):
|
||||||
settings = settings | logger.USE_ANSI
|
settings = settings | logger.USE_ANSI
|
||||||
if config.get('log', {'console': {'output': True}})['console']['output']:
|
if config.get('log.console.output', True):
|
||||||
settings = settings | logger.OUTPUT_TO_CONSOLE
|
settings = settings | logger.OUTPUT_TO_CONSOLE
|
||||||
if config.get('log', {'file': {'output': True}})['file']['output']:
|
if config.get('log.file.output', True):
|
||||||
settings = settings | logger.OUTPUT_TO_FILE
|
settings = settings | logger.OUTPUT_TO_FILE
|
||||||
logger.set_file(config.get('log', {'file': {'path': 'data/output.log'}})['file']['path'])
|
logger.set_file(config.get('log.file.path', '/tmp/onionr.log'))
|
||||||
logger.set_settings(settings)
|
logger.set_settings(settings)
|
||||||
|
|
||||||
if str(config.get('devmode', True)).lower() == 'true':
|
if str(config.get('general.dev_mode', True)).lower() == 'true':
|
||||||
self._developmentMode = True
|
self._developmentMode = True
|
||||||
logger.set_level(logger.LEVEL_DEBUG)
|
logger.set_level(logger.LEVEL_DEBUG)
|
||||||
else:
|
else:
|
||||||
|
@ -89,6 +92,8 @@ class Onionr:
|
||||||
self.onionrCore = core.Core()
|
self.onionrCore = core.Core()
|
||||||
self.onionrUtils = OnionrUtils(self.onionrCore)
|
self.onionrUtils = OnionrUtils(self.onionrCore)
|
||||||
|
|
||||||
|
self.userOS = platform.system()
|
||||||
|
|
||||||
# Handle commands
|
# Handle commands
|
||||||
|
|
||||||
self.debug = False # Whole application debugging
|
self.debug = False # Whole application debugging
|
||||||
|
@ -144,7 +149,7 @@ class Onionr:
|
||||||
randomPort = random.randint(1024, 65535)
|
randomPort = random.randint(1024, 65535)
|
||||||
if self.onionrUtils.checkPort(randomPort):
|
if self.onionrUtils.checkPort(randomPort):
|
||||||
break
|
break
|
||||||
config.set('client', {'participate': 'true', 'client_hmac': base64.b16encode(os.urandom(32)).decode('utf-8'), 'port': randomPort, 'api_version': API_VERSION}, True)
|
config.set('client', {'participate': True, 'hmac': base64.b16encode(os.urandom(32)).decode('utf-8'), 'port': randomPort, 'api_version': API_VERSION}, True)
|
||||||
|
|
||||||
self.cmds = {
|
self.cmds = {
|
||||||
'': self.showHelpSuggestion,
|
'': self.showHelpSuggestion,
|
||||||
|
@ -191,12 +196,18 @@ class Onionr:
|
||||||
'add-addr': self.addAddress,
|
'add-addr': self.addAddress,
|
||||||
'addaddr': self.addAddress,
|
'addaddr': self.addAddress,
|
||||||
'addaddress': self.addAddress,
|
'addaddress': self.addAddress,
|
||||||
'addfile': self.addFile,
|
|
||||||
|
|
||||||
|
'add-file': self.addFile,
|
||||||
|
'addfile': self.addFile,
|
||||||
|
'listconn': self.listConn,
|
||||||
|
|
||||||
|
'import-blocks': self.onionrUtils.importNewBlocks,
|
||||||
'importblocks': self.onionrUtils.importNewBlocks,
|
'importblocks': self.onionrUtils.importNewBlocks,
|
||||||
|
|
||||||
'introduce': self.onionrCore.introduceNode,
|
'introduce': self.onionrCore.introduceNode,
|
||||||
'connect': self.addAddress
|
'connect': self.addAddress,
|
||||||
|
|
||||||
|
'getpassword': self.getWebPassword
|
||||||
}
|
}
|
||||||
|
|
||||||
self.cmdhelp = {
|
self.cmdhelp = {
|
||||||
|
@ -206,6 +217,7 @@ class Onionr:
|
||||||
'start': 'Starts the Onionr daemon',
|
'start': 'Starts the Onionr daemon',
|
||||||
'stop': 'Stops the Onionr daemon',
|
'stop': 'Stops the Onionr daemon',
|
||||||
'stats': 'Displays node statistics',
|
'stats': 'Displays node statistics',
|
||||||
|
'getpassword': 'Displays the web password',
|
||||||
'enable-plugin': 'Enables and starts a plugin',
|
'enable-plugin': 'Enables and starts a plugin',
|
||||||
'disable-plugin': 'Disables and stops a plugin',
|
'disable-plugin': 'Disables and stops a plugin',
|
||||||
'reload-plugin': 'Reloads a plugin',
|
'reload-plugin': 'Reloads a plugin',
|
||||||
|
@ -215,8 +227,9 @@ class Onionr:
|
||||||
'add-msg': 'Broadcasts a message to the Onionr network',
|
'add-msg': 'Broadcasts a message to the Onionr network',
|
||||||
'pm': 'Adds a private message to block',
|
'pm': 'Adds a private message to block',
|
||||||
'get-pms': 'Shows private messages sent to you',
|
'get-pms': 'Shows private messages sent to you',
|
||||||
'addfile': 'Create an Onionr block from a file',
|
'add-file': 'Create an Onionr block from a file',
|
||||||
'importblocks': 'import blocks from the disk (Onionr is transport-agnostic!)',
|
'import-blocks': 'import blocks from the disk (Onionr is transport-agnostic!)',
|
||||||
|
'listconn': 'list connected peers',
|
||||||
'introduce': 'Introduce your node to the public Onionr network',
|
'introduce': 'Introduce your node to the public Onionr network',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,6 +258,12 @@ class Onionr:
|
||||||
def getCommands(self):
|
def getCommands(self):
|
||||||
return self.cmds
|
return self.cmds
|
||||||
|
|
||||||
|
def listConn(self):
|
||||||
|
self.onionrCore.daemonQueueAdd('connectedPeers')
|
||||||
|
|
||||||
|
def getWebPassword(self):
|
||||||
|
return config.get('client.hmac')
|
||||||
|
|
||||||
def getHelp(self):
|
def getHelp(self):
|
||||||
return self.cmdhelp
|
return self.cmdhelp
|
||||||
|
|
||||||
|
@ -348,14 +367,31 @@ class Onionr:
|
||||||
'''
|
'''
|
||||||
Adds a peer (?)
|
Adds a peer (?)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
newPeer = sys.argv[2]
|
newPeer = sys.argv[2]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
if self.onionrUtils.hasKey(newPeer):
|
||||||
|
logger.info('We already have that key')
|
||||||
|
return
|
||||||
|
if not '-' in newPeer:
|
||||||
|
logger.info('Since no POW token was supplied for that key, one is being generated')
|
||||||
|
proof = onionrproofs.POW(newPeer)
|
||||||
|
while True:
|
||||||
|
result = proof.getResult()
|
||||||
|
if result == False:
|
||||||
|
time.sleep(0.5)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
newPeer += '-' + base64.b64encode(result[1]).decode()
|
||||||
|
logger.info(newPeer)
|
||||||
|
|
||||||
logger.info("Adding peer: " + logger.colors.underline + newPeer)
|
logger.info("Adding peer: " + logger.colors.underline + newPeer)
|
||||||
self.onionrCore.addPeer(newPeer)
|
if self.onionrUtils.mergeKeys(newPeer):
|
||||||
|
logger.info('Successfully added key')
|
||||||
|
else:
|
||||||
|
logger.error('Failed to add key')
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -390,12 +426,12 @@ class Onionr:
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
return
|
return
|
||||||
|
|
||||||
#addedHash = self.onionrCore.setData(messageToAdd)
|
#addedHash = Block(type = 'txt', content = messageToAdd).save()
|
||||||
addedHash = self.onionrCore.insertBlock(messageToAdd, header='txt')
|
addedHash = self.onionrCore.insertBlock(messageToAdd)
|
||||||
#self.onionrCore.addToBlockDB(addedHash, selfInsert=True)
|
if addedHash != None:
|
||||||
#self.onionrCore.setBlockType(addedHash, 'txt')
|
|
||||||
if addedHash != '':
|
|
||||||
logger.info("Message inserted as as block %s" % addedHash)
|
logger.info("Message inserted as as block %s" % addedHash)
|
||||||
|
else:
|
||||||
|
logger.error('Failed to insert block.', timestamp = False)
|
||||||
return
|
return
|
||||||
|
|
||||||
def getPMs(self):
|
def getPMs(self):
|
||||||
|
@ -463,7 +499,12 @@ class Onionr:
|
||||||
|
|
||||||
os.makedirs(plugins.get_plugins_folder(plugin_name))
|
os.makedirs(plugins.get_plugins_folder(plugin_name))
|
||||||
with open(plugins.get_plugins_folder(plugin_name) + '/main.py', 'a') as main:
|
with open(plugins.get_plugins_folder(plugin_name) + '/main.py', 'a') as main:
|
||||||
main.write(open('static-data/default_plugin.py').read().replace('$user', os.getlogin()).replace('$date', datetime.datetime.now().strftime('%Y-%m-%d')).replace('$name', plugin_name))
|
contents = ''
|
||||||
|
with open('static-data/default_plugin.py', 'rb') as file:
|
||||||
|
contents = file.read().decode()
|
||||||
|
|
||||||
|
# TODO: Fix $user. os.getlogin() is B U G G Y
|
||||||
|
main.write(contents.replace('$user', 'some random developer').replace('$date', datetime.datetime.now().strftime('%Y-%m-%d')).replace('$name', plugin_name))
|
||||||
|
|
||||||
with open(plugins.get_plugins_folder(plugin_name) + '/info.json', 'a') as main:
|
with open(plugins.get_plugins_folder(plugin_name) + '/info.json', 'a') as main:
|
||||||
main.write(json.dumps({'author' : 'anonymous', 'description' : 'the default description of the plugin', 'version' : '1.0'}))
|
main.write(json.dumps({'author' : 'anonymous', 'description' : 'the default description of the plugin', 'version' : '1.0'}))
|
||||||
|
@ -494,12 +535,12 @@ class Onionr:
|
||||||
|
|
||||||
logger.info('Do ' + logger.colors.bold + sys.argv[0] + ' --help' + logger.colors.reset + logger.colors.fg.green + ' for Onionr help.')
|
logger.info('Do ' + logger.colors.bold + sys.argv[0] + ' --help' + logger.colors.reset + logger.colors.fg.green + ' for Onionr help.')
|
||||||
|
|
||||||
def start(self, input = False):
|
def start(self, input = False, override = False):
|
||||||
'''
|
'''
|
||||||
Starts the Onionr daemon
|
Starts the Onionr daemon
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if os.path.exists('.onionr-lock'):
|
if os.path.exists('.onionr-lock') and not override:
|
||||||
logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).')
|
logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).')
|
||||||
else:
|
else:
|
||||||
if not self.debug and not self._developmentMode:
|
if not self.debug and not self._developmentMode:
|
||||||
|
@ -516,18 +557,25 @@ class Onionr:
|
||||||
'''
|
'''
|
||||||
Starts the Onionr communication daemon
|
Starts the Onionr communication daemon
|
||||||
'''
|
'''
|
||||||
|
communicatorDaemon = './communicator.py'
|
||||||
if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
|
||||||
if self._developmentMode:
|
if self._developmentMode:
|
||||||
logger.warn('DEVELOPMENT MODE ENABLED (THIS IS LESS SECURE!)')
|
logger.warn('DEVELOPMENT MODE ENABLED (THIS IS LESS SECURE!)', timestamp = False)
|
||||||
net = NetController(config.get('client')['port'])
|
net = NetController(config.get('client.port', 59496))
|
||||||
logger.info('Tor is starting...')
|
logger.info('Tor is starting...')
|
||||||
if not net.startTor():
|
if not net.startTor():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
logger.info('Started Tor .onion service: ' + logger.colors.underline + net.myID)
|
logger.info('Started .onion service: ' + logger.colors.underline + net.myID)
|
||||||
logger.info('Our Public key: ' + self.onionrCore._crypto.pubKey)
|
logger.info('Our Public key: ' + self.onionrCore._crypto.pubKey)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
subprocess.Popen(["./communicator.py", "run", str(net.socksPort)])
|
try:
|
||||||
|
if config.get('general.newCommunicator', False):
|
||||||
|
communicatorDaemon = './communicator2.py'
|
||||||
|
logger.info('Using new communicator')
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
|
#TODO make runable on windows
|
||||||
|
subprocess.Popen([communicatorDaemon, "run", str(net.socksPort)])
|
||||||
logger.debug('Started communicator')
|
logger.debug('Started communicator')
|
||||||
events.event('daemon_start', onionr = self)
|
events.event('daemon_start', onionr = self)
|
||||||
api.API(self.debug)
|
api.API(self.debug)
|
||||||
|
@ -542,7 +590,7 @@ class Onionr:
|
||||||
logger.warn('Killing the running daemon...', timestamp = False)
|
logger.warn('Killing the running daemon...', timestamp = False)
|
||||||
try:
|
try:
|
||||||
events.event('daemon_stop', onionr = self)
|
events.event('daemon_stop', onionr = self)
|
||||||
net = NetController(config.get('client')['port'])
|
net = NetController(config.get('client.port', 59496))
|
||||||
try:
|
try:
|
||||||
self.onionrUtils.localCommand('shutdown')
|
self.onionrUtils.localCommand('shutdown')
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
|
@ -561,11 +609,16 @@ class Onionr:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# define stats messages here
|
# define stats messages here
|
||||||
|
totalBlocks = len(Block.getBlocks())
|
||||||
|
signedBlocks = len(Block.getBlocks(signed = True))
|
||||||
|
powToken = self.onionrCore._crypto.pubKeyPowToken
|
||||||
messages = {
|
messages = {
|
||||||
# info about local client
|
# info about local client
|
||||||
'Onionr Daemon Status' : ((logger.colors.fg.green + 'Online') if self.onionrUtils.isCommunicatorRunning(timeout = 2) else logger.colors.fg.red + 'Offline'),
|
'Onionr Daemon Status' : ((logger.colors.fg.green + 'Online') if self.onionrUtils.isCommunicatorRunning(timeout = 2) else logger.colors.fg.red + 'Offline'),
|
||||||
'Public Key' : self.onionrCore._crypto.pubKey,
|
'Public Key' : self.onionrCore._crypto.pubKey,
|
||||||
'Address' : self.get_hostname(),
|
'POW Token' : powToken,
|
||||||
|
'Combined' : self.onionrCore._crypto.pubKey + '-' + powToken,
|
||||||
|
'Node Address' : self.get_hostname(),
|
||||||
|
|
||||||
# file and folder size stats
|
# file and folder size stats
|
||||||
'div1' : True, # this creates a solid line across the screen, a div
|
'div1' : True, # this creates a solid line across the screen, a div
|
||||||
|
@ -576,7 +629,9 @@ class Onionr:
|
||||||
# count stats
|
# count stats
|
||||||
'div2' : True,
|
'div2' : True,
|
||||||
'Known Peers Count' : str(len(self.onionrCore.listPeers()) - 1),
|
'Known Peers Count' : str(len(self.onionrCore.listPeers()) - 1),
|
||||||
'Enabled Plugins Count' : str(len(config.get('plugins')['enabled'])) + ' / ' + str(len(os.listdir('data/plugins/')))
|
'Enabled Plugins Count' : str(len(config.get('plugins.enabled', list()))) + ' / ' + str(len(os.listdir('data/plugins/'))),
|
||||||
|
'Known Blocks Count' : str(totalBlocks),
|
||||||
|
'Percent Blocks Signed' : str(round(100 * signedBlocks / max(totalBlocks, 1), 2)) + '%'
|
||||||
}
|
}
|
||||||
|
|
||||||
# color configuration
|
# color configuration
|
||||||
|
@ -591,19 +646,27 @@ class Onionr:
|
||||||
|
|
||||||
# pre-processing
|
# pre-processing
|
||||||
maxlength = 0
|
maxlength = 0
|
||||||
|
width = self.getConsoleWidth()
|
||||||
for key, val in messages.items():
|
for key, val in messages.items():
|
||||||
if not (type(val) is bool and val is True):
|
if not (type(val) is bool and val is True):
|
||||||
maxlength = max(len(key), maxlength)
|
maxlength = max(len(key), maxlength)
|
||||||
|
prewidth = maxlength + len(' | ')
|
||||||
|
groupsize = width - prewidth - len('[+] ')
|
||||||
|
|
||||||
# generate stats table
|
# generate stats table
|
||||||
logger.info(colors['title'] + 'Onionr v%s Statistics' % ONIONR_VERSION + colors['reset'])
|
logger.info(colors['title'] + 'Onionr v%s Statistics' % ONIONR_VERSION + colors['reset'])
|
||||||
logger.info(colors['border'] + '─' * (maxlength + 1) + '┐' + colors['reset'])
|
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'])
|
||||||
for key, val in messages.items():
|
for key, val in messages.items():
|
||||||
if not (type(val) is bool and val is True):
|
if not (type(val) is bool and val is True):
|
||||||
logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' │ ' + colors['reset'] + colors['val'] + str(val) + colors['reset'])
|
val = [str(val)[i:i + groupsize] for i in range(0, len(str(val)), groupsize)]
|
||||||
|
|
||||||
|
logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(val.pop(0)) + colors['reset'])
|
||||||
|
|
||||||
|
for value in val:
|
||||||
|
logger.info(' ' * maxlength + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(value) + colors['reset'])
|
||||||
else:
|
else:
|
||||||
logger.info(colors['border'] + '─' * (maxlength + 1) + '┤' + colors['reset'])
|
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'])
|
||||||
logger.info(colors['border'] + '─' * (maxlength + 1) + '┘' + colors['reset'])
|
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to generate statistics table.', error = e, timestamp = False)
|
logger.error('Failed to generate statistics table.', error = e, timestamp = False)
|
||||||
|
|
||||||
|
@ -637,19 +700,40 @@ class Onionr:
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def addFile(self):
|
def getConsoleWidth(self):
|
||||||
'''command to add a file to the onionr network'''
|
'''
|
||||||
if len(sys.argv) >= 2:
|
Returns an integer, the width of the terminal/cmd window
|
||||||
newFile = sys.argv[2]
|
'''
|
||||||
logger.info('Attempting to add file...')
|
|
||||||
|
columns = 80
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(newFile, 'rb') as new:
|
columns = int(os.popen('stty size', 'r').read().split()[1])
|
||||||
new = new.read()
|
except:
|
||||||
except FileNotFoundError:
|
# if it errors, it's probably windows, so default to 80.
|
||||||
|
pass
|
||||||
|
|
||||||
|
return columns
|
||||||
|
|
||||||
|
def addFile(self):
|
||||||
|
'''
|
||||||
|
Adds a file to the onionr network
|
||||||
|
'''
|
||||||
|
|
||||||
|
if len(sys.argv) >= 3:
|
||||||
|
filename = sys.argv[2]
|
||||||
|
contents = None
|
||||||
|
|
||||||
|
if not os.path.exists(filename):
|
||||||
logger.warn('That file does not exist. Improper path?')
|
logger.warn('That file does not exist. Improper path?')
|
||||||
|
|
||||||
|
try:
|
||||||
|
blockhash = Block.createChain(file = filename)
|
||||||
|
logger.info('File %s saved in block %s.' % (filename, blockhash))
|
||||||
|
except:
|
||||||
|
logger.error('Failed to save file in block.', timestamp = False)
|
||||||
else:
|
else:
|
||||||
logger.debug(new)
|
logger.error('%s add-file <filename>' % sys.argv[0], timestamp = False)
|
||||||
logger.info(self.onionrCore.insertBlock(new, header='bin'))
|
|
||||||
|
|
||||||
|
|
||||||
Onionr()
|
Onionr()
|
||||||
|
|
|
@ -18,35 +18,25 @@
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import core as onionrcore, logger
|
import core as onionrcore, logger, config
|
||||||
import json, os, datetime
|
import json, os, sys, datetime, base64
|
||||||
|
|
||||||
class Block:
|
class Block:
|
||||||
def __init__(self, hash = None, core = None):
|
blockCacheOrder = list() # NEVER write your own code that writes to this!
|
||||||
'''
|
blockCache = dict() # should never be accessed directly, look at Block.getCache()
|
||||||
Initializes Onionr
|
|
||||||
|
|
||||||
Inputs:
|
def __init__(self, hash = None, core = None, type = None, content = None):
|
||||||
- hash (str): the hash of the block to be imported, if any
|
# take from arguments
|
||||||
- core (Core/str):
|
# sometimes people input a bytes object instead of str in `hash`
|
||||||
- if (Core): this is the Core instance to be used, don't create a new one
|
try:
|
||||||
- if (str): treat `core` as the block content, and instead, treat `hash` as the block type
|
hash = hash.decode()
|
||||||
|
except AttributeError:
|
||||||
Outputs:
|
pass
|
||||||
- (Block): the new Block instance
|
|
||||||
'''
|
|
||||||
|
|
||||||
# input from arguments
|
|
||||||
if (type(hash) == str) and (type(core) == str):
|
|
||||||
self.btype = hash
|
|
||||||
self.bcontent = core
|
|
||||||
self.hash = None
|
|
||||||
self.core = None
|
|
||||||
else:
|
|
||||||
self.btype = ''
|
|
||||||
self.bcontent = ''
|
|
||||||
self.hash = hash
|
self.hash = hash
|
||||||
self.core = core
|
self.core = core
|
||||||
|
self.btype = type
|
||||||
|
self.bcontent = content
|
||||||
|
|
||||||
|
|
||||||
# initialize variables
|
# initialize variables
|
||||||
self.valid = True
|
self.valid = True
|
||||||
|
@ -57,14 +47,20 @@ class Block:
|
||||||
self.signature = None
|
self.signature = None
|
||||||
self.signedData = None
|
self.signedData = None
|
||||||
self.blockFile = None
|
self.blockFile = None
|
||||||
|
self.parent = None
|
||||||
self.bheader = {}
|
self.bheader = {}
|
||||||
self.bmetadata = {}
|
self.bmetadata = {}
|
||||||
|
|
||||||
# handle arguments
|
# handle arguments
|
||||||
if self.getCore() is None:
|
if self.getCore() is None:
|
||||||
self.core = onionrcore.Core()
|
self.core = onionrcore.Core()
|
||||||
|
|
||||||
|
# update the blocks' contents if it exists
|
||||||
if not self.getHash() is None:
|
if not self.getHash() is None:
|
||||||
self.update()
|
if not self.update():
|
||||||
|
logger.debug('Failed to open block %s.' % self.getHash())
|
||||||
|
else:
|
||||||
|
logger.debug('Did not update block')
|
||||||
|
|
||||||
# logic
|
# logic
|
||||||
|
|
||||||
|
@ -92,13 +88,23 @@ class Block:
|
||||||
if blockdata is None:
|
if blockdata is None:
|
||||||
filelocation = file
|
filelocation = file
|
||||||
|
|
||||||
|
readfile = True
|
||||||
|
|
||||||
if filelocation is None:
|
if filelocation is None:
|
||||||
if self.getHash() is None:
|
if self.getHash() is None:
|
||||||
return False
|
return False
|
||||||
|
elif self.getHash() in Block.getCache():
|
||||||
|
# get the block from cache, if it's in it
|
||||||
|
blockdata = Block.getCache(self.getHash())
|
||||||
|
readfile = False
|
||||||
|
|
||||||
|
# read from file if it's still None
|
||||||
|
if blockdata is None:
|
||||||
filelocation = 'data/blocks/%s.dat' % self.getHash()
|
filelocation = 'data/blocks/%s.dat' % self.getHash()
|
||||||
|
|
||||||
blockdata = open(filelocation, 'rb').read().decode('utf-8')
|
if readfile:
|
||||||
|
with open(filelocation, 'rb') as f:
|
||||||
|
blockdata = f.read().decode()
|
||||||
|
|
||||||
self.blockFile = filelocation
|
self.blockFile = filelocation
|
||||||
else:
|
else:
|
||||||
|
@ -108,12 +114,13 @@ class Block:
|
||||||
self.raw = str(blockdata)
|
self.raw = str(blockdata)
|
||||||
self.bheader = json.loads(self.getRaw()[:self.getRaw().index('\n')])
|
self.bheader = json.loads(self.getRaw()[:self.getRaw().index('\n')])
|
||||||
self.bcontent = self.getRaw()[self.getRaw().index('\n') + 1:]
|
self.bcontent = self.getRaw()[self.getRaw().index('\n') + 1:]
|
||||||
self.bmetadata = json.loads(self.getHeader('meta'))
|
self.bmetadata = json.loads(self.getHeader('meta', None))
|
||||||
self.btype = self.getMetadata('type')
|
self.parent = self.getMetadata('parent', None)
|
||||||
self.powHash = self.getMetadata('powHash')
|
self.btype = self.getMetadata('type', None)
|
||||||
self.powToken = self.getMetadata('powToken')
|
self.powHash = self.getMetadata('powHash', None)
|
||||||
|
self.powToken = self.getMetadata('powToken', None)
|
||||||
self.signed = ('sig' in self.getHeader() and self.getHeader('sig') != '')
|
self.signed = ('sig' in self.getHeader() and self.getHeader('sig') != '')
|
||||||
self.signature = (None if not self.isSigned() else self.getHeader('sig'))
|
self.signature = self.getHeader('sig', None)
|
||||||
self.signedData = (None if not self.isSigned() else self.getHeader('meta') + '\n' + self.getContent())
|
self.signedData = (None if not self.isSigned() else self.getHeader('meta') + '\n' + self.getContent())
|
||||||
self.date = self.getCore().getBlockDate(self.getHash())
|
self.date = self.getCore().getBlockDate(self.getHash())
|
||||||
|
|
||||||
|
@ -121,6 +128,10 @@ class Block:
|
||||||
self.date = datetime.datetime.fromtimestamp(self.getDate())
|
self.date = datetime.datetime.fromtimestamp(self.getDate())
|
||||||
|
|
||||||
self.valid = True
|
self.valid = True
|
||||||
|
|
||||||
|
if len(self.getRaw()) <= config.get('allocations.blockCache', 500000):
|
||||||
|
self.cache()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to update block data.', error = e, timestamp = False)
|
logger.error('Failed to update block data.', error = e, timestamp = False)
|
||||||
|
@ -163,7 +174,7 @@ class Block:
|
||||||
else:
|
else:
|
||||||
self.hash = self.getCore().insertBlock(self.getContent(), header = self.getType(), sign = sign)
|
self.hash = self.getCore().insertBlock(self.getContent(), header = self.getType(), sign = sign)
|
||||||
self.update()
|
self.update()
|
||||||
return True
|
return self.getHash()
|
||||||
else:
|
else:
|
||||||
logger.warn('Not writing block; it is invalid.')
|
logger.warn('Not writing block; it is invalid.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -212,7 +223,7 @@ class Block:
|
||||||
|
|
||||||
return str(self.raw)
|
return str(self.raw)
|
||||||
|
|
||||||
def getHeader(self, key = None):
|
def getHeader(self, key = None, default = None):
|
||||||
'''
|
'''
|
||||||
Returns the header information
|
Returns the header information
|
||||||
|
|
||||||
|
@ -224,11 +235,12 @@ class Block:
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if not key is None:
|
if not key is None:
|
||||||
|
if key in self.getHeader():
|
||||||
return self.getHeader()[key]
|
return self.getHeader()[key]
|
||||||
else:
|
return default
|
||||||
return self.bheader
|
return self.bheader
|
||||||
|
|
||||||
def getMetadata(self, key = None):
|
def getMetadata(self, key = None, default = None):
|
||||||
'''
|
'''
|
||||||
Returns the metadata information
|
Returns the metadata information
|
||||||
|
|
||||||
|
@ -240,8 +252,9 @@ class Block:
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if not key is None:
|
if not key is None:
|
||||||
|
if key in self.getMetadata():
|
||||||
return self.getMetadata()[key]
|
return self.getMetadata()[key]
|
||||||
else:
|
return default
|
||||||
return self.bmetadata
|
return self.bmetadata
|
||||||
|
|
||||||
def getContent(self):
|
def getContent(self):
|
||||||
|
@ -254,6 +267,24 @@ class Block:
|
||||||
|
|
||||||
return str(self.bcontent)
|
return str(self.bcontent)
|
||||||
|
|
||||||
|
def getParent(self):
|
||||||
|
'''
|
||||||
|
Returns the Block's parent Block, or None
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
- (Block): the Block's parent
|
||||||
|
'''
|
||||||
|
|
||||||
|
if type(self.parent) == str:
|
||||||
|
if self.parent == self.getHash():
|
||||||
|
self.parent = self
|
||||||
|
elif Block.exists(self.parent):
|
||||||
|
self.parent = Block(self.getMetadata('parent'), core = self.getCore())
|
||||||
|
else:
|
||||||
|
self.parent = None
|
||||||
|
|
||||||
|
return self.parent
|
||||||
|
|
||||||
def getDate(self):
|
def getDate(self):
|
||||||
'''
|
'''
|
||||||
Returns the date that the block was received, if loaded from file
|
Returns the date that the block was received, if loaded from file
|
||||||
|
@ -344,12 +375,32 @@ class Block:
|
||||||
- btype (str): the type of block to be set to
|
- btype (str): the type of block to be set to
|
||||||
|
|
||||||
Outputs:
|
Outputs:
|
||||||
- (Block): the block instance
|
- (Block): the Block instance
|
||||||
'''
|
'''
|
||||||
|
|
||||||
self.btype = btype
|
self.btype = btype
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def setMetadata(self, key, val):
|
||||||
|
'''
|
||||||
|
Sets a custom metadata value
|
||||||
|
|
||||||
|
Metadata should not store block-specific data structures.
|
||||||
|
|
||||||
|
Inputs:
|
||||||
|
- key (str): the key
|
||||||
|
- val: the value (type is irrelevant)
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
- (Block): the Block instance
|
||||||
|
'''
|
||||||
|
|
||||||
|
if key == 'parent' and (not val is None) and (not val == self.getParent().getHash()):
|
||||||
|
self.setParent(val)
|
||||||
|
else:
|
||||||
|
self.bmetadata[key] = val
|
||||||
|
return self
|
||||||
|
|
||||||
def setContent(self, bcontent):
|
def setContent(self, bcontent):
|
||||||
'''
|
'''
|
||||||
Sets the contents of the block
|
Sets the contents of the block
|
||||||
|
@ -358,13 +409,31 @@ class Block:
|
||||||
- bcontent (str): the contents to be set to
|
- bcontent (str): the contents to be set to
|
||||||
|
|
||||||
Outputs:
|
Outputs:
|
||||||
- (Block): the block instance
|
- (Block): the Block instance
|
||||||
'''
|
'''
|
||||||
|
|
||||||
self.bcontent = str(bcontent)
|
self.bcontent = str(bcontent)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
# static
|
def setParent(self, parent):
|
||||||
|
'''
|
||||||
|
Sets the Block's parent
|
||||||
|
|
||||||
|
Inputs:
|
||||||
|
- parent (Block/str): the Block's parent, to be stored in metadata
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
- (Block): the Block instance
|
||||||
|
'''
|
||||||
|
|
||||||
|
if type(parent) == str:
|
||||||
|
parent = Block(parent, core = self.getCore())
|
||||||
|
|
||||||
|
self.parent = parent
|
||||||
|
self.setMetadata('parent', (None if parent is None else self.getParent().getHash()))
|
||||||
|
return self
|
||||||
|
|
||||||
|
# static functions
|
||||||
|
|
||||||
def getBlocks(type = None, signer = None, signed = None, reverse = False, core = None):
|
def getBlocks(type = None, signer = None, signed = None, reverse = False, core = None):
|
||||||
'''
|
'''
|
||||||
|
@ -410,7 +479,6 @@ class Block:
|
||||||
|
|
||||||
if relevant:
|
if relevant:
|
||||||
relevant_blocks.append(block)
|
relevant_blocks.append(block)
|
||||||
|
|
||||||
if bool(reverse):
|
if bool(reverse):
|
||||||
relevant_blocks.reverse()
|
relevant_blocks.reverse()
|
||||||
|
|
||||||
|
@ -420,6 +488,156 @@ class Block:
|
||||||
|
|
||||||
return list()
|
return list()
|
||||||
|
|
||||||
|
def mergeChain(child, file = None, maximumFollows = 32, core = None):
|
||||||
|
'''
|
||||||
|
Follows a child Block to its root parent Block, merging content
|
||||||
|
|
||||||
|
Inputs:
|
||||||
|
- child (str/Block): the child Block to be followed
|
||||||
|
- file (str/file): the file to write the content to, instead of returning it
|
||||||
|
- maximumFollows (int): the maximum number of Blocks to follow
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# validate data and instantiate Core
|
||||||
|
core = (core if not core is None else onionrcore.Core())
|
||||||
|
maximumFollows = max(0, maximumFollows)
|
||||||
|
|
||||||
|
# type conversions
|
||||||
|
if type(child) == list:
|
||||||
|
child = child[-1]
|
||||||
|
if type(child) == str:
|
||||||
|
child = Block(child)
|
||||||
|
if (not file is None) and (type(file) == str):
|
||||||
|
file = open(file, 'ab')
|
||||||
|
|
||||||
|
# only store hashes to avoid intensive memory usage
|
||||||
|
blocks = [child.getHash()]
|
||||||
|
|
||||||
|
# generate a list of parent Blocks
|
||||||
|
while True:
|
||||||
|
# end if the maximum number of follows has been exceeded
|
||||||
|
if len(blocks) - 1 >= maximumFollows:
|
||||||
|
break
|
||||||
|
|
||||||
|
block = Block(blocks[-1], core = core).getParent()
|
||||||
|
|
||||||
|
# end if there is no parent Block
|
||||||
|
if block is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
# end if the Block is pointing to a previously parsed Block
|
||||||
|
if block.getHash() in blocks:
|
||||||
|
break
|
||||||
|
|
||||||
|
# end if the block is not valid
|
||||||
|
if not block.isValid():
|
||||||
|
break
|
||||||
|
|
||||||
|
blocks.append(block.getHash())
|
||||||
|
|
||||||
|
buffer = ''
|
||||||
|
|
||||||
|
# combine block contents
|
||||||
|
for hash in blocks:
|
||||||
|
block = Block(hash, core = core)
|
||||||
|
contents = block.getContent()
|
||||||
|
contents = base64.b64decode(contents.encode())
|
||||||
|
|
||||||
|
if file is None:
|
||||||
|
buffer += contents.decode()
|
||||||
|
else:
|
||||||
|
file.write(contents)
|
||||||
|
|
||||||
|
return (None if not file is None else buffer)
|
||||||
|
|
||||||
|
def createChain(data = None, chunksize = 99800, file = None, type = 'chunk', sign = True, encrypt = False, verbose = False):
|
||||||
|
'''
|
||||||
|
Creates a chain of blocks to store larger amounts of data
|
||||||
|
|
||||||
|
The chunksize is set to 99800 because it provides the least amount of PoW for the most amount of data.
|
||||||
|
|
||||||
|
Inputs:
|
||||||
|
- data (*): if `file` is None, the data to be stored in blocks
|
||||||
|
- file (file/str): the filename or file object to read from (or None to read `data` instead)
|
||||||
|
- chunksize (int): the number of bytes per block chunk
|
||||||
|
- type (str): the type header for each of the blocks
|
||||||
|
- sign (bool): whether or not to sign each block
|
||||||
|
- encrypt (str): the public key to encrypt to, or False to disable encryption
|
||||||
|
- verbose (bool): whether or not to return a tuple containing more info
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
- if `verbose`:
|
||||||
|
- (tuple):
|
||||||
|
- (str): the child block hash
|
||||||
|
- (list): all block hashes associated with storing the file
|
||||||
|
- if not `verbose`:
|
||||||
|
- (str): the child block hash
|
||||||
|
'''
|
||||||
|
|
||||||
|
blocks = list()
|
||||||
|
|
||||||
|
# initial datatype checks
|
||||||
|
if data is None and file is None:
|
||||||
|
return blocks
|
||||||
|
elif not (file is None or (isinstance(file, str) and os.path.exists(file))):
|
||||||
|
return blocks
|
||||||
|
elif isinstance(file, str):
|
||||||
|
file = open(file, 'rb')
|
||||||
|
if not isinstance(data, str):
|
||||||
|
data = str(data)
|
||||||
|
|
||||||
|
if not file is None:
|
||||||
|
filesize = os.stat(file.name).st_size
|
||||||
|
offset = filesize % chunksize
|
||||||
|
maxtimes = int(filesize / chunksize)
|
||||||
|
|
||||||
|
for times in range(0, maxtimes + 1):
|
||||||
|
# read chunksize bytes from the file (end -> beginning)
|
||||||
|
if times < maxtimes:
|
||||||
|
file.seek(- ((times + 1) * chunksize), 2)
|
||||||
|
content = file.read(chunksize)
|
||||||
|
else:
|
||||||
|
file.seek(0, 0)
|
||||||
|
content = file.read(offset)
|
||||||
|
|
||||||
|
# encode it- python is really bad at handling certain bytes that
|
||||||
|
# are often present in binaries.
|
||||||
|
content = base64.b64encode(content).decode()
|
||||||
|
|
||||||
|
# if it is the end of the file, exit
|
||||||
|
if not content:
|
||||||
|
break
|
||||||
|
|
||||||
|
# create block
|
||||||
|
block = Block()
|
||||||
|
block.setType(type)
|
||||||
|
block.setContent(content)
|
||||||
|
block.setParent((blocks[-1] if len(blocks) != 0 else None))
|
||||||
|
hash = block.save(sign = sign)
|
||||||
|
|
||||||
|
# remember the hash in cache
|
||||||
|
blocks.append(hash)
|
||||||
|
elif not data is None:
|
||||||
|
for content in reversed([data[n:n + chunksize] for n in range(0, len(data), chunksize)]):
|
||||||
|
# encode chunk with base64
|
||||||
|
content = base64.b64encode(content.encode()).decode()
|
||||||
|
|
||||||
|
# create block
|
||||||
|
block = Block()
|
||||||
|
block.setType(type)
|
||||||
|
block.setContent(content)
|
||||||
|
block.setParent((blocks[-1] if len(blocks) != 0 else None))
|
||||||
|
hash = block.save(sign = sign)
|
||||||
|
|
||||||
|
# remember the hash in cache
|
||||||
|
blocks.append(hash)
|
||||||
|
|
||||||
|
# return different things depending on verbosity
|
||||||
|
if verbose:
|
||||||
|
return (blocks[-1], blocks)
|
||||||
|
return blocks[-1]
|
||||||
|
|
||||||
def exists(hash):
|
def exists(hash):
|
||||||
'''
|
'''
|
||||||
Checks if a block is saved to file or not
|
Checks if a block is saved to file or not
|
||||||
|
@ -433,11 +651,54 @@ class Block:
|
||||||
- (bool): whether or not the block file exists
|
- (bool): whether or not the block file exists
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
# no input data? scrap it.
|
||||||
if hash is None:
|
if hash is None:
|
||||||
return False
|
return False
|
||||||
elif type(hash) == Block:
|
|
||||||
|
if type(hash) == Block:
|
||||||
blockfile = hash.getBlockFile()
|
blockfile = hash.getBlockFile()
|
||||||
else:
|
else:
|
||||||
blockfile = 'data/blocks/%s.dat' % hash
|
blockfile = 'data/blocks/%s.dat' % hash
|
||||||
|
|
||||||
return os.path.exists(blockfile) and os.path.isfile(blockfile)
|
return os.path.exists(blockfile) and os.path.isfile(blockfile)
|
||||||
|
|
||||||
|
def getCache(hash = None):
|
||||||
|
# give a list of the hashes of the cached blocks
|
||||||
|
if hash is None:
|
||||||
|
return list(Block.blockCache.keys())
|
||||||
|
|
||||||
|
# if they inputted self or a Block, convert to hash
|
||||||
|
if type(hash) == Block:
|
||||||
|
hash = hash.getHash()
|
||||||
|
|
||||||
|
# just to make sure someone didn't put in a bool or something lol
|
||||||
|
hash = str(hash)
|
||||||
|
|
||||||
|
# if it exists, return its content
|
||||||
|
if hash in Block.getCache():
|
||||||
|
return Block.blockCache[hash]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def cache(block, override = False):
|
||||||
|
# why even bother if they're giving bad data?
|
||||||
|
if not type(block) == Block:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# only cache if written to file
|
||||||
|
if block.getHash() is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# if it's already cached, what are we here for?
|
||||||
|
if block.getHash() in Block.getCache() and not override:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# dump old cached blocks if the size exeeds the maximum
|
||||||
|
if sys.getsizeof(Block.blockCacheOrder) >= config.get('allocations.blockCacheTotal', 50000000): # 50MB default cache size
|
||||||
|
del Block.blockCache[blockCacheOrder.pop(0)]
|
||||||
|
|
||||||
|
# cache block content
|
||||||
|
Block.blockCache[block.getHash()] = block.getRaw()
|
||||||
|
Block.blockCacheOrder.append(block.getHash())
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
|
@ -17,7 +17,13 @@
|
||||||
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
'''
|
'''
|
||||||
import nacl.signing, nacl.encoding, nacl.public, nacl.hash, nacl.secret, os, binascii, base64, hashlib, logger, onionrproofs, time, math
|
import nacl.signing, nacl.encoding, nacl.public, nacl.hash, nacl.secret, os, binascii, base64, hashlib, logger, onionrproofs, time, math, sys
|
||||||
|
|
||||||
|
# secrets module was added into standard lib in 3.6+
|
||||||
|
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
|
||||||
|
from dependencies import secrets
|
||||||
|
elif sys.version_info[0] == 3 and sys.version_info[1] >= 6:
|
||||||
|
import secrets
|
||||||
|
|
||||||
class OnionrCrypto:
|
class OnionrCrypto:
|
||||||
def __init__(self, coreInstance):
|
def __init__(self, coreInstance):
|
||||||
|
@ -27,6 +33,8 @@ class OnionrCrypto:
|
||||||
self.pubKey = None
|
self.pubKey = None
|
||||||
self.privKey = None
|
self.privKey = None
|
||||||
|
|
||||||
|
self.secrets = secrets
|
||||||
|
|
||||||
self.pubKeyPowToken = None
|
self.pubKeyPowToken = None
|
||||||
#self.pubKeyPowHash = None
|
#self.pubKeyPowHash = None
|
||||||
|
|
||||||
|
@ -102,7 +110,7 @@ class OnionrCrypto:
|
||||||
retData = key.sign(data).signature
|
retData = key.sign(data).signature
|
||||||
return retData
|
return retData
|
||||||
|
|
||||||
def pubKeyEncrypt(self, data, pubkey, anonymous=False, encodedData=False):
|
def pubKeyEncrypt(self, data, pubkey, anonymous=True, encodedData=False):
|
||||||
'''Encrypt to a public key (Curve25519, taken from base32 Ed25519 pubkey)'''
|
'''Encrypt to a public key (Curve25519, taken from base32 Ed25519 pubkey)'''
|
||||||
retVal = ''
|
retVal = ''
|
||||||
|
|
||||||
|
@ -247,29 +255,28 @@ class OnionrCrypto:
|
||||||
'''
|
'''
|
||||||
retData = False
|
retData = False
|
||||||
|
|
||||||
if not (('powToken' in metadata) and ('powHash' in metadata)):
|
if not 'powRandomToken' in metadata:
|
||||||
|
logger.warn('No powRandomToken')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
dataLen = len(blockContent)
|
dataLen = len(blockContent)
|
||||||
|
|
||||||
expectedHash = self.blake2bHash(base64.b64decode(metadata['powToken']) + self.blake2bHash(blockContent.encode()))
|
expectedHash = self.blake2bHash(base64.b64decode(metadata['powRandomToken']) + self.blake2bHash(blockContent.encode()))
|
||||||
difficulty = 0
|
difficulty = 0
|
||||||
try:
|
try:
|
||||||
expectedHash = expectedHash.decode()
|
expectedHash = expectedHash.decode()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
if metadata['powHash'] == expectedHash:
|
|
||||||
difficulty = math.floor(dataLen / 1000000)
|
difficulty = math.floor(dataLen / 1000000)
|
||||||
|
|
||||||
mainHash = '0000000000000000000000000000000000000000000000000000000000000000'#nacl.hash.blake2b(nacl.utils.random()).decode()
|
mainHash = '0000000000000000000000000000000000000000000000000000000000000000'#nacl.hash.blake2b(nacl.utils.random()).decode()
|
||||||
puzzle = mainHash[:difficulty]
|
puzzle = mainHash[:difficulty]
|
||||||
|
|
||||||
if metadata['powHash'][:difficulty] == puzzle:
|
if metadata['powRandomToken'][:difficulty] == puzzle:
|
||||||
# logger.debug('Validated block pow')
|
# logger.debug('Validated block pow')
|
||||||
retData = True
|
retData = True
|
||||||
else:
|
else:
|
||||||
logger.debug("Invalid token (#1)")
|
logger.debug("Invalid token, bad proof")
|
||||||
else:
|
|
||||||
logger.debug('Invalid token (#2): Expected hash %s, got hash %s...' % (metadata['powHash'], expectedHash))
|
|
||||||
|
|
||||||
return retData
|
return retData
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
'''
|
||||||
|
Onionr - P2P Microblogging Platform & Social network.
|
||||||
|
|
||||||
|
This file contains exceptions for onionr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
|
||||||
|
# general exceptions
|
||||||
|
class NotFound(Exception):
|
||||||
|
pass
|
||||||
|
class Unknown(Exception):
|
||||||
|
pass
|
||||||
|
class Invalid(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# crypto exceptions
|
||||||
|
class InvalidPubkey(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# block exceptions
|
||||||
|
class InvalidMetadata(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# network level exceptions
|
||||||
|
class MissingPort(Exception):
|
||||||
|
pass
|
||||||
|
class InvalidAddress(Exception):
|
||||||
|
pass
|
|
@ -0,0 +1,19 @@
|
||||||
|
'''
|
||||||
|
Onionr - P2P Microblogging Platform & Social network.
|
||||||
|
|
||||||
|
This file contains both the OnionrCommunicate class for communcating with peers
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
|
@ -64,9 +64,7 @@ def enable(name, onionr = None, start_event = True):
|
||||||
enabled_plugins = get_enabled_plugins()
|
enabled_plugins = get_enabled_plugins()
|
||||||
if not name in enabled_plugins:
|
if not name in enabled_plugins:
|
||||||
enabled_plugins.append(name)
|
enabled_plugins.append(name)
|
||||||
config_plugins = config.get('plugins')
|
config.set('plugins.enabled', enabled_plugins, True)
|
||||||
config_plugins['enabled'] = enabled_plugins
|
|
||||||
config.set('plugins', config_plugins, True)
|
|
||||||
|
|
||||||
events.call(get_plugin(name), 'enable', onionr)
|
events.call(get_plugin(name), 'enable', onionr)
|
||||||
|
|
||||||
|
@ -77,7 +75,7 @@ def enable(name, onionr = None, start_event = True):
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.error('Failed to enable plugin \"' + name + '\", disabling plugin.')
|
logger.error('Failed to enable plugin \"%s\", disabling plugin.' % name)
|
||||||
disable(name)
|
disable(name)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -93,9 +91,7 @@ def disable(name, onionr = None, stop_event = True):
|
||||||
if is_enabled(name):
|
if is_enabled(name):
|
||||||
enabled_plugins = get_enabled_plugins()
|
enabled_plugins = get_enabled_plugins()
|
||||||
enabled_plugins.remove(name)
|
enabled_plugins.remove(name)
|
||||||
config_plugins = config.get('plugins')
|
config.set('plugins.enabled', enabled_plugins, True)
|
||||||
config_plugins['enabled'] = enabled_plugins
|
|
||||||
config.set('plugins', config_plugins, True)
|
|
||||||
|
|
||||||
if exists(name):
|
if exists(name):
|
||||||
events.call(get_plugin(name), 'disable', onionr)
|
events.call(get_plugin(name), 'disable', onionr)
|
||||||
|
@ -121,9 +117,9 @@ def start(name, onionr = None):
|
||||||
|
|
||||||
return plugin
|
return plugin
|
||||||
except:
|
except:
|
||||||
logger.error('Failed to start module \"' + name + '\".')
|
logger.error('Failed to start module \"%s\".' % name)
|
||||||
else:
|
else:
|
||||||
logger.error('Failed to start nonexistant module \"' + name + '\".')
|
logger.error('Failed to start nonexistant module \"%s\".' % name)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -145,9 +141,9 @@ def stop(name, onionr = None):
|
||||||
|
|
||||||
return plugin
|
return plugin
|
||||||
except:
|
except:
|
||||||
logger.error('Failed to stop module \"' + name + '\".')
|
logger.error('Failed to stop module \"%s\".' % name)
|
||||||
else:
|
else:
|
||||||
logger.error('Failed to stop nonexistant module \"' + name + '\".')
|
logger.error('Failed to stop nonexistant module \"%s\".' % name)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -187,7 +183,7 @@ def get_enabled_plugins():
|
||||||
|
|
||||||
config.reload()
|
config.reload()
|
||||||
|
|
||||||
return config.get('plugins')['enabled']
|
return config.get('plugins.enabled', list())
|
||||||
|
|
||||||
def is_enabled(name):
|
def is_enabled(name):
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -22,7 +22,37 @@ import nacl.encoding, nacl.hash, nacl.utils, time, math, threading, binascii, lo
|
||||||
import core
|
import core
|
||||||
|
|
||||||
class POW:
|
class POW:
|
||||||
def pow(self, reporting = False):
|
def __init__(self, data, threadCount = 5):
|
||||||
|
self.foundHash = False
|
||||||
|
self.difficulty = 0
|
||||||
|
self.data = data
|
||||||
|
self.threadCount = threadCount
|
||||||
|
|
||||||
|
dataLen = sys.getsizeof(data)
|
||||||
|
self.difficulty = math.floor(dataLen / 1000000)
|
||||||
|
if self.difficulty <= 2:
|
||||||
|
self.difficulty = 4
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.data = self.data.encode()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.data = nacl.hash.blake2b(self.data)
|
||||||
|
|
||||||
|
logger.info('Computing POW (difficulty: %s)...' % self.difficulty)
|
||||||
|
|
||||||
|
self.mainHash = '0' * 70
|
||||||
|
self.puzzle = self.mainHash[0:min(self.difficulty, len(self.mainHash))]
|
||||||
|
|
||||||
|
myCore = core.Core()
|
||||||
|
for i in range(max(1, threadCount)):
|
||||||
|
t = threading.Thread(name = 'thread%s' % i, target = self.pow, args = (True,myCore))
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def pow(self, reporting = False, myCore = None):
|
||||||
startTime = math.floor(time.time())
|
startTime = math.floor(time.time())
|
||||||
self.hashing = True
|
self.hashing = True
|
||||||
self.reporting = reporting
|
self.reporting = reporting
|
||||||
|
@ -30,7 +60,7 @@ class POW:
|
||||||
answer = ''
|
answer = ''
|
||||||
heartbeat = 200000
|
heartbeat = 200000
|
||||||
hbCount = 0
|
hbCount = 0
|
||||||
myCore = core.Core()
|
|
||||||
while self.hashing:
|
while self.hashing:
|
||||||
rand = nacl.utils.random()
|
rand = nacl.utils.random()
|
||||||
token = nacl.hash.blake2b(rand + self.data).decode()
|
token = nacl.hash.blake2b(rand + self.data).decode()
|
||||||
|
@ -39,45 +69,14 @@ class POW:
|
||||||
self.hashing = False
|
self.hashing = False
|
||||||
iFound = True
|
iFound = True
|
||||||
break
|
break
|
||||||
else:
|
|
||||||
logger.debug('POW thread exiting, another thread found result')
|
|
||||||
if iFound:
|
if iFound:
|
||||||
endTime = math.floor(time.time())
|
endTime = math.floor(time.time())
|
||||||
if self.reporting:
|
if self.reporting:
|
||||||
logger.info('Found token ' + token, timestamp=True)
|
logger.debug('Found token after %s seconds: %s' % (endTime - startTime, token), timestamp=True)
|
||||||
logger.info('rand value: ' + base64.b64encode(rand).decode())
|
logger.debug('Random value was: %s' % base64.b64encode(rand).decode())
|
||||||
logger.info('took ' + str(endTime - startTime) + ' seconds', timestamp=True)
|
|
||||||
self.result = (token, rand)
|
self.result = (token, rand)
|
||||||
|
|
||||||
def __init__(self, data):
|
|
||||||
self.foundHash = False
|
|
||||||
self.difficulty = 0
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
dataLen = sys.getsizeof(data)
|
|
||||||
self.difficulty = math.floor(dataLen/1000000)
|
|
||||||
if self.difficulty <= 2:
|
|
||||||
self.difficulty = 4
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.data = self.data.encode()
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
self.data = nacl.hash.blake2b(self.data)
|
|
||||||
|
|
||||||
logger.debug('Computing difficulty of ' + str(self.difficulty))
|
|
||||||
|
|
||||||
self.mainHash = '0000000000000000000000000000000000000000000000000000000000000000'#nacl.hash.blake2b(nacl.utils.random()).decode()
|
|
||||||
self.puzzle = self.mainHash[0:self.difficulty]
|
|
||||||
#logger.debug('trying to find ' + str(self.mainHash))
|
|
||||||
tOne = threading.Thread(name='one', target=self.pow, args=(True,))
|
|
||||||
tTwo = threading.Thread(name='two', target=self.pow, args=(True,))
|
|
||||||
tThree = threading.Thread(name='three', target=self.pow, args=(True,))
|
|
||||||
tOne.start()
|
|
||||||
tTwo.start()
|
|
||||||
tThree.start()
|
|
||||||
return
|
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.hashing = False
|
self.hashing = False
|
||||||
self.puzzle = ''
|
self.puzzle = ''
|
||||||
|
@ -89,9 +88,28 @@ class POW:
|
||||||
'''
|
'''
|
||||||
Returns the result then sets to false, useful to automatically clear the result
|
Returns the result then sets to false, useful to automatically clear the result
|
||||||
'''
|
'''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = self.result
|
retVal = self.result
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
self.result = False
|
self.result = False
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
def waitForResult(self):
|
||||||
|
'''
|
||||||
|
Returns the result only when it has been found, False if not running and not found
|
||||||
|
'''
|
||||||
|
result = False
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
result = self.getResult()
|
||||||
|
if not self.hashing:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
time.sleep(2)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.shutdown()
|
||||||
|
logger.warn('Got keyboard interrupt while waiting for POW result, stopping')
|
||||||
|
return result
|
|
@ -18,8 +18,11 @@
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
'''
|
'''
|
||||||
# Misc functions that do not fit in the main api, but are useful
|
# Misc functions that do not fit in the main api, but are useful
|
||||||
import getpass, sys, requests, os, socket, hashlib, logger, sqlite3, config, binascii, time, base64, json, glob, shutil, math
|
import getpass, sys, requests, os, socket, hashlib, logger, sqlite3, config, binascii, time, base64, json, glob, shutil, math, json
|
||||||
import nacl.signing, nacl.encoding
|
import nacl.signing, nacl.encoding
|
||||||
|
from onionrblockapi import Block
|
||||||
|
import onionrexceptions
|
||||||
|
from defusedxml import minidom
|
||||||
|
|
||||||
if sys.version_info < (3, 6):
|
if sys.version_info < (3, 6):
|
||||||
try:
|
try:
|
||||||
|
@ -77,6 +80,13 @@ class OnionrUtils:
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def getCurrentHourEpoch(self):
|
||||||
|
'''
|
||||||
|
Returns the current epoch, rounded down to the hour
|
||||||
|
'''
|
||||||
|
epoch = self.getEpoch()
|
||||||
|
return epoch - (epoch % 3600)
|
||||||
|
|
||||||
def incrementAddressSuccess(self, address):
|
def incrementAddressSuccess(self, address):
|
||||||
'''
|
'''
|
||||||
Increase the recorded sucesses for an address
|
Increase the recorded sucesses for an address
|
||||||
|
@ -95,7 +105,7 @@ class OnionrUtils:
|
||||||
|
|
||||||
def mergeKeys(self, newKeyList):
|
def mergeKeys(self, newKeyList):
|
||||||
'''
|
'''
|
||||||
Merge ed25519 key list to our database
|
Merge ed25519 key list to our database, comma seperated string
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
retVal = False
|
retVal = False
|
||||||
|
@ -122,7 +132,8 @@ class OnionrUtils:
|
||||||
if self._core.addPeer(key[0], key[1]):
|
if self._core.addPeer(key[0], key[1]):
|
||||||
retVal = True
|
retVal = True
|
||||||
else:
|
else:
|
||||||
logger.warn(powHash)
|
logger.warn("Failed to add key")
|
||||||
|
else:
|
||||||
logger.warn('%s pow failed' % key[0])
|
logger.warn('%s pow failed' % key[0])
|
||||||
return retVal
|
return retVal
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
|
@ -165,8 +176,11 @@ class OnionrUtils:
|
||||||
config.reload()
|
config.reload()
|
||||||
self.getTimeBypassToken()
|
self.getTimeBypassToken()
|
||||||
# TODO: URL encode parameters, just as an extra measure. May not be needed, but should be added regardless.
|
# TODO: URL encode parameters, just as an extra measure. May not be needed, but should be added regardless.
|
||||||
|
with open('data/host.txt', 'r') as host:
|
||||||
|
hostname = host.read()
|
||||||
|
payload = 'http://%s:%s/client/?action=%s&token=%s&timingToken=%s' % (hostname, config.get('client.port'), command, config.get('client.hmac'), self.timingToken)
|
||||||
try:
|
try:
|
||||||
retData = requests.get('http://' + open('data/host.txt', 'r').read() + ':' + str(config.get('client')['port']) + '/client/?action=' + command + '&token=' + str(config.get('client')['client_hmac']) + '&timingToken=' + self.timingToken).text
|
retData = requests.get(payload).text
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
if not silent:
|
if not silent:
|
||||||
logger.error('Failed to make local request (command: %s).' % command, error=error)
|
logger.error('Failed to make local request (command: %s).' % command, error=error)
|
||||||
|
@ -195,6 +209,22 @@ class OnionrUtils:
|
||||||
|
|
||||||
return pass1
|
return pass1
|
||||||
|
|
||||||
|
def getBlockMetadataFromData(self, blockData):
|
||||||
|
'''
|
||||||
|
accepts block contents as string and returns a tuple of metadata, meta (meta being internal metadata)
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
blockData = blockData.encode()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
metadata = json.loads(blockData[:blockData.find(b'\n')].decode())
|
||||||
|
data = blockData[blockData.find(b'\n'):].decode()
|
||||||
|
try:
|
||||||
|
meta = json.loads(metadata['meta'])
|
||||||
|
except KeyError:
|
||||||
|
meta = {}
|
||||||
|
return (metadata, meta, data)
|
||||||
|
|
||||||
def checkPort(self, port, host=''):
|
def checkPort(self, port, host=''):
|
||||||
'''
|
'''
|
||||||
Checks if a port is available, returns bool
|
Checks if a port is available, returns bool
|
||||||
|
@ -280,6 +310,38 @@ class OnionrUtils:
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
def validateMetadata(metadata):
|
||||||
|
'''Validate metadata meets onionr spec (does not validate proof value computation), take in either dictionary or json string'''
|
||||||
|
# TODO, make this check sane sizes
|
||||||
|
retData = False
|
||||||
|
|
||||||
|
# convert to dict if it is json string
|
||||||
|
if type(metadata) is str:
|
||||||
|
try:
|
||||||
|
metadata = json.loads(metadata)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Validate metadata dict for invalid keys to sizes that are too large
|
||||||
|
if type(metadata) is dict:
|
||||||
|
for i in metadata:
|
||||||
|
try:
|
||||||
|
self._core.requirements.blockMetadataLengths[i]
|
||||||
|
except KeyError:
|
||||||
|
logger.warn('Block has invalid metadata key ' + i)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if self._core.requirements.blockMetadataLengths[i] < len(metadata[i]):
|
||||||
|
logger.warn('Block metadata key ' + i + ' exceeded maximum size')
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# if metadata loop gets no errors, it does not break, therefore metadata is valid
|
||||||
|
retData = True
|
||||||
|
else:
|
||||||
|
logger.warn('In call to utils.validateMetadata, metadata must be JSON string or a dictionary object')
|
||||||
|
|
||||||
|
return retData
|
||||||
|
|
||||||
def validatePubKey(self, key):
|
def validatePubKey(self, key):
|
||||||
'''
|
'''
|
||||||
Validate if a string is a valid base32 encoded Ed25519 key
|
Validate if a string is a valid base32 encoded Ed25519 key
|
||||||
|
@ -347,18 +409,12 @@ class OnionrUtils:
|
||||||
'''
|
'''
|
||||||
Find, decrypt, and return array of PMs (array of dictionary, {from, text})
|
Find, decrypt, and return array of PMs (array of dictionary, {from, text})
|
||||||
'''
|
'''
|
||||||
#blocks = self._core.getBlockList()
|
blocks = Block.getBlocks(type = 'pm', core = self._core)
|
||||||
blocks = self._core.getBlocksByType('pm')
|
|
||||||
message = ''
|
message = ''
|
||||||
sender = ''
|
sender = ''
|
||||||
for i in blocks:
|
for i in blocks:
|
||||||
if len (i) == 0:
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
with open('data/blocks/' + i + '.dat', 'r') as potentialMessage:
|
blockContent = i.getContent()
|
||||||
potentialMessage = potentialMessage.read()
|
|
||||||
blockMetadata = json.loads(potentialMessage[:potentialMessage.find('\n')])
|
|
||||||
blockContent = potentialMessage[potentialMessage.find('\n') + 1:]
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
message = self._core._crypto.pubKeyDecrypt(blockContent, encodedData=True, anonymous=True)
|
message = self._core._crypto.pubKeyDecrypt(blockContent, encodedData=True, anonymous=True)
|
||||||
|
@ -375,7 +431,7 @@ class OnionrUtils:
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.info('Decrypted %s:' % i)
|
logger.debug('Decrypted %s:' % i.getHash())
|
||||||
logger.info(message["msg"])
|
logger.info(message["msg"])
|
||||||
|
|
||||||
signer = message["id"]
|
signer = message["id"]
|
||||||
|
@ -480,6 +536,50 @@ class OnionrUtils:
|
||||||
'''returns epoch'''
|
'''returns epoch'''
|
||||||
return math.floor(time.time())
|
return math.floor(time.time())
|
||||||
|
|
||||||
|
def doGetRequest(self, url, port=0, proxyType='tor'):
|
||||||
|
'''
|
||||||
|
Do a get request through a local tor or i2p instance
|
||||||
|
'''
|
||||||
|
if proxyType == 'tor':
|
||||||
|
if port == 0:
|
||||||
|
raise onionrexceptions.MissingPort('Socks port required for Tor HTTP get request')
|
||||||
|
proxies = {'http': 'socks5://127.0.0.1:' + str(port), 'https': 'socks5://127.0.0.1:' + str(port)}
|
||||||
|
elif proxyType == 'i2p':
|
||||||
|
proxies = {'http': 'http://127.0.0.1:4444'}
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
headers = {'user-agent': 'PyOnionr'}
|
||||||
|
try:
|
||||||
|
proxies = {'http': 'socks5h://127.0.0.1:' + str(port), 'https': 'socks5h://127.0.0.1:' + str(port)}
|
||||||
|
r = requests.get(url, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
|
||||||
|
retData = r.text
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.debug('Error: %s' % str(e))
|
||||||
|
retData = False
|
||||||
|
return retData
|
||||||
|
|
||||||
|
def getNistBeaconSalt(self, torPort=0):
|
||||||
|
'''
|
||||||
|
Get the token for the current hour from the NIST randomness beacon
|
||||||
|
'''
|
||||||
|
if torPort == 0:
|
||||||
|
try:
|
||||||
|
sys.argv[2]
|
||||||
|
except IndexError:
|
||||||
|
raise onionrexceptions.MissingPort('Missing Tor socks port')
|
||||||
|
retData = ''
|
||||||
|
curTime = self._core._utils.getCurrentHourEpoch
|
||||||
|
self.nistSaltTimestamp = curTime
|
||||||
|
data = self.doGetRequest('https://beacon.nist.gov/rest/record/' + str(curTime), port=torPort)
|
||||||
|
dataXML = minidom.parseString(data, forbid_dtd=True, forbid_entities=True, forbid_external=True)
|
||||||
|
try:
|
||||||
|
retData = dataXML.getElementsByTagName('outputValue')[0].childNodes[0].data
|
||||||
|
except ValueError:
|
||||||
|
logger.warn('Could not get NIST beacon value')
|
||||||
|
else:
|
||||||
|
self.powSalt = retData
|
||||||
|
return retData
|
||||||
|
|
||||||
def size(path='.'):
|
def size(path='.'):
|
||||||
'''
|
'''
|
||||||
Returns the size of a folder's contents in bytes
|
Returns the size of a folder's contents in bytes
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
'''
|
||||||
|
Onionr - P2P Microblogging Platform & Social network
|
||||||
|
|
||||||
|
This file defines values and requirements used by Onionr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
|
||||||
|
class OnionrValues:
|
||||||
|
def __init__(self):
|
||||||
|
self.passwordLength = 20
|
||||||
|
self.blockMetadataLengths = {'meta': 1000, 'sig': 88, 'signer': 64, 'time': 10, 'powRandomToken': '1000'}
|
|
@ -0,0 +1,104 @@
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
|
@ -0,0 +1,674 @@
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
|
@ -0,0 +1,2 @@
|
||||||
|
# pluginmanager
|
||||||
|
Onionr's plugin manager source code
|
|
@ -34,7 +34,7 @@ def writeKeys():
|
||||||
Serializes and writes the keystore in memory to file
|
Serializes and writes the keystore in memory to file
|
||||||
'''
|
'''
|
||||||
|
|
||||||
file = open(keys_file, 'w')
|
with open(keys_file, 'w') as file:
|
||||||
file.write(json.dumps(keys_data, indent=4, sort_keys=True))
|
file.write(json.dumps(keys_data, indent=4, sort_keys=True))
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
|
@ -44,7 +44,8 @@ def readKeys():
|
||||||
'''
|
'''
|
||||||
|
|
||||||
global keys_data
|
global keys_data
|
||||||
keys_data = json.loads(open(keys_file).read())
|
with open(keys_file) as file:
|
||||||
|
keys_data = json.loads(file.read())
|
||||||
return keys_data
|
return keys_data
|
||||||
|
|
||||||
def getKey(plugin):
|
def getKey(plugin):
|
||||||
|
@ -106,27 +107,37 @@ def getRepositories():
|
||||||
readKeys()
|
readKeys()
|
||||||
return keys_data['repositories']
|
return keys_data['repositories']
|
||||||
|
|
||||||
def addRepository(repositories, data):
|
def addRepository(blockhash, data):
|
||||||
'''
|
'''
|
||||||
Saves the plugin name, to remember that it was installed by the pluginmanager
|
Saves the plugin name, to remember that it was installed by the pluginmanager
|
||||||
'''
|
'''
|
||||||
|
|
||||||
global keys_data
|
global keys_data
|
||||||
readKeys()
|
readKeys()
|
||||||
keys_data['repositories'][repositories] = data
|
keys_data['repositories'][blockhash] = data
|
||||||
writeKeys()
|
writeKeys()
|
||||||
|
|
||||||
def removeRepository(repositories):
|
def removeRepository(blockhash):
|
||||||
'''
|
'''
|
||||||
Removes the plugin name from the pluginmanager's records
|
Removes the plugin name from the pluginmanager's records
|
||||||
'''
|
'''
|
||||||
|
|
||||||
global keys_data
|
global keys_data
|
||||||
readKeys()
|
readKeys()
|
||||||
if plugin in keys_data['repositories']:
|
if blockhash in keys_data['repositories']:
|
||||||
del keys_data['repositories'][repositories]
|
del keys_data['repositories'][blockhash]
|
||||||
writeKeys()
|
writeKeys()
|
||||||
|
|
||||||
|
def createRepository(plugins):
|
||||||
|
contents = {'plugins' : plugins, 'author' : getpass.getuser(), 'compiled-by' : plugin_name}
|
||||||
|
|
||||||
|
block = Block(core = pluginapi.get_core())
|
||||||
|
|
||||||
|
block.setType('repository')
|
||||||
|
block.setContent(json.dumps(contents))
|
||||||
|
|
||||||
|
return block.save(True)
|
||||||
|
|
||||||
def check():
|
def check():
|
||||||
'''
|
'''
|
||||||
Checks to make sure the keystore file still exists
|
Checks to make sure the keystore file still exists
|
||||||
|
@ -144,7 +155,7 @@ def sanitize(name):
|
||||||
|
|
||||||
def blockToPlugin(block):
|
def blockToPlugin(block):
|
||||||
try:
|
try:
|
||||||
block = Block(block)
|
block = Block(block, core = pluginapi.get_core())
|
||||||
blockContent = json.loads(block.getContent())
|
blockContent = json.loads(block.getContent())
|
||||||
|
|
||||||
name = sanitize(blockContent['name'])
|
name = sanitize(blockContent['name'])
|
||||||
|
@ -194,14 +205,19 @@ def pluginToBlock(plugin, import_block = True):
|
||||||
shutil.rmtree(directory + '__pycache__')
|
shutil.rmtree(directory + '__pycache__')
|
||||||
|
|
||||||
shutil.make_archive(zipfile[:-4], 'zip', directory)
|
shutil.make_archive(zipfile[:-4], 'zip', directory)
|
||||||
data = base64.b64encode(open(zipfile, 'rb').read())
|
data = ''
|
||||||
|
with open(zipfile, 'rb') as file:
|
||||||
|
data = base64.b64encode(file.read())
|
||||||
|
|
||||||
author = getpass.getuser()
|
author = getpass.getuser()
|
||||||
description = 'Default plugin description'
|
description = 'Default plugin description'
|
||||||
info = {"name" : plugin}
|
info = {"name" : plugin}
|
||||||
try:
|
try:
|
||||||
if os.path.exists(directory + 'info.json'):
|
if os.path.exists(directory + 'info.json'):
|
||||||
info = json.loads(open(directory + 'info.json').read())
|
info = ''
|
||||||
|
with open(directory + 'info.json').read() as file:
|
||||||
|
info = json.loads(file.read())
|
||||||
|
|
||||||
if 'author' in info:
|
if 'author' in info:
|
||||||
author = info['author']
|
author = info['author']
|
||||||
if 'description' in info:
|
if 'description' in info:
|
||||||
|
@ -211,7 +227,13 @@ def pluginToBlock(plugin, import_block = True):
|
||||||
|
|
||||||
metadata = {'author' : author, 'date' : str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')), 'name' : plugin, 'info' : info, 'compiled-by' : plugin_name, 'content' : data.decode('utf-8'), 'description' : description}
|
metadata = {'author' : author, 'date' : str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')), 'name' : plugin, 'info' : info, 'compiled-by' : plugin_name, 'content' : data.decode('utf-8'), 'description' : description}
|
||||||
|
|
||||||
hash = pluginapi.get_core().insertBlock(json.dumps(metadata), header = 'plugin', sign = True)
|
block = Block(core = pluginapi.get_core())
|
||||||
|
|
||||||
|
block.setType('plugin')
|
||||||
|
block.setContent(json.dumps(metadata))
|
||||||
|
|
||||||
|
hash = block.save(True)
|
||||||
|
# hash = pluginapi.get_core().insertBlock(, header = 'plugin', sign = True)
|
||||||
|
|
||||||
if import_block:
|
if import_block:
|
||||||
pluginapi.get_utils().importNewBlocks()
|
pluginapi.get_utils().importNewBlocks()
|
||||||
|
@ -226,7 +248,7 @@ def pluginToBlock(plugin, import_block = True):
|
||||||
|
|
||||||
def installBlock(block):
|
def installBlock(block):
|
||||||
try:
|
try:
|
||||||
block = Block(block)
|
block = Block(block, core = pluginapi.get_core())
|
||||||
blockContent = json.loads(block.getContent())
|
blockContent = json.loads(block.getContent())
|
||||||
|
|
||||||
name = sanitize(blockContent['name'])
|
name = sanitize(blockContent['name'])
|
||||||
|
@ -353,7 +375,8 @@ def commandInstallPlugin():
|
||||||
choice = logger.readline('Select the number of the key to use, from 1 to %s, or press Ctrl+C to cancel:' % (index - 1))
|
choice = logger.readline('Select the number of the key to use, from 1 to %s, or press Ctrl+C to cancel:' % (index - 1))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if int(choice) < index and int(choice) >= 1:
|
choice = int(choice)
|
||||||
|
if choice <= index and choice >= 1:
|
||||||
distributor = distributors[int(choice)]
|
distributor = distributors[int(choice)]
|
||||||
valid = True
|
valid = True
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
@ -368,9 +391,11 @@ def commandInstallPlugin():
|
||||||
logger.warn('Failed to lookup plugin in repositories.', timestamp = False)
|
logger.warn('Failed to lookup plugin in repositories.', timestamp = False)
|
||||||
logger.error('asdf', error = e, timestamp = False)
|
logger.error('asdf', error = e, timestamp = False)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
if pkobh is None:
|
if pkobh is None:
|
||||||
logger.error('No key for this plugin found in keystore or repositories, please specify.')
|
logger.error('No key for this plugin found in keystore or repositories, please specify.', timestamp = False)
|
||||||
help()
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
valid_hash = pluginapi.get_utils().validateHash(pkobh)
|
valid_hash = pluginapi.get_utils().validateHash(pkobh)
|
||||||
|
@ -386,7 +411,7 @@ def commandInstallPlugin():
|
||||||
blockhash = None
|
blockhash = None
|
||||||
|
|
||||||
if valid_hash and not real_block:
|
if valid_hash and not real_block:
|
||||||
logger.error('Block hash not found. Perhaps it has not been synced yet?')
|
logger.error('Block hash not found. Perhaps it has not been synced yet?', timestamp = False)
|
||||||
logger.debug('Is valid hash, but does not belong to a known block.')
|
logger.debug('Is valid hash, but does not belong to a known block.')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -396,7 +421,7 @@ def commandInstallPlugin():
|
||||||
|
|
||||||
installBlock(blockhash)
|
installBlock(blockhash)
|
||||||
elif valid_key and not real_key:
|
elif valid_key and not real_key:
|
||||||
logger.error('Public key not found. Try adding the node by address manually, if possible.')
|
logger.error('Public key not found. Try adding the node by address manually, if possible.', timestamp = False)
|
||||||
logger.debug('Is valid key, but the key is not a known one.')
|
logger.debug('Is valid key, but the key is not a known one.')
|
||||||
elif valid_key and real_key:
|
elif valid_key and real_key:
|
||||||
publickey = str(pkobh)
|
publickey = str(pkobh)
|
||||||
|
@ -432,10 +457,11 @@ def commandInstallPlugin():
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
logger.warn('Only continue the installation is you are absolutely certain that you trust the plugin distributor. Public key of plugin distributor: %s' % publickey, timestamp = False)
|
logger.warn('Only continue the installation if you are absolutely certain that you trust the plugin distributor. Public key of plugin distributor: %s' % publickey, timestamp = False)
|
||||||
|
logger.debug('Most recent block matching parameters is %s' % mostRecentVersionBlock)
|
||||||
installBlock(mostRecentVersionBlock)
|
installBlock(mostRecentVersionBlock)
|
||||||
else:
|
else:
|
||||||
logger.error('Unknown data "%s"; must be public key or block hash.' % str(pkobh))
|
logger.error('Unknown data "%s"; must be public key or block hash.' % str(pkobh), timestamp = False)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' <plugin> [public key/block hash]')
|
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' <plugin> [public key/block hash]')
|
||||||
|
@ -463,11 +489,11 @@ def commandAddRepository():
|
||||||
if pluginapi.get_utils().validateHash(blockhash):
|
if pluginapi.get_utils().validateHash(blockhash):
|
||||||
if Block.exists(blockhash):
|
if Block.exists(blockhash):
|
||||||
try:
|
try:
|
||||||
blockContent = json.loads(Block(blockhash).getContent())
|
blockContent = json.loads(Block(blockhash, core = pluginapi.get_core()).getContent())
|
||||||
|
|
||||||
pluginslist = dict()
|
pluginslist = dict()
|
||||||
|
|
||||||
for pluginname, distributor in blockContent['plugins'].items():
|
for pluginname, distributor in blockContent['plugins']:
|
||||||
if pluginapi.get_utils().validatePubKey(distributor):
|
if pluginapi.get_utils().validatePubKey(distributor):
|
||||||
pluginslist[pluginname] = distributor
|
pluginslist[pluginname] = distributor
|
||||||
|
|
||||||
|
@ -477,14 +503,14 @@ def commandAddRepository():
|
||||||
addRepository(blockhash, pluginslist)
|
addRepository(blockhash, pluginslist)
|
||||||
logger.info('Successfully added repository.')
|
logger.info('Successfully added repository.')
|
||||||
else:
|
else:
|
||||||
logger.error('Repository contains no records, not importing.')
|
logger.error('Repository contains no records, not importing.', timestamp = False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to parse block.', error = e)
|
logger.error('Failed to parse block.', error = e)
|
||||||
else:
|
else:
|
||||||
logger.error('Block hash not found. Perhaps it has not been synced yet?')
|
logger.error('Block hash not found. Perhaps it has not been synced yet?', timestamp = False)
|
||||||
logger.debug('Is valid hash, but does not belong to a known block.')
|
logger.debug('Is valid hash, but does not belong to a known block.')
|
||||||
else:
|
else:
|
||||||
logger.error('Unknown data "%s"; must be block hash.' % str(pkobh))
|
logger.error('Unknown data "%s"; must be block hash.' % str(pkobh), timestamp = False)
|
||||||
else:
|
else:
|
||||||
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [block hash]')
|
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [block hash]')
|
||||||
|
|
||||||
|
@ -500,10 +526,11 @@ def commandRemoveRepository():
|
||||||
if blockhash in getRepositories():
|
if blockhash in getRepositories():
|
||||||
try:
|
try:
|
||||||
removeRepository(blockhash)
|
removeRepository(blockhash)
|
||||||
|
logger.info('Successfully removed repository.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to parse block.', error = e)
|
logger.error('Failed to parse block.', error = e)
|
||||||
else:
|
else:
|
||||||
logger.error('Repository has not been imported, nothing to remove.')
|
logger.error('Repository has not been imported, nothing to remove.', timestamp = False)
|
||||||
else:
|
else:
|
||||||
logger.error('Unknown data "%s"; must be block hash.' % str(pkobh))
|
logger.error('Unknown data "%s"; must be block hash.' % str(pkobh))
|
||||||
else:
|
else:
|
||||||
|
@ -526,6 +553,48 @@ def commandPublishPlugin():
|
||||||
else:
|
else:
|
||||||
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' <plugin>')
|
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' <plugin>')
|
||||||
|
|
||||||
|
def commandCreateRepository():
|
||||||
|
if len(sys.argv) >= 3:
|
||||||
|
check()
|
||||||
|
|
||||||
|
plugins = list()
|
||||||
|
script = sys.argv[0]
|
||||||
|
|
||||||
|
del sys.argv[:2]
|
||||||
|
success = True
|
||||||
|
for pluginname in sys.argv:
|
||||||
|
distributor = None
|
||||||
|
|
||||||
|
if ':' in pluginname:
|
||||||
|
split = pluginname.split(':')
|
||||||
|
pluginname = split[0]
|
||||||
|
distributor = split[1]
|
||||||
|
|
||||||
|
pluginname = sanitize(pluginname)
|
||||||
|
|
||||||
|
if distributor is None:
|
||||||
|
distributor = getKey(pluginname)
|
||||||
|
if distributor is None:
|
||||||
|
logger.error('No distributor key was found for the plugin %s.' % pluginname, timestamp = False)
|
||||||
|
success = False
|
||||||
|
|
||||||
|
plugins.append([pluginname, distributor])
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
logger.error('Please correct the above errors, then recreate the repository.')
|
||||||
|
return True
|
||||||
|
|
||||||
|
blockhash = createRepository(plugins)
|
||||||
|
print(blockhash)
|
||||||
|
if not blockhash is None:
|
||||||
|
logger.info('Successfully created repository. Execute the following command to add the repository:\n ' + logger.colors.underline + '%s --add-repository %s' % (script, blockhash))
|
||||||
|
else:
|
||||||
|
logger.error('Failed to create repository, an unknown error occurred.')
|
||||||
|
else:
|
||||||
|
logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [plugins...]')
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
# event listeners
|
# event listeners
|
||||||
|
|
||||||
def on_init(api, data = None):
|
def on_init(api, data = None):
|
||||||
|
@ -540,6 +609,7 @@ def on_init(api, data = None):
|
||||||
api.commands.register(['add-repo', 'add-repository', 'addrepo', 'addrepository', 'repository-add', 'repo-add', 'repoadd', 'addrepository', 'add-plugin-repository', 'add-plugin-repo', 'add-pluginrepo', 'add-pluginrepository', 'addpluginrepo', 'addpluginrepository'], commandAddRepository)
|
api.commands.register(['add-repo', 'add-repository', 'addrepo', 'addrepository', 'repository-add', 'repo-add', 'repoadd', 'addrepository', 'add-plugin-repository', 'add-plugin-repo', 'add-pluginrepo', 'add-pluginrepository', 'addpluginrepo', 'addpluginrepository'], commandAddRepository)
|
||||||
api.commands.register(['remove-repo', 'remove-repository', 'removerepo', 'removerepository', 'repository-remove', 'repo-remove', 'reporemove', 'removerepository', 'remove-plugin-repository', 'remove-plugin-repo', 'remove-pluginrepo', 'remove-pluginrepository', 'removepluginrepo', 'removepluginrepository', 'rm-repo', 'rm-repository', 'rmrepo', 'rmrepository', 'repository-rm', 'repo-rm', 'reporm', 'rmrepository', 'rm-plugin-repository', 'rm-plugin-repo', 'rm-pluginrepo', 'rm-pluginrepository', 'rmpluginrepo', 'rmpluginrepository'], commandRemoveRepository)
|
api.commands.register(['remove-repo', 'remove-repository', 'removerepo', 'removerepository', 'repository-remove', 'repo-remove', 'reporemove', 'removerepository', 'remove-plugin-repository', 'remove-plugin-repo', 'remove-pluginrepo', 'remove-pluginrepository', 'removepluginrepo', 'removepluginrepository', 'rm-repo', 'rm-repository', 'rmrepo', 'rmrepository', 'repository-rm', 'repo-rm', 'reporm', 'rmrepository', 'rm-plugin-repository', 'rm-plugin-repo', 'rm-pluginrepo', 'rm-pluginrepository', 'rmpluginrepo', 'rmpluginrepository'], commandRemoveRepository)
|
||||||
api.commands.register(['publish-plugin', 'plugin-publish', 'publishplugin', 'pluginpublish', 'publish'], commandPublishPlugin)
|
api.commands.register(['publish-plugin', 'plugin-publish', 'publishplugin', 'pluginpublish', 'publish'], commandPublishPlugin)
|
||||||
|
api.commands.register(['create-repository', 'create-repo', 'createrepo', 'createrepository', 'repocreate'], commandCreateRepository)
|
||||||
|
|
||||||
# add help menus once the features are actually implemented
|
# add help menus once the features are actually implemented
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,17 @@
|
||||||
{
|
{
|
||||||
"devmode": true,
|
"general" : {
|
||||||
|
"dev_mode": true,
|
||||||
|
"display_header" : true,
|
||||||
|
|
||||||
|
"newCommunicator": false,
|
||||||
|
|
||||||
"dc_response": true,
|
"dc_response": true,
|
||||||
|
"dc_execcallbacks" : true
|
||||||
|
},
|
||||||
|
|
||||||
|
"client" : {
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
"log": {
|
"log": {
|
||||||
"file": {
|
"file": {
|
||||||
|
@ -13,13 +24,21 @@
|
||||||
"color": true
|
"color": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
"tor" : {
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
"i2p":{
|
"i2p":{
|
||||||
"host": false,
|
"host": false,
|
||||||
"connect": true,
|
"connect": true,
|
||||||
"ownAddr": ""
|
"ownAddr": ""
|
||||||
},
|
},
|
||||||
|
|
||||||
"allocations":{
|
"allocations":{
|
||||||
"disk": 1000000000,
|
"disk": 1000000000,
|
||||||
"netTotal": 1000000000
|
"netTotal": 1000000000,
|
||||||
|
"blockCache" : 5000000,
|
||||||
|
"blockCacheTotal" : 50000000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
|
|
||||||
# Imports some useful libraries
|
# Imports some useful libraries
|
||||||
import logger, config
|
import logger, config
|
||||||
|
from onionrblockapi import Block
|
||||||
|
|
||||||
plugin_name = '$name'
|
plugin_name = '$name'
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
P G'
|
||||||
|
P G''
|
||||||
|
P G'' '
|
||||||
|
P G''''''
|
||||||
|
P :G;'''''P:
|
||||||
|
P ::G;'''P::
|
||||||
|
P :::G;;P:::
|
||||||
|
P ::::::::
|
||||||
|
P ::::::::::::
|
||||||
|
P :::::::::::::::
|
||||||
|
P ::::: :::::
|
||||||
|
P ::::: :::::::: ::::
|
||||||
|
P ::: :::::: ::::::: :::: W:::::: :: :: :: ::::: ::: :: ::::::
|
||||||
|
P ::: :::: :::: ::::: ::: W:: :: ::: :: :: :: :: :::: :: :: ::
|
||||||
|
P ::: ::::: :::::: :::: :::: W:: :: :::::: :: :: :: :::: :: :: ::
|
||||||
|
P ::: :::: ::::::: :::: :::: W:: :: :: ::: :: :: :: :: :::: :::::
|
||||||
|
P ::: ::::: :::::: :::: :::: W:: :: :: ::: :: :: :: :: ::: :: :::
|
||||||
|
P :::: ::::: ::::: ::: W :::: :: :: :: ::::: :: :: :: ::
|
||||||
|
P :::: :::::: :::::: ::::
|
||||||
|
P :::: :::::::::::: ::::
|
||||||
|
P ::::: :::::::: ::::
|
||||||
|
P ::::: ::::::
|
||||||
|
P ::::::::::::::::
|
||||||
|
P :::::::
|
|
@ -117,6 +117,44 @@ class OnionrTests(unittest.TestCase):
|
||||||
|
|
||||||
self.assertTrue(True)
|
self.assertTrue(True)
|
||||||
|
|
||||||
|
def testBlockAPI(self):
|
||||||
|
logger.debug('-'*26 + '\n')
|
||||||
|
logger.info('Running BlockAPI test #1...')
|
||||||
|
|
||||||
|
content = 'Onionr test block'
|
||||||
|
|
||||||
|
from onionrblockapi import Block
|
||||||
|
hash = Block(type = 'test', content = content).save()
|
||||||
|
block = Block(hash) # test init
|
||||||
|
|
||||||
|
if len(Block.getBlocks(type = 'test')) == 0:
|
||||||
|
logger.warn('Failed to find test block.')
|
||||||
|
self.assertTrue(False)
|
||||||
|
if not block.getContent() == content:
|
||||||
|
logger.warn('Test block content is invalid! (%s != %s)' % (block.getContent(), content))
|
||||||
|
self.assertTrue(False)
|
||||||
|
|
||||||
|
logger.debug('-'*26 + '\n')
|
||||||
|
logger.info('Running BlockAPI test #2...')
|
||||||
|
|
||||||
|
original_content = 'onionr'
|
||||||
|
|
||||||
|
logger.debug('original: %s' % original_content)
|
||||||
|
|
||||||
|
blocks = Block.createChain(data = original_content, chunksize = 2, verbose = True)
|
||||||
|
|
||||||
|
logger.debug(blocks[1])
|
||||||
|
|
||||||
|
child = blocks[0]
|
||||||
|
merged = Block.mergeChain(child)
|
||||||
|
|
||||||
|
logger.debug('merged blocks (child: %s): %s' % (child, merged))
|
||||||
|
|
||||||
|
if merged != original_content:
|
||||||
|
self.assertTrue(False)
|
||||||
|
self.assertTrue(True)
|
||||||
|
|
||||||
|
|
||||||
def testBitcoinNode(self):
|
def testBitcoinNode(self):
|
||||||
# temporarily disabled- this takes a lot of time the CI doesn't have
|
# temporarily disabled- this takes a lot of time the CI doesn't have
|
||||||
self.assertTrue(True)
|
self.assertTrue(True)
|
||||||
|
@ -234,6 +272,6 @@ class OnionrTests(unittest.TestCase):
|
||||||
else:
|
else:
|
||||||
self.assertTrue(False)
|
self.assertTrue(False)
|
||||||
else:
|
else:
|
||||||
self.assertTrue(False)
|
self.assertTrue(False) # <- annoying :(
|
||||||
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -1,50 +0,0 @@
|
||||||
'''
|
|
||||||
Onionr - P2P Microblogging Platform & Social network. Run with 'help' for usage.
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU General Public License for more details.
|
|
||||||
You should have received a copy of the GNU General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import hmac, base64, time, math
|
|
||||||
|
|
||||||
class TimedHMAC:
|
|
||||||
def __init__(self, base64Key, data, hashAlgo):
|
|
||||||
'''
|
|
||||||
base64Key = base64 encoded key
|
|
||||||
data = data to hash
|
|
||||||
expire = time expiry in epoch
|
|
||||||
hashAlgo = string in hashlib.algorithms_available
|
|
||||||
|
|
||||||
Maximum of 10 seconds grace period
|
|
||||||
'''
|
|
||||||
|
|
||||||
self.data = data
|
|
||||||
self.expire = math.floor(time.time())
|
|
||||||
self.hashAlgo = hashAlgo
|
|
||||||
self.b64Key = base64Key
|
|
||||||
generatedHMAC = hmac.HMAC(base64.b64decode(base64Key).decode(), digestmod=self.hashAlgo)
|
|
||||||
generatedHMAC.update(data + expire)
|
|
||||||
self.HMACResult = generatedHMAC.hexdigest()
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
def check(self, data):
|
|
||||||
'''
|
|
||||||
Check a hash (and verify time is sane)
|
|
||||||
'''
|
|
||||||
|
|
||||||
testHash = hmac.HMAC(base64.b64decode(base64Key).decode(), digestmod=self.hashAlgo)
|
|
||||||
testHash.update(data + math.floor(time.time()))
|
|
||||||
testHash = testHash.hexdigest()
|
|
||||||
if hmac.compare_digest(testHash, self.HMACResult):
|
|
||||||
return true
|
|
||||||
|
|
||||||
return false
|
|
|
@ -7,5 +7,6 @@ sha3==0.2.1
|
||||||
simple_crypt==4.1.7
|
simple_crypt==4.1.7
|
||||||
ecdsa==0.13
|
ecdsa==0.13
|
||||||
requests==2.12.4
|
requests==2.12.4
|
||||||
|
defusedxml==0.5.0
|
||||||
SocksiPy_branch==1.01
|
SocksiPy_branch==1.01
|
||||||
sphinx_rtd_theme==0.3.0
|
sphinx_rtd_theme==0.3.0
|
||||||
|
|
Loading…
Reference in New Issue