improved network stability by having automatic tor restarts

master
Kevin Froman 2019-08-09 20:04:56 -05:00
parent daff149acc
commit 75ec108496
11 changed files with 48 additions and 16 deletions

View File

@ -138,7 +138,7 @@ class OnionrCommunicatorDaemon:
deniableBlockTimer.count = (deniableBlockTimer.frequency - 175)
# Timer to check for connectivity, through Tor to various high-profile onion services
netCheckTimer = OnionrCommunicatorTimers(self, netcheck.net_check, 600, myArgs=[self])
netCheckTimer = OnionrCommunicatorTimers(self, netcheck.net_check, 500, myArgs=[self], maxThreads=1)
# Announce the public API server transport address to other nodes if security level allows
if config.get('general.security_level', 1) == 0 and config.get('general.announce_node', True):
@ -185,7 +185,10 @@ class OnionrCommunicatorDaemon:
for server in self.service_greenlets:
server.stop()
localcommand.local_command('shutdown') # shutdown the api
try:
time.sleep(0.5)
except KeyboardInterrupt:
pass
def lookupAdders(self):
'''Lookup new peer addresses'''

View File

@ -22,6 +22,7 @@ import onionrevents as events
from onionrutils import localcommand
from coredb import daemonqueue
import filepaths
from . import restarttor
def handle_daemon_commands(comm_inst):
cmd = daemonqueue.daemon_queue()
response = ''
@ -43,6 +44,11 @@ def handle_daemon_commands(comm_inst):
response = 'none'
elif cmd[0] == 'localCommand':
response = localcommand.local_command(cmd[1])
elif cmd[0] == 'clearOffline':
comm_inst.offlinePeers = []
elif cmd[0] == 'restartTor':
restarttor.restart(comm_inst)
comm_inst.offlinePeers = []
elif cmd[0] == 'pex':
for i in comm_inst.timers:
if i.timerFunction.__name__ == 'lookupAdders':

View File

@ -21,6 +21,7 @@
import logger
from utils import netutils
from onionrutils import localcommand, epoch
from . import restarttor
def net_check(comm_inst):
'''Check if we are connected to the internet or not when we can't connect to any peers'''
rec = False # for detecting if we have received incoming connections recently
@ -33,7 +34,9 @@ def net_check(comm_inst):
pass
if not rec and not netutils.checkNetwork(torPort=comm_inst.proxyPort):
if not comm_inst.shutdown:
logger.warn('Network check failed, are you connected to the Internet, and is Tor working?')
logger.warn('Network check failed, are you connected to the Internet, and is Tor working?', terminal=True)
restarttor.restart(comm_inst)
comm_inst.offlinePeers = []
comm_inst.isOnline = False
else:
comm_inst.isOnline = True

View File

@ -0,0 +1,5 @@
import netcontroller
def restart(comm_inst):
net = comm_inst.shared_state.get(netcontroller.NetController)
net.killTor()
net.startTor()

View File

@ -26,8 +26,8 @@ ONIONR_VERSION_TUPLE = tuple(ONIONR_VERSION.split('.')) # (MAJOR, MINOR, VERSION
API_VERSION = '0' # increments of 1; only change when something fundamental about how the API works changes. This way other nodes know how to communicate without learning too much information about you.
MIN_PY_VERSION = 6
DEVELOPMENT_MODE = True
MAX_BLOCK_TYPE_LENGTH = 15
MAX_BLOCK_CLOCK_SKEW = 120
# Begin OnionrValues migrated values
ANNOUNCE_POW = 5

View File

@ -88,11 +88,11 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort)
torrc.close()
return
def startTor(self):
def startTor(self, gen_torrc=True):
'''
Start Tor with onion service on port 80 & socks proxy on random port
'''
if gen_torrc:
self.generateTorrc()
if os.path.exists('./tor'):

View File

@ -0,0 +1,5 @@
import time
from coredb import daemonqueue
def rebuild():
daemonqueue.daemon_queue_add('restartTor')

View File

@ -0,0 +1,8 @@
from stem.control import Controller
import config
def get_controller():
c = Controller.from_port(port=config.get('tor.controlPort'))
c.authenticate(config.get('tor.controlpassword'))
return c

View File

@ -22,11 +22,11 @@ import logger, onionrexceptions
from etc import onionrvalues
from onionrutils import stringvalidators, epoch, bytesconverter
import config, filepaths, onionrcrypto
def validate_metadata(metadata, blockData):
def validate_metadata(metadata, block_data) -> bool:
'''Validate metadata meets onionr spec (does not validate proof value computation), take in either dictionary or json string'''
ret_data = False
maxClockDifference = 120
max_clock_difference = onionrvalues.MAX_BLOCK_CLOCK_SKEW
# convert to dict if it is json string
if type(metadata) is str:
@ -36,7 +36,7 @@ def validate_metadata(metadata, blockData):
pass
# Validate metadata dict for invalid keys to sizes that are too large
maxAge = config.get("general.max_block_age", onionrvalues.DEFAULT_EXPIRE)
maxAge = min(config.get("general.max_block_age", onionrvalues.DEFAULT_EXPIRE), onionrvalues.DEFAULT_EXPIRE)
if type(metadata) is dict:
for i in metadata:
try:
@ -58,8 +58,8 @@ def validate_metadata(metadata, blockData):
logger.warn('Block metadata time stamp is not integer string or int')
break
isFuture = (metadata[i] - epoch.get_epoch())
if isFuture > maxClockDifference:
logger.warn('Block timestamp is skewed to the future over the max %s: %s' (maxClockDifference, isFuture))
if isFuture > max_clock_difference:
logger.warn('Block timestamp is skewed to the future over the max %s: %s' (max_clock_difference, isFuture))
break
if (epoch.get_epoch() - metadata[i]) > maxAge:
logger.warn('Block is outdated: %s' % (metadata[i],))
@ -79,7 +79,7 @@ def validate_metadata(metadata, blockData):
else:
# if metadata loop gets no errors, it does not break, therefore metadata is valid
# make sure we do not have another block with the same data content (prevent data duplication and replay attacks)
nonce = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(blockData))
nonce = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(block_data))
try:
with open(filepaths.data_nonce_file, 'r') as nonceFile:
if nonce in nonceFile.read():

View File

@ -18,13 +18,14 @@
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
from onionrutils import basicrequests
from . import readstatic
from onionrcrypto import cryptoutils
def checkNetwork(torPort=0):
'''Check if we are connected to the internet (through Tor)'''
retData = False
connectURLs = []
try:
with open('static-data/connect-check.txt', 'r') as connectTest:
connectURLs = connectTest.read().split(',')
connectURLs = cryptoutils.random_shuffle(readstatic.read_static('connect-check.txt').split(','))
for url in connectURLs:
if basicrequests.do_get_request(url, port=torPort, ignoreAPI=True) != False:

View File

@ -9,4 +9,5 @@ def read_static(file, ret_bin=False):
else:
mode = 'r'
with open(static_file, mode) as f:
return f.read()
data = f.read()
return data