added network merger test

master
Kevin Froman 2019-07-26 00:55:20 -05:00
parent 19fa128710
commit c7efc1ba08
3 changed files with 50 additions and 23 deletions

View File

@ -0,0 +1,31 @@
#!/usr/bin/env python3
import sys, os
sys.path.append(".")
import unittest, uuid
TEST_DIR = 'testdata/%s-%s' % (uuid.uuid4(), os.path.basename(__file__)) + '/'
print("Test directory:", TEST_DIR)
os.environ["ONIONR_HOME"] = TEST_DIR
from utils import networkmerger, createdirs
from coredb import keydb
import setupconfig
from utils import createdirs
createdirs.create_dirs()
setupconfig.setup_config()
class NetworkMergerTest(unittest.TestCase):
def test_valid_merge(self):
adders = 'facebookcorewwwi.onion,mporbyyjhmz2c62shctbi3ngrslne5lpcyav6uzhxok45iblodhgjoad.onion'
networkmerger.mergeAdders(adders)
added = keydb.listkeys.list_adders()
self.assertIn('mporbyyjhmz2c62shctbi3ngrslne5lpcyav6uzhxok45iblodhgjoad.onion', added)
self.assertNotIn('inwalidkcorewwi.onion', added)
self.assertIn('facebookcorewwwi.onion', added)
def test_invalid_mergeself(self):
adders = 'facebookc0rewwi.onion,sdfsdfsdf.onion, ssdf324, null, \n'
networkmerger.mergeAdders(adders)
added = keydb.listkeys.list_adders()
for adder in adders:
self.assertNotIn(adder, added)
unittest.main()

View File

@ -14,7 +14,7 @@ class OnionrValidations(unittest.TestCase):
valid = ['facebookcorewwwi.onion', 'vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd.onion',
'5bvb5ncnfr4dlsfriwczpzcvo65kn7fnnlnt2ln7qvhzna2xaldq.b32.i2p']
invalid = [None, 'dsfewjirji0ejipdfs', '', ' ', '\n', '\r\n', 'f$ce%^okc+rewwwi.onion']
invalid = [None, 'dsfewjirji0ejipdfs', '', ' ', '\n', '\r\n', 'f$ce%^okc+rewwwi.onion', 'facebookc0rewwi.onion']
for x in valid:
print('testing', x)

View File

@ -26,25 +26,21 @@ def mergeAdders(newAdderList):
Merge peer adders list to our database
'''
blacklist = onionrblacklist.OnionrBlackList()
try:
retVal = False
if newAdderList != False:
for adder in newAdderList.split(','):
adder = adder.strip()
if not adder in keydb.listkeys.list_adders(randomOrder = False) and adder != gettransports.transports[0] and not blacklist.inBlacklist(adder):
if not config.get('tor.v3onions') and len(adder) == 62:
continue
if keydb.addkeys.add_address(adder):
# Check if we have the maximum amount of allowed stored peers
if config.get('peers.max_stored_peers') > len(keydb.listkeys.list_adders()):
logger.info('Added %s to db.' % adder, timestamp = True)
retVal = True
else:
logger.warn('Reached the maximum amount of peers in the net database as allowed by your config.')
else:
pass
#logger.debug('%s is either our address or already in our DB' % adder)
return retVal
except Exception as error:
logger.error('Failed to merge adders.', error = error)
return False
retVal = False
if newAdderList != False:
for adder in newAdderList.split(','):
adder = adder.strip()
if not adder in keydb.listkeys.list_adders(randomOrder = False) and not adder in gettransports.get() and not blacklist.inBlacklist(adder):
if not config.get('tor.v3onions', True) and len(adder) == 62:
continue
if keydb.addkeys.add_address(adder):
# Check if we have the maximum amount of allowed stored peers
if config.get('peers.max_stored_peers') > len(keydb.listkeys.list_adders()):
logger.info('Added %s to db.' % adder, timestamp = True)
retVal = True
else:
logger.warn('Reached the maximum amount of peers in the net database as allowed by your config.')
else:
pass
#logger.debug('%s is either our address or already in our DB' % adder)
return retVal