fixes in board, refactoring core databases to not use core anymore
This commit is contained in:
parent
1ced21f40c
commit
000538ddc8
17 changed files with 43 additions and 67 deletions
|
@ -17,9 +17,10 @@
|
|||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
from coredb import blockmetadb
|
||||
def should_download(comm_inst, block_hash):
|
||||
ret_data = True
|
||||
if block_hash in comm_inst._core.getBlockList(): # Dont download block we have
|
||||
if block_hash in blockmetadb.get_block_list(): # Dont download block we have
|
||||
ret_data = False
|
||||
else:
|
||||
if comm_inst._core._blacklist.inBlacklist(block_hash): # Dont download blacklisted block
|
||||
|
|
|
@ -21,6 +21,7 @@ import sqlite3
|
|||
import logger
|
||||
from onionrusers import onionrusers
|
||||
from onionrutils import epoch
|
||||
from coredb import blockmetadb
|
||||
def clean_old_blocks(comm_inst):
|
||||
'''Delete old blocks if our disk allocation is full/near full, and also expired blocks'''
|
||||
|
||||
|
@ -31,7 +32,7 @@ def clean_old_blocks(comm_inst):
|
|||
logger.info('Deleted block: %s' % (bHash,))
|
||||
|
||||
while comm_inst._core.storage_counter.isFull():
|
||||
oldest = comm_inst._core.getBlockList()[0]
|
||||
oldest = blockmetadb.get_block_list()[0]
|
||||
comm_inst._core._blacklist.addToDB(oldest)
|
||||
comm_inst._core.removeBlock(oldest)
|
||||
logger.info('Deleted block: %s' % (oldest,))
|
||||
|
|
|
@ -20,12 +20,12 @@
|
|||
import logger, onionrproofs
|
||||
from onionrutils import stringvalidators, epoch
|
||||
from communicator import peeraction, onlinepeers
|
||||
|
||||
from coredb import blockmetadb
|
||||
def lookup_blocks_from_communicator(comm_inst):
|
||||
logger.info('Looking up new blocks...')
|
||||
tryAmount = 2
|
||||
newBlocks = ''
|
||||
existingBlocks = comm_inst._core.getBlockList()
|
||||
existingBlocks = blockmetadb.get_block_list()
|
||||
triedPeers = [] # list of peers we've tried this time around
|
||||
maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion
|
||||
lastLookupTime = 0 # Last time we looked up a particular peer's list
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
'''
|
||||
import communicator, onionrblockapi
|
||||
from onionrutils import stringvalidators, bytesconverter
|
||||
|
||||
from coredb import blockmetadb
|
||||
def service_creator(daemon):
|
||||
assert isinstance(daemon, communicator.OnionrCommunicatorDaemon)
|
||||
core = daemon._core
|
||||
|
||||
# Find socket connection blocks
|
||||
# TODO cache blocks and only look at recently received ones
|
||||
con_blocks = core.getBlocksByType('con')
|
||||
con_blocks = blockmetadb.get_blocks_by_type('con')
|
||||
for b in con_blocks:
|
||||
if not b in daemon.active_services:
|
||||
bl = onionrblockapi.Block(b, core=core, decrypt=True)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue