switched to upload event api

master
Kevin Froman 2020-01-04 06:13:10 -06:00
parent 1ba8b4c707
commit c975d27906
8 changed files with 67 additions and 27 deletions

View File

@ -13,3 +13,4 @@
* localization support * localization support
* add BCC support to mail * add BCC support to mail
* prevent local insertion success of duplicate block content * prevent local insertion success of duplicate block content
* truncate last N blocks when sharing list

View File

@ -8,11 +8,14 @@ from typing import TYPE_CHECKING
from gevent import sleep from gevent import sleep
from communicatorutils.uploadblocks import mixmate
if TYPE_CHECKING: if TYPE_CHECKING:
from toomanyobjs import TooMany from toomanyobjs import TooMany
from communicator import OnionrCommunicatorDaemon from communicator import OnionrCommunicatorDaemon
from httpapi.daemoneventsapi import DaemonEventsBP from httpapi.daemoneventsapi import DaemonEventsBP
from onionrtypes import BlockHash from onionrtypes import BlockHash
from apiservers import PublicAPI
""" """
This program is free software: you can redistribute it and/or modify This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by it under the terms of the GNU General Public License as published by
@ -37,16 +40,28 @@ def daemon_event_handlers(shared_state: 'TooMany'):
except KeyError: except KeyError:
sleep(0.2) sleep(0.2)
comm_inst = _get_inst('OnionrCommunicatorDaemon') comm_inst = _get_inst('OnionrCommunicatorDaemon')
public_api: 'PublicAPI' = _get_inst('PublicAPI')
events_api: 'DaemonEventsBP' = _get_inst('DaemonEventsBP') events_api: 'DaemonEventsBP' = _get_inst('DaemonEventsBP')
def remove_from_insert_queue_wrapper(block_hash: 'BlockHash'): def remove_from_insert_queue_wrapper(block_hash: 'BlockHash'):
print(f'removed {block_hash} from upload')
remove_from_insert_queue(comm_inst, block_hash) remove_from_insert_queue(comm_inst, block_hash)
return "removed"
def print_test(text=''): def print_test(text=''):
print("It works!", text) print("It works!", text)
return f"It works! {text}" return f"It works! {text}"
def upload_event(block: 'BlockHash' = ''):
if not block:
raise ValueError
public_api.hideBlocks.append(block)
try:
mixmate.block_mixer(comm_inst.blocksToUpload, block)
except ValueError:
pass
return "removed"
events_api.register_listener(remove_from_insert_queue_wrapper) events_api.register_listener(remove_from_insert_queue_wrapper)
events_api.register_listener(print_test) events_api.register_listener(print_test)
events_api.register_listener(upload_event)

View File

@ -4,6 +4,7 @@ Class to remember blocks that need to be uploaded
and not shared on startup/shutdown and not shared on startup/shutdown
""" """
import atexit import atexit
import os
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import deadsimplekv import deadsimplekv

View File

@ -6,6 +6,9 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
if TYPE_CHECKING: if TYPE_CHECKING:
from communicator import OnionrCommunicatorDaemon from communicator import OnionrCommunicatorDaemon
from gevent import spawn
import onionrexceptions import onionrexceptions
import logger import logger
import onionrpeers import onionrpeers
@ -16,6 +19,7 @@ from onionrutils import blockmetadata
from onionrutils import validatemetadata from onionrutils import validatemetadata
from coredb import blockmetadb from coredb import blockmetadb
from coredb import daemonqueue from coredb import daemonqueue
from onionrutils.localcommand import local_command
import onionrcrypto import onionrcrypto
import onionrstorage import onionrstorage
from onionrblocks import onionrblacklist from onionrblocks import onionrblacklist
@ -82,7 +86,7 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
logger.info("Attempting to download %s from %s..." % (blockHash[:12], peerUsed)) logger.info("Attempting to download %s from %s..." % (blockHash[:12], peerUsed))
content = peeraction.peer_action(comm_inst, peerUsed, 'getdata/' + blockHash, max_resp_size=3000000) # block content from random peer (includes metadata) content = peeraction.peer_action(comm_inst, peerUsed, 'getdata/' + blockHash, max_resp_size=3000000) # block content from random peer (includes metadata)
if content != False and len(content) > 0: if content is not False and len(content) > 0:
try: try:
content = content.encode() content = content.encode()
except AttributeError: except AttributeError:
@ -98,7 +102,8 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
metas = blockmetadata.get_block_metadata_from_data(content) # returns tuple(metadata, meta), meta is also in metadata metas = blockmetadata.get_block_metadata_from_data(content) # returns tuple(metadata, meta), meta is also in metadata
metadata = metas[0] metadata = metas[0]
try: try:
metadata_validation_result = validatemetadata.validate_metadata(metadata, metas[2]) metadata_validation_result = \
validatemetadata.validate_metadata(metadata, metas[2])
except onionrexceptions.DataExists: except onionrexceptions.DataExists:
metadata_validation_result = False metadata_validation_result = False
if metadata_validation_result: # check if metadata is valid, and verify nonce if metadata_validation_result: # check if metadata is valid, and verify nonce
@ -113,7 +118,14 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
removeFromQueue = False removeFromQueue = False
else: else:
blockmetadb.add_to_block_DB(blockHash, dataSaved=True) # add block to meta db blockmetadb.add_to_block_DB(blockHash, dataSaved=True) # add block to meta db
daemonqueue.daemon_queue_add('uploadEvent', blockHash) spawn(
local_command,
f'/daemon-event/upload_event',
post=True,
is_json=True,
postData={'block': blockHash}
)
blockmetadata.process_block_metadata(blockHash) # caches block metadata values to block database blockmetadata.process_block_metadata(blockHash) # caches block metadata values to block database
else: else:
logger.warn('POW failed for block %s.' % (blockHash,)) logger.warn('POW failed for block %s.' % (blockHash,))
@ -134,14 +146,17 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
onionrpeers.PeerProfiles(peerUsed).addScore(-50) onionrpeers.PeerProfiles(peerUsed).addScore(-50)
if tempHash != 'ed55e34cb828232d6c14da0479709bfa10a0923dca2b380496e6b2ed4f7a0253': if tempHash != 'ed55e34cb828232d6c14da0479709bfa10a0923dca2b380496e6b2ed4f7a0253':
# Dumb hack for 404 response from peer. Don't log it if 404 since its likely not malicious or a critical error. # Dumb hack for 404 response from peer. Don't log it if 404 since its likely not malicious or a critical error.
logger.warn('Block hash validation failed for ' + blockHash + ' got ' + tempHash) logger.warn(
'Block hash validation failed for ' +
blockHash + ' got ' + tempHash)
else: else:
removeFromQueue = False # Don't remove from queue if 404 removeFromQueue = False # Don't remove from queue if 404
if removeFromQueue: if removeFromQueue:
try: try:
del comm_inst.blockQueue[blockHash] # remove from block queue both if success or false del comm_inst.blockQueue[blockHash] # remove from block queue both if success or false
if count == LOG_SKIP_COUNT: if count == LOG_SKIP_COUNT:
logger.info('%s blocks remaining in queue' % [len(comm_inst.blockQueue)], terminal=True) logger.info('%s blocks remaining in queue' %
[len(comm_inst.blockQueue)], terminal=True)
count = 0 count = 0
except KeyError: except KeyError:
pass pass

View File

@ -122,4 +122,4 @@ class BlockUploadSessionManager:
comm_inst.blocksToUpload.remove(sess.block_hash) comm_inst.blocksToUpload.remove(sess.block_hash)
except ValueError: except ValueError:
pass pass
localcommand.local_command('waitforshare/{session.block_hash}') localcommand.local_command(f'waitforshare/{session.block_hash}')

View File

@ -48,8 +48,7 @@ class DaemonEventsBP:
json_data = {} json_data = {}
for handler in self.listeners: for handler in self.listeners:
if handler.__name__ == name: if handler.__name__ == name:
return Response( return Response(handler(**json_data))
spawn(handler, **json_data).get(timeout=120))
abort(404) abort(404)
def register_listener(self, listener: Callable): def register_listener(self, listener: Callable):

View File

@ -197,7 +197,14 @@ def insert_block(data: Union[str, bytes], header: str = 'txt',
retData = False retData = False
else: else:
# Tell the api server through localCommand to wait for the daemon to upload this block to make statistical analysis more difficult # Tell the api server through localCommand to wait for the daemon to upload this block to make statistical analysis more difficult
coredb.daemonqueue.daemon_queue_add('uploadEvent', retData) #coredb.daemonqueue.daemon_queue_add('uploadEvent', retData)
spawn(
localcommand.local_command,
f'/daemon-event/upload_event',
post=True,
is_json=True,
postData={'block': retData}
).get(timeout=5)
coredb.blockmetadb.add.add_to_block_DB(retData, selfInsert=True, dataSaved=True) coredb.blockmetadb.add.add_to_block_DB(retData, selfInsert=True, dataSaved=True)
if expire is None: if expire is None:
@ -213,10 +220,10 @@ def insert_block(data: Union[str, bytes], header: str = 'txt',
events.event('insertdeniable', {'content': plaintext, 'meta': plaintextMeta, 'hash': retData, 'peer': bytesconverter.bytes_to_str(asymPeer)}, threaded = True) events.event('insertdeniable', {'content': plaintext, 'meta': plaintextMeta, 'hash': retData, 'peer': bytesconverter.bytes_to_str(asymPeer)}, threaded = True)
else: else:
events.event('insertblock', {'content': plaintext, 'meta': plaintextMeta, 'hash': retData, 'peer': bytesconverter.bytes_to_str(asymPeer)}, threaded = True) events.event('insertblock', {'content': plaintext, 'meta': plaintextMeta, 'hash': retData, 'peer': bytesconverter.bytes_to_str(asymPeer)}, threaded = True)
#coredb.daemonqueue.daemon_queue_add('remove_from_insert_list', data= dataNonce)
spawn( spawn(
localcommand.local_command, localcommand.local_command,
'/daemon-event/remove_from_insert_queue_wrapper', '/daemon-event/remove_from_insert_queue_wrapper',
post=True, timeout=10 post=True
) ).get(timeout=5)
return retData return retData

View File

@ -1,9 +1,19 @@
''' """
Onionr - Private P2P Communication Onionr - Private P2P Communication
send a command to the local API server send a command to the local API server
''' """
''' import urllib, time
import json
import functools
from typing import TYPE_CHECKING, Callable
import requests
import logger, config, deadsimplekv
from . import getclientapiserver
import filepaths
"""
This program is free software: you can redistribute it and/or modify This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or the Free Software Foundation, either version 3 of the License, or
@ -16,15 +26,7 @@
You should have received a copy of the GNU General Public License You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>. along with this program. If not, see <https://www.gnu.org/licenses/>.
''' """
import urllib, time
import json
import requests
import logger, config, deadsimplekv
from . import getclientapiserver
import filepaths
config.reload() config.reload()
cache = deadsimplekv.DeadSimpleKV(filepaths.cached_storage, refresh_seconds=1000) cache = deadsimplekv.DeadSimpleKV(filepaths.cached_storage, refresh_seconds=1000)
@ -54,9 +56,9 @@ def local_command(command, data='', silent = True, post=False,
postData = {}, maxWait=20, postData = {}, maxWait=20,
is_json=False is_json=False
): ):
''' """
Send a command to the local http API server, securely. Intended for local clients, DO NOT USE for remote peers. Send a command to the local http API server, securely. Intended for local clients, DO NOT USE for remote peers.
''' """
# TODO: URL encode parameters, just as an extra measure. May not be needed, but should be added regardless. # TODO: URL encode parameters, just as an extra measure. May not be needed, but should be added regardless.
hostname = get_hostname() hostname = get_hostname()
if hostname == False: return False if hostname == False: return False