punish peers for overloading get responses

master
Kevin Froman 2019-07-10 02:29:05 -05:00
parent 52c7ba860d
commit be318f2403
3 changed files with 16 additions and 10 deletions

View File

@ -20,6 +20,7 @@
along with this program. If not, see <https://www.gnu.org/licenses/>. along with this program. If not, see <https://www.gnu.org/licenses/>.
''' '''
import sys, os, time import sys, os, time
import streamedrequests
import core, config, logger, onionr import core, config, logger, onionr
import onionrexceptions, onionrpeers, onionrevents as events, onionrplugins as plugins, onionrblockapi as block import onionrexceptions, onionrpeers, onionrevents as events, onionrplugins as plugins, onionrblockapi as block
from communicatorutils import servicecreator, onionrcommunicatortimers from communicatorutils import servicecreator, onionrcommunicatortimers
@ -303,6 +304,7 @@ class OnionrCommunicatorDaemon:
def peerAction(self, peer, action, data='', returnHeaders=False, max_resp_size=5242880): def peerAction(self, peer, action, data='', returnHeaders=False, max_resp_size=5242880):
'''Perform a get request to a peer''' '''Perform a get request to a peer'''
penalty_score = -10
if len(peer) == 0: if len(peer) == 0:
return False return False
#logger.debug('Performing ' + action + ' with ' + peer + ' on port ' + str(self.proxyPort)) #logger.debug('Performing ' + action + ' with ' + peer + ' on port ' + str(self.proxyPort))
@ -311,12 +313,15 @@ class OnionrCommunicatorDaemon:
url += '&data=' + data url += '&data=' + data
self._core.setAddressInfo(peer, 'lastConnectAttempt', epoch.get_epoch()) # mark the time we're trying to request this peer self._core.setAddressInfo(peer, 'lastConnectAttempt', epoch.get_epoch()) # mark the time we're trying to request this peer
try:
retData = basicrequests.do_get_request(self._core, url, port=self.proxyPort) retData = basicrequests.do_get_request(self._core, url, port=self.proxyPort, max_size=max_resp_size)
except streamedrequests.exceptions.ResponseLimitReached:
retData = False
penalty_score = -100
# if request failed, (error), mark peer offline # if request failed, (error), mark peer offline
if retData == False: if retData == False:
try: try:
self.getPeerProfileInstance(peer).addScore(-10) self.getPeerProfileInstance(peer).addScore(penalty_score)
self.removeOnlinePeer(peer) self.removeOnlinePeer(peer)
if action != 'ping' and not self.shutdown: if action != 'ping' and not self.shutdown:
logger.warn('Lost connection to ' + peer, terminal=True) logger.warn('Lost connection to ' + peer, terminal=True)

View File

@ -19,7 +19,7 @@
''' '''
import requests, streamedrequests import requests, streamedrequests
import logger, onionrexceptions import logger, onionrexceptions
def do_post_request(core_inst, url, data={}, port=0, proxyType='tor'): def do_post_request(core_inst, url, data={}, port=0, proxyType='tor', max_size=10000):
''' '''
Do a POST request through a local tor or i2p instance Do a POST request through a local tor or i2p instance
''' '''
@ -34,8 +34,9 @@ def do_post_request(core_inst, url, data={}, port=0, proxyType='tor'):
headers = {'user-agent': 'PyOnionr', 'Connection':'close'} headers = {'user-agent': 'PyOnionr', 'Connection':'close'}
try: try:
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)} proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
r = requests.post(url, data=data, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30)) #r = requests.post(url, data=data, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
retData = r.text r = streamedrequests.post(url, post_data=data, request_headers=headers, proxy=proxies, connect_timeout=15, stream_timeout=30, max_size=max_size, allow_redirects=False)
retData = r[1]
except KeyboardInterrupt: except KeyboardInterrupt:
raise KeyboardInterrupt raise KeyboardInterrupt
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
@ -61,7 +62,7 @@ def do_get_request(core_inst, url, port=0, proxyType='tor', ignoreAPI=False, ret
response_headers = dict() response_headers = dict()
try: try:
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)} proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
r = streamedrequests.get(url, request_headers=headers, allow_redirects=False, proxy=proxies, connect_timeout=15, max_size=max_size) r = streamedrequests.get(url, request_headers=headers, allow_redirects=False, proxy=proxies, connect_timeout=15, stream_timeout=120, max_size=max_size)
# Check server is using same API version as us # Check server is using same API version as us
if not ignoreAPI: if not ignoreAPI:
try: try:

View File

@ -48,7 +48,7 @@ function openReply(bHash, quote, subject){
entry = inbox[i] entry = inbox[i]
} }
} }
if (entry.getAttribute('data-nameSet') == 'true'){ if (entry.getAttribute('data-nameset') == 'true'){
document.getElementById('friendSelect').value = entry.getElementsByTagName('input')[0].value document.getElementById('friendSelect').value = entry.getElementsByTagName('input')[0].value
} }
key = entry.getAttribute('data-pubkey') key = entry.getAttribute('data-pubkey')
@ -62,7 +62,7 @@ function openReply(bHash, quote, subject){
} }
quote = '\n' + key.substring(0, 12) + ' wrote:' + '\n' + splitQuotes.join('\n') quote = '\n' + key.substring(0, 12) + ' wrote:' + '\n' + splitQuotes.join('\n')
document.getElementById('draftText').value = quote document.getElementById('draftText').value = quote
setActiveTab('send message') setActiveTab('compose')
} }
function openThread(bHash, sender, date, sigBool, pubkey, subjectLine){ function openThread(bHash, sender, date, sigBool, pubkey, subjectLine){
@ -185,7 +185,7 @@ function loadInboxEntries(bHash){
senderInput.value = resp['meta']['signer'] || 'Anonymous' senderInput.value = resp['meta']['signer'] || 'Anonymous'
entry.setAttribute('data-nameSet', false) entry.setAttribute('data-nameSet', false)
} }
bHashDisplay.innerText = bHash.substring(0, 10) //bHashDisplay.innerText = bHash.substring(0, 10)
entry.setAttribute('data-hash', bHash) entry.setAttribute('data-hash', bHash)
entry.setAttribute('data-pubkey', resp['meta']['signer']) entry.setAttribute('data-pubkey', resp['meta']['signer'])
senderInput.readOnly = true senderInput.readOnly = true