onionr/src/onionrstorage/__init__.py

115 lines
3.6 KiB
Python
Raw Normal View History

2020-03-26 08:57:54 +00:00
"""Onionr - Private P2P Communication.
2018-12-31 04:49:27 +00:00
2020-03-26 08:57:54 +00:00
Handle block storage, providing an abstraction for
storing blocks between file system and database
"""
import sys
import sqlite3
import os
from onionrutils import bytesconverter
from onionrutils import stringvalidators
from coredb import dbfiles
from filepaths import block_data_location
import onionrexceptions
from onionrcrypto import hashers
from . import setdata
from etc.onionrvalues import DATABASE_LOCK_TIMEOUT
2020-03-26 08:57:54 +00:00
"""
2018-12-31 04:49:27 +00:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
2020-03-26 08:57:54 +00:00
"""
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
2019-01-05 06:15:31 +00:00
2019-07-24 17:22:19 +00:00
set_data = setdata.set_data
def _dbInsert(block_hash, data):
conn = sqlite3.connect(dbfiles.block_data_db,
timeout=DATABASE_LOCK_TIMEOUT)
2019-01-05 06:15:31 +00:00
c = conn.cursor()
data = (block_hash, data)
2019-01-05 06:15:31 +00:00
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
conn.commit()
conn.close()
def _dbFetch(block_hash):
conn = sqlite3.connect(dbfiles.block_data_db,
timeout=DATABASE_LOCK_TIMEOUT)
2019-01-05 06:15:31 +00:00
c = conn.cursor()
for i in c.execute(
'SELECT data from blockData where hash = ?', (block_hash,)):
2019-01-05 06:15:31 +00:00
return i[0]
conn.commit()
conn.close()
return None
def deleteBlock(block_hash):
# Call removeblock.remove_block to automatically want to remove storage byte count
if os.path.exists(f'{block_data_location}/{block_hash}.dat'):
os.remove(f'{block_data_location}/{block_hash}.dat')
2019-02-12 05:30:56 +00:00
return True
conn = sqlite3.connect(dbfiles.block_data_db,
timeout=DATABASE_LOCK_TIMEOUT)
2019-02-12 05:30:56 +00:00
c = conn.cursor()
data = (block_hash,)
2019-02-12 05:30:56 +00:00
c.execute('DELETE FROM blockData where hash = ?', data)
conn.commit()
conn.close()
return True
def store(data, block_hash=''):
if not stringvalidators.validate_hash(block_hash):
raise ValueError
2019-07-19 19:49:56 +00:00
ourHash = hashers.sha3_hash(data)
if block_hash != '':
if not ourHash == block_hash:
raise ValueError('Hash specified does not meet internal hash check')
2019-01-05 22:16:36 +00:00
else:
block_hash = ourHash
2019-01-05 06:15:31 +00:00
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
_dbInsert(block_hash, data)
2019-01-05 06:15:31 +00:00
else:
with open(
f'{block_data_location}/{block_hash}.dat', 'wb') as blck_file:
blck_file.write(data)
2019-01-05 06:15:31 +00:00
2019-07-19 19:49:56 +00:00
def getData(bHash):
if not stringvalidators.validate_hash(bHash):
raise ValueError
2019-01-05 06:15:31 +00:00
bHash = bytesconverter.bytes_to_str(bHash)
bHash = bHash.strip()
2019-01-05 06:15:31 +00:00
# First check DB for data entry by hash
# if no entry, check disk
# If no entry in either, raise an exception
ret_data = None
fileLocation = '%s/%s.dat' % (block_data_location, bHash)
not_found_msg = "Block data not found for: "
2019-01-05 06:15:31 +00:00
if os.path.exists(fileLocation):
2019-01-05 22:16:36 +00:00
with open(fileLocation, 'rb') as block:
ret_data = block.read()
2019-01-05 06:15:31 +00:00
else:
ret_data = _dbFetch(bHash)
if ret_data is None:
raise onionrexceptions.NoDataAvailable(not_found_msg + str(bHash))
return ret_data