Merge pull request #9 from beardog108/communicator-multithreading-newline
Add newline delimiter to metadata+contentmaster
commit
71f4318719
|
@ -517,7 +517,7 @@ class OnionrCommunicate:
|
||||||
'''
|
'''
|
||||||
if isThread:
|
if isThread:
|
||||||
self.processBlocksThreads += 1
|
self.processBlocksThreads += 1
|
||||||
for i in self._core.getBlockList(unsaved=True).split("\n"):
|
for i in self._core.getBlockList(unsaved = True):
|
||||||
if i != "":
|
if i != "":
|
||||||
if i in self.blocksProcessing or i in self.ignoredHashes:
|
if i in self.blocksProcessing or i in self.ignoredHashes:
|
||||||
#logger.debug('already processing ' + i)
|
#logger.debug('already processing ' + i)
|
||||||
|
@ -553,13 +553,13 @@ class OnionrCommunicate:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
#blockMetadata = json.loads(self._core.getData(i)).split('}')[0] + '}'
|
#blockMetadata = json.loads(self._core.getData(i)).split('}')[0] + '}'
|
||||||
blockMetadata = json.loads(blockContent[:blockContent.rfind(b'}') + 1].decode())
|
blockMetadata = json.loads(blockContent[:blockContent.find(b'\n')].decode())
|
||||||
try:
|
try:
|
||||||
blockMeta2 = json.loads(blockMetadata['meta'])
|
blockMeta2 = json.loads(blockMetadata['meta'])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
blockMeta2 = {'type': ''}
|
blockMeta2 = {'type': ''}
|
||||||
pass
|
pass
|
||||||
blockContent = blockContent[blockContent.rfind(b'}') + 1:]
|
blockContent = blockContent[blockContent.find(b'\n') + 1:]
|
||||||
try:
|
try:
|
||||||
blockContent = blockContent.decode()
|
blockContent = blockContent.decode()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
|
|
@ -678,7 +678,7 @@ class Core:
|
||||||
signature = ''
|
signature = ''
|
||||||
|
|
||||||
if sign:
|
if sign:
|
||||||
signature = self._crypto.edSign(metadata + data, self._crypto.privKey, encodeResult=True)
|
signature = self._crypto.edSign(metadata + b'\n' + data, self._crypto.privKey, encodeResult=True)
|
||||||
ourID = self._crypto.pubKeyHashID()
|
ourID = self._crypto.pubKeyHashID()
|
||||||
# Convert from bytes on some py versions?
|
# Convert from bytes on some py versions?
|
||||||
try:
|
try:
|
||||||
|
@ -692,7 +692,7 @@ class Core:
|
||||||
if len(data) == 0:
|
if len(data) == 0:
|
||||||
logger.error('Will not insert empty block')
|
logger.error('Will not insert empty block')
|
||||||
else:
|
else:
|
||||||
addedHash = self.setData(metadata + data)
|
addedHash = self.setData(metadata + b'\n' + data)
|
||||||
self.addToBlockDB(addedHash, selfInsert=True)
|
self.addToBlockDB(addedHash, selfInsert=True)
|
||||||
self.setBlockType(addedHash, header)
|
self.setBlockType(addedHash, header)
|
||||||
retData = addedHash
|
retData = addedHash
|
||||||
|
|
|
@ -334,7 +334,7 @@ class OnionrUtils:
|
||||||
'''
|
'''
|
||||||
Find, decrypt, and return array of PMs (array of dictionary, {from, text})
|
Find, decrypt, and return array of PMs (array of dictionary, {from, text})
|
||||||
'''
|
'''
|
||||||
#blocks = self._core.getBlockList().split('\n')
|
#blocks = self._core.getBlockList()
|
||||||
blocks = self._core.getBlocksByType('pm')
|
blocks = self._core.getBlocksByType('pm')
|
||||||
message = ''
|
message = ''
|
||||||
sender = ''
|
sender = ''
|
||||||
|
@ -344,8 +344,8 @@ class OnionrUtils:
|
||||||
try:
|
try:
|
||||||
with open('data/blocks/' + i + '.dat', 'r') as potentialMessage:
|
with open('data/blocks/' + i + '.dat', 'r') as potentialMessage:
|
||||||
potentialMessage = potentialMessage.read()
|
potentialMessage = potentialMessage.read()
|
||||||
blockMetadata = json.loads(potentialMessage[:potentialMessage.rfind('}') + 1])
|
blockMetadata = json.loads(potentialMessage[:potentialMessage.find('\n')])
|
||||||
blockContent = potentialMessage[potentialMessage.rfind('}') + 1:]
|
blockContent = potentialMessage[potentialMessage.find('\n') + 1:]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
message = self._core._crypto.pubKeyDecrypt(blockContent, encodedData=True, anonymous=True)
|
message = self._core._crypto.pubKeyDecrypt(blockContent, encodedData=True, anonymous=True)
|
||||||
|
|
Loading…
Reference in New Issue