fix computing hashes for duplicate messages. See #8258
This commit is contained in:
parent
c5c1e5d235
commit
580cdb2723
|
@ -1449,15 +1449,16 @@ class DecryptedMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
|
|||
|
||||
# ignore message duplicates
|
||||
if self.msgtxt and self.id_ and self.jid:
|
||||
self.msghash = hashlib.sha256("%s|%s|%s" % (
|
||||
hashlib.sha256(str(self.msgtxt)).hexdigest(),
|
||||
hashlib.sha256(str(self.id_)).hexdigest(),
|
||||
hashlib.sha256(str(self.jid)).hexdigest())).digest()
|
||||
self.msghash = hashlib.sha256(("%s|%s|%s" % (
|
||||
hashlib.sha256(self.msgtxt.encode('utf-8')).hexdigest(),
|
||||
hashlib.sha256(self.id_.encode('utf-8')).hexdigest(),
|
||||
hashlib.sha256(self.jid.encode('utf-8')).hexdigest())).encode(
|
||||
'utf-8')).digest()
|
||||
if self.msghash in self.conn.received_message_hashes:
|
||||
log.info("Ignoring duplicated message from '%s' with id '%s'" % (str(self.jid), str(self.id_)))
|
||||
log.info("Ignoring duplicated message from '%s' with id '%s'" % (self.jid, self.id_))
|
||||
return False
|
||||
else:
|
||||
log.debug("subhashes: msgtxt, id_, jid = ('%s', '%s', '%s')" % (hashlib.sha256(str(self.msgtxt)).hexdigest(), hashlib.sha256(str(self.id_)).hexdigest(), hashlib.sha256(str(self.jid)).hexdigest()))
|
||||
log.debug("subhashes: msgtxt, id_, jid = ('%s', '%s', '%s')" % (hashlib.sha256(self.msgtxt.encode('utf-8')).hexdigest(), hashlib.sha256(self.id_.encode('utf-8')).hexdigest(), hashlib.sha256(self.jid.encode('utf-8')).hexdigest()))
|
||||
self.conn.received_message_hashes.append(self.msghash)
|
||||
# only record the last 20000 hashes (should be about 1MB [32 bytes per hash]
|
||||
# and about 24 hours if you receive a message every 5 seconds)
|
||||
|
|
Loading…
Reference in New Issue