Merge branch 'master' into 'master'

Add MAM for MUCs

See merge request gajim/gajim!152
This commit is contained in:
Philipp Hörist 2017-11-19 23:14:10 +01:00
commit 3683f23877
14 changed files with 616 additions and 235 deletions

View file

@ -1,6 +1,6 @@
import subprocess import subprocess
__version__ = "0.16.11.1" __version__ = "0.16.11.2"
try: try:
node = subprocess.Popen('git rev-parse --short=12 HEAD', shell=True, node = subprocess.Popen('git rev-parse --short=12 HEAD', shell=True,

View file

@ -33,10 +33,12 @@ through ClientCaps objects which are hold by contact instances.
import base64 import base64
import hashlib import hashlib
from collections import namedtuple
import logging import logging
log = logging.getLogger('gajim.c.caps_cache') log = logging.getLogger('gajim.c.caps_cache')
import nbxmpp
from nbxmpp import (NS_XHTML_IM, NS_ESESSION, NS_CHATSTATES, from nbxmpp import (NS_XHTML_IM, NS_ESESSION, NS_CHATSTATES,
NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO, NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO,
NS_JINGLE_FILE_TRANSFER_5) NS_JINGLE_FILE_TRANSFER_5)
@ -44,7 +46,7 @@ from nbxmpp import (NS_XHTML_IM, NS_ESESSION, NS_CHATSTATES,
FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_ESESSION, FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_ESESSION,
NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO, NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO,
NS_JINGLE_FILE_TRANSFER_5] NS_JINGLE_FILE_TRANSFER_5]
from gajim.common import app
# Query entry status codes # Query entry status codes
NEW = 0 NEW = 0
QUERIED = 1 QUERIED = 1
@ -56,12 +58,15 @@ FAKED = 3 # allow NullClientCaps to behave as it has a cached item
################################################################################ ################################################################################
capscache = None capscache = None
muc_caps_cache = None
def initialize(logger): def initialize(logger):
""" """
Initialize this module Initialize this module
""" """
global capscache global capscache
global muc_caps_cache
capscache = CapsCache(logger) capscache = CapsCache(logger)
muc_caps_cache = MucCapsCache()
def client_supports(client_caps, requested_feature): def client_supports(client_caps, requested_feature):
lookup_item = client_caps.get_cache_lookup_strategy() lookup_item = client_caps.get_cache_lookup_strategy()
@ -438,3 +443,51 @@ class CapsCache(object):
key = (hash_method, hash) key = (hash_method, hash)
if key in self.__cache: if key in self.__cache:
del self.__cache[key] del self.__cache[key]
class MucCapsCache:
DiscoInfo = namedtuple('DiscoInfo', ['identities', 'features', 'data'])
def __init__(self):
self.cache = {}
def append(self, stanza):
jid = stanza.getFrom()
identities, features, data = [], [], []
query_childs = stanza.getQueryChildren()
if not query_childs:
app.log('gajim.muc').warning('%s returned empty disco info', jid)
return
for child in query_childs:
if child.getName() == 'identity':
attr = {}
for key in child.getAttrs().keys():
attr[key] = child.getAttr(key)
identities.append(attr)
elif child.getName() == 'feature':
features.append(child.getAttr('var'))
elif child.getName() == 'x':
if child.getNamespace() == nbxmpp.NS_DATA:
data.append(nbxmpp.DataForm(node=child))
self.cache[jid] = self.DiscoInfo(identities, features, data)
def is_cached(self, jid):
return jid in self.cache
def supports(self, jid, feature):
if jid in self.cache:
if feature in self.cache[jid].features:
return True
return False
def has_mam(self, jid):
try:
if nbxmpp.NS_MAM_2 in self.cache[jid].features:
return True
if nbxmpp.NS_MAM_1 in self.cache[jid].features:
return True
except (KeyError, AttributeError):
return False

View file

@ -65,6 +65,7 @@ def create_log_db():
CREATE TABLE logs( CREATE TABLE logs(
log_line_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, log_line_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
account_id INTEGER,
jid_id INTEGER, jid_id INTEGER,
contact_name TEXT, contact_name TEXT,
time INTEGER, time INTEGER,
@ -80,6 +81,13 @@ def create_log_db():
marker INTEGER marker INTEGER
); );
CREATE TABLE last_archive_message(
jid_id INTEGER PRIMARY KEY UNIQUE,
last_mam_id TEXT,
oldest_mam_timestamp TEXT,
last_muc_timestamp TEXT
);
CREATE INDEX idx_logs_jid_id_time ON logs (jid_id, time DESC); CREATE INDEX idx_logs_jid_id_time ON logs (jid_id, time DESC);
''' '''
) )

View file

@ -409,8 +409,6 @@ class Config:
'oauth2_client_id': [ opt_str, '0000000044077801', _('client_id for OAuth 2.0 authentication.')], 'oauth2_client_id': [ opt_str, '0000000044077801', _('client_id for OAuth 2.0 authentication.')],
'oauth2_redirect_url': [ opt_str, 'https%3A%2F%2Fgajim.org%2Fmsnauth%2Findex.cgi', _('redirect_url for OAuth 2.0 authentication.')], 'oauth2_redirect_url': [ opt_str, 'https%3A%2F%2Fgajim.org%2Fmsnauth%2Findex.cgi', _('redirect_url for OAuth 2.0 authentication.')],
'opened_chat_controls': [opt_str, '', _('Space separated list of JIDs for which we want to re-open a chat window on next startup.')], 'opened_chat_controls': [opt_str, '', _('Space separated list of JIDs for which we want to re-open a chat window on next startup.')],
'last_mam_id': [opt_str, '', _('Last MAM id we are syncronized with')],
'mam_start_date': [opt_int, 0, _('The earliest date we requested MAM history for')],
}, {}), }, {}),
'statusmsg': ({ 'statusmsg': ({
'message': [ opt_str, '' ], 'message': [ opt_str, '' ],

View file

@ -42,6 +42,7 @@ import locale
import hmac import hmac
import hashlib import hashlib
import json import json
from functools import partial
try: try:
randomsource = random.SystemRandom() randomsource = random.SystemRandom()
@ -432,7 +433,7 @@ class CommonConnection:
if obj.message is None: if obj.message is None:
return return
app.logger.insert_into_logs(jid, obj.timestamp, obj.kind, app.logger.insert_into_logs(self.name, jid, obj.timestamp, obj.kind,
message=obj.message, message=obj.message,
subject=obj.subject, subject=obj.subject,
additional_data=obj.additional_data, additional_data=obj.additional_data,
@ -1913,8 +1914,6 @@ class Connection(CommonConnection, ConnectionHandlers):
self.archiving_namespace = nbxmpp.NS_MAM_2 self.archiving_namespace = nbxmpp.NS_MAM_2
elif nbxmpp.NS_MAM_1 in obj.features: elif nbxmpp.NS_MAM_1 in obj.features:
self.archiving_namespace = nbxmpp.NS_MAM_1 self.archiving_namespace = nbxmpp.NS_MAM_1
elif nbxmpp.NS_MAM in obj.features:
self.archiving_namespace = nbxmpp.NS_MAM
if self.archiving_namespace: if self.archiving_namespace:
self.archiving_supported = True self.archiving_supported = True
self.archiving_313_supported = True self.archiving_313_supported = True
@ -2583,6 +2582,11 @@ class Connection(CommonConnection, ConnectionHandlers):
# Never join a room when invisible # Never join a room when invisible
return return
self.discoverMUC(
room_jid, partial(self._join_gc, nick, show, room_jid,
password, change_nick, rejoin))
def _join_gc(self, nick, show, room_jid, password, change_nick, rejoin):
# Check time first in the FAST table # Check time first in the FAST table
last_date = app.logger.get_room_last_message_time( last_date = app.logger.get_room_last_message_time(
self.name, room_jid) self.name, room_jid)
@ -2599,11 +2603,19 @@ class Connection(CommonConnection, ConnectionHandlers):
if app.config.get('send_sha_in_gc_presence'): if app.config.get('send_sha_in_gc_presence'):
p = self.add_sha(p) p = self.add_sha(p)
self.add_lang(p) self.add_lang(p)
if not change_nick: if change_nick:
t = p.setTag(nbxmpp.NS_MUC + ' x') self.connection.send(p)
return
t = p.setTag(nbxmpp.NS_MUC + ' x')
if muc_caps_cache.has_mam(room_jid):
# The room is MAM capable dont get MUC History
t.setTag('history', {'maxchars': '0'})
else:
# Request MUC History (not MAM)
tags = {} tags = {}
timeout = app.config.get_per('rooms', room_jid, timeout = app.config.get_per('rooms', room_jid,
'muc_restore_timeout') 'muc_restore_timeout')
if timeout is None or timeout == -2: if timeout is None or timeout == -2:
timeout = app.config.get('muc_restore_timeout') timeout = app.config.get('muc_restore_timeout')
if last_date == 0 and timeout >= 0: if last_date == 0 and timeout >= 0:
@ -2621,8 +2633,9 @@ class Connection(CommonConnection, ConnectionHandlers):
tags['maxstanzas'] = nb tags['maxstanzas'] = nb
if tags: if tags:
t.setTag('history', tags) t.setTag('history', tags)
if password:
t.setTagData('password', password) if password:
t.setTagData('password', password)
self.connection.send(p) self.connection.send(p)
def _nec_gc_message_outgoing(self, obj): def _nec_gc_message_outgoing(self, obj):

View file

@ -47,6 +47,7 @@ from gajim.common import helpers
from gajim.common import app from gajim.common import app
from gajim.common import dataforms from gajim.common import dataforms
from gajim.common import jingle_xtls from gajim.common import jingle_xtls
from gajim.common.caps_cache import muc_caps_cache
from gajim.common.commands import ConnectionCommands from gajim.common.commands import ConnectionCommands
from gajim.common.pubsub import ConnectionPubSub from gajim.common.pubsub import ConnectionPubSub
from gajim.common.protocol.caps import ConnectionCaps from gajim.common.protocol.caps import ConnectionCaps
@ -96,6 +97,29 @@ class ConnectionDisco:
id_ = self._discover(nbxmpp.NS_DISCO_INFO, jid, node, id_prefix) id_ = self._discover(nbxmpp.NS_DISCO_INFO, jid, node, id_prefix)
self.disco_info_ids.append(id_) self.disco_info_ids.append(id_)
def discoverMUC(self, jid, callback):
disco_info = nbxmpp.Iq(typ='get', to=jid, queryNS=nbxmpp.NS_DISCO_INFO)
self.connection.SendAndCallForResponse(
disco_info, self.received_muc_info, {'callback': callback})
def received_muc_info(self, conn, stanza, callback):
if nbxmpp.isResultNode(stanza):
app.log('gajim.muc').info(
'Received MUC DiscoInfo for %s', stanza.getFrom())
muc_caps_cache.append(stanza)
callback()
else:
error = stanza.getError()
if error == 'item-not-found':
# Groupchat does not exist
callback()
return
app.nec.push_incoming_event(
InformationEvent(None, conn=self,
level='error',
pri_txt=_('Unable to join Groupchat'),
sec_txt=error))
def request_register_agent_info(self, agent): def request_register_agent_info(self, agent):
if not self.connection or self.connected < 2: if not self.connection or self.connected < 2:
return None return None
@ -760,6 +784,8 @@ class ConnectionHandlersBase:
self._nec_message_received) self._nec_message_received)
app.ged.register_event_handler('mam-message-received', ged.CORE, app.ged.register_event_handler('mam-message-received', ged.CORE,
self._nec_message_received) self._nec_message_received)
app.ged.register_event_handler('mam-gc-message-received', ged.CORE,
self._nec_message_received)
app.ged.register_event_handler('decrypted-message-received', ged.CORE, app.ged.register_event_handler('decrypted-message-received', ged.CORE,
self._nec_decrypted_message_received) self._nec_decrypted_message_received)
app.ged.register_event_handler('gc-message-received', ged.CORE, app.ged.register_event_handler('gc-message-received', ged.CORE,
@ -776,6 +802,8 @@ class ConnectionHandlersBase:
self._nec_message_received) self._nec_message_received)
app.ged.remove_event_handler('mam-message-received', ged.CORE, app.ged.remove_event_handler('mam-message-received', ged.CORE,
self._nec_message_received) self._nec_message_received)
app.ged.remove_event_handler('mam-gc-message-received', ged.CORE,
self._nec_message_received)
app.ged.remove_event_handler('decrypted-message-received', ged.CORE, app.ged.remove_event_handler('decrypted-message-received', ged.CORE,
self._nec_decrypted_message_received) self._nec_decrypted_message_received)
app.ged.remove_event_handler('gc-message-received', ged.CORE, app.ged.remove_event_handler('gc-message-received', ged.CORE,
@ -918,7 +946,8 @@ class ConnectionHandlersBase:
app.config.should_log(self.name, obj.jid): app.config.should_log(self.name, obj.jid):
show = app.logger.convert_show_values_to_db_api_values(obj.show) show = app.logger.convert_show_values_to_db_api_values(obj.show)
if show is not None: if show is not None:
app.logger.insert_into_logs(nbxmpp.JID(obj.jid).getStripped(), app.logger.insert_into_logs(self.name,
nbxmpp.JID(obj.jid).getStripped(),
time_time(), time_time(),
KindConstant.STATUS, KindConstant.STATUS,
message=obj.status, message=obj.status,
@ -1044,7 +1073,8 @@ class ConnectionHandlersBase:
# if not obj.nick, it means message comes from room itself # if not obj.nick, it means message comes from room itself
# usually it hold description and can be send at each connection # usually it hold description and can be send at each connection
# so don't store it in logs # so don't store it in logs
app.logger.insert_into_logs(obj.jid, app.logger.insert_into_logs(self.name,
obj.jid,
obj.timestamp, obj.timestamp,
KindConstant.GC_MSG, KindConstant.GC_MSG,
message=obj.msgtxt, message=obj.msgtxt,
@ -1064,7 +1094,8 @@ class ConnectionHandlersBase:
subject = msg.getSubject() subject = msg.getSubject()
if session.is_loggable(): if session.is_loggable():
app.logger.insert_into_logs(nbxmpp.JID(frm).getStripped(), app.logger.insert_into_logs(self.name,
nbxmpp.JID(frm).getStripped(),
tim, tim,
KindConstant.ERROR, KindConstant.ERROR,
message=error_msg, message=error_msg,
@ -1266,10 +1297,6 @@ ConnectionHandlersBase, ConnectionJingle, ConnectionIBBytestream):
app.nec.register_incoming_event(ArchivingErrorReceivedEvent) app.nec.register_incoming_event(ArchivingErrorReceivedEvent)
app.nec.register_incoming_event( app.nec.register_incoming_event(
Archiving313PreferencesChangedReceivedEvent) Archiving313PreferencesChangedReceivedEvent)
app.nec.register_incoming_event(
ArchivingFinishedLegacyReceivedEvent)
app.nec.register_incoming_event(
ArchivingFinishedReceivedEvent)
app.nec.register_incoming_event(NotificationEvent) app.nec.register_incoming_event(NotificationEvent)
app.ged.register_event_handler('http-auth-received', ged.CORE, app.ged.register_event_handler('http-auth-received', ged.CORE,
@ -2210,7 +2237,6 @@ ConnectionHandlersBase, ConnectionJingle, ConnectionIBBytestream):
con.RegisterHandler('iq', self._IqPingCB, 'get', nbxmpp.NS_PING) con.RegisterHandler('iq', self._IqPingCB, 'get', nbxmpp.NS_PING)
con.RegisterHandler('iq', self._SearchCB, 'result', nbxmpp.NS_SEARCH) con.RegisterHandler('iq', self._SearchCB, 'result', nbxmpp.NS_SEARCH)
con.RegisterHandler('iq', self._PrivacySetCB, 'set', nbxmpp.NS_PRIVACY) con.RegisterHandler('iq', self._PrivacySetCB, 'set', nbxmpp.NS_PRIVACY)
con.RegisterHandler('iq', self._ArchiveCB, ns=nbxmpp.NS_MAM)
con.RegisterHandler('iq', self._ArchiveCB, ns=nbxmpp.NS_MAM_1) con.RegisterHandler('iq', self._ArchiveCB, ns=nbxmpp.NS_MAM_1)
con.RegisterHandler('iq', self._ArchiveCB, ns=nbxmpp.NS_MAM_2) con.RegisterHandler('iq', self._ArchiveCB, ns=nbxmpp.NS_MAM_2)
con.RegisterHandler('iq', self._PubSubCB, 'result') con.RegisterHandler('iq', self._PubSubCB, 'result')

View file

@ -954,7 +954,8 @@ class GcPresenceReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
show = app.logger.convert_show_values_to_db_api_values(self.show) show = app.logger.convert_show_values_to_db_api_values(self.show)
if show is not None: if show is not None:
fjid = nbxmpp.JID(self.fjid) fjid = nbxmpp.JID(self.fjid)
app.logger.insert_into_logs(fjid.getStripped(), app.logger.insert_into_logs(self.conn.name,
fjid.getStripped(),
time_time(), time_time(),
KindConstant.GCSTATUS, KindConstant.GCSTATUS,
contact_name=fjid.getResource(), contact_name=fjid.getResource(),
@ -1052,36 +1053,33 @@ class MamMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
:stanza: Complete stanza Node :stanza: Complete stanza Node
:forwarded: Forwarded Node :forwarded: Forwarded Node
:result: Result Node :result: Result Node
:unique_id: The unique stable id
''' '''
self._set_base_event_vars_as_attributes(base_event) self._set_base_event_vars_as_attributes(base_event)
self.additional_data = {} self.additional_data = {}
self.encrypted = False self.encrypted = False
self.groupchat = False self.groupchat = False
self.nick = None
def generate(self): def generate(self):
archive_jid = self.stanza.getFrom() archive_jid = self.stanza.getFrom()
own_jid = self.conn.get_own_jid() own_jid = self.conn.get_own_jid().getStripped()
if archive_jid and not archive_jid.bareMatch(own_jid): if archive_jid and not archive_jid.bareMatch(own_jid):
# MAM Message not from our Archive # MAM Message not from our Archive
log.info('MAM message not from our user archive')
return False return False
self.msg_ = self.forwarded.getTag('message', protocol=True) self.msg_ = self.forwarded.getTag('message', protocol=True)
if self.msg_.getType() == 'groupchat': if self.msg_.getType() == 'groupchat':
log.info('Received groupchat message from user archive')
return False return False
# use stanza-id as unique-id # use stanza-id as unique-id
self.unique_id, origin_id = self.get_unique_id() self.unique_id, origin_id = self.get_unique_id()
# Check for duplicates # Check for duplicates
if app.logger.find_stanza_id(self.unique_id, origin_id): if app.logger.find_stanza_id(own_jid, self.unique_id, origin_id):
return return
self.msgtxt = self.msg_.getTagData('body') self.msgtxt = self.msg_.getTagData('body')
self.query_id = self.result.getAttr('queryid')
frm = self.msg_.getFrom() frm = self.msg_.getFrom()
# Some servers dont set the 'to' attribute when # Some servers dont set the 'to' attribute when
@ -1123,26 +1121,83 @@ class MamMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
return True return True
def get_unique_id(self): def get_unique_id(self):
stanza_id = self.get_stanza_id(self.result, query=True)
if self.conn.get_own_jid().bareMatch(self.msg_.getFrom()): if self.conn.get_own_jid().bareMatch(self.msg_.getFrom()):
# On our own Messages we have to check for both # message we sent
# stanza-id and origin-id, because other resources
# maybe not support origin-id
stanza_id = None
if self.result.getNamespace() == nbxmpp.NS_MAM_2:
# Only mam:2 ensures valid stanza-id
stanza_id = self.get_stanza_id(self.result, query=True)
# try always to get origin-id because its a message
# we sent.
origin_id = self.msg_.getOriginID() origin_id = self.msg_.getOriginID()
return stanza_id, origin_id return stanza_id, origin_id
# A message we received # A message we received
elif self.result.getNamespace() == nbxmpp.NS_MAM_2: return stanza_id, None
# Only mam:2 ensures valid stanza-id
return self.get_stanza_id(self.result, query=True), None class MamGcMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
return None, None name = 'mam-gc-message-received'
base_network_events = ['raw-mam-message-received']
def __init__(self, name, base_event):
'''
Pre-Generated attributes on self:
:conn: Connection instance
:stanza: Complete stanza Node
:forwarded: Forwarded Node
:result: Result Node
'''
self._set_base_event_vars_as_attributes(base_event)
self.additional_data = {}
self.encrypted = False
self.groupchat = True
self.kind = KindConstant.GC_MSG
def generate(self):
self.msg_ = self.forwarded.getTag('message', protocol=True)
if self.msg_.getType() != 'groupchat':
return False
self.room_jid = self.stanza.getFrom().getStripped()
self.unique_id = self.get_stanza_id(self.result, query=True)
# Check for duplicates
if app.logger.find_stanza_id(self.room_jid, self.unique_id,
groupchat=True):
return
self.msgtxt = self.msg_.getTagData('body')
self.with_ = self.msg_.getFrom().getStripped()
self.nick = self.msg_.getFrom().getResource()
# Get the real jid if we have it
self.real_jid = None
muc_user = self.msg_.getTag('x', namespace=nbxmpp.NS_MUC_USER)
if muc_user is not None:
self.real_jid = muc_user.getTagAttr('item', 'jid')
delay = self.forwarded.getTagAttr(
'delay', 'stamp', namespace=nbxmpp.NS_DELAY2)
if delay is None:
log.error('Received MAM message without timestamp')
return
self.timestamp = helpers.parse_datetime(
delay, check_utc=True, epoch=True)
if self.timestamp is None:
log.error('Received MAM message with invalid timestamp: %s', delay)
return
# Save timestamp added by the user
user_delay = self.msg_.getTagAttr(
'delay', 'stamp', namespace=nbxmpp.NS_DELAY2)
if user_delay is not None:
self.user_timestamp = helpers.parse_datetime(
user_delay, check_utc=True, epoch=True)
if self.user_timestamp is None:
log.warning('Received MAM message with '
'invalid user timestamp: %s', user_delay)
log.debug('Received mam-gc-message: unique id: %s', self.unique_id)
return True
class MamDecryptedMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent): class MamDecryptedMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
name = 'mam-decrypted-message-received' name = 'mam-decrypted-message-received'
@ -1156,6 +1211,9 @@ class MamDecryptedMessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
self.get_oob_data(self.msg_) self.get_oob_data(self.msg_)
if self.groupchat:
return True
self.is_pm = app.logger.jid_is_room_jid(self.with_.getStripped()) self.is_pm = app.logger.jid_is_room_jid(self.with_.getStripped())
if self.is_pm is None: if self.is_pm is None:
# Check if this event is triggered after a disco, so we dont # Check if this event is triggered after a disco, so we dont
@ -1226,7 +1284,9 @@ class MessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
# Check groupchat messages for duplicates, # Check groupchat messages for duplicates,
# We do this because of MUC History messages # We do this because of MUC History messages
if self.stanza.getType() == 'groupchat': if self.stanza.getType() == 'groupchat':
if app.logger.find_stanza_id(self.unique_id): if app.logger.find_stanza_id(self.stanza.getFrom().getStripped(),
self.unique_id,
groupchat=True):
return return
address_tag = self.stanza.getTag('addresses', address_tag = self.stanza.getTag('addresses',
@ -1285,9 +1345,14 @@ class MessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
self.forwarded = True self.forwarded = True
result = self.stanza.getTag('result', protocol=True) result = self.stanza.getTag('result', protocol=True)
if result and result.getNamespace() in (nbxmpp.NS_MAM, if result and result.getNamespace() in (nbxmpp.NS_MAM_1,
nbxmpp.NS_MAM_1,
nbxmpp.NS_MAM_2): nbxmpp.NS_MAM_2):
if result.getAttr('queryid') not in self.conn.mam_query_ids:
log.warning('Invalid MAM Message: unknown query id')
log.debug(self.stanza)
return
forwarded = result.getTag('forwarded', forwarded = result.getTag('forwarded',
namespace=nbxmpp.NS_FORWARD, namespace=nbxmpp.NS_FORWARD,
protocol=True) protocol=True)
@ -1300,8 +1365,7 @@ class MessageReceivedEvent(nec.NetworkIncomingEvent, HelperEvent):
conn=self.conn, conn=self.conn,
stanza=self.stanza, stanza=self.stanza,
forwarded=forwarded, forwarded=forwarded,
result=result, result=result))
stanza_id=self.unique_id))
return return
# Mediated invitation? # Mediated invitation?
@ -1799,6 +1863,27 @@ class ArchivingErrorReceivedEvent(nec.NetworkIncomingEvent):
self.error_msg = self.stanza.getErrorMsg() self.error_msg = self.stanza.getErrorMsg()
return True return True
class ArchivingCountReceived(nec.NetworkIncomingEvent):
name = 'archiving-count-received'
base_network_events = []
def generate(self):
return True
class ArchivingIntervalFinished(nec.NetworkIncomingEvent):
name = 'archiving-interval-finished'
base_network_events = []
def generate(self):
return True
class ArchivingQueryID(nec.NetworkIncomingEvent):
name = 'archiving-query-id'
base_network_events = []
def generate(self):
return True
class Archiving313PreferencesChangedReceivedEvent(nec.NetworkIncomingEvent): class Archiving313PreferencesChangedReceivedEvent(nec.NetworkIncomingEvent):
name = 'archiving-313-preferences-changed-received' name = 'archiving-313-preferences-changed-received'
base_network_events = ['archiving-received'] base_network_events = ['archiving-received']
@ -1826,43 +1911,6 @@ class Archiving313PreferencesChangedReceivedEvent(nec.NetworkIncomingEvent):
return True return True
class ArchivingFinishedReceivedEvent(nec.NetworkIncomingEvent):
name = 'archiving-finished'
base_network_events = ['archiving-received']
def generate(self):
self.conn = self.base_event.conn
self.stanza = self.base_event.stanza
self.type_ = self.base_event.type_
self.fin = self.stanza.getTag('fin')
if self.type_ != 'result' or not self.fin:
return
self.query_id = self.fin.getAttr('queryid')
if not self.query_id:
return
return True
class ArchivingFinishedLegacyReceivedEvent(nec.NetworkIncomingEvent):
name = 'archiving-finished-legacy'
base_network_events = ['raw-message-received']
def generate(self):
self.conn = self.base_event.conn
self.stanza = self.base_event.stanza
self.fin = self.stanza.getTag('fin', namespace=nbxmpp.NS_MAM)
if not self.fin:
return
self.query_id = self.fin.getAttr('queryid')
if not self.query_id:
return
return True
class AccountCreatedEvent(nec.NetworkIncomingEvent): class AccountCreatedEvent(nec.NetworkIncomingEvent):
name = 'account-created' name = 'account-created'
base_network_events = [] base_network_events = []

View file

@ -36,6 +36,10 @@ class AvatarSize(IntEnum):
TOOLTIP = 125 TOOLTIP = 125
VCARD = 200 VCARD = 200
class ArchiveState(IntEnum):
NEVER = 0
ALL = 1
THANKS = u"""\ THANKS = u"""\
Alexander Futász Alexander Futász

View file

@ -282,6 +282,10 @@ class Logger:
return [user['jid'] for user in family] return [user['jid'] for user in family]
return [jid] return [jid]
def get_account_id(self, account):
jid = app.get_jid_from_account(account)
return self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
def get_jid_id(self, jid, kind=None, type_=None): def get_jid_id(self, jid, kind=None, type_=None):
""" """
Get the jid id from a jid. Get the jid id from a jid.
@ -1084,12 +1088,19 @@ class Logger:
return True return True
return False return False
def find_stanza_id(self, stanza_id, origin_id=None): def find_stanza_id(self, archive_jid, stanza_id, origin_id=None,
groupchat=False):
""" """
Checks if a stanza-id is already in the `logs` table Checks if a stanza-id is already in the `logs` table
:param archive_jid: The jid of the archive the stanza-id belongs to
:param stanza_id: The stanza-id :param stanza_id: The stanza-id
:param origin_id: The origin-id
:param groupchat: stanza-id is from a groupchat
return True if the stanza-id was found return True if the stanza-id was found
""" """
ids = [] ids = []
@ -1101,12 +1112,19 @@ class Logger:
if not ids: if not ids:
return False return False
archive_id = self.get_jid_id(archive_jid)
if groupchat:
column = 'jid_id'
else:
column = 'account_id'
sql = ''' sql = '''
SELECT stanza_id FROM logs SELECT stanza_id FROM logs
WHERE stanza_id IN ({values}) LIMIT 1 WHERE stanza_id IN ({values}) AND {archive} = ? LIMIT 1
'''.format(values=', '.join('?' * len(ids))) '''.format(values=', '.join('?' * len(ids)),
archive=column)
result = self.con.execute(sql, tuple(ids)).fetchone() result = self.con.execute(sql, tuple(ids) + (archive_id,)).fetchone()
if result is not None: if result is not None:
log.info('Found duplicated message, stanza-id: %s, origin-id: %s', log.info('Found duplicated message, stanza-id: %s, origin-id: %s',
@ -1127,7 +1145,8 @@ class Logger:
""" """
return self.get_jid_id(jid, kind, type_) return self.get_jid_id(jid, kind, type_)
def insert_into_logs(self, jid, time_, kind, unread=True, **kwargs): def insert_into_logs(self, account, jid, time_, kind,
unread=True, **kwargs):
""" """
Insert a new message into the `logs` table Insert a new message into the `logs` table
@ -1144,20 +1163,22 @@ class Logger:
a field in the `logs` table a field in the `logs` table
""" """
jid_id = self.get_jid_id(jid, kind=kind) jid_id = self.get_jid_id(jid, kind=kind)
account_id = self.get_account_id(account)
if 'additional_data' in kwargs: if 'additional_data' in kwargs:
if not kwargs['additional_data']: if not kwargs['additional_data']:
del kwargs['additional_data'] del kwargs['additional_data']
else: else:
kwargs['additional_data'] = json.dumps(kwargs["additional_data"]) kwargs['additional_data'] = json.dumps(kwargs["additional_data"])
sql = ''' sql = '''
INSERT INTO logs (jid_id, time, kind, {columns}) INSERT INTO logs (account_id, jid_id, time, kind, {columns})
VALUES (?, ?, ?, {values}) VALUES (?, ?, ?, ?, {values})
'''.format(columns=', '.join(kwargs.keys()), '''.format(columns=', '.join(kwargs.keys()),
values=', '.join('?' * len(kwargs))) values=', '.join('?' * len(kwargs)))
lastrowid = self.con.execute(sql, (jid_id, time_, kind) + tuple(kwargs.values())).lastrowid lastrowid = self.con.execute(
sql, (account_id, jid_id, time_, kind) + tuple(kwargs.values())).lastrowid
log.info('Insert into DB: jid: %s, time: %s, kind: %s, stanza_id: %s', log.info('Insert into DB: jid: %s, time: %s, kind: %s, stanza_id: %s',
jid, time_, kind, kwargs.get('stanza_id', None)) jid, time_, kind, kwargs.get('stanza_id', None))
@ -1192,3 +1213,45 @@ class Logger:
''' '''
self.con.execute(sql, (sha, account_jid_id, jid_id)) self.con.execute(sql, (sha, account_jid_id, jid_id))
self._timeout_commit() self._timeout_commit()
def get_archive_timestamp(self, jid, type_=None):
"""
Get the last archive id/timestamp for a jid
:param jid: The jid that belongs to the avatar
"""
jid_id = self.get_jid_id(jid, type_=type_)
sql = '''SELECT * FROM last_archive_message WHERE jid_id = ?'''
return self.con.execute(sql, (jid_id,)).fetchone()
def set_archive_timestamp(self, jid, **kwargs):
"""
Set the last archive id/timestamp
:param jid: The jid that belongs to the avatar
:param last_mam_id: The last MAM result id
:param oldest_mam_timestamp: The oldest date we requested MAM
history for
:param last_muc_timestamp: The timestamp of the last message we
received in a MUC
"""
jid_id = self.get_jid_id(jid)
exists = self.get_archive_timestamp(jid)
if not exists:
sql = '''INSERT INTO last_archive_message VALUES (?, ?, ?, ?)'''
self.con.execute(sql, (jid_id,
kwargs.get('last_mam_id', None),
kwargs.get('oldest_mam_timestamp', None),
kwargs.get('last_muc_timestamp', None)))
else:
args = ' = ?, '.join(kwargs.keys()) + ' = ?'
sql = '''UPDATE last_archive_message SET {}
WHERE jid_id = ?'''.format(args)
self.con.execute(sql, tuple(kwargs.values()) + (jid_id,))
log.info('Save archive timestamps: %s', kwargs)
self._timeout_commit()

View file

@ -26,6 +26,7 @@ import nbxmpp
from gajim.common import app from gajim.common import app
from gajim.common import ged from gajim.common import ged
from gajim.common.logger import KindConstant, JIDConstant from gajim.common.logger import KindConstant, JIDConstant
from gajim.common.const import ArchiveState
import gajim.common.connection_handlers_events as ev import gajim.common.connection_handlers_events as ev
log = logging.getLogger('gajim.c.message_archiving') log = logging.getLogger('gajim.c.message_archiving')
@ -36,13 +37,9 @@ class ConnectionArchive313:
self.archiving_313_supported = False self.archiving_313_supported = False
self.mam_awaiting_disco_result = {} self.mam_awaiting_disco_result = {}
self.iq_answer = [] self.iq_answer = []
self.mam_query_date = None self.mam_query_ids = []
self.mam_query_id = None
app.nec.register_incoming_event(ev.MamMessageReceivedEvent) app.nec.register_incoming_event(ev.MamMessageReceivedEvent)
app.ged.register_event_handler('archiving-finished-legacy', ged.CORE, app.nec.register_incoming_event(ev.MamGcMessageReceivedEvent)
self._nec_result_finished)
app.ged.register_event_handler('archiving-finished', ged.CORE,
self._nec_result_finished)
app.ged.register_event_handler('agent-info-error-received', ged.CORE, app.ged.register_event_handler('agent-info-error-received', ged.CORE,
self._nec_agent_info_error) self._nec_agent_info_error)
app.ged.register_event_handler('agent-info-received', ged.CORE, app.ged.register_event_handler('agent-info-received', ged.CORE,
@ -54,10 +51,6 @@ class ConnectionArchive313:
self._nec_archiving_313_preferences_changed_received) self._nec_archiving_313_preferences_changed_received)
def cleanup(self): def cleanup(self):
app.ged.remove_event_handler('archiving-finished-legacy', ged.CORE,
self._nec_result_finished)
app.ged.remove_event_handler('archiving-finished', ged.CORE,
self._nec_result_finished)
app.ged.remove_event_handler('agent-info-error-received', ged.CORE, app.ged.remove_event_handler('agent-info-error-received', ged.CORE,
self._nec_agent_info_error) self._nec_agent_info_error)
app.ged.remove_event_handler('agent-info-received', ged.CORE, app.ged.remove_event_handler('agent-info-received', ged.CORE,
@ -101,28 +94,111 @@ class ConnectionArchive313:
None, disco=True, **vars(msg_obj))) None, disco=True, **vars(msg_obj)))
del self.mam_awaiting_disco_result[obj.jid] del self.mam_awaiting_disco_result[obj.jid]
def _nec_result_finished(self, obj): @staticmethod
if obj.conn.name != self.name: def parse_iq(stanza, query_id):
if not nbxmpp.isResultNode(stanza):
log.error('Error on MAM query: %s', stanza.getError())
raise InvalidMamIQ
fin = stanza.getTag('fin')
if fin is None:
log.error('Malformed MAM query result received: %s', stanza)
raise InvalidMamIQ
if fin.getAttr('queryid') != query_id:
log.error('Result with unknown query id received')
raise InvalidMamIQ
set_ = fin.getTag('set', namespace=nbxmpp.NS_RSM)
if set_ is None:
log.error(
'Malformed MAM query result received (no "set" Node): %s',
stanza)
raise InvalidMamIQ
return fin, set_
def parse_from_jid(self, stanza):
jid = stanza.getFrom()
if jid is None:
# No from means, iq from our own archive
jid = self.get_own_jid().getStripped()
else:
jid = jid.getStripped()
return jid
def _result_finished(self, conn, stanza, query_id, start_date, groupchat):
try:
fin, set_ = self.parse_iq(stanza, query_id)
except InvalidMamIQ:
return return
if obj.query_id != self.mam_query_id: last = set_.getTagData('last')
if last is None:
log.info('End of MAM query, no items retrieved')
return return
set_ = obj.fin.getTag('set', namespace=nbxmpp.NS_RSM) jid = self.parse_from_jid(stanza)
if set_: complete = fin.getAttr('complete')
last = set_.getTagData('last') app.logger.set_archive_timestamp(jid, last_mam_id=last)
complete = obj.fin.getAttr('complete') if complete != 'true':
if last: self.mam_query_ids.remove(query_id)
app.config.set_per('accounts', self.name, 'last_mam_id', last) query_id = self.get_query_id()
if complete != 'true': query = self.get_archive_query(query_id, jid=jid, after=last)
self.request_archive(self.get_query_id(), after=last) self._send_archive_query(query, query_id, groupchat=groupchat)
if complete == 'true': else:
self.mam_query_id = None self.mam_query_ids.remove(query_id)
if self.mam_query_date: if start_date is not None:
app.config.set_per( app.logger.set_archive_timestamp(
'accounts', self.name, jid,
'mam_start_date', self.mam_query_date.timestamp()) last_mam_id=last,
self.mam_query_date = None oldest_mam_timestamp=start_date.timestamp())
log.info('End of MAM query, last mam id: %s', last)
def _intervall_result_finished(self, conn, stanza, query_id,
start_date, end_date, event_id):
try:
fin, set_ = self.parse_iq(stanza, query_id)
except InvalidMamIQ:
return
self.mam_query_ids.remove(query_id)
jid = self.parse_from_jid(stanza)
if start_date:
timestamp = start_date.timestamp()
else:
timestamp = ArchiveState.ALL
last = set_.getTagData('last')
if last is None:
app.nec.push_incoming_event(ev.ArchivingIntervalFinished(
None, event_id=event_id))
app.logger.set_archive_timestamp(
jid, oldest_mam_timestamp=timestamp)
log.info('End of MAM query, no items retrieved')
return
complete = fin.getAttr('complete')
if complete != 'true':
self.request_archive_interval(event_id, start_date, end_date, last)
else:
log.info('query finished')
app.logger.set_archive_timestamp(
jid, oldest_mam_timestamp=timestamp)
app.nec.push_incoming_event(ev.ArchivingIntervalFinished(
None, event_id=event_id, stanza=stanza))
def _received_count(self, conn, stanza, query_id, event_id):
try:
_, set_ = self.parse_iq(stanza, query_id)
except InvalidMamIQ:
return
self.mam_query_ids.remove(query_id)
count = set_.getTagData('count')
log.info('message count received: %s', count)
app.nec.push_incoming_event(ev.ArchivingCountReceived(
None, event_id=event_id, count=count))
def _nec_mam_decrypted_message_received(self, obj): def _nec_mam_decrypted_message_received(self, obj):
if obj.conn.name != self.name: if obj.conn.name != self.name:
@ -132,33 +208,96 @@ class ConnectionArchive313:
duplicate = app.logger.search_for_duplicate( duplicate = app.logger.search_for_duplicate(
obj.with_, obj.timestamp, obj.msgtxt) obj.with_, obj.timestamp, obj.msgtxt)
if duplicate: if duplicate:
return # dont propagate the event further
app.logger.insert_into_logs( return True
obj.with_, obj.timestamp, obj.kind, app.logger.insert_into_logs(self.name,
unread=False, obj.with_,
message=obj.msgtxt, obj.timestamp,
additional_data=obj.additional_data, obj.kind,
stanza_id=obj.unique_id) unread=False,
message=obj.msgtxt,
contact_name=obj.nick,
additional_data=obj.additional_data,
stanza_id=obj.unique_id)
def get_query_id(self): def get_query_id(self):
self.mam_query_id = self.connection.getAnID() query_id = self.connection.getAnID()
return self.mam_query_id self.mam_query_ids.append(query_id)
return query_id
def request_archive_on_signin(self): def request_archive_on_signin(self):
mam_id = app.config.get_per('accounts', self.name, 'last_mam_id') own_jid = self.get_own_jid().getStripped()
archive = app.logger.get_archive_timestamp(own_jid)
# Migration of last_mam_id from config to DB
if archive is not None:
mam_id = archive.last_mam_id
else:
mam_id = app.config.get_per('accounts', self.name, 'last_mam_id')
start_date = None
query_id = self.get_query_id() query_id = self.get_query_id()
if mam_id: if mam_id:
self.request_archive(query_id, after=mam_id) log.info('MAM query after: %s', mam_id)
query = self.get_archive_query(query_id, after=mam_id)
else: else:
# First Start, we request the last week # First Start, we request the last week
self.mam_query_date = datetime.utcnow() - timedelta(days=7) start_date = datetime.utcnow() - timedelta(days=7)
log.info('First start: query archive start: %s', self.mam_query_date) log.info('First start: query archive start: %s', start_date)
self.request_archive(query_id, start=self.mam_query_date) query = self.get_archive_query(query_id, start=start_date)
self._send_archive_query(query, query_id, start_date)
def request_archive(self, query_id, start=None, end=None, with_=None, def request_archive_on_muc_join(self, jid):
after=None, max_=30): archive = app.logger.get_archive_timestamp(
jid, type_=JIDConstant.ROOM_TYPE)
query_id = self.get_query_id()
start_date = None
if archive is not None:
log.info('Query Groupchat MAM Archive %s after %s:',
jid, archive.last_mam_id)
query = self.get_archive_query(
query_id, jid=jid, after=archive.last_mam_id)
else:
# First Start, we dont request history
# Depending on what a MUC saves, there could be thousands
# of Messages even in just one day.
start_date = datetime.utcnow() - timedelta(days=1)
log.info('First join: query archive %s from: %s', jid, start_date)
query = self.get_archive_query(query_id, jid=jid, start=start_date)
self._send_archive_query(query, query_id, start_date, groupchat=True)
def request_archive_count(self, event_id, start_date, end_date):
query_id = self.get_query_id()
query = self.get_archive_query(
query_id, start=start_date, end=end_date, max_=0)
self.connection.SendAndCallForResponse(
query, self._received_count, {'query_id': query_id,
'event_id': event_id})
def request_archive_interval(self, event_id, start_date,
end_date, after=None):
query_id = self.get_query_id()
query = self.get_archive_query(query_id, start=start_date,
end=end_date, after=after, max_=30)
app.nec.push_incoming_event(ev.ArchivingQueryID(
None, event_id=event_id, query_id=query_id))
self.connection.SendAndCallForResponse(
query, self._intervall_result_finished, {'query_id': query_id,
'start_date': start_date,
'end_date': end_date,
'event_id': event_id})
def _send_archive_query(self, query, query_id, start_date=None,
groupchat=False):
self.connection.SendAndCallForResponse(
query, self._result_finished, {'query_id': query_id,
'start_date': start_date,
'groupchat': groupchat})
def get_archive_query(self, query_id, jid=None, start=None, end=None, with_=None,
after=None, max_=30):
namespace = self.archiving_namespace namespace = self.archiving_namespace
iq = nbxmpp.Iq('set') iq = nbxmpp.Iq('set', to=jid)
query = iq.addChild('query', namespace=namespace) query = iq.addChild('query', namespace=namespace)
form = query.addChild(node=nbxmpp.DataForm(typ='submit')) form = query.addChild(node=nbxmpp.DataForm(typ='submit'))
field = nbxmpp.DataField(typ='hidden', field = nbxmpp.DataField(typ='hidden',
@ -184,9 +323,7 @@ class ConnectionArchive313:
if after: if after:
set_.setTagData('after', after) set_.setTagData('after', after)
query.setAttr('queryid', query_id) query.setAttr('queryid', query_id)
id_ = self.connection.getAnID() return iq
iq.setID(id_)
self.connection.send(iq)
def request_archive_preferences(self): def request_archive_preferences(self):
if not app.account_is_connected(self.name): if not app.account_is_connected(self.name):
@ -219,3 +356,7 @@ class ConnectionArchive313:
app.nec.push_incoming_event(ev.ArchivingReceivedEvent(None, conn=self, app.nec.push_incoming_event(ev.ArchivingReceivedEvent(None, conn=self,
stanza=iq_obj)) stanza=iq_obj))
raise nbxmpp.NodeProcessed raise nbxmpp.NodeProcessed
class InvalidMamIQ(Exception):
pass

View file

@ -242,6 +242,8 @@ class OptionsParser:
self.update_config_to_016105() self.update_config_to_016105()
if old < [0, 16, 11, 1] and new >= [0, 16, 11, 1]: if old < [0, 16, 11, 1] and new >= [0, 16, 11, 1]:
self.update_config_to_016111() self.update_config_to_016111()
if old < [0, 16, 11, 2] and new >= [0, 16, 11, 2]:
self.update_config_to_016112()
app.logger.init_vars() app.logger.init_vars()
app.logger.attach_cache_database() app.logger.attach_cache_database()
@ -1029,3 +1031,24 @@ class OptionsParser:
log.exception('Error') log.exception('Error')
con.close() con.close()
app.config.set('version', '0.16.11.1') app.config.set('version', '0.16.11.1')
def update_config_to_016112(self):
con = sqlite.connect(logger.LOG_DB_PATH)
cur = con.cursor()
try:
cur.executescript(
'''
CREATE TABLE IF NOT EXISTS last_archive_message(
jid_id INTEGER PRIMARY KEY UNIQUE,
last_mam_id TEXT,
oldest_mam_timestamp TEXT,
last_muc_timestamp TEXT
);
ALTER TABLE logs ADD COLUMN 'account_id' INTEGER;
'''
)
con.commit()
except sqlite.OperationalError:
log.exception('Error')
con.close()
app.config.set('version', '0.16.11.2')

View file

@ -47,6 +47,7 @@ from gajim import vcard
from gajim import cell_renderer_image from gajim import cell_renderer_image
from gajim import dataforms_widget from gajim import dataforms_widget
from gajim.common.const import AvatarSize from gajim.common.const import AvatarSize
from gajim.common.caps_cache import muc_caps_cache
import nbxmpp import nbxmpp
from enum import IntEnum, unique from enum import IntEnum, unique
@ -478,6 +479,8 @@ class GroupchatControl(ChatControlBase):
self._nec_gc_presence_received) self._nec_gc_presence_received)
app.ged.register_event_handler('gc-message-received', ged.GUI1, app.ged.register_event_handler('gc-message-received', ged.GUI1,
self._nec_gc_message_received) self._nec_gc_message_received)
app.ged.register_event_handler('mam-decrypted-message-received',
ged.GUI1, self._nec_mam_decrypted_message_received)
app.ged.register_event_handler('vcard-published', ged.GUI1, app.ged.register_event_handler('vcard-published', ged.GUI1,
self._nec_vcard_published) self._nec_vcard_published)
app.ged.register_event_handler('update-gc-avatar', ged.GUI1, app.ged.register_event_handler('update-gc-avatar', ged.GUI1,
@ -1053,6 +1056,17 @@ class GroupchatControl(ChatControlBase):
obj.contact.name, obj.contact.avatar_sha) obj.contact.name, obj.contact.avatar_sha)
self.draw_avatar(obj.contact) self.draw_avatar(obj.contact)
def _nec_mam_decrypted_message_received(self, obj):
if not obj.groupchat:
return
if obj.room_jid != self.room_jid:
return
self.print_conversation(
obj.msgtxt, contact=obj.nick,
tim=obj.timestamp, encrypted=obj.encrypted,
msg_stanza_id=obj.unique_id,
additional_data=obj.additional_data)
def _nec_gc_message_received(self, obj): def _nec_gc_message_received(self, obj):
if obj.room_jid != self.room_jid or obj.conn.name != self.account: if obj.room_jid != self.room_jid or obj.conn.name != self.account:
return return
@ -1455,6 +1469,11 @@ class GroupchatControl(ChatControlBase):
GLib.source_remove(self.autorejoin) GLib.source_remove(self.autorejoin)
self.autorejoin = None self.autorejoin = None
if muc_caps_cache.has_mam(self.room_jid):
# Request MAM
app.connections[self.account].request_archive_on_muc_join(
self.room_jid)
app.gc_connected[self.account][self.room_jid] = True app.gc_connected[self.account][self.room_jid] = True
ChatControlBase.got_connected(self) ChatControlBase.got_connected(self)
self.list_treeview.set_model(self.model) self.list_treeview.set_model(self.model)

View file

@ -19,7 +19,7 @@
import logging import logging
from enum import IntEnum from enum import IntEnum
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta
import nbxmpp import nbxmpp
from gi.repository import Gtk, GLib from gi.repository import Gtk, GLib
@ -27,17 +27,16 @@ from gi.repository import Gtk, GLib
from gajim.common import app from gajim.common import app
from gajim.common import ged from gajim.common import ged
from gajim.gtkgui_helpers import get_icon_pixmap from gajim.gtkgui_helpers import get_icon_pixmap
from gajim.common.const import ArchiveState
log = logging.getLogger('gajim.c.message_archiving') log = logging.getLogger('gajim.c.message_archiving')
class Pages(IntEnum): class Pages(IntEnum):
TIME = 0 TIME = 0
SYNC = 1 SYNC = 1
SUMMARY = 2 SUMMARY = 2
class ArchiveState(IntEnum):
NEVER = 0
ALL = 1
class HistorySyncAssistant(Gtk.Assistant): class HistorySyncAssistant(Gtk.Assistant):
def __init__(self, account, parent): def __init__(self, account, parent):
@ -52,13 +51,22 @@ class HistorySyncAssistant(Gtk.Assistant):
self.timedelta = None self.timedelta = None
self.now = datetime.utcnow() self.now = datetime.utcnow()
self.query_id = None self.query_id = None
self.count_query_id = None
self.start = None self.start = None
self.end = None self.end = None
self.next = None self.next = None
self.hide_buttons() self.hide_buttons()
self.event_id = id(self)
own_jid = self.con.get_own_jid().getStripped()
archive = app.logger.get_archive_timestamp(own_jid)
if archive is not None:
mam_start = float(archive.oldest_mam_timestamp)
else:
# Migration from old config value
mam_start = app.config.get_per(
'accounts', account, 'mam_start_date')
mam_start = app.config.get_per('accounts', account, 'mam_start_date')
if not mam_start or mam_start == ArchiveState.NEVER: if not mam_start or mam_start == ArchiveState.NEVER:
self.current_start = self.now self.current_start = self.now
elif mam_start == ArchiveState.ALL: elif mam_start == ArchiveState.ALL:
@ -72,7 +80,8 @@ class HistorySyncAssistant(Gtk.Assistant):
self.download_history = DownloadHistoryPage(self) self.download_history = DownloadHistoryPage(self)
self.append_page(self.download_history) self.append_page(self.download_history)
self.set_page_type(self.download_history, Gtk.AssistantPageType.PROGRESS) self.set_page_type(self.download_history,
Gtk.AssistantPageType.PROGRESS)
self.set_page_complete(self.download_history, True) self.set_page_complete(self.download_history, True)
self.summary = SummaryPage(self) self.summary = SummaryPage(self)
@ -80,12 +89,18 @@ class HistorySyncAssistant(Gtk.Assistant):
self.set_page_type(self.summary, Gtk.AssistantPageType.SUMMARY) self.set_page_type(self.summary, Gtk.AssistantPageType.SUMMARY)
self.set_page_complete(self.summary, True) self.set_page_complete(self.summary, True)
app.ged.register_event_handler('archiving-finished', app.ged.register_event_handler('archiving-count-received',
ged.PRECORE, ged.GUI1,
self._nec_archiving_finished) self._received_count)
app.ged.register_event_handler('archiving-query-id',
ged.GUI1,
self._new_query_id)
app.ged.register_event_handler('archiving-interval-finished',
ged.GUI1,
self._received_finished)
app.ged.register_event_handler('raw-mam-message-received', app.ged.register_event_handler('raw-mam-message-received',
ged.PRECORE, ged.PRECORE,
self._nec_mam_message_received) self._nec_mam_message_received)
self.connect('prepare', self.on_page_change) self.connect('prepare', self.on_page_change)
self.connect('destroy', self.on_destroy) self.connect('destroy', self.on_destroy)
@ -96,9 +111,9 @@ class HistorySyncAssistant(Gtk.Assistant):
self.set_current_page(Pages.SUMMARY) self.set_current_page(Pages.SUMMARY)
self.summary.nothing_to_do() self.summary.nothing_to_do()
if self.con.mam_query_id: # if self.con.mam_query_ids:
self.set_current_page(Pages.SUMMARY) # self.set_current_page(Pages.SUMMARY)
self.summary.query_already_running() # self.summary.query_already_running()
self.show_all() self.show_all()
@ -129,27 +144,42 @@ class HistorySyncAssistant(Gtk.Assistant):
self.start = self.now - self.timedelta self.start = self.now - self.timedelta
self.end = self.current_start self.end = self.current_start
log.info('config: get mam_start_date: %s', self.current_start) log.info('get mam_start_date: %s', self.current_start)
log.info('now: %s', self.now) log.info('now: %s', self.now)
log.info('start: %s', self.start) log.info('start: %s', self.start)
log.info('end: %s', self.end) log.info('end: %s', self.end)
self.query_count() self.con.request_archive_count(self.event_id, self.start, self.end)
def query_count(self): def _received_count(self, event):
self.count_query_id = self.con.connection.getAnID() if event.event_id != self.event_id:
self.con.request_archive(self.count_query_id, return
start=self.start, if event.count is not None:
end=self.end, self.download_history.count = int(event.count)
max_=0) self.con.request_archive_interval(self.event_id, self.start, self.end)
def query_messages(self, last=None): def _received_finished(self, event):
self.query_id = self.con.connection.getAnID() if event.event_id != self.event_id:
self.con.request_archive(self.query_id, return
start=self.start, log.info('query finished')
end=self.end, GLib.idle_add(self.download_history.finished)
after=last, self.set_current_page(Pages.SUMMARY)
max_=30) self.summary.finished()
def _new_query_id(self, event):
if event.event_id != self.event_id:
return
self.query_id = event.query_id
def _nec_mam_message_received(self, obj):
if obj.conn.name != self.account:
return
if obj.result.getAttr('queryid') != self.query_id:
return
log.debug('received message')
GLib.idle_add(self.download_history.set_fraction)
def on_row_selected(self, listbox, row): def on_row_selected(self, listbox, row):
self.timedelta = row.get_child().get_delta() self.timedelta = row.get_child().get_delta()
@ -164,71 +194,23 @@ class HistorySyncAssistant(Gtk.Assistant):
self.prepare_query() self.prepare_query()
def on_destroy(self, *args): def on_destroy(self, *args):
app.ged.remove_event_handler('archiving-finished', app.ged.remove_event_handler('archiving-count-received',
ged.PRECORE, ged.GUI1,
self._nec_archiving_finished) self._received_count)
app.ged.remove_event_handler('archiving-query-id',
ged.GUI1,
self._new_query_id)
app.ged.remove_event_handler('archiving-interval-finished',
ged.GUI1,
self._received_finished)
app.ged.remove_event_handler('raw-mam-message-received', app.ged.remove_event_handler('raw-mam-message-received',
ged.PRECORE, ged.PRECORE,
self._nec_mam_message_received) self._nec_mam_message_received)
del app.interface.instances[self.account]['history_sync'] del app.interface.instances[self.account]['history_sync']
def on_close_clicked(self, *args): def on_close_clicked(self, *args):
self.destroy() self.destroy()
def _nec_mam_message_received(self, obj):
if obj.conn.name != self.account:
return
if obj.result.getAttr('queryid') != self.query_id:
return
log.debug('received message')
GLib.idle_add(self.download_history.set_fraction)
def _nec_archiving_finished(self, obj):
if obj.conn.name != self.account:
return
if obj.query_id not in (self.query_id, self.count_query_id):
return
set_ = obj.fin.getTag('set', namespace=nbxmpp.NS_RSM)
if not set_:
log.error('invalid result')
log.error(obj.fin)
return
if obj.query_id == self.count_query_id:
count = set_.getTagData('count')
log.info('message count received: %s', count)
if count:
self.download_history.count = int(count)
self.query_messages()
return
if obj.query_id == self.query_id:
last = set_.getTagData('last')
complete = obj.fin.getAttr('complete')
if not last and complete != 'true':
log.error('invalid result')
log.error(obj.fin)
return
if complete != 'true':
self.query_messages(last)
else:
log.info('query finished')
GLib.idle_add(self.download_history.finished)
if self.start:
timestamp = self.start.timestamp()
else:
timestamp = ArchiveState.ALL
app.config.set_per('accounts', self.account,
'mam_start_date', timestamp)
log.debug('config: set mam_start_date: %s', timestamp)
self.set_current_page(Pages.SUMMARY)
self.summary.finished()
class SelectTimePage(Gtk.Box): class SelectTimePage(Gtk.Box):
def __init__(self, assistant): def __init__(self, assistant):
@ -257,6 +239,7 @@ class SelectTimePage(Gtk.Box):
self.pack_start(label, True, True, 0) self.pack_start(label, True, True, 0)
self.pack_start(listbox, False, False, 0) self.pack_start(listbox, False, False, 0)
class DownloadHistoryPage(Gtk.Box): class DownloadHistoryPage(Gtk.Box):
def __init__(self, assistant): def __init__(self, assistant):
super().__init__(orientation=Gtk.Orientation.VERTICAL) super().__init__(orientation=Gtk.Orientation.VERTICAL)
@ -292,6 +275,7 @@ class DownloadHistoryPage(Gtk.Box):
def finished(self): def finished(self):
self.progress.set_fraction(1) self.progress.set_fraction(1)
class SummaryPage(Gtk.Box): class SummaryPage(Gtk.Box):
def __init__(self, assistant): def __init__(self, assistant):
super().__init__(orientation=Gtk.Orientation.VERTICAL) super().__init__(orientation=Gtk.Orientation.VERTICAL)
@ -307,25 +291,26 @@ class SummaryPage(Gtk.Box):
def finished(self): def finished(self):
received = self.assistant.download_history.received received = self.assistant.download_history.received
finished = _(''' finished = _('''
Finshed synchronising your History. Finshed synchronising your History.
{received} Messages downloaded. {received} Messages downloaded.
'''.format(received=received)) '''.format(received=received))
self.label.set_text(finished) self.label.set_text(finished)
def nothing_to_do(self): def nothing_to_do(self):
nothing_to_do = _(''' nothing_to_do = _('''
Gajim is fully synchronised Gajim is fully synchronised
with the Archive. with the Archive.
''') ''')
self.label.set_text(nothing_to_do) self.label.set_text(nothing_to_do)
def query_already_running(self): def query_already_running(self):
already_running = _(''' already_running = _('''
There is already a synchronisation in There is already a synchronisation in
progress. Please try later. progress. Please try later.
''') ''')
self.label.set_text(already_running) self.label.set_text(already_running)
class TimeOption(Gtk.Label): class TimeOption(Gtk.Label):
def __init__(self, label, months=None): def __init__(self, label, months=None):
super().__init__(label=label) super().__init__(label=label)

View file

@ -117,7 +117,7 @@ class ChatControlSession(stanza_session.EncryptedStanzaSession):
jid = obj.jid jid = obj.jid
obj.msg_log_id = app.logger.insert_into_logs( obj.msg_log_id = app.logger.insert_into_logs(
jid, obj.timestamp, log_type, self.conn.name, jid, obj.timestamp, log_type,
message=msg_to_log, message=msg_to_log,
subject=obj.subject, subject=obj.subject,
additional_data=obj.additional_data, additional_data=obj.additional_data,