2009-07-12 16:32:01 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
|
|
|
## src/common/message_archiving.py
|
|
|
|
##
|
|
|
|
## Copyright (C) 2009 Anaël Verrier <elghinn AT free.fr>
|
|
|
|
##
|
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
|
|
|
## it under the terms of the GNU General Public License as published
|
|
|
|
## by the Free Software Foundation; version 3 only.
|
|
|
|
##
|
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
|
|
|
## You should have received a copy of the GNU General Public License
|
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
##
|
|
|
|
|
2017-07-28 00:07:49 +02:00
|
|
|
import logging
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
2012-12-09 21:37:51 +01:00
|
|
|
import nbxmpp
|
2017-07-28 00:07:49 +02:00
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
from gajim.common import app
|
2017-06-13 23:58:06 +02:00
|
|
|
from gajim.common import ged
|
2018-03-24 01:47:03 +01:00
|
|
|
from gajim.common import helpers
|
2018-04-23 18:32:01 +02:00
|
|
|
from gajim.common.const import ArchiveState, JIDConstant
|
2017-11-19 23:45:03 +01:00
|
|
|
from gajim.common.caps_cache import muc_caps_cache
|
2017-06-13 23:58:06 +02:00
|
|
|
import gajim.common.connection_handlers_events as ev
|
2009-07-31 19:16:03 +02:00
|
|
|
|
2009-12-02 12:05:52 +01:00
|
|
|
log = logging.getLogger('gajim.c.message_archiving')
|
|
|
|
|
2009-07-12 16:32:01 +02:00
|
|
|
|
2017-07-28 00:07:49 +02:00
|
|
|
class ConnectionArchive313:
|
2010-02-10 17:59:17 +01:00
|
|
|
def __init__(self):
|
2014-11-11 15:28:24 +01:00
|
|
|
self.archiving_313_supported = False
|
|
|
|
self.mam_awaiting_disco_result = {}
|
2016-10-31 02:11:46 +01:00
|
|
|
self.iq_answer = []
|
2017-11-11 21:46:34 +01:00
|
|
|
self.mam_query_ids = []
|
2017-08-13 13:18:56 +02:00
|
|
|
app.nec.register_incoming_event(ev.MamMessageReceivedEvent)
|
2017-11-11 21:46:34 +01:00
|
|
|
app.nec.register_incoming_event(ev.MamGcMessageReceivedEvent)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.register_event_handler('agent-info-error-received', ged.CORE,
|
2014-11-11 15:28:24 +01:00
|
|
|
self._nec_agent_info_error)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.register_event_handler('agent-info-received', ged.CORE,
|
2014-11-11 15:28:24 +01:00
|
|
|
self._nec_agent_info)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.register_event_handler('mam-decrypted-message-received',
|
2015-09-19 13:58:28 +02:00
|
|
|
ged.CORE, self._nec_mam_decrypted_message_received)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.register_event_handler(
|
2016-10-31 02:11:46 +01:00
|
|
|
'archiving-313-preferences-changed-received', ged.CORE,
|
|
|
|
self._nec_archiving_313_preferences_changed_received)
|
2014-11-11 15:28:24 +01:00
|
|
|
|
|
|
|
def cleanup(self):
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.remove_event_handler('agent-info-error-received', ged.CORE,
|
2015-09-19 13:58:28 +02:00
|
|
|
self._nec_agent_info_error)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.remove_event_handler('agent-info-received', ged.CORE,
|
2015-09-19 13:58:28 +02:00
|
|
|
self._nec_agent_info)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.remove_event_handler('mam-decrypted-message-received',
|
2015-09-19 13:58:28 +02:00
|
|
|
ged.CORE, self._nec_mam_decrypted_message_received)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.remove_event_handler(
|
2016-10-31 02:11:46 +01:00
|
|
|
'archiving-313-preferences-changed-received', ged.CORE,
|
|
|
|
self._nec_archiving_313_preferences_changed_received)
|
|
|
|
|
|
|
|
def _nec_archiving_313_preferences_changed_received(self, obj):
|
|
|
|
if obj.id in self.iq_answer:
|
|
|
|
obj.answer = True
|
2014-11-11 15:28:24 +01:00
|
|
|
|
|
|
|
def _nec_agent_info_error(self, obj):
|
|
|
|
if obj.jid in self.mam_awaiting_disco_result:
|
|
|
|
log.warn('Unable to discover %s, ignoring those logs', obj.jid)
|
|
|
|
del self.mam_awaiting_disco_result[obj.jid]
|
|
|
|
|
|
|
|
def _nec_agent_info(self, obj):
|
2017-08-01 18:42:01 +02:00
|
|
|
if obj.jid not in self.mam_awaiting_disco_result:
|
|
|
|
return
|
|
|
|
|
|
|
|
for identity in obj.identities:
|
|
|
|
if identity['category'] != 'conference':
|
|
|
|
continue
|
|
|
|
# it's a groupchat
|
|
|
|
for msg_obj in self.mam_awaiting_disco_result[obj.jid]:
|
2017-08-13 13:18:56 +02:00
|
|
|
app.logger.insert_jid(msg_obj.with_.getStripped(),
|
2017-08-01 18:42:01 +02:00
|
|
|
type_=JIDConstant.ROOM_TYPE)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.nec.push_incoming_event(
|
2017-08-01 18:42:01 +02:00
|
|
|
ev.MamDecryptedMessageReceivedEvent(
|
|
|
|
None, disco=True, **vars(msg_obj)))
|
2014-11-11 15:28:24 +01:00
|
|
|
del self.mam_awaiting_disco_result[obj.jid]
|
2017-08-01 18:42:01 +02:00
|
|
|
return
|
|
|
|
# it's not a groupchat
|
|
|
|
for msg_obj in self.mam_awaiting_disco_result[obj.jid]:
|
2017-08-13 13:18:56 +02:00
|
|
|
app.logger.insert_jid(msg_obj.with_.getStripped())
|
|
|
|
app.nec.push_incoming_event(
|
2017-08-01 18:42:01 +02:00
|
|
|
ev.MamDecryptedMessageReceivedEvent(
|
|
|
|
None, disco=True, **vars(msg_obj)))
|
|
|
|
del self.mam_awaiting_disco_result[obj.jid]
|
2014-11-11 15:28:24 +01:00
|
|
|
|
2017-11-14 23:00:10 +01:00
|
|
|
@staticmethod
|
2018-01-17 23:52:19 +01:00
|
|
|
def parse_iq(stanza):
|
2017-11-11 21:46:34 +01:00
|
|
|
if not nbxmpp.isResultNode(stanza):
|
|
|
|
log.error('Error on MAM query: %s', stanza.getError())
|
2017-11-14 23:00:10 +01:00
|
|
|
raise InvalidMamIQ
|
2017-11-11 21:46:34 +01:00
|
|
|
|
|
|
|
fin = stanza.getTag('fin')
|
|
|
|
if fin is None:
|
|
|
|
log.error('Malformed MAM query result received: %s', stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
raise InvalidMamIQ
|
2017-11-11 21:46:34 +01:00
|
|
|
|
|
|
|
set_ = fin.getTag('set', namespace=nbxmpp.NS_RSM)
|
|
|
|
if set_ is None:
|
|
|
|
log.error(
|
|
|
|
'Malformed MAM query result received (no "set" Node): %s',
|
|
|
|
stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
raise InvalidMamIQ
|
|
|
|
return fin, set_
|
|
|
|
|
2017-11-17 22:05:02 +01:00
|
|
|
def parse_from_jid(self, stanza):
|
|
|
|
jid = stanza.getFrom()
|
|
|
|
if jid is None:
|
|
|
|
# No from means, iq from our own archive
|
|
|
|
jid = self.get_own_jid().getStripped()
|
|
|
|
else:
|
|
|
|
jid = jid.getStripped()
|
|
|
|
return jid
|
|
|
|
|
2017-11-14 23:00:10 +01:00
|
|
|
def _result_finished(self, conn, stanza, query_id, start_date, groupchat):
|
|
|
|
try:
|
2018-01-17 23:52:19 +01:00
|
|
|
fin, set_ = self.parse_iq(stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
except InvalidMamIQ:
|
2014-11-11 15:28:24 +01:00
|
|
|
return
|
|
|
|
|
2017-11-11 21:46:34 +01:00
|
|
|
last = set_.getTagData('last')
|
2017-11-13 21:58:20 +01:00
|
|
|
if last is None:
|
|
|
|
log.info('End of MAM query, no items retrieved')
|
|
|
|
return
|
|
|
|
|
2017-11-17 22:05:02 +01:00
|
|
|
jid = self.parse_from_jid(stanza)
|
2017-11-11 21:46:34 +01:00
|
|
|
complete = fin.getAttr('complete')
|
2017-11-13 21:58:20 +01:00
|
|
|
app.logger.set_archive_timestamp(jid, last_mam_id=last)
|
|
|
|
if complete != 'true':
|
2017-11-14 23:00:10 +01:00
|
|
|
self.mam_query_ids.remove(query_id)
|
2017-11-13 21:58:20 +01:00
|
|
|
query_id = self.get_query_id()
|
2017-11-17 22:05:02 +01:00
|
|
|
query = self.get_archive_query(query_id, jid=jid, after=last)
|
2017-11-14 23:00:10 +01:00
|
|
|
self._send_archive_query(query, query_id, groupchat=groupchat)
|
2017-11-13 21:58:20 +01:00
|
|
|
else:
|
2017-11-11 21:46:34 +01:00
|
|
|
self.mam_query_ids.remove(query_id)
|
2017-11-13 21:58:20 +01:00
|
|
|
if start_date is not None:
|
|
|
|
app.logger.set_archive_timestamp(
|
|
|
|
jid,
|
|
|
|
last_mam_id=last,
|
|
|
|
oldest_mam_timestamp=start_date.timestamp())
|
|
|
|
log.info('End of MAM query, last mam id: %s', last)
|
2014-11-11 15:28:24 +01:00
|
|
|
|
2017-11-14 23:00:10 +01:00
|
|
|
def _intervall_result_finished(self, conn, stanza, query_id,
|
|
|
|
start_date, end_date, event_id):
|
|
|
|
try:
|
2018-01-17 23:52:19 +01:00
|
|
|
fin, set_ = self.parse_iq(stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
except InvalidMamIQ:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.mam_query_ids.remove(query_id)
|
2017-11-17 22:05:02 +01:00
|
|
|
jid = self.parse_from_jid(stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
if start_date:
|
|
|
|
timestamp = start_date.timestamp()
|
|
|
|
else:
|
|
|
|
timestamp = ArchiveState.ALL
|
|
|
|
|
|
|
|
last = set_.getTagData('last')
|
|
|
|
if last is None:
|
|
|
|
app.nec.push_incoming_event(ev.ArchivingIntervalFinished(
|
|
|
|
None, event_id=event_id))
|
|
|
|
app.logger.set_archive_timestamp(
|
|
|
|
jid, oldest_mam_timestamp=timestamp)
|
|
|
|
log.info('End of MAM query, no items retrieved')
|
|
|
|
return
|
|
|
|
|
|
|
|
complete = fin.getAttr('complete')
|
|
|
|
if complete != 'true':
|
|
|
|
self.request_archive_interval(event_id, start_date, end_date, last)
|
|
|
|
else:
|
|
|
|
log.info('query finished')
|
|
|
|
app.logger.set_archive_timestamp(
|
|
|
|
jid, oldest_mam_timestamp=timestamp)
|
|
|
|
app.nec.push_incoming_event(ev.ArchivingIntervalFinished(
|
|
|
|
None, event_id=event_id, stanza=stanza))
|
|
|
|
|
|
|
|
def _received_count(self, conn, stanza, query_id, event_id):
|
|
|
|
try:
|
2018-01-17 23:52:19 +01:00
|
|
|
_, set_ = self.parse_iq(stanza)
|
2017-11-14 23:00:10 +01:00
|
|
|
except InvalidMamIQ:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.mam_query_ids.remove(query_id)
|
|
|
|
|
|
|
|
count = set_.getTagData('count')
|
|
|
|
log.info('message count received: %s', count)
|
|
|
|
app.nec.push_incoming_event(ev.ArchivingCountReceived(
|
|
|
|
None, event_id=event_id, count=count))
|
|
|
|
|
2015-09-19 13:58:28 +02:00
|
|
|
def _nec_mam_decrypted_message_received(self, obj):
|
|
|
|
if obj.conn.name != self.name:
|
|
|
|
return
|
2018-02-25 22:02:39 +01:00
|
|
|
|
|
|
|
namespace = self.archiving_namespace
|
2018-03-24 10:52:16 +01:00
|
|
|
blacklisted = False
|
2018-02-25 22:02:39 +01:00
|
|
|
if obj.groupchat:
|
|
|
|
namespace = muc_caps_cache.get_mam_namespace(obj.room_jid)
|
2018-03-24 10:52:16 +01:00
|
|
|
blacklisted = obj.room_jid in helpers.get_mam_blacklist()
|
2018-02-25 22:02:39 +01:00
|
|
|
|
2018-03-24 01:47:03 +01:00
|
|
|
if namespace != nbxmpp.NS_MAM_2 or blacklisted:
|
2018-02-25 22:02:39 +01:00
|
|
|
# Fallback duplicate search without stanza-id
|
|
|
|
duplicate = app.logger.search_for_duplicate(
|
|
|
|
self.name, obj.with_, obj.timestamp, obj.msgtxt)
|
|
|
|
if duplicate:
|
|
|
|
# dont propagate the event further
|
|
|
|
return True
|
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
app.logger.insert_into_logs(self.name,
|
|
|
|
obj.with_,
|
2017-11-11 21:46:34 +01:00
|
|
|
obj.timestamp,
|
|
|
|
obj.kind,
|
|
|
|
unread=False,
|
|
|
|
message=obj.msgtxt,
|
|
|
|
contact_name=obj.nick,
|
|
|
|
additional_data=obj.additional_data,
|
|
|
|
stanza_id=obj.unique_id)
|
2015-09-19 13:58:28 +02:00
|
|
|
|
2017-07-21 16:53:58 +02:00
|
|
|
def get_query_id(self):
|
2017-11-11 21:46:34 +01:00
|
|
|
query_id = self.connection.getAnID()
|
|
|
|
self.mam_query_ids.append(query_id)
|
|
|
|
return query_id
|
2017-07-21 16:53:58 +02:00
|
|
|
|
|
|
|
def request_archive_on_signin(self):
|
2017-11-13 21:58:20 +01:00
|
|
|
own_jid = self.get_own_jid().getStripped()
|
|
|
|
archive = app.logger.get_archive_timestamp(own_jid)
|
|
|
|
|
|
|
|
# Migration of last_mam_id from config to DB
|
|
|
|
if archive is not None:
|
|
|
|
mam_id = archive.last_mam_id
|
|
|
|
else:
|
|
|
|
mam_id = app.config.get_per('accounts', self.name, 'last_mam_id')
|
|
|
|
|
2017-11-11 21:46:34 +01:00
|
|
|
start_date = None
|
2017-07-21 16:53:58 +02:00
|
|
|
query_id = self.get_query_id()
|
|
|
|
if mam_id:
|
2017-11-14 23:00:10 +01:00
|
|
|
log.info('MAM query after: %s', mam_id)
|
2017-11-11 21:46:34 +01:00
|
|
|
query = self.get_archive_query(query_id, after=mam_id)
|
2017-07-21 16:53:58 +02:00
|
|
|
else:
|
|
|
|
# First Start, we request the last week
|
2017-11-11 21:46:34 +01:00
|
|
|
start_date = datetime.utcnow() - timedelta(days=7)
|
|
|
|
log.info('First start: query archive start: %s', start_date)
|
|
|
|
query = self.get_archive_query(query_id, start=start_date)
|
2017-11-14 23:00:10 +01:00
|
|
|
self._send_archive_query(query, query_id, start_date)
|
2017-11-11 21:46:34 +01:00
|
|
|
|
|
|
|
def request_archive_on_muc_join(self, jid):
|
2017-11-14 23:00:10 +01:00
|
|
|
archive = app.logger.get_archive_timestamp(
|
|
|
|
jid, type_=JIDConstant.ROOM_TYPE)
|
2017-11-11 21:46:34 +01:00
|
|
|
query_id = self.get_query_id()
|
2017-11-13 21:58:20 +01:00
|
|
|
start_date = None
|
|
|
|
if archive is not None:
|
|
|
|
log.info('Query Groupchat MAM Archive %s after %s:',
|
|
|
|
jid, archive.last_mam_id)
|
|
|
|
query = self.get_archive_query(
|
|
|
|
query_id, jid=jid, after=archive.last_mam_id)
|
|
|
|
else:
|
2017-11-18 00:12:12 +01:00
|
|
|
# First Start, we dont request history
|
|
|
|
# Depending on what a MUC saves, there could be thousands
|
|
|
|
# of Messages even in just one day.
|
|
|
|
start_date = datetime.utcnow() - timedelta(days=1)
|
2017-11-13 21:58:20 +01:00
|
|
|
log.info('First join: query archive %s from: %s', jid, start_date)
|
|
|
|
query = self.get_archive_query(query_id, jid=jid, start=start_date)
|
2017-11-14 23:00:10 +01:00
|
|
|
self._send_archive_query(query, query_id, start_date, groupchat=True)
|
2017-11-11 21:46:34 +01:00
|
|
|
|
2017-11-14 23:00:10 +01:00
|
|
|
def request_archive_count(self, event_id, start_date, end_date):
|
|
|
|
query_id = self.get_query_id()
|
|
|
|
query = self.get_archive_query(
|
|
|
|
query_id, start=start_date, end=end_date, max_=0)
|
|
|
|
self.connection.SendAndCallForResponse(
|
|
|
|
query, self._received_count, {'query_id': query_id,
|
|
|
|
'event_id': event_id})
|
|
|
|
|
|
|
|
def request_archive_interval(self, event_id, start_date,
|
|
|
|
end_date, after=None):
|
|
|
|
query_id = self.get_query_id()
|
|
|
|
query = self.get_archive_query(query_id, start=start_date,
|
|
|
|
end=end_date, after=after, max_=30)
|
|
|
|
app.nec.push_incoming_event(ev.ArchivingQueryID(
|
|
|
|
None, event_id=event_id, query_id=query_id))
|
|
|
|
self.connection.SendAndCallForResponse(
|
|
|
|
query, self._intervall_result_finished, {'query_id': query_id,
|
|
|
|
'start_date': start_date,
|
|
|
|
'end_date': end_date,
|
|
|
|
'event_id': event_id})
|
|
|
|
|
|
|
|
def _send_archive_query(self, query, query_id, start_date=None,
|
|
|
|
groupchat=False):
|
2017-11-11 21:46:34 +01:00
|
|
|
self.connection.SendAndCallForResponse(
|
|
|
|
query, self._result_finished, {'query_id': query_id,
|
|
|
|
'start_date': start_date,
|
|
|
|
'groupchat': groupchat})
|
|
|
|
|
|
|
|
def get_archive_query(self, query_id, jid=None, start=None, end=None, with_=None,
|
|
|
|
after=None, max_=30):
|
2017-11-19 23:45:03 +01:00
|
|
|
# Muc archive query?
|
|
|
|
namespace = muc_caps_cache.get_mam_namespace(jid)
|
|
|
|
if namespace is None:
|
|
|
|
# Query to our own archive
|
|
|
|
namespace = self.archiving_namespace
|
|
|
|
|
2017-11-11 21:46:34 +01:00
|
|
|
iq = nbxmpp.Iq('set', to=jid)
|
2017-07-21 16:53:58 +02:00
|
|
|
query = iq.addChild('query', namespace=namespace)
|
|
|
|
form = query.addChild(node=nbxmpp.DataForm(typ='submit'))
|
|
|
|
field = nbxmpp.DataField(typ='hidden',
|
|
|
|
name='FORM_TYPE',
|
|
|
|
value=namespace)
|
|
|
|
form.addChild(node=field)
|
2014-11-11 15:28:24 +01:00
|
|
|
if start:
|
2017-07-21 16:53:58 +02:00
|
|
|
field = nbxmpp.DataField(typ='text-single',
|
|
|
|
name='start',
|
|
|
|
value=start.strftime('%Y-%m-%dT%H:%M:%SZ'))
|
|
|
|
form.addChild(node=field)
|
2014-11-11 15:28:24 +01:00
|
|
|
if end:
|
2017-07-21 16:53:58 +02:00
|
|
|
field = nbxmpp.DataField(typ='text-single',
|
|
|
|
name='end',
|
|
|
|
value=end.strftime('%Y-%m-%dT%H:%M:%SZ'))
|
|
|
|
form.addChild(node=field)
|
2014-11-11 15:28:24 +01:00
|
|
|
if with_:
|
2017-07-21 16:53:58 +02:00
|
|
|
field = nbxmpp.DataField(typ='jid-single', name='with', value=with_)
|
|
|
|
form.addChild(node=field)
|
|
|
|
|
2014-11-11 15:28:24 +01:00
|
|
|
set_ = query.setTag('set', namespace=nbxmpp.NS_RSM)
|
2017-07-21 16:53:58 +02:00
|
|
|
set_.setTagData('max', max_)
|
2014-11-11 15:28:24 +01:00
|
|
|
if after:
|
|
|
|
set_.setTagData('after', after)
|
2017-07-21 16:53:58 +02:00
|
|
|
query.setAttr('queryid', query_id)
|
2017-11-11 21:46:34 +01:00
|
|
|
return iq
|
2014-11-11 15:28:24 +01:00
|
|
|
|
2016-10-31 02:11:46 +01:00
|
|
|
def request_archive_preferences(self):
|
2017-08-13 13:18:56 +02:00
|
|
|
if not app.account_is_connected(self.name):
|
2016-10-31 02:11:46 +01:00
|
|
|
return
|
|
|
|
iq = nbxmpp.Iq(typ='get')
|
|
|
|
id_ = self.connection.getAnID()
|
|
|
|
iq.setID(id_)
|
2017-04-18 17:52:17 +02:00
|
|
|
iq.addChild(name='prefs', namespace=self.archiving_namespace)
|
2016-10-31 02:11:46 +01:00
|
|
|
self.connection.send(iq)
|
|
|
|
|
|
|
|
def set_archive_preferences(self, items, default):
|
2017-08-13 13:18:56 +02:00
|
|
|
if not app.account_is_connected(self.name):
|
2016-10-31 02:11:46 +01:00
|
|
|
return
|
|
|
|
iq = nbxmpp.Iq(typ='set')
|
|
|
|
id_ = self.connection.getAnID()
|
|
|
|
self.iq_answer.append(id_)
|
|
|
|
iq.setID(id_)
|
2017-04-18 17:52:17 +02:00
|
|
|
prefs = iq.addChild(name='prefs', namespace=self.archiving_namespace, attrs={'default': default})
|
2016-10-31 02:11:46 +01:00
|
|
|
always = prefs.addChild(name='always')
|
|
|
|
never = prefs.addChild(name='never')
|
|
|
|
for item in items:
|
|
|
|
jid, preference = item
|
|
|
|
if preference == 'always':
|
|
|
|
always.addChild(name='jid').setData(jid)
|
|
|
|
else:
|
|
|
|
never.addChild(name='jid').setData(jid)
|
|
|
|
self.connection.send(iq)
|
2014-11-11 15:28:24 +01:00
|
|
|
|
2010-02-10 17:59:17 +01:00
|
|
|
def _ArchiveCB(self, con, iq_obj):
|
2017-08-13 13:18:56 +02:00
|
|
|
app.nec.push_incoming_event(ev.ArchivingReceivedEvent(None, conn=self,
|
2010-11-05 20:36:45 +01:00
|
|
|
stanza=iq_obj))
|
2012-12-09 21:37:51 +01:00
|
|
|
raise nbxmpp.NodeProcessed
|
2017-11-14 23:00:10 +01:00
|
|
|
|
|
|
|
|
|
|
|
class InvalidMamIQ(Exception):
|
|
|
|
pass
|