2008-08-15 19:31:51 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
2009-12-10 20:06:46 +01:00
|
|
|
## src/common/caps_cache.py
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
## Travis Shirk <travis AT pobox.com>
|
2014-01-02 09:33:54 +01:00
|
|
|
## Copyright (C) 2007-2014 Yann Leboulanger <asterix AT lagaule.org>
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
|
|
|
|
## Jonathan Schleifer <js-gajim AT webkeks.org>
|
2009-10-25 21:57:20 +01:00
|
|
|
## Copyright (C) 2008-2009 Stephan Erb <steve-e AT h3c.de>
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2007-06-27 02:51:12 +02:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2007-06-27 02:51:12 +02:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2008-08-15 05:20:23 +02:00
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2007-06-27 02:51:12 +02:00
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
2008-08-15 05:20:23 +02:00
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2007-10-22 13:13:13 +02:00
|
|
|
##
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-11-26 11:21:43 +01:00
|
|
|
"""
|
|
|
|
Module containing all XEP-115 (Entity Capabilities) related classes
|
2009-10-25 21:57:20 +01:00
|
|
|
|
|
|
|
Basic Idea:
|
|
|
|
CapsCache caches features to hash relationships. The cache is queried
|
2009-11-26 11:21:43 +01:00
|
|
|
through ClientCaps objects which are hold by contact instances.
|
|
|
|
"""
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-30 23:55:03 +01:00
|
|
|
import base64
|
|
|
|
import hashlib
|
2017-11-11 21:46:34 +01:00
|
|
|
from collections import namedtuple
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-12-10 18:31:00 +01:00
|
|
|
import logging
|
2009-12-10 20:06:46 +01:00
|
|
|
log = logging.getLogger('gajim.c.caps_cache')
|
2009-12-10 18:31:00 +01:00
|
|
|
|
2017-11-11 21:46:34 +01:00
|
|
|
import nbxmpp
|
2017-04-10 21:25:57 +02:00
|
|
|
from nbxmpp import (NS_XHTML_IM, NS_ESESSION, NS_CHATSTATES,
|
2016-12-22 17:47:53 +01:00
|
|
|
NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO,
|
2017-06-14 21:31:29 +02:00
|
|
|
NS_JINGLE_FILE_TRANSFER_5)
|
2009-10-26 19:20:16 +01:00
|
|
|
# Features where we cannot safely assume that the other side supports them
|
2017-04-10 21:25:57 +02:00
|
|
|
FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_ESESSION,
|
2012-12-09 21:37:51 +01:00
|
|
|
NS_JINGLE_ICE_UDP, NS_JINGLE_RTP_AUDIO, NS_JINGLE_RTP_VIDEO,
|
2017-06-14 21:31:29 +02:00
|
|
|
NS_JINGLE_FILE_TRANSFER_5]
|
2017-11-11 21:46:34 +01:00
|
|
|
from gajim.common import app
|
2009-11-11 23:14:51 +01:00
|
|
|
# Query entry status codes
|
|
|
|
NEW = 0
|
|
|
|
QUERIED = 1
|
|
|
|
CACHED = 2 # got the answer
|
2009-12-28 21:29:51 +01:00
|
|
|
FAKED = 3 # allow NullClientCaps to behave as it has a cached item
|
2009-10-27 22:48:47 +01:00
|
|
|
|
2009-11-09 21:26:56 +01:00
|
|
|
################################################################################
|
|
|
|
### Public API of this module
|
|
|
|
################################################################################
|
|
|
|
|
2009-10-30 23:01:25 +01:00
|
|
|
capscache = None
|
2017-11-11 21:46:34 +01:00
|
|
|
muc_caps_cache = None
|
2009-10-30 23:01:25 +01:00
|
|
|
def initialize(logger):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Initialize this module
|
|
|
|
"""
|
|
|
|
global capscache
|
2017-11-11 21:46:34 +01:00
|
|
|
global muc_caps_cache
|
2010-02-08 15:08:40 +01:00
|
|
|
capscache = CapsCache(logger)
|
2017-11-11 21:46:34 +01:00
|
|
|
muc_caps_cache = MucCapsCache()
|
2009-10-30 23:55:03 +01:00
|
|
|
|
2009-11-09 21:26:56 +01:00
|
|
|
def client_supports(client_caps, requested_feature):
|
2010-02-08 15:08:40 +01:00
|
|
|
lookup_item = client_caps.get_cache_lookup_strategy()
|
|
|
|
cache_item = lookup_item(capscache)
|
|
|
|
|
|
|
|
supported_features = cache_item.features
|
|
|
|
if requested_feature in supported_features:
|
|
|
|
return True
|
|
|
|
elif not supported_features and cache_item.status in (NEW, QUERIED, FAKED):
|
|
|
|
# assume feature is supported, if we don't know yet, what the client
|
|
|
|
# is capable of
|
|
|
|
return requested_feature not in FEATURE_BLACKLIST
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2011-01-04 15:52:37 +01:00
|
|
|
def create_suitable_client_caps(node, caps_hash, hash_method, fjid=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Create and return a suitable ClientCaps object for the given node,
|
|
|
|
caps_hash, hash_method combination.
|
|
|
|
"""
|
|
|
|
if not node or not caps_hash:
|
2011-01-04 15:52:37 +01:00
|
|
|
if fjid:
|
|
|
|
client_caps = NoClientCaps(fjid)
|
|
|
|
else:
|
|
|
|
# improper caps, ignore client capabilities.
|
|
|
|
client_caps = NullClientCaps()
|
2010-02-08 15:08:40 +01:00
|
|
|
elif not hash_method:
|
|
|
|
client_caps = OldClientCaps(caps_hash, node)
|
|
|
|
else:
|
|
|
|
client_caps = ClientCaps(caps_hash, node, hash_method)
|
|
|
|
return client_caps
|
2009-10-30 23:55:03 +01:00
|
|
|
|
2017-02-08 03:12:41 +01:00
|
|
|
def compute_caps_hash(identities, features, dataforms=None, hash_method='sha-1'):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Compute caps hash according to XEP-0115, V1.5
|
|
|
|
|
2012-12-09 21:37:51 +01:00
|
|
|
dataforms are nbxmpp.DataForms objects as common.dataforms don't allow several
|
2010-02-08 15:08:40 +01:00
|
|
|
values without a field type list-multi
|
|
|
|
"""
|
2017-02-08 03:12:41 +01:00
|
|
|
if dataforms is None:
|
|
|
|
dataforms = []
|
2010-02-08 15:08:40 +01:00
|
|
|
def sort_identities_func(i1, i2):
|
|
|
|
cat1 = i1['category']
|
|
|
|
cat2 = i2['category']
|
|
|
|
if cat1 < cat2:
|
|
|
|
return -1
|
|
|
|
if cat1 > cat2:
|
|
|
|
return 1
|
|
|
|
type1 = i1.get('type', '')
|
|
|
|
type2 = i2.get('type', '')
|
|
|
|
if type1 < type2:
|
|
|
|
return -1
|
|
|
|
if type1 > type2:
|
|
|
|
return 1
|
|
|
|
lang1 = i1.get('xml:lang', '')
|
|
|
|
lang2 = i2.get('xml:lang', '')
|
|
|
|
if lang1 < lang2:
|
|
|
|
return -1
|
|
|
|
if lang1 > lang2:
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def sort_dataforms_func(d1, d2):
|
|
|
|
f1 = d1.getField('FORM_TYPE')
|
|
|
|
f2 = d2.getField('FORM_TYPE')
|
|
|
|
if f1 and f2 and (f1.getValue() < f2.getValue()):
|
|
|
|
return -1
|
|
|
|
return 1
|
|
|
|
|
|
|
|
S = ''
|
2013-01-02 13:54:02 +01:00
|
|
|
from functools import cmp_to_key
|
|
|
|
identities.sort(key=cmp_to_key(sort_identities_func))
|
2010-02-08 15:08:40 +01:00
|
|
|
for i in identities:
|
|
|
|
c = i['category']
|
|
|
|
type_ = i.get('type', '')
|
|
|
|
lang = i.get('xml:lang', '')
|
|
|
|
name = i.get('name', '')
|
|
|
|
S += '%s/%s/%s/%s<' % (c, type_, lang, name)
|
|
|
|
features.sort()
|
|
|
|
for f in features:
|
|
|
|
S += '%s<' % f
|
2013-01-02 13:54:02 +01:00
|
|
|
dataforms.sort(key=cmp_to_key(sort_dataforms_func))
|
2010-02-08 15:08:40 +01:00
|
|
|
for dataform in dataforms:
|
|
|
|
# fields indexed by var
|
|
|
|
fields = {}
|
|
|
|
for f in dataform.getChildren():
|
|
|
|
fields[f.getVar()] = f
|
|
|
|
form_type = fields.get('FORM_TYPE')
|
|
|
|
if form_type:
|
|
|
|
S += form_type.getValue() + '<'
|
|
|
|
del fields['FORM_TYPE']
|
|
|
|
for var in sorted(fields.keys()):
|
|
|
|
S += '%s<' % var
|
|
|
|
values = sorted(fields[var].getValues())
|
|
|
|
for value in values:
|
|
|
|
S += '%s<' % value
|
|
|
|
|
|
|
|
if hash_method == 'sha-1':
|
2013-01-02 13:54:02 +01:00
|
|
|
hash_ = hashlib.sha1(S.encode('utf-8'))
|
2010-02-08 15:08:40 +01:00
|
|
|
elif hash_method == 'md5':
|
2013-01-02 13:54:02 +01:00
|
|
|
hash_ = hashlib.md5(S.encode('utf-8'))
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
return ''
|
2013-01-02 17:53:42 +01:00
|
|
|
return base64.b64encode(hash_.digest()).decode('utf-8')
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2009-10-30 23:01:25 +01:00
|
|
|
|
2009-11-09 21:26:56 +01:00
|
|
|
################################################################################
|
|
|
|
### Internal classes of this module
|
|
|
|
################################################################################
|
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class AbstractClientCaps(object):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Base class representing a client and its capabilities as advertised by a
|
|
|
|
caps tag in a presence
|
|
|
|
"""
|
|
|
|
def __init__(self, caps_hash, node):
|
|
|
|
self._hash = caps_hash
|
|
|
|
self._node = node
|
2011-01-04 15:52:37 +01:00
|
|
|
self._hash_method = None
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_discover_strategy(self):
|
|
|
|
return self._discover
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
"""
|
|
|
|
To be implemented by subclassess
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def get_cache_lookup_strategy(self):
|
|
|
|
return self._lookup_in_cache
|
|
|
|
|
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
"""
|
|
|
|
To be implemented by subclassess
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def get_hash_validation_strategy(self):
|
|
|
|
return self._is_hash_valid
|
|
|
|
|
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
"""
|
|
|
|
To be implemented by subclassess
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
2009-11-11 23:37:46 +01:00
|
|
|
|
2009-11-11 23:14:51 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class ClientCaps(AbstractClientCaps):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
The current XEP-115 implementation
|
|
|
|
"""
|
|
|
|
def __init__(self, caps_hash, node, hash_method):
|
|
|
|
AbstractClientCaps.__init__(self, caps_hash, node)
|
|
|
|
assert hash_method != 'old'
|
|
|
|
self._hash_method = hash_method
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[(self._hash_method, self._hash)]
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid, '%s#%s' % (self._node, self._hash))
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
computed_hash = compute_caps_hash(identities, features,
|
|
|
|
dataforms=dataforms, hash_method=self._hash_method)
|
|
|
|
return computed_hash == self._hash
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2009-11-11 23:14:51 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class OldClientCaps(AbstractClientCaps):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Old XEP-115 implemtation. Kept around for background competability
|
|
|
|
"""
|
|
|
|
def __init__(self, caps_hash, node):
|
|
|
|
AbstractClientCaps.__init__(self, caps_hash, node)
|
2011-01-04 15:52:37 +01:00
|
|
|
self._hash_method = 'old'
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[('old', self._node + '#' + self._hash)]
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid)
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
return True
|
2009-11-26 11:21:43 +01:00
|
|
|
|
2011-01-04 15:52:37 +01:00
|
|
|
class NoClientCaps(AbstractClientCaps):
|
|
|
|
"""
|
|
|
|
For clients that don't support XEP-0115
|
|
|
|
"""
|
|
|
|
def __init__(self, fjid):
|
|
|
|
AbstractClientCaps.__init__(self, fjid, fjid)
|
|
|
|
self._hash_method = 'no'
|
|
|
|
|
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[('no', self._node)]
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid)
|
|
|
|
|
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
return True
|
2009-11-11 23:14:51 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class NullClientCaps(AbstractClientCaps):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
This is a NULL-Object to streamline caps handling if a client has not
|
|
|
|
advertised any caps or has advertised them in an improper way
|
|
|
|
|
|
|
|
Assumes (almost) everything is supported.
|
|
|
|
"""
|
|
|
|
_instance = None
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Make it a singleton.
|
|
|
|
"""
|
|
|
|
if not cls._instance:
|
|
|
|
cls._instance = super(NullClientCaps, cls).__new__(
|
|
|
|
cls, *args, **kwargs)
|
|
|
|
return cls._instance
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
AbstractClientCaps.__init__(self, None, None)
|
2011-01-04 15:52:37 +01:00
|
|
|
self._hash_method = 'dummy'
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
# lookup something which does not exist to get a new CacheItem created
|
|
|
|
cache_item = caps_cache[('dummy', '')]
|
|
|
|
# Mark the item as cached so that protocol/caps.py does not update it
|
|
|
|
cache_item.status = FAKED
|
|
|
|
return cache_item
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
return False
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 22:48:47 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
class CapsCache(object):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
This object keeps the mapping between caps data and real disco features they
|
|
|
|
represent, and provides simple way to query that info
|
|
|
|
"""
|
|
|
|
def __init__(self, logger=None):
|
|
|
|
# our containers:
|
|
|
|
# __cache is a dictionary mapping: pair of hash method and hash maps
|
|
|
|
# to CapsCacheItem object
|
|
|
|
# __CacheItem is a class that stores data about particular
|
|
|
|
# client (hash method/hash pair)
|
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
class CacheItem(object):
|
|
|
|
# __names is a string cache; every string long enough is given
|
|
|
|
# another object, and we will have plenty of identical long
|
|
|
|
# strings. therefore we can cache them
|
|
|
|
__names = {}
|
|
|
|
|
|
|
|
def __init__(self, hash_method, hash_, logger):
|
|
|
|
# cached into db
|
|
|
|
self.hash_method = hash_method
|
|
|
|
self.hash = hash_
|
|
|
|
self._features = []
|
|
|
|
self._identities = []
|
|
|
|
self._logger = logger
|
|
|
|
|
|
|
|
self.status = NEW
|
|
|
|
self._recently_seen = False
|
|
|
|
|
|
|
|
def _get_features(self):
|
|
|
|
return self._features
|
|
|
|
|
|
|
|
def _set_features(self, value):
|
|
|
|
self._features = []
|
|
|
|
for feature in value:
|
|
|
|
self._features.append(self.__names.setdefault(feature, feature))
|
|
|
|
|
|
|
|
features = property(_get_features, _set_features)
|
|
|
|
|
|
|
|
def _get_identities(self):
|
|
|
|
list_ = []
|
|
|
|
for i in self._identities:
|
|
|
|
# transforms it back in a dict
|
|
|
|
d = dict()
|
|
|
|
d['category'] = i[0]
|
|
|
|
if i[1]:
|
|
|
|
d['type'] = i[1]
|
|
|
|
if i[2]:
|
|
|
|
d['xml:lang'] = i[2]
|
|
|
|
if i[3]:
|
|
|
|
d['name'] = i[3]
|
|
|
|
list_.append(d)
|
|
|
|
return list_
|
|
|
|
|
|
|
|
def _set_identities(self, value):
|
|
|
|
self._identities = []
|
|
|
|
for identity in value:
|
|
|
|
# dict are not hashable, so transform it into a tuple
|
|
|
|
t = (identity['category'], identity.get('type'),
|
|
|
|
identity.get('xml:lang'), identity.get('name'))
|
|
|
|
self._identities.append(self.__names.setdefault(t, t))
|
|
|
|
|
|
|
|
identities = property(_get_identities, _set_identities)
|
|
|
|
|
|
|
|
def set_and_store(self, identities, features):
|
|
|
|
self.identities = identities
|
|
|
|
self.features = features
|
2011-01-04 15:52:37 +01:00
|
|
|
if self.hash_method != 'no':
|
|
|
|
self._logger.add_caps_entry(self.hash_method, self.hash,
|
2010-02-08 15:08:40 +01:00
|
|
|
identities, features)
|
|
|
|
self.status = CACHED
|
|
|
|
|
|
|
|
def update_last_seen(self):
|
|
|
|
if not self._recently_seen:
|
|
|
|
self._recently_seen = True
|
2011-01-04 15:52:37 +01:00
|
|
|
if self.hash_method != 'no':
|
|
|
|
self._logger.update_caps_time(self.hash_method,
|
|
|
|
self.hash)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def is_valid(self):
|
|
|
|
"""
|
|
|
|
Returns True if identities and features for this cache item
|
|
|
|
are known.
|
|
|
|
"""
|
|
|
|
return self.status in (CACHED, FAKED)
|
|
|
|
|
|
|
|
self.__CacheItem = CacheItem
|
|
|
|
self.logger = logger
|
|
|
|
|
|
|
|
def initialize_from_db(self):
|
|
|
|
self._remove_outdated_caps()
|
|
|
|
for hash_method, hash_, identities, features in \
|
|
|
|
self.logger.iter_caps_data():
|
|
|
|
x = self[(hash_method, hash_)]
|
|
|
|
x.identities = identities
|
|
|
|
x.features = features
|
|
|
|
x.status = CACHED
|
|
|
|
|
|
|
|
def _remove_outdated_caps(self):
|
|
|
|
"""
|
|
|
|
Remove outdated values from the db
|
|
|
|
"""
|
|
|
|
self.logger.clean_caps_table()
|
|
|
|
|
|
|
|
def __getitem__(self, caps):
|
|
|
|
if caps in self.__cache:
|
|
|
|
return self.__cache[caps]
|
|
|
|
|
|
|
|
hash_method, hash_ = caps
|
|
|
|
|
|
|
|
x = self.__CacheItem(hash_method, hash_, self.logger)
|
|
|
|
self.__cache[(hash_method, hash_)] = x
|
|
|
|
return x
|
|
|
|
|
|
|
|
def query_client_of_jid_if_unknown(self, connection, jid, client_caps):
|
|
|
|
"""
|
|
|
|
Start a disco query to determine caps (node, ver, exts). Won't query if
|
|
|
|
the data is already in cache
|
|
|
|
"""
|
|
|
|
lookup_cache_item = client_caps.get_cache_lookup_strategy()
|
|
|
|
q = lookup_cache_item(self)
|
|
|
|
|
|
|
|
if q.status == NEW:
|
|
|
|
# do query for bare node+hash pair
|
|
|
|
# this will create proper object
|
|
|
|
q.status = QUERIED
|
|
|
|
discover = client_caps.get_discover_strategy()
|
|
|
|
discover(connection, jid)
|
|
|
|
else:
|
|
|
|
q.update_last_seen()
|
2011-01-04 15:52:37 +01:00
|
|
|
|
|
|
|
def forget_caps(self, client_caps):
|
|
|
|
hash_method = client_caps._hash_method
|
|
|
|
hash = client_caps._hash
|
|
|
|
key = (hash_method, hash)
|
|
|
|
if key in self.__cache:
|
|
|
|
del self.__cache[key]
|
2017-11-11 21:46:34 +01:00
|
|
|
|
|
|
|
|
|
|
|
class MucCapsCache:
|
|
|
|
|
|
|
|
DiscoInfo = namedtuple('DiscoInfo', ['identities', 'features', 'data'])
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.cache = {}
|
|
|
|
|
|
|
|
def append(self, stanza):
|
|
|
|
jid = stanza.getFrom()
|
|
|
|
identities, features, data = [], [], []
|
|
|
|
query_childs = stanza.getQueryChildren()
|
|
|
|
if not query_childs:
|
|
|
|
app.log('gajim.muc').warning('%s returned empty disco info', jid)
|
|
|
|
return
|
|
|
|
|
|
|
|
for child in query_childs:
|
|
|
|
if child.getName() == 'identity':
|
|
|
|
attr = {}
|
|
|
|
for key in child.getAttrs().keys():
|
|
|
|
attr[key] = child.getAttr(key)
|
|
|
|
identities.append(attr)
|
|
|
|
elif child.getName() == 'feature':
|
|
|
|
features.append(child.getAttr('var'))
|
|
|
|
elif child.getName() == 'x':
|
|
|
|
if child.getNamespace() == nbxmpp.NS_DATA:
|
|
|
|
data.append(nbxmpp.DataForm(node=child))
|
|
|
|
|
|
|
|
self.cache[jid] = self.DiscoInfo(identities, features, data)
|
|
|
|
|
|
|
|
def is_cached(self, jid):
|
|
|
|
return jid in self.cache
|
|
|
|
|
|
|
|
def supports(self, jid, feature):
|
|
|
|
if jid in self.cache:
|
|
|
|
if feature in self.cache[jid].features:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def has_mam(self, jid):
|
|
|
|
try:
|
|
|
|
if nbxmpp.NS_MAM_2 in self.cache[jid].features:
|
|
|
|
return True
|
|
|
|
if nbxmpp.NS_MAM_1 in self.cache[jid].features:
|
|
|
|
return True
|
|
|
|
except (KeyError, AttributeError):
|
|
|
|
return False
|