2008-08-15 19:31:51 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
2008-08-15 05:20:23 +02:00
|
|
|
## src/common/caps.py
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
## Travis Shirk <travis AT pobox.com>
|
|
|
|
## Copyright (C) 2007-2008 Yann Leboulanger <asterix AT lagaule.org>
|
|
|
|
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
|
|
|
|
## Jonathan Schleifer <js-gajim AT webkeks.org>
|
2009-10-25 21:57:20 +01:00
|
|
|
## Copyright (C) 2008-2009 Stephan Erb <steve-e AT h3c.de>
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2007-06-27 02:51:12 +02:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2007-06-27 02:51:12 +02:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2008-08-15 05:20:23 +02:00
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2007-06-27 02:51:12 +02:00
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
2008-08-15 05:20:23 +02:00
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2007-10-22 13:13:13 +02:00
|
|
|
##
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
|
|
|
Module containing all XEP-115 (Entity Capabilities) related classes
|
|
|
|
|
|
|
|
Basic Idea:
|
|
|
|
CapsCache caches features to hash relationships. The cache is queried
|
|
|
|
through EntityCapabilities objects which are hold by contact instances.
|
|
|
|
|
|
|
|
EntityCapabilities represent the client of contacts. It is set on the receive
|
|
|
|
of a presence. The respective jid is then queried with a disco if the advertised
|
|
|
|
client/hash is unknown.
|
|
|
|
'''
|
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
import gajim
|
2008-04-21 00:58:47 +02:00
|
|
|
import helpers
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-25 21:57:20 +01:00
|
|
|
|
|
|
|
class AbstractEntityCapabilities(object):
|
|
|
|
'''
|
|
|
|
Base class representing a client and its capabilities as advertised by
|
|
|
|
a caps tag in a presence
|
|
|
|
'''
|
|
|
|
|
|
|
|
def __init__(self, caps_cache, caps_hash, node):
|
|
|
|
self._caps_cache = caps_cache
|
|
|
|
self._hash = caps_hash
|
|
|
|
self._node = node
|
|
|
|
|
|
|
|
def query_client_of_jid_if_unknown(self, connection, jid):
|
|
|
|
'''
|
|
|
|
Asynchronously query the give jid for its (node, ver, exts) caps data
|
|
|
|
using a disco query.
|
|
|
|
|
|
|
|
Query will only be sent if the data is not already cached.
|
|
|
|
'''
|
|
|
|
q = self._lookup_in_cache()
|
|
|
|
if q and q.query_status == q.NOT_QUERIED:
|
|
|
|
q.query_status = q.QUERIED
|
|
|
|
q._discover(connection, jid)
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
''' To be implemented by subclassess '''
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def _lookup_in_cache(self):
|
|
|
|
''' To be implemented by subclassess '''
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
|
|
|
|
class EntityCapabilities(AbstractEntityCapabilities):
|
|
|
|
''' The current XEP-115 implementation '''
|
|
|
|
|
|
|
|
def __init__(self, caps_cache, caps_hash, node, hash_method):
|
|
|
|
AbstractEntityCapabilities.__init__(self, caps_cache, caps_hash, node)
|
|
|
|
assert hash_method != 'old'
|
|
|
|
self._hash_method = hash_method
|
|
|
|
|
|
|
|
def _lookup_in_cache(self):
|
|
|
|
return self._caps_cache[(self._hash_method, self._hash)]
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid, '%s#%s' % (self._node, self._hash))
|
|
|
|
|
|
|
|
|
|
|
|
class OldEntityCapabilities(AbstractEntityCapabilities):
|
|
|
|
''' Old XEP-115 implemtation. Kept around for background competability. '''
|
|
|
|
|
|
|
|
def __init__(self, caps_cache, caps_hash, node):
|
|
|
|
AbstractEntityCapabilities.__init__(self, caps_cache, caps_hash, node)
|
|
|
|
|
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[('old', self._node + '#' + self._hash)]
|
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid)
|
|
|
|
|
|
|
|
|
|
|
|
class NullEntityCapabilities(AbstractEntityCapabilities):
|
|
|
|
'''
|
|
|
|
This is a NULL-Object to streamline caps handling is a client has not
|
|
|
|
advertised any caps or has advertised them in an improper way.
|
|
|
|
|
|
|
|
Assumes everything is supported.
|
|
|
|
'''
|
|
|
|
|
|
|
|
def _lookup_in_cache(self):
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
class CapsCache(object):
|
|
|
|
''' This object keeps the mapping between caps data and real disco
|
2008-06-12 05:56:47 +02:00
|
|
|
features they represent, and provides simple way to query that info.
|
2009-10-25 21:17:32 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
It is application-wide, that is there's one object for all
|
|
|
|
connections.
|
2009-10-25 21:17:32 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
Goals:
|
|
|
|
* handle storing/retrieving info from database
|
|
|
|
* cache info in memory
|
|
|
|
* expose simple interface
|
2009-10-25 21:17:32 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
Properties:
|
|
|
|
* one object for all connections (move to logger.py?)
|
|
|
|
* store info efficiently (a set() of urls -- we can assume there won't be
|
|
|
|
too much of these, ensure that (X,Y,Z1) and (X,Y,Z2) has different
|
|
|
|
features.
|
|
|
|
'''
|
|
|
|
def __init__(self, logger=None):
|
|
|
|
''' Create a cache for entity capabilities. '''
|
|
|
|
# our containers:
|
2008-06-12 05:56:47 +02:00
|
|
|
# __cache is a dictionary mapping: pair of hash method and hash maps
|
2007-06-27 02:51:12 +02:00
|
|
|
# to CapsCacheItem object
|
|
|
|
# __CacheItem is a class that stores data about particular
|
2008-06-12 05:56:47 +02:00
|
|
|
# client (hash method/hash pair)
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
class CacheItem(object):
|
2008-04-21 16:18:19 +02:00
|
|
|
# __names is a string cache; every string long enough is given
|
|
|
|
# another object, and we will have plenty of identical long
|
|
|
|
# strings. therefore we can cache them
|
|
|
|
__names = {}
|
2009-10-25 21:17:32 +01:00
|
|
|
|
|
|
|
def __init__(self, hash_method, hash_, logger):
|
2007-06-27 02:51:12 +02:00
|
|
|
# cached into db
|
2009-10-24 20:49:16 +02:00
|
|
|
self.hash_method = hash_method
|
|
|
|
self.hash = hash_
|
|
|
|
self._features = []
|
|
|
|
self._identities = []
|
2009-10-25 21:17:32 +01:00
|
|
|
self._logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# not cached into db:
|
|
|
|
# have we sent the query?
|
|
|
|
# 0 == not queried
|
|
|
|
# 1 == queried
|
|
|
|
# 2 == got the answer
|
2009-10-24 20:49:16 +02:00
|
|
|
self.queried = 0
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _get_features(self):
|
|
|
|
return self._features
|
2008-12-03 18:16:04 +01:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _set_features(self, value):
|
|
|
|
self._features = []
|
2008-04-21 14:57:34 +02:00
|
|
|
for feature in value:
|
2009-10-25 21:17:32 +01:00
|
|
|
self._features.append(self.__names.setdefault(feature, feature))
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2008-04-21 16:16:20 +02:00
|
|
|
features = property(_get_features, _set_features)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _get_identities(self):
|
2008-04-21 16:44:19 +02:00
|
|
|
list_ = []
|
2009-10-24 20:49:16 +02:00
|
|
|
for i in self._identities:
|
2008-04-21 16:44:19 +02:00
|
|
|
# transforms it back in a dict
|
|
|
|
d = dict()
|
|
|
|
d['category'] = i[0]
|
|
|
|
if i[1]:
|
|
|
|
d['type'] = i[1]
|
|
|
|
if i[2]:
|
|
|
|
d['xml:lang'] = i[2]
|
|
|
|
if i[3]:
|
|
|
|
d['name'] = i[3]
|
2008-06-12 05:56:47 +02:00
|
|
|
list_.append(d)
|
2008-04-21 16:44:19 +02:00
|
|
|
return list_
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _set_identities(self, value):
|
|
|
|
self._identities = []
|
2008-04-21 14:57:34 +02:00
|
|
|
for identity in value:
|
2008-04-21 16:44:19 +02:00
|
|
|
# dict are not hashable, so transform it into a tuple
|
2008-12-03 18:16:04 +01:00
|
|
|
t = (identity['category'], identity.get('type'),
|
|
|
|
identity.get('xml:lang'), identity.get('name'))
|
2009-10-24 20:49:16 +02:00
|
|
|
self._identities.append(self.__names.setdefault(t, t))
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2008-04-21 16:16:20 +02:00
|
|
|
identities = property(_get_identities, _set_identities)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def update(self, identities, features):
|
2007-07-09 23:24:47 +02:00
|
|
|
# NOTE: self refers to CapsCache object, not to CacheItem
|
2009-10-24 20:49:16 +02:00
|
|
|
self.identities=identities
|
|
|
|
self.features=features
|
2009-10-25 21:17:32 +01:00
|
|
|
self._logger.add_caps_entry(self.hash_method, self.hash,
|
2007-07-09 23:24:47 +02:00
|
|
|
identities, features)
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__CacheItem = CacheItem
|
|
|
|
|
|
|
|
# prepopulate data which we are sure of; note: we do not log these info
|
|
|
|
|
2008-05-27 12:24:29 +02:00
|
|
|
for account in gajim.connections:
|
2008-12-02 16:10:31 +01:00
|
|
|
gajimcaps = self[('sha-1', gajim.caps_hash[account])]
|
2008-05-27 12:24:29 +02:00
|
|
|
gajimcaps.identities = [gajim.gajim_identity]
|
|
|
|
gajimcaps.features = gajim.gajim_common_features + \
|
|
|
|
gajim.gajim_optional_features[account]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# start logging data from the net
|
2007-07-09 23:24:47 +02:00
|
|
|
self.logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-25 21:17:32 +01:00
|
|
|
def initialize_from_db(self):
|
2007-06-27 02:51:12 +02:00
|
|
|
# get data from logger...
|
2007-07-09 23:24:47 +02:00
|
|
|
if self.logger is not None:
|
2008-12-03 18:16:04 +01:00
|
|
|
for hash_method, hash_, identities, features in \
|
2008-04-21 00:58:47 +02:00
|
|
|
self.logger.iter_caps_data():
|
2008-12-03 18:16:04 +01:00
|
|
|
x = self[(hash_method, hash_)]
|
2008-04-21 00:58:47 +02:00
|
|
|
x.identities = identities
|
|
|
|
x.features = features
|
|
|
|
x.queried = 2
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
def __getitem__(self, caps):
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps in self.__cache:
|
|
|
|
return self.__cache[caps]
|
2008-06-12 05:56:47 +02:00
|
|
|
|
2008-12-03 18:16:04 +01:00
|
|
|
hash_method, hash_ = caps
|
2008-06-12 05:56:47 +02:00
|
|
|
|
2009-10-25 21:17:32 +01:00
|
|
|
x = self.__CacheItem(hash_method, hash_, self.logger)
|
2008-12-03 18:16:04 +01:00
|
|
|
self.__cache[(hash_method, hash_)] = x
|
2007-06-27 02:51:12 +02:00
|
|
|
return x
|
|
|
|
|
2008-10-11 12:22:04 +02:00
|
|
|
def preload(self, con, jid, node, hash_method, hash_):
|
2007-06-27 02:51:12 +02:00
|
|
|
''' Preload data about (node, ver, exts) caps using disco
|
|
|
|
query to jid using proper connection. Don't query if
|
|
|
|
the data is already in cache. '''
|
2008-08-05 00:06:54 +02:00
|
|
|
if hash_method == 'old':
|
2008-10-11 12:22:04 +02:00
|
|
|
q = self[(hash_method, node + '#' + hash_)]
|
2008-08-05 00:06:54 +02:00
|
|
|
else:
|
2008-10-11 12:22:04 +02:00
|
|
|
q = self[(hash_method, hash_)]
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
if q.queried==0:
|
2008-04-21 00:58:47 +02:00
|
|
|
# do query for bare node+hash pair
|
2007-06-27 02:51:12 +02:00
|
|
|
# this will create proper object
|
|
|
|
q.queried=1
|
2008-08-05 00:06:54 +02:00
|
|
|
if hash_method == 'old':
|
|
|
|
con.discoverInfo(jid)
|
|
|
|
else:
|
2008-10-11 12:22:04 +02:00
|
|
|
con.discoverInfo(jid, '%s#%s' % (node, hash_))
|
2008-08-05 00:06:54 +02:00
|
|
|
|
2008-07-02 11:20:49 +02:00
|
|
|
def is_supported(self, contact, feature):
|
2008-09-22 11:35:19 +02:00
|
|
|
if not contact:
|
2008-07-02 11:20:49 +02:00
|
|
|
return False
|
|
|
|
|
2008-07-23 20:38:52 +02:00
|
|
|
# Unfortunately, if all resources are offline, the contact
|
|
|
|
# includes the last resource that was online. Check for its
|
|
|
|
# show, so we can be sure it's existant. Otherwise, we still
|
|
|
|
# return caps for a contact that has no resources left.
|
|
|
|
if contact.show == 'offline':
|
|
|
|
return False
|
|
|
|
|
2008-07-02 11:20:49 +02:00
|
|
|
# FIXME: We assume everything is supported if we got no caps.
|
|
|
|
# This is the "Asterix way", after 0.12 release, I will
|
|
|
|
# likely implement a fallback to disco (could be disabled
|
|
|
|
# for mobile users who pay for traffic)
|
2008-08-05 00:06:54 +02:00
|
|
|
if contact.caps_hash_method == 'old':
|
|
|
|
features = self[(contact.caps_hash_method, contact.caps_node + '#' + \
|
|
|
|
contact.caps_hash)].features
|
|
|
|
else:
|
|
|
|
features = self[(contact.caps_hash_method, contact.caps_hash)].features
|
2008-07-02 11:20:49 +02:00
|
|
|
if feature in features or features == []:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
gajim.capscache = CapsCache(gajim.logger)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
class ConnectionCaps(object):
|
|
|
|
''' This class highly depends on that it is a part of Connection class. '''
|
|
|
|
def _capsPresenceCB(self, con, presence):
|
|
|
|
''' Handle incoming presence stanzas... This is a callback
|
|
|
|
for xmpp registered in connection_handlers.py'''
|
|
|
|
|
2008-07-23 20:28:55 +02:00
|
|
|
# we will put these into proper Contact object and ask
|
|
|
|
# for disco... so that disco will learn how to interpret
|
|
|
|
# these caps
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = None
|
2009-04-30 17:20:37 +02:00
|
|
|
try:
|
|
|
|
jid = helpers.get_full_jid_from_iq(presence)
|
|
|
|
except:
|
|
|
|
# Bad jid
|
|
|
|
return
|
2008-08-05 00:06:54 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(
|
|
|
|
self.name, room_jid, nick)
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = gajim.interface.msg_win_mgr.get_control(jid, self.name)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
# TODO: a way to put contact not-in-roster
|
|
|
|
# into Contacts
|
2008-12-03 22:56:12 +01:00
|
|
|
return
|
2008-07-23 20:28:55 +02:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
# get the caps element
|
2008-04-21 00:58:47 +02:00
|
|
|
caps = presence.getTag('c')
|
|
|
|
if not caps:
|
2008-07-23 20:28:55 +02:00
|
|
|
contact.caps_node = None
|
|
|
|
contact.caps_hash = None
|
2008-08-11 10:01:03 +02:00
|
|
|
contact.caps_hash_method = None
|
2008-04-21 00:58:47 +02:00
|
|
|
return
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2008-12-03 18:16:04 +01:00
|
|
|
hash_method, node, hash_ = caps['hash'], caps['node'], caps['ver']
|
2008-08-05 00:06:54 +02:00
|
|
|
|
2008-12-03 18:16:04 +01:00
|
|
|
if hash_method is None and node and hash_:
|
2008-08-05 00:06:54 +02:00
|
|
|
# Old XEP-115 implentation
|
|
|
|
hash_method = 'old'
|
2008-07-23 20:28:55 +02:00
|
|
|
|
2008-12-03 18:16:04 +01:00
|
|
|
if hash_method is None or node is None or hash_ is None:
|
2007-06-27 02:51:12 +02:00
|
|
|
# improper caps in stanza, ignoring
|
2008-07-23 20:28:55 +02:00
|
|
|
contact.caps_node = None
|
|
|
|
contact.caps_hash = None
|
|
|
|
contact.hash_method = None
|
2007-06-27 02:51:12 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# start disco query...
|
2008-12-03 18:16:04 +01:00
|
|
|
gajim.capscache.preload(self, jid, node, hash_method, hash_)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# overwriting old data
|
2008-04-21 00:58:47 +02:00
|
|
|
contact.caps_node = node
|
|
|
|
contact.caps_hash_method = hash_method
|
2008-12-03 18:16:04 +01:00
|
|
|
contact.caps_hash = hash_
|
2008-10-12 13:20:35 +02:00
|
|
|
if pm_ctrl:
|
|
|
|
pm_ctrl.update_contact()
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2008-04-21 14:57:34 +02:00
|
|
|
def _capsDiscoCB(self, jid, node, identities, features, dataforms):
|
2008-04-21 00:58:47 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
|
|
|
if not contact:
|
2008-06-27 17:07:15 +02:00
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(self.name, room_jid, nick)
|
|
|
|
if contact is None:
|
|
|
|
return
|
2008-04-21 00:58:47 +02:00
|
|
|
if not contact.caps_node:
|
|
|
|
return # we didn't asked for that?
|
2008-08-05 00:06:54 +02:00
|
|
|
if contact.caps_hash_method != 'old':
|
|
|
|
computed_hash = helpers.compute_caps_hash(identities, features,
|
|
|
|
dataforms=dataforms, hash_method=contact.caps_hash_method)
|
2008-11-04 23:03:54 +01:00
|
|
|
if computed_hash != contact.caps_hash:
|
2008-08-05 00:06:54 +02:00
|
|
|
# wrong hash, forget it
|
|
|
|
contact.caps_node = ''
|
|
|
|
contact.caps_hash_method = ''
|
|
|
|
contact.caps_hash = ''
|
|
|
|
return
|
|
|
|
# if we don't have this info already...
|
|
|
|
caps = gajim.capscache[(contact.caps_hash_method, contact.caps_hash)]
|
|
|
|
else:
|
|
|
|
# if we don't have this info already...
|
|
|
|
caps = gajim.capscache[(contact.caps_hash_method, contact.caps_node + \
|
|
|
|
'#' + contact.caps_hash)]
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps.queried == 2:
|
|
|
|
return
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
caps.update(identities, features)
|
2008-07-29 21:49:31 +02:00
|
|
|
|
2008-08-05 00:06:54 +02:00
|
|
|
# vim: se ts=3:
|