2008-08-15 19:31:51 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
2008-08-15 05:20:23 +02:00
|
|
|
## src/common/caps.py
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
## Travis Shirk <travis AT pobox.com>
|
|
|
|
## Copyright (C) 2007-2008 Yann Leboulanger <asterix AT lagaule.org>
|
|
|
|
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
|
|
|
|
## Jonathan Schleifer <js-gajim AT webkeks.org>
|
|
|
|
## Stephan Erb <steve-e AT h3c.de>
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2007-06-27 02:51:12 +02:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2007-06-27 02:51:12 +02:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2008-08-15 05:20:23 +02:00
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2007-06-27 02:51:12 +02:00
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
2008-08-15 05:20:23 +02:00
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2007-10-22 13:13:13 +02:00
|
|
|
##
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
from itertools import *
|
2007-06-28 00:32:35 +02:00
|
|
|
import xmpp.features_nb
|
2007-07-09 23:24:47 +02:00
|
|
|
import gajim
|
2008-04-21 00:58:47 +02:00
|
|
|
import helpers
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
class CapsCache(object):
|
|
|
|
''' This object keeps the mapping between caps data and real disco
|
2008-06-12 05:56:47 +02:00
|
|
|
features they represent, and provides simple way to query that info.
|
2007-06-27 02:51:12 +02:00
|
|
|
It is application-wide, that is there's one object for all
|
|
|
|
connections.
|
|
|
|
Goals:
|
|
|
|
* handle storing/retrieving info from database
|
|
|
|
* cache info in memory
|
|
|
|
* expose simple interface
|
|
|
|
Properties:
|
|
|
|
* one object for all connections (move to logger.py?)
|
|
|
|
* store info efficiently (a set() of urls -- we can assume there won't be
|
|
|
|
too much of these, ensure that (X,Y,Z1) and (X,Y,Z2) has different
|
|
|
|
features.
|
|
|
|
|
|
|
|
Connections with other objects: (TODO)
|
|
|
|
|
|
|
|
Interface:
|
|
|
|
|
|
|
|
# object creation
|
|
|
|
>>> cc=CapsCache(logger_object)
|
|
|
|
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> caps = ('sha-1', '66/0NaeaBKkwk85efJTGmU47vXI=')
|
|
|
|
>>> muc = 'http://jabber.org/protocol/muc'
|
|
|
|
>>> chatstates = 'http://jabber.org/protocol/chatstates'
|
|
|
|
|
|
|
|
# setting data
|
|
|
|
>>> cc[caps].identities = [{'category':'client', 'type':'pc'}]
|
|
|
|
>>> cc[caps].features = [muc]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# retrieving data
|
|
|
|
>>> muc in cc[caps].features
|
|
|
|
True
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> chatstates in cc[caps].features
|
2007-06-27 02:51:12 +02:00
|
|
|
False
|
2007-06-30 19:31:27 +02:00
|
|
|
>>> cc[caps].identities
|
2008-06-12 05:56:47 +02:00
|
|
|
[{'category': 'client', 'type': 'pc'}]
|
|
|
|
>>> x = cc[caps] # more efficient if making several queries for one set of caps
|
2007-06-27 02:51:12 +02:00
|
|
|
ATypicalBlackBoxObject
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> muc in x.features
|
2007-06-27 02:51:12 +02:00
|
|
|
True
|
|
|
|
|
|
|
|
'''
|
|
|
|
def __init__(self, logger=None):
|
|
|
|
''' Create a cache for entity capabilities. '''
|
|
|
|
# our containers:
|
2008-06-12 05:56:47 +02:00
|
|
|
# __cache is a dictionary mapping: pair of hash method and hash maps
|
2007-06-27 02:51:12 +02:00
|
|
|
# to CapsCacheItem object
|
|
|
|
# __CacheItem is a class that stores data about particular
|
2008-06-12 05:56:47 +02:00
|
|
|
# client (hash method/hash pair)
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
class CacheItem(object):
|
|
|
|
''' TODO: logging data into db '''
|
2008-04-21 16:18:19 +02:00
|
|
|
# __names is a string cache; every string long enough is given
|
|
|
|
# another object, and we will have plenty of identical long
|
|
|
|
# strings. therefore we can cache them
|
|
|
|
# TODO: maybe put all known xmpp namespace strings here
|
|
|
|
# (strings given in xmpppy)?
|
|
|
|
__names = {}
|
2008-10-11 12:22:04 +02:00
|
|
|
def __init__(ciself, hash_method, hash_):
|
2007-06-27 02:51:12 +02:00
|
|
|
# cached into db
|
2008-04-21 00:58:47 +02:00
|
|
|
ciself.hash_method = hash_method
|
2008-10-11 12:22:04 +02:00
|
|
|
ciself.hash = hash_
|
2008-04-21 14:57:34 +02:00
|
|
|
ciself._features = []
|
|
|
|
ciself._identities = []
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# not cached into db:
|
|
|
|
# have we sent the query?
|
|
|
|
# 0 == not queried
|
|
|
|
# 1 == queried
|
|
|
|
# 2 == got the answer
|
|
|
|
ciself.queried = 0
|
|
|
|
|
2008-04-21 14:57:34 +02:00
|
|
|
def _get_features(ciself):
|
|
|
|
return ciself._features
|
|
|
|
def _set_features(ciself, value):
|
|
|
|
ciself._features = []
|
|
|
|
for feature in value:
|
2008-04-21 16:18:19 +02:00
|
|
|
ciself._features.append(ciself.__names.setdefault(feature,
|
2008-04-21 14:57:34 +02:00
|
|
|
feature))
|
2008-04-21 16:16:20 +02:00
|
|
|
features = property(_get_features, _set_features)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
|
|
|
def _get_identities(ciself):
|
2008-04-21 16:44:19 +02:00
|
|
|
list_ = []
|
|
|
|
for i in ciself._identities:
|
|
|
|
# transforms it back in a dict
|
|
|
|
d = dict()
|
|
|
|
d['category'] = i[0]
|
|
|
|
if i[1]:
|
|
|
|
d['type'] = i[1]
|
|
|
|
if i[2]:
|
|
|
|
d['xml:lang'] = i[2]
|
|
|
|
if i[3]:
|
|
|
|
d['name'] = i[3]
|
2008-06-12 05:56:47 +02:00
|
|
|
list_.append(d)
|
2008-04-21 16:44:19 +02:00
|
|
|
return list_
|
2008-04-21 14:57:34 +02:00
|
|
|
def _set_identities(ciself, value):
|
|
|
|
ciself._identities = []
|
|
|
|
for identity in value:
|
2008-04-21 16:44:19 +02:00
|
|
|
# dict are not hashable, so transform it into a tuple
|
2008-04-24 15:44:09 +02:00
|
|
|
t = (identity['category'], identity.get('type'), identity.get('xml:lang'), identity.get('name'))
|
2008-04-21 16:44:19 +02:00
|
|
|
ciself._identities.append(ciself.__names.setdefault(t, t))
|
2008-04-21 16:16:20 +02:00
|
|
|
identities = property(_get_identities, _set_identities)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
def update(ciself, identities, features):
|
|
|
|
# NOTE: self refers to CapsCache object, not to CacheItem
|
2008-04-21 00:58:47 +02:00
|
|
|
ciself.identities=identities
|
|
|
|
ciself.features=features
|
|
|
|
self.logger.add_caps_entry(ciself.hash_method, ciself.hash,
|
2007-07-09 23:24:47 +02:00
|
|
|
identities, features)
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__CacheItem = CacheItem
|
|
|
|
|
|
|
|
# prepopulate data which we are sure of; note: we do not log these info
|
|
|
|
|
2008-05-27 12:24:29 +02:00
|
|
|
for account in gajim.connections:
|
2008-10-20 20:05:33 +02:00
|
|
|
gajimcaps = self[('sha-1', gajim.caps_hash[account])]
|
2008-05-27 12:24:29 +02:00
|
|
|
gajimcaps.identities = [gajim.gajim_identity]
|
|
|
|
gajimcaps.features = gajim.gajim_common_features + \
|
|
|
|
gajim.gajim_optional_features[account]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# start logging data from the net
|
2007-07-09 23:24:47 +02:00
|
|
|
self.logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
def load_from_db(self):
|
2007-06-27 02:51:12 +02:00
|
|
|
# get data from logger...
|
2007-07-09 23:24:47 +02:00
|
|
|
if self.logger is not None:
|
2008-04-21 00:58:47 +02:00
|
|
|
for hash_method, hash, identities, features in \
|
|
|
|
self.logger.iter_caps_data():
|
|
|
|
x = self[(hash_method, hash)]
|
|
|
|
x.identities = identities
|
|
|
|
x.features = features
|
|
|
|
x.queried = 2
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
def __getitem__(self, caps):
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps in self.__cache:
|
|
|
|
return self.__cache[caps]
|
2008-06-12 05:56:47 +02:00
|
|
|
|
|
|
|
hash_method, hash = caps
|
|
|
|
|
2008-04-21 00:58:47 +02:00
|
|
|
x = self.__CacheItem(hash_method, hash)
|
|
|
|
self.__cache[(hash_method, hash)] = x
|
2007-06-27 02:51:12 +02:00
|
|
|
return x
|
|
|
|
|
2008-10-11 12:22:04 +02:00
|
|
|
def preload(self, con, jid, node, hash_method, hash_):
|
2007-06-27 02:51:12 +02:00
|
|
|
''' Preload data about (node, ver, exts) caps using disco
|
|
|
|
query to jid using proper connection. Don't query if
|
|
|
|
the data is already in cache. '''
|
2008-08-05 00:06:54 +02:00
|
|
|
if hash_method == 'old':
|
2008-10-11 12:22:04 +02:00
|
|
|
q = self[(hash_method, node + '#' + hash_)]
|
2008-08-05 00:06:54 +02:00
|
|
|
else:
|
2008-10-11 12:22:04 +02:00
|
|
|
q = self[(hash_method, hash_)]
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
if q.queried==0:
|
2008-04-21 00:58:47 +02:00
|
|
|
# do query for bare node+hash pair
|
2007-06-27 02:51:12 +02:00
|
|
|
# this will create proper object
|
|
|
|
q.queried=1
|
2008-08-05 00:06:54 +02:00
|
|
|
if hash_method == 'old':
|
|
|
|
con.discoverInfo(jid)
|
|
|
|
else:
|
2008-10-11 12:22:04 +02:00
|
|
|
con.discoverInfo(jid, '%s#%s' % (node, hash_))
|
2008-08-05 00:06:54 +02:00
|
|
|
|
2008-07-02 11:20:49 +02:00
|
|
|
def is_supported(self, contact, feature):
|
2008-09-22 11:35:19 +02:00
|
|
|
if not contact:
|
2008-07-02 11:20:49 +02:00
|
|
|
return False
|
|
|
|
|
2008-07-23 20:38:52 +02:00
|
|
|
# Unfortunately, if all resources are offline, the contact
|
|
|
|
# includes the last resource that was online. Check for its
|
|
|
|
# show, so we can be sure it's existant. Otherwise, we still
|
|
|
|
# return caps for a contact that has no resources left.
|
|
|
|
if contact.show == 'offline':
|
|
|
|
return False
|
|
|
|
|
2008-07-02 11:20:49 +02:00
|
|
|
# FIXME: We assume everything is supported if we got no caps.
|
|
|
|
# This is the "Asterix way", after 0.12 release, I will
|
|
|
|
# likely implement a fallback to disco (could be disabled
|
|
|
|
# for mobile users who pay for traffic)
|
2008-08-05 00:06:54 +02:00
|
|
|
if contact.caps_hash_method == 'old':
|
|
|
|
features = self[(contact.caps_hash_method, contact.caps_node + '#' + \
|
|
|
|
contact.caps_hash)].features
|
|
|
|
else:
|
|
|
|
features = self[(contact.caps_hash_method, contact.caps_hash)].features
|
2008-07-02 11:20:49 +02:00
|
|
|
if feature in features or features == []:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
gajim.capscache = CapsCache(gajim.logger)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
class ConnectionCaps(object):
|
|
|
|
''' This class highly depends on that it is a part of Connection class. '''
|
|
|
|
def _capsPresenceCB(self, con, presence):
|
|
|
|
''' Handle incoming presence stanzas... This is a callback
|
|
|
|
for xmpp registered in connection_handlers.py'''
|
|
|
|
|
2008-07-23 20:28:55 +02:00
|
|
|
# we will put these into proper Contact object and ask
|
|
|
|
# for disco... so that disco will learn how to interpret
|
|
|
|
# these caps
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = None
|
2008-07-23 20:28:55 +02:00
|
|
|
jid = helpers.get_full_jid_from_iq(presence)
|
2008-08-05 00:06:54 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(
|
|
|
|
self.name, room_jid, nick)
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = gajim.interface.msg_win_mgr.get_control(jid, self.name)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
# TODO: a way to put contact not-in-roster
|
|
|
|
# into Contacts
|
|
|
|
return
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
# get the caps element
|
2008-04-21 00:58:47 +02:00
|
|
|
caps = presence.getTag('c')
|
|
|
|
if not caps:
|
2008-07-23 20:28:55 +02:00
|
|
|
contact.caps_node = None
|
|
|
|
contact.caps_hash = None
|
2008-08-11 10:01:03 +02:00
|
|
|
contact.caps_hash_method = None
|
2008-04-21 00:58:47 +02:00
|
|
|
return
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2008-08-05 00:06:54 +02:00
|
|
|
hash_method, node, hash = caps['hash'], caps['node'], caps['ver']
|
|
|
|
|
|
|
|
if hash_method is None and node and hash:
|
|
|
|
# Old XEP-115 implentation
|
|
|
|
hash_method = 'old'
|
2008-07-23 20:28:55 +02:00
|
|
|
|
2008-04-21 00:58:47 +02:00
|
|
|
if hash_method is None or node is None or hash is None:
|
2007-06-27 02:51:12 +02:00
|
|
|
# improper caps in stanza, ignoring
|
2008-07-23 20:28:55 +02:00
|
|
|
contact.caps_node = None
|
|
|
|
contact.caps_hash = None
|
|
|
|
contact.hash_method = None
|
2007-06-27 02:51:12 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# start disco query...
|
2008-04-21 00:58:47 +02:00
|
|
|
gajim.capscache.preload(self, jid, node, hash_method, hash)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# overwriting old data
|
2008-04-21 00:58:47 +02:00
|
|
|
contact.caps_node = node
|
|
|
|
contact.caps_hash_method = hash_method
|
|
|
|
contact.caps_hash = hash
|
2008-10-12 13:20:35 +02:00
|
|
|
if pm_ctrl:
|
|
|
|
pm_ctrl.update_contact()
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2008-04-21 14:57:34 +02:00
|
|
|
def _capsDiscoCB(self, jid, node, identities, features, dataforms):
|
2008-04-21 00:58:47 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
|
|
|
if not contact:
|
2008-06-27 17:07:15 +02:00
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(self.name, room_jid, nick)
|
|
|
|
if contact is None:
|
|
|
|
return
|
2008-04-21 00:58:47 +02:00
|
|
|
if not contact.caps_node:
|
|
|
|
return # we didn't asked for that?
|
2008-08-05 00:06:54 +02:00
|
|
|
if contact.caps_hash_method != 'old' and not node.startswith(
|
|
|
|
contact.caps_node + '#'):
|
2008-04-21 00:58:47 +02:00
|
|
|
return
|
2008-08-05 00:06:54 +02:00
|
|
|
if contact.caps_hash_method != 'old':
|
|
|
|
node, hash = node.split('#', 1)
|
|
|
|
computed_hash = helpers.compute_caps_hash(identities, features,
|
|
|
|
dataforms=dataforms, hash_method=contact.caps_hash_method)
|
|
|
|
if computed_hash != hash:
|
|
|
|
# wrong hash, forget it
|
|
|
|
contact.caps_node = ''
|
|
|
|
contact.caps_hash_method = ''
|
|
|
|
contact.caps_hash = ''
|
|
|
|
return
|
|
|
|
# if we don't have this info already...
|
|
|
|
caps = gajim.capscache[(contact.caps_hash_method, contact.caps_hash)]
|
|
|
|
else:
|
|
|
|
# if we don't have this info already...
|
|
|
|
caps = gajim.capscache[(contact.caps_hash_method, contact.caps_node + \
|
|
|
|
'#' + contact.caps_hash)]
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps.queried == 2:
|
|
|
|
return
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
caps.update(identities, features)
|
2008-07-29 21:49:31 +02:00
|
|
|
|
2008-08-05 00:06:54 +02:00
|
|
|
# vim: se ts=3:
|