2005-05-12 15:43:17 +02:00
|
|
|
## logger.py
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2006-10-06 00:50:52 +02:00
|
|
|
## Copyright (C) 2005-2006 Nikos Kouremenos <kourem@gmail.com>
|
2007-10-22 13:33:50 +02:00
|
|
|
## Copyright (C) 2005-2006 Yann Leboulanger <asterix@lagaule.org>
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2003-11-30 17:02:00 +01:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2003-11-30 17:02:00 +01:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
##
|
2003-11-30 17:02:00 +01:00
|
|
|
|
2007-06-27 02:51:38 +02:00
|
|
|
''' This module allows to access the on-disk database of logs. '''
|
|
|
|
|
2003-11-30 17:02:00 +01:00
|
|
|
import os
|
2005-11-23 20:12:52 +01:00
|
|
|
import sys
|
2003-11-30 17:02:00 +01:00
|
|
|
import time
|
2005-11-23 20:12:52 +01:00
|
|
|
import datetime
|
2007-07-09 23:24:47 +02:00
|
|
|
from gzip import GzipFile
|
|
|
|
from cStringIO import StringIO
|
2005-08-25 18:50:02 +02:00
|
|
|
|
2005-12-05 12:13:08 +01:00
|
|
|
import exceptions
|
2006-06-04 11:54:11 +02:00
|
|
|
import gajim
|
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
try:
|
2006-10-24 18:04:21 +02:00
|
|
|
import sqlite3 as sqlite # python 2.5
|
2005-11-23 20:12:52 +01:00
|
|
|
except ImportError:
|
2006-10-24 18:04:21 +02:00
|
|
|
try:
|
|
|
|
from pysqlite2 import dbapi2 as sqlite
|
|
|
|
except ImportError:
|
|
|
|
raise exceptions.PysqliteNotAvailable
|
2005-12-01 18:16:53 +01:00
|
|
|
|
2006-11-04 19:15:38 +01:00
|
|
|
import configpaths
|
|
|
|
LOG_DB_PATH = configpaths.gajimpaths['LOG_DB']
|
2007-01-02 18:56:26 +01:00
|
|
|
LOG_DB_FOLDER, LOG_DB_FILE = os.path.split(LOG_DB_PATH)
|
2005-09-02 23:45:35 +02:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
class Constants:
|
|
|
|
def __init__(self):
|
|
|
|
(
|
|
|
|
self.JID_NORMAL_TYPE,
|
2005-11-30 15:16:26 +01:00
|
|
|
self.JID_ROOM_TYPE
|
2005-11-26 00:23:25 +01:00
|
|
|
) = range(2)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
(
|
|
|
|
self.KIND_STATUS,
|
|
|
|
self.KIND_GCSTATUS,
|
|
|
|
self.KIND_GC_MSG,
|
|
|
|
self.KIND_SINGLE_MSG_RECV,
|
|
|
|
self.KIND_CHAT_MSG_RECV,
|
|
|
|
self.KIND_SINGLE_MSG_SENT,
|
2006-04-10 16:00:04 +02:00
|
|
|
self.KIND_CHAT_MSG_SENT,
|
|
|
|
self.KIND_ERROR
|
|
|
|
) = range(8)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
(
|
|
|
|
self.SHOW_ONLINE,
|
|
|
|
self.SHOW_CHAT,
|
|
|
|
self.SHOW_AWAY,
|
|
|
|
self.SHOW_XA,
|
|
|
|
self.SHOW_DND,
|
|
|
|
self.SHOW_OFFLINE
|
|
|
|
) = range(6)
|
|
|
|
|
2006-07-30 00:29:59 +02:00
|
|
|
(
|
|
|
|
self.TYPE_AIM,
|
|
|
|
self.TYPE_GG,
|
|
|
|
self.TYPE_HTTP_WS,
|
|
|
|
self.TYPE_ICQ,
|
|
|
|
self.TYPE_MSN,
|
|
|
|
self.TYPE_QQ,
|
|
|
|
self.TYPE_SMS,
|
|
|
|
self.TYPE_SMTP,
|
|
|
|
self.TYPE_TLEN,
|
|
|
|
self.TYPE_YAHOO,
|
|
|
|
self.TYPE_NEWMAIL,
|
|
|
|
self.TYPE_RSS,
|
|
|
|
self.TYPE_WEATHER,
|
|
|
|
) = range(13)
|
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
constants = Constants()
|
2005-11-24 19:29:45 +01:00
|
|
|
|
2005-04-16 00:02:13 +02:00
|
|
|
class Logger:
|
|
|
|
def __init__(self):
|
2005-12-05 14:56:02 +01:00
|
|
|
self.jids_already_in = [] # holds jids that we already have in DB
|
2006-07-31 09:45:29 +02:00
|
|
|
self.con = None
|
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
if not os.path.exists(LOG_DB_PATH):
|
|
|
|
# this can happen only the first time (the time we create the db)
|
2005-11-26 00:23:25 +01:00
|
|
|
# db is not created here but in src/common/checks_paths.py
|
2005-11-23 20:12:52 +01:00
|
|
|
return
|
2005-12-08 10:46:59 +01:00
|
|
|
self.init_vars()
|
2006-07-31 09:45:29 +02:00
|
|
|
|
2006-11-10 19:32:38 +01:00
|
|
|
def close_db(self):
|
2006-07-31 09:45:29 +02:00
|
|
|
if self.con:
|
|
|
|
self.con.close()
|
2006-11-10 19:32:38 +01:00
|
|
|
self.con = None
|
|
|
|
self.cur = None
|
|
|
|
|
|
|
|
def open_db(self):
|
|
|
|
self.close_db()
|
|
|
|
|
2007-01-02 18:56:26 +01:00
|
|
|
# FIXME: sqlite3_open wants UTF8 strings. So a path with
|
|
|
|
# non-ascii chars doesn't work. See #2812 and
|
|
|
|
# http://lists.initd.org/pipermail/pysqlite/2005-August/000134.html
|
|
|
|
back = os.getcwd()
|
|
|
|
os.chdir(LOG_DB_FOLDER)
|
|
|
|
|
2006-11-10 19:32:38 +01:00
|
|
|
# if locked, wait up to 20 sec to unlock
|
|
|
|
# before raise (hopefully should be enough)
|
2007-01-02 18:56:26 +01:00
|
|
|
|
|
|
|
self.con = sqlite.connect(LOG_DB_FILE, timeout = 20.0,
|
2005-11-27 13:22:07 +01:00
|
|
|
isolation_level = 'IMMEDIATE')
|
2007-01-02 18:56:26 +01:00
|
|
|
os.chdir(back)
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur = self.con.cursor()
|
2006-11-10 19:32:38 +01:00
|
|
|
self.set_synchronous(False)
|
|
|
|
|
|
|
|
def set_synchronous(self, sync):
|
|
|
|
try:
|
|
|
|
if sync:
|
|
|
|
self.cur.execute("PRAGMA synchronous = NORMAL")
|
|
|
|
else:
|
|
|
|
self.cur.execute("PRAGMA synchronous = OFF")
|
|
|
|
except sqlite.Error, e:
|
|
|
|
gajim.log.debug("Failed to set_synchronous(%s): %s" % (sync, str(e)))
|
2006-07-31 09:45:29 +02:00
|
|
|
|
2006-11-10 19:32:38 +01:00
|
|
|
def init_vars(self):
|
|
|
|
self.open_db()
|
2005-11-23 20:12:52 +01:00
|
|
|
self.get_jids_already_in_db()
|
2005-07-02 01:13:46 +02:00
|
|
|
|
2008-04-16 15:46:44 +02:00
|
|
|
def simple_commit(self, sql_to_commit):
|
|
|
|
'''helper to commit'''
|
|
|
|
self.cur.execute(sql_to_commit)
|
|
|
|
try:
|
|
|
|
self.con.commit()
|
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
print >> sys.stderr, str(e)
|
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
def get_jids_already_in_db(self):
|
2008-03-17 08:22:43 +01:00
|
|
|
try:
|
|
|
|
self.cur.execute('SELECT jid FROM jids')
|
|
|
|
rows = self.cur.fetchall() # list of tupples: [(u'aaa@bbb',), (u'cc@dd',)]
|
|
|
|
except sqlite.DatabaseError:
|
|
|
|
raise exceptions.DatabaseMalformed
|
2006-07-31 09:45:29 +02:00
|
|
|
self.jids_already_in = []
|
2005-11-23 20:12:52 +01:00
|
|
|
for row in rows:
|
|
|
|
# row[0] is first item of row (the only result here, the jid)
|
2008-04-16 15:46:44 +02:00
|
|
|
if row[0] == '':
|
|
|
|
# malformed jid, ignore line
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.jids_already_in.append(row[0])
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2007-09-14 22:18:06 +02:00
|
|
|
def get_jids_in_db(self):
|
|
|
|
return self.jids_already_in
|
2005-11-23 20:12:52 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
def jid_is_from_pm(self, jid):
|
2005-11-23 20:12:52 +01:00
|
|
|
'''if jid is gajim@conf/nkour it's likely a pm one, how we know
|
|
|
|
gajim@conf is not a normal guy and nkour is not his resource?
|
2005-11-26 00:23:25 +01:00
|
|
|
we ask if gajim@conf is already in jids (with type room jid)
|
|
|
|
this fails if user disables logging for room and only enables for
|
|
|
|
pm (so higly unlikely) and if we fail we do not go chaos
|
|
|
|
(user will see the first pm as if it was message in room's public chat)
|
|
|
|
and after that all okay'''
|
2008-03-20 00:14:58 +01:00
|
|
|
|
|
|
|
if jid.find('/') > -1:
|
|
|
|
possible_room_jid, possible_nick = jid.split('/', 1)
|
|
|
|
return self.jid_is_room_jid(possible_room_jid)
|
|
|
|
else:
|
|
|
|
# it's not a full jid, so it's not a pm one
|
|
|
|
return False
|
2006-03-24 19:46:48 +01:00
|
|
|
|
2006-10-06 00:49:28 +02:00
|
|
|
def jid_is_room_jid(self, jid):
|
2008-03-13 10:07:58 +01:00
|
|
|
self.cur.execute('SELECT jid_id FROM jids WHERE jid=? AND type=?',
|
2006-10-06 09:03:08 +02:00
|
|
|
(jid, constants.JID_ROOM_TYPE))
|
2005-12-05 18:22:50 +01:00
|
|
|
row = self.cur.fetchone()
|
2006-04-09 21:08:57 +02:00
|
|
|
if row is None:
|
2005-11-23 20:12:52 +01:00
|
|
|
return False
|
2006-04-09 21:08:57 +02:00
|
|
|
else:
|
|
|
|
return True
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 01:03:09 +01:00
|
|
|
def get_jid_id(self, jid, typestr = None):
|
2005-11-23 20:12:52 +01:00
|
|
|
'''jids table has jid and jid_id
|
|
|
|
logs table has log_id, jid_id, contact_name, time, kind, show, message
|
|
|
|
so to ask logs we need jid_id that matches our jid in jids table
|
2006-02-06 21:34:06 +01:00
|
|
|
this method wants jid and returns the jid_id for later sql-ing on logs
|
2006-02-03 00:44:09 +01:00
|
|
|
typestr can be 'ROOM' or anything else depending on the type of JID
|
|
|
|
and is only needed to be specified when the JID is new in DB
|
2005-11-23 20:12:52 +01:00
|
|
|
'''
|
2005-11-24 19:29:45 +01:00
|
|
|
if jid.find('/') != -1: # if it has a /
|
2005-11-26 01:03:09 +01:00
|
|
|
jid_is_from_pm = self.jid_is_from_pm(jid)
|
|
|
|
if not jid_is_from_pm: # it's normal jid with resource
|
2005-11-24 19:29:45 +01:00
|
|
|
jid = jid.split('/', 1)[0] # remove the resource
|
2005-11-23 20:12:52 +01:00
|
|
|
if jid in self.jids_already_in: # we already have jids in DB
|
2006-04-19 12:36:45 +02:00
|
|
|
self.cur.execute('SELECT jid_id FROM jids WHERE jid=?', [jid])
|
2007-06-05 15:13:20 +02:00
|
|
|
row = self.cur.fetchone()
|
|
|
|
if row:
|
|
|
|
return row[0]
|
|
|
|
# oh! a new jid :), we add it now
|
|
|
|
if typestr == 'ROOM':
|
|
|
|
typ = constants.JID_ROOM_TYPE
|
|
|
|
else:
|
|
|
|
typ = constants.JID_NORMAL_TYPE
|
2007-08-20 13:13:53 +02:00
|
|
|
try:
|
|
|
|
self.cur.execute('INSERT INTO jids (jid, type) VALUES (?, ?)', (jid,
|
|
|
|
typ))
|
|
|
|
except sqlite.IntegrityError, e:
|
|
|
|
# Jid already in DB, maybe added by another instance. re-read DB
|
|
|
|
self.get_jids_already_in_db()
|
|
|
|
return self.get_jid_id(jid, typestr)
|
2007-06-05 15:13:20 +02:00
|
|
|
try:
|
|
|
|
self.con.commit()
|
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
print >> sys.stderr, str(e)
|
|
|
|
jid_id = self.cur.lastrowid
|
|
|
|
self.jids_already_in.append(jid)
|
2005-11-23 20:12:52 +01:00
|
|
|
return jid_id
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
def convert_human_values_to_db_api_values(self, kind, show):
|
|
|
|
'''coverts from string style to constant ints for db'''
|
|
|
|
if kind == 'status':
|
|
|
|
kind_col = constants.KIND_STATUS
|
|
|
|
elif kind == 'gcstatus':
|
|
|
|
kind_col = constants.KIND_GCSTATUS
|
|
|
|
elif kind == 'gc_msg':
|
|
|
|
kind_col = constants.KIND_GC_MSG
|
|
|
|
elif kind == 'single_msg_recv':
|
|
|
|
kind_col = constants.KIND_SINGLE_MSG_RECV
|
|
|
|
elif kind == 'single_msg_sent':
|
|
|
|
kind_col = constants.KIND_SINGLE_MSG_SENT
|
|
|
|
elif kind == 'chat_msg_recv':
|
|
|
|
kind_col = constants.KIND_CHAT_MSG_RECV
|
|
|
|
elif kind == 'chat_msg_sent':
|
|
|
|
kind_col = constants.KIND_CHAT_MSG_SENT
|
2006-04-10 16:00:04 +02:00
|
|
|
elif kind == 'error':
|
|
|
|
kind_col = constants.KIND_ERROR
|
2005-11-28 17:41:34 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
if show == 'online':
|
|
|
|
show_col = constants.SHOW_ONLINE
|
|
|
|
elif show == 'chat':
|
|
|
|
show_col = constants.SHOW_CHAT
|
|
|
|
elif show == 'away':
|
|
|
|
show_col = constants.SHOW_AWAY
|
|
|
|
elif show == 'xa':
|
|
|
|
show_col = constants.SHOW_XA
|
|
|
|
elif show == 'dnd':
|
|
|
|
show_col = constants.SHOW_DND
|
|
|
|
elif show == 'offline':
|
|
|
|
show_col = constants.SHOW_OFFLINE
|
2005-11-28 18:56:57 +01:00
|
|
|
elif show is None:
|
|
|
|
show_col = None
|
2005-11-28 18:34:43 +01:00
|
|
|
else: # invisible in GC when someone goes invisible
|
|
|
|
# it's a RFC violation .... but we should not crash
|
|
|
|
show_col = 'UNKNOWN'
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
return kind_col, show_col
|
2006-07-30 00:29:59 +02:00
|
|
|
|
|
|
|
def convert_human_transport_type_to_db_api_values(self, type_):
|
|
|
|
'''converts from string style to constant ints for db'''
|
|
|
|
if type_ == 'aim':
|
|
|
|
return constants.TYPE_AIM
|
|
|
|
if type_ == 'gadu-gadu':
|
|
|
|
return constants.TYPE_GG
|
|
|
|
if type_ == 'http-ws':
|
|
|
|
return constants.TYPE_HTTP_WS
|
|
|
|
if type_ == 'icq':
|
|
|
|
return constants.TYPE_ICQ
|
|
|
|
if type_ == 'msn':
|
|
|
|
return constants.TYPE_MSN
|
|
|
|
if type_ == 'qq':
|
|
|
|
return constants.TYPE_QQ
|
|
|
|
if type_ == 'sms':
|
|
|
|
return constants.TYPE_SMS
|
|
|
|
if type_ == 'smtp':
|
|
|
|
return constants.TYPE_SMTP
|
2006-12-07 08:10:33 +01:00
|
|
|
if type_ in ('tlen', 'x-tlen'):
|
2006-07-30 00:29:59 +02:00
|
|
|
return constants.TYPE_TLEN
|
|
|
|
if type_ == 'yahoo':
|
|
|
|
return constants.TYPE_YAHOO
|
|
|
|
if type_ == 'newmail':
|
|
|
|
return constants.TYPE_NEWMAIL
|
|
|
|
if type_ == 'rss':
|
|
|
|
return constants.TYPE_RSS
|
|
|
|
if type_ == 'weather':
|
|
|
|
return constants.TYPE_WEATHER
|
|
|
|
return None
|
|
|
|
|
|
|
|
def convert_api_values_to_human_transport_type(self, type_id):
|
|
|
|
'''converts from constant ints for db to string style'''
|
|
|
|
if type_id == constants.TYPE_AIM:
|
|
|
|
return 'aim'
|
|
|
|
if type_id == constants.TYPE_GG:
|
|
|
|
return 'gadu-gadu'
|
|
|
|
if type_id == constants.TYPE_HTTP_WS:
|
|
|
|
return 'http-ws'
|
|
|
|
if type_id == constants.TYPE_ICQ:
|
|
|
|
return 'icq'
|
|
|
|
if type_id == constants.TYPE_MSN:
|
|
|
|
return 'msn'
|
|
|
|
if type_id == constants.TYPE_QQ:
|
|
|
|
return 'qq'
|
|
|
|
if type_id == constants.TYPE_SMS:
|
|
|
|
return 'sms'
|
|
|
|
if type_id == constants.TYPE_SMTP:
|
|
|
|
return 'smtp'
|
|
|
|
if type_id == constants.TYPE_TLEN:
|
|
|
|
return 'tlen'
|
|
|
|
if type_id == constants.TYPE_YAHOO:
|
|
|
|
return 'yahoo'
|
|
|
|
if type_id == constants.TYPE_NEWMAIL:
|
|
|
|
return 'newmail'
|
|
|
|
if type_id == constants.TYPE_RSS:
|
|
|
|
return 'rss'
|
|
|
|
if type_id == constants.TYPE_WEATHER:
|
|
|
|
return 'weather'
|
|
|
|
|
2006-04-11 00:08:02 +02:00
|
|
|
def commit_to_db(self, values, write_unread = False):
|
2005-11-26 16:21:52 +01:00
|
|
|
sql = 'INSERT INTO logs (jid_id, contact_name, time, kind, show, message, subject) VALUES (?, ?, ?, ?, ?, ?, ?)'
|
2007-07-09 18:01:19 +02:00
|
|
|
try:
|
|
|
|
self.cur.execute(sql, values)
|
2008-03-17 20:30:38 +01:00
|
|
|
except sqlite.DatabaseError:
|
|
|
|
raise exceptions.DatabaseMalformed
|
2007-07-09 18:01:19 +02:00
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
raise exceptions.PysqliteOperationalError(str(e))
|
2006-04-11 00:08:02 +02:00
|
|
|
message_id = None
|
|
|
|
try:
|
|
|
|
self.con.commit()
|
|
|
|
if write_unread:
|
|
|
|
message_id = self.cur.lastrowid
|
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
print >> sys.stderr, str(e)
|
|
|
|
if message_id:
|
2006-04-11 13:28:36 +02:00
|
|
|
self.insert_unread_events(message_id, values[0])
|
2006-04-11 00:08:02 +02:00
|
|
|
return message_id
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2006-04-11 13:28:36 +02:00
|
|
|
def insert_unread_events(self, message_id, jid_id):
|
2006-04-11 00:08:02 +02:00
|
|
|
''' add unread message with id: message_id'''
|
2006-04-11 13:28:36 +02:00
|
|
|
sql = 'INSERT INTO unread_messages VALUES (%d, %d)' % (message_id, jid_id)
|
2008-04-16 15:46:44 +02:00
|
|
|
self.simple_commit(sql)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2006-04-11 00:08:02 +02:00
|
|
|
def set_read_messages(self, message_ids):
|
|
|
|
''' mark all messages with ids in message_ids as read'''
|
|
|
|
ids = ','.join([str(i) for i in message_ids])
|
|
|
|
sql = 'DELETE FROM unread_messages WHERE message_id IN (%s)' % ids
|
2008-04-16 15:46:44 +02:00
|
|
|
self.simple_commit(sql)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2008-03-19 22:43:01 +01:00
|
|
|
def get_unread_msgs(self):
|
|
|
|
''' get all unread messages '''
|
2006-04-11 13:28:36 +02:00
|
|
|
all_messages = []
|
2006-04-19 12:28:42 +02:00
|
|
|
try:
|
|
|
|
self.cur.execute(
|
2008-03-19 22:43:01 +01:00
|
|
|
'SELECT message_id from unread_messages')
|
2006-04-19 12:28:42 +02:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
except:
|
2006-07-28 18:15:18 +02:00
|
|
|
pass
|
2006-04-11 13:28:36 +02:00
|
|
|
for message in results:
|
|
|
|
msg_id = message[0]
|
2008-03-19 22:43:01 +01:00
|
|
|
# here we get infos for that message, and related jid from jids table
|
|
|
|
# do NOT change order of SELECTed things, unless you change function(s)
|
|
|
|
# that called this function
|
2006-04-11 13:28:36 +02:00
|
|
|
self.cur.execute('''
|
2008-03-19 22:43:01 +01:00
|
|
|
SELECT logs.log_line_id, logs.message, logs.time, logs.subject,
|
|
|
|
jids.jid
|
|
|
|
FROM logs, jids
|
|
|
|
WHERE logs.log_line_id = %d AND logs.jid_id = jids.jid_id
|
|
|
|
''' % msg_id
|
2006-04-11 13:28:36 +02:00
|
|
|
)
|
|
|
|
results = self.cur.fetchall()
|
2008-03-23 23:52:29 +01:00
|
|
|
if len(results) == 0:
|
|
|
|
# Log line is no more in logs table. remove it from unread_messages
|
2008-03-24 01:45:53 +01:00
|
|
|
self.set_read_messages([msg_id])
|
2008-03-23 23:52:29 +01:00
|
|
|
continue
|
|
|
|
all_messages.append(results[0])
|
2006-04-11 13:28:36 +02:00
|
|
|
return all_messages
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2006-04-09 21:46:11 +02:00
|
|
|
def write(self, kind, jid, message = None, show = None, tim = None,
|
|
|
|
subject = None):
|
2005-11-23 20:12:52 +01:00
|
|
|
'''write a row (status, gcstatus, message etc) to logs database
|
|
|
|
kind can be status, gcstatus, gc_msg, (we only recv for those 3),
|
|
|
|
single_msg_recv, chat_msg_recv, chat_msg_sent, single_msg_sent
|
|
|
|
we cannot know if it is pm or normal chat message, we try to guess
|
2008-03-19 22:43:01 +01:00
|
|
|
see jid_is_from_pm()
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
we analyze jid and store it as follows:
|
|
|
|
jids.jid text column will hold JID if TC-related, room_jid if GC-related,
|
|
|
|
ROOM_JID/nick if pm-related.'''
|
2005-11-24 19:29:45 +01:00
|
|
|
|
2005-12-05 14:56:02 +01:00
|
|
|
if self.jids_already_in == []: # only happens if we just created the db
|
2006-11-10 19:32:38 +01:00
|
|
|
self.open_db()
|
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
contact_name_col = None # holds nickname for kinds gcstatus, gc_msg
|
|
|
|
# message holds the message unless kind is status or gcstatus,
|
|
|
|
# then it holds status message
|
|
|
|
message_col = message
|
2005-11-26 00:23:25 +01:00
|
|
|
subject_col = subject
|
2005-11-23 20:12:52 +01:00
|
|
|
if tim:
|
|
|
|
time_col = int(float(time.mktime(tim)))
|
2005-04-16 00:02:13 +02:00
|
|
|
else:
|
2005-11-23 20:12:52 +01:00
|
|
|
time_col = int(float(time.time()))
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 01:03:09 +01:00
|
|
|
kind_col, show_col = self.convert_human_values_to_db_api_values(kind,
|
|
|
|
show)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2006-04-11 00:08:02 +02:00
|
|
|
write_unread = False
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
# now we may have need to do extra care for some values in columns
|
2005-11-23 20:12:52 +01:00
|
|
|
if kind == 'status': # we store (not None) time, jid, show, msg
|
|
|
|
# status for roster items
|
2005-11-26 01:03:09 +01:00
|
|
|
jid_id = self.get_jid_id(jid)
|
2005-11-28 18:34:43 +01:00
|
|
|
if show is None: # show is None (xmpp), but we say that 'online'
|
|
|
|
show_col = constants.SHOW_ONLINE
|
2005-07-21 23:39:47 +02:00
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
elif kind == 'gcstatus':
|
|
|
|
# status in ROOM (for pm status see status)
|
2005-11-28 18:34:43 +01:00
|
|
|
if show is None: # show is None (xmpp), but we say that 'online'
|
|
|
|
show_col = constants.SHOW_ONLINE
|
2005-11-23 20:12:52 +01:00
|
|
|
jid, nick = jid.split('/', 1)
|
2005-11-26 01:03:09 +01:00
|
|
|
jid_id = self.get_jid_id(jid, 'ROOM') # re-get jid_id for the new jid
|
2005-11-23 20:12:52 +01:00
|
|
|
contact_name_col = nick
|
2005-11-26 00:23:25 +01:00
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
elif kind == 'gc_msg':
|
|
|
|
if jid.find('/') != -1: # if it has a /
|
|
|
|
jid, nick = jid.split('/', 1)
|
2005-06-08 11:19:54 +02:00
|
|
|
else:
|
2005-11-23 20:12:52 +01:00
|
|
|
# it's server message f.e. error message
|
|
|
|
# when user tries to ban someone but he's not allowed to
|
|
|
|
nick = None
|
2005-11-26 01:03:09 +01:00
|
|
|
jid_id = self.get_jid_id(jid, 'ROOM') # re-get jid_id for the new jid
|
2005-11-23 20:12:52 +01:00
|
|
|
contact_name_col = nick
|
2005-11-26 01:03:09 +01:00
|
|
|
else:
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2006-04-11 00:08:02 +02:00
|
|
|
if kind == 'chat_msg_recv':
|
2008-03-19 22:43:01 +01:00
|
|
|
if not self.jid_is_from_pm(jid):
|
|
|
|
# Save in unread table only if it's not a pm
|
|
|
|
write_unread = True
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-28 18:34:43 +01:00
|
|
|
if show_col == 'UNKNOWN': # unknown show, do not log
|
|
|
|
return
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-26 00:23:25 +01:00
|
|
|
values = (jid_id, contact_name_col, time_col, kind_col, show_col,
|
|
|
|
message_col, subject_col)
|
2006-04-11 00:08:02 +02:00
|
|
|
return self.commit_to_db(values, write_unread)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-23 20:12:52 +01:00
|
|
|
def get_last_conversation_lines(self, jid, restore_how_many_rows,
|
2006-06-04 19:21:14 +02:00
|
|
|
pending_how_many, timeout, account):
|
2005-11-23 20:12:52 +01:00
|
|
|
'''accepts how many rows to restore and when to time them out (in minutes)
|
|
|
|
(mark them as too old) and number of messages that are in queue
|
|
|
|
and are already logged but pending to be viewed,
|
|
|
|
returns a list of tupples containg time, kind, message,
|
|
|
|
list with empty tupple if nothing found to meet our demands'''
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2006-06-04 19:21:14 +02:00
|
|
|
where_sql = self._build_contact_where(account, jid)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-27 18:33:57 +01:00
|
|
|
now = int(float(time.time()))
|
|
|
|
timed_out = now - (timeout * 60) # before that they are too old
|
2005-11-23 20:12:52 +01:00
|
|
|
# so if we ask last 5 lines and we have 2 pending we get
|
|
|
|
# 3 - 8 (we avoid the last 2 lines but we still return 5 asked)
|
2008-03-17 08:22:43 +01:00
|
|
|
try:
|
|
|
|
self.cur.execute('''
|
|
|
|
SELECT time, kind, message FROM logs
|
|
|
|
WHERE (%s) AND kind IN (%d, %d, %d, %d, %d) AND time > %d
|
|
|
|
ORDER BY time DESC LIMIT %d OFFSET %d
|
|
|
|
''' % (where_sql, constants.KIND_SINGLE_MSG_RECV,
|
|
|
|
constants.KIND_CHAT_MSG_RECV, constants.KIND_SINGLE_MSG_SENT,
|
|
|
|
constants.KIND_CHAT_MSG_SENT, constants.KIND_ERROR,
|
|
|
|
timed_out, restore_how_many_rows, pending_how_many)
|
|
|
|
)
|
|
|
|
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
except sqlite.DatabaseError:
|
|
|
|
raise exceptions.DatabaseMalformed
|
2005-11-27 18:33:57 +01:00
|
|
|
results.reverse()
|
|
|
|
return results
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-24 02:39:47 +01:00
|
|
|
def get_unix_time_from_date(self, year, month, day):
|
|
|
|
# year (fe 2005), month (fe 11), day (fe 25)
|
|
|
|
# returns time in seconds for the second that starts that date since epoch
|
|
|
|
# gimme unixtime from year month day:
|
|
|
|
d = datetime.date(year, month, day)
|
|
|
|
local_time = d.timetuple() # time tupple (compat with time.localtime())
|
2008-03-13 10:07:58 +01:00
|
|
|
# we have time since epoch baby :)
|
|
|
|
start_of_day = int(time.mktime(local_time))
|
2005-11-24 02:39:47 +01:00
|
|
|
return start_of_day
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2006-06-04 11:54:11 +02:00
|
|
|
def get_conversation_for_date(self, jid, year, month, day, account):
|
2005-11-23 20:12:52 +01:00
|
|
|
'''returns contact_name, time, kind, show, message
|
|
|
|
for each row in a list of tupples,
|
|
|
|
returns list with empty tupple if we found nothing to meet our demands'''
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2006-06-04 11:54:11 +02:00
|
|
|
where_sql = self._build_contact_where(account, jid)
|
|
|
|
|
2005-11-24 02:39:47 +01:00
|
|
|
start_of_day = self.get_unix_time_from_date(year, month, day)
|
2005-11-25 23:13:39 +01:00
|
|
|
seconds_in_a_day = 86400 # 60 * 60 * 24
|
|
|
|
last_second_of_day = start_of_day + seconds_in_a_day - 1
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur.execute('''
|
2005-11-23 20:12:52 +01:00
|
|
|
SELECT contact_name, time, kind, show, message FROM logs
|
2006-06-04 11:54:11 +02:00
|
|
|
WHERE (%s)
|
2005-11-23 20:12:52 +01:00
|
|
|
AND time BETWEEN %d AND %d
|
|
|
|
ORDER BY time
|
2006-06-04 11:54:11 +02:00
|
|
|
''' % (where_sql, start_of_day, last_second_of_day))
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-12-05 18:22:50 +01:00
|
|
|
results = self.cur.fetchall()
|
2005-11-23 20:12:52 +01:00
|
|
|
return results
|
2005-11-29 15:41:01 +01:00
|
|
|
|
2006-06-04 11:54:11 +02:00
|
|
|
def get_search_results_for_query(self, jid, query, account):
|
2005-11-29 15:41:01 +01:00
|
|
|
'''returns contact_name, time, kind, show, message
|
|
|
|
for each row in a list of tupples,
|
|
|
|
returns list with empty tupple if we found nothing to meet our demands'''
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2006-06-04 11:54:11 +02:00
|
|
|
|
|
|
|
if False: #query.startswith('SELECT '): # it's SQL query (FIXME)
|
2005-11-29 15:41:01 +01:00
|
|
|
try:
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur.execute(query)
|
2005-11-29 15:41:01 +01:00
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
results = [('', '', '', '', str(e))]
|
|
|
|
return results
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-11-29 15:41:01 +01:00
|
|
|
else: # user just typed something, we search in message column
|
2006-06-04 11:54:11 +02:00
|
|
|
where_sql = self._build_contact_where(account, jid)
|
2007-04-16 22:48:38 +02:00
|
|
|
like_sql = '%' + query.replace("'", "''") + '%'
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur.execute('''
|
2005-11-30 18:30:58 +01:00
|
|
|
SELECT contact_name, time, kind, show, message, subject FROM logs
|
2006-06-04 11:54:11 +02:00
|
|
|
WHERE (%s) AND message LIKE '%s'
|
2005-11-29 15:41:01 +01:00
|
|
|
ORDER BY time
|
2006-06-04 11:54:11 +02:00
|
|
|
''' % (where_sql, like_sql))
|
2005-11-29 15:41:01 +01:00
|
|
|
|
2005-12-05 18:22:50 +01:00
|
|
|
results = self.cur.fetchall()
|
2005-11-29 15:41:01 +01:00
|
|
|
return results
|
|
|
|
|
2006-06-04 11:54:11 +02:00
|
|
|
def get_days_with_logs(self, jid, year, month, max_day, account):
|
2005-12-21 11:54:41 +01:00
|
|
|
'''returns the list of days that have logs (not status messages)'''
|
2005-11-24 02:39:47 +01:00
|
|
|
jid_id = self.get_jid_id(jid)
|
2006-03-01 22:15:01 +01:00
|
|
|
days_with_logs = []
|
2006-06-04 11:54:11 +02:00
|
|
|
where_sql = self._build_contact_where(account, jid)
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2005-12-21 11:54:41 +01:00
|
|
|
# First select all date of month whith logs we want
|
|
|
|
start_of_month = self.get_unix_time_from_date(year, month, 1)
|
2005-11-24 02:39:47 +01:00
|
|
|
seconds_in_a_day = 86400 # 60 * 60 * 24
|
2005-12-21 11:54:41 +01:00
|
|
|
last_second_of_month = start_of_month + (seconds_in_a_day * max_day) - 1
|
|
|
|
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur.execute('''
|
2005-12-21 11:54:41 +01:00
|
|
|
SELECT time FROM logs
|
2006-06-04 11:54:11 +02:00
|
|
|
WHERE (%s)
|
2005-11-24 02:39:47 +01:00
|
|
|
AND time BETWEEN %d AND %d
|
2005-11-29 14:18:21 +01:00
|
|
|
AND kind NOT IN (%d, %d)
|
2005-12-21 11:54:41 +01:00
|
|
|
ORDER BY time
|
2006-06-04 11:54:11 +02:00
|
|
|
''' % (where_sql, start_of_month, last_second_of_month,
|
2005-11-29 14:18:21 +01:00
|
|
|
constants.KIND_STATUS, constants.KIND_GCSTATUS))
|
2005-12-21 11:54:41 +01:00
|
|
|
result = self.cur.fetchall()
|
|
|
|
|
2008-03-13 10:07:58 +01:00
|
|
|
# Copy all interesting times in a temporary table
|
2008-04-15 03:52:11 +02:00
|
|
|
self.cur.execute('CREATE TEMPORARY TABLE temp_table(time,INTEGER)')
|
2008-03-13 10:07:58 +01:00
|
|
|
for line in result:
|
|
|
|
self.cur.execute('''
|
2008-04-15 03:52:11 +02:00
|
|
|
INSERT INTO temp_table (time) VALUES (%d)
|
2008-03-13 10:07:58 +01:00
|
|
|
''' % (line[0]))
|
2005-12-21 11:54:41 +01:00
|
|
|
|
2008-03-13 10:07:58 +01:00
|
|
|
# then search in this small temp table for each day
|
2006-03-01 22:15:01 +01:00
|
|
|
for day in xrange(1, max_day + 1): # count from 1 to 28 or to 30 or to 31
|
2008-03-13 10:07:58 +01:00
|
|
|
start_of_day = self.get_unix_time_from_date(year, month, day)
|
|
|
|
last_second_of_day = start_of_day + seconds_in_a_day - 1
|
|
|
|
|
|
|
|
# just ask one row to see if we have sth for this date
|
|
|
|
self.cur.execute('''
|
2008-04-15 03:52:11 +02:00
|
|
|
SELECT time FROM temp_table
|
2008-03-13 10:07:58 +01:00
|
|
|
WHERE time BETWEEN %d AND %d
|
|
|
|
LIMIT 1
|
|
|
|
''' % (start_of_day, last_second_of_day))
|
|
|
|
result = self.cur.fetchone()
|
|
|
|
if result:
|
2006-03-01 22:15:01 +01:00
|
|
|
days_with_logs[0:0]=[day]
|
2005-12-21 11:54:41 +01:00
|
|
|
|
2008-03-13 10:07:58 +01:00
|
|
|
# Delete temporary table
|
2008-04-15 03:52:11 +02:00
|
|
|
self.cur.execute('DROP TABLE temp_table')
|
2005-12-05 18:22:50 +01:00
|
|
|
result = self.cur.fetchone()
|
2006-03-01 22:15:01 +01:00
|
|
|
return days_with_logs
|
2005-11-30 22:56:42 +01:00
|
|
|
|
2006-06-04 11:54:11 +02:00
|
|
|
def get_last_date_that_has_logs(self, jid, account = None, is_room = False):
|
2005-11-30 22:56:42 +01:00
|
|
|
'''returns last time (in seconds since EPOCH) for which
|
|
|
|
we had logs (excluding statuses)'''
|
2006-06-04 11:54:11 +02:00
|
|
|
where_sql = ''
|
|
|
|
if not is_room:
|
|
|
|
where_sql = self._build_contact_where(account, jid)
|
|
|
|
else:
|
|
|
|
jid_id = self.get_jid_id(jid, 'ROOM')
|
2008-03-13 10:07:58 +01:00
|
|
|
where_sql = 'jid_id = %s' % jid_id
|
2005-12-05 18:22:50 +01:00
|
|
|
self.cur.execute('''
|
2007-01-09 18:30:03 +01:00
|
|
|
SELECT MAX(time) FROM logs
|
2008-03-13 10:07:58 +01:00
|
|
|
WHERE (%s)
|
2006-06-04 11:54:11 +02:00
|
|
|
AND kind NOT IN (%d, %d)
|
|
|
|
''' % (where_sql, constants.KIND_STATUS, constants.KIND_GCSTATUS))
|
2005-12-26 19:23:57 +01:00
|
|
|
|
|
|
|
results = self.cur.fetchone()
|
2005-12-26 19:34:02 +01:00
|
|
|
if results is not None:
|
2005-12-26 19:23:57 +01:00
|
|
|
result = results[0]
|
|
|
|
else:
|
|
|
|
result = None
|
2008-04-15 03:52:11 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def get_room_last_message_time(self, jid):
|
|
|
|
'''returns FASTLY last time (in seconds since EPOCH) for which
|
|
|
|
we had logs for that room from rooms_last_message_time table'''
|
|
|
|
jid_id = self.get_jid_id(jid, 'ROOM')
|
|
|
|
where_sql = 'jid_id = %s' % jid_id
|
|
|
|
self.cur.execute('''
|
|
|
|
SELECT time FROM rooms_last_message_time
|
|
|
|
WHERE (%s)
|
|
|
|
''' % (where_sql))
|
2005-12-26 19:23:57 +01:00
|
|
|
|
2008-04-15 03:52:11 +02:00
|
|
|
results = self.cur.fetchone()
|
|
|
|
if results is not None:
|
|
|
|
result = results[0]
|
|
|
|
else:
|
|
|
|
result = None
|
2005-11-30 22:56:42 +01:00
|
|
|
return result
|
2006-06-04 11:54:11 +02:00
|
|
|
|
2008-04-15 03:52:11 +02:00
|
|
|
def set_room_last_message_time(self, jid, time):
|
|
|
|
'''set last time (in seconds since EPOCH) for which
|
|
|
|
we had logs for that room in rooms_last_message_time table'''
|
|
|
|
jid_id = self.get_jid_id(jid, 'ROOM')
|
|
|
|
# jid_id is unique in this table, create or update :
|
|
|
|
sql = 'REPLACE INTO rooms_last_message_time VALUES (%d, %d)' % \
|
|
|
|
(jid_id, time)
|
2008-04-16 15:46:44 +02:00
|
|
|
self.simple_commit(sql)
|
2008-04-15 03:52:11 +02:00
|
|
|
|
2006-06-04 11:54:11 +02:00
|
|
|
def _build_contact_where(self, account, jid):
|
|
|
|
'''build the where clause for a jid, including metacontacts
|
|
|
|
jid(s) if any'''
|
2008-03-13 10:07:58 +01:00
|
|
|
where_sql = ''
|
|
|
|
# will return empty list if jid is not associated with
|
|
|
|
# any metacontacts
|
2006-06-04 11:54:11 +02:00
|
|
|
family = gajim.contacts.get_metacontacts_family(account, jid)
|
|
|
|
if family:
|
|
|
|
for user in family:
|
|
|
|
jid_id = self.get_jid_id(user['jid'])
|
2008-03-13 10:07:58 +01:00
|
|
|
where_sql += 'jid_id = %s' % jid_id
|
2006-06-04 11:54:11 +02:00
|
|
|
if user != family[-1]:
|
|
|
|
where_sql += ' OR '
|
2008-03-13 10:07:58 +01:00
|
|
|
else: # if jid was not associated with metacontacts
|
2006-06-04 11:54:11 +02:00
|
|
|
jid_id = self.get_jid_id(jid)
|
|
|
|
where_sql = 'jid_id = %s' % jid_id
|
|
|
|
return where_sql
|
2006-07-30 00:29:59 +02:00
|
|
|
|
|
|
|
def save_transport_type(self, jid, type_):
|
|
|
|
'''save the type of the transport in DB'''
|
|
|
|
type_id = self.convert_human_transport_type_to_db_api_values(type_)
|
|
|
|
if not type_id:
|
|
|
|
# unknown type
|
|
|
|
return
|
|
|
|
self.cur.execute(
|
|
|
|
'SELECT type from transports_cache WHERE transport = "%s"' % jid)
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if results:
|
|
|
|
result = results[0][0]
|
|
|
|
if result == type_id:
|
|
|
|
return
|
2008-04-16 15:46:44 +02:00
|
|
|
sql = 'UPDATE transports_cache SET type = %d WHERE transport = "%s"' %\
|
|
|
|
(type_id, jid)
|
|
|
|
self.simple_commit(sql)
|
2006-07-30 00:29:59 +02:00
|
|
|
return
|
2008-04-16 15:46:44 +02:00
|
|
|
sql = 'INSERT INTO transports_cache VALUES ("%s", %d)' % (jid, type_id)
|
|
|
|
self.simple_commit(sql)
|
2006-07-30 00:29:59 +02:00
|
|
|
|
|
|
|
def get_transports_type(self):
|
|
|
|
'''return all the type of the transports in DB'''
|
|
|
|
self.cur.execute(
|
|
|
|
'SELECT * from transports_cache')
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if not results:
|
|
|
|
return {}
|
|
|
|
answer = {}
|
|
|
|
for result in results:
|
2008-03-13 10:07:58 +01:00
|
|
|
answer[result[0]] = self.convert_api_values_to_human_transport_type(
|
|
|
|
result[1])
|
2006-07-30 00:29:59 +02:00
|
|
|
return answer
|
2007-06-27 02:51:38 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
# A longer note here:
|
|
|
|
# The database contains a blob field. Pysqlite seems to need special care for such fields.
|
|
|
|
# When storing, we need to convert string into buffer object (1).
|
|
|
|
# When retrieving, we need to convert it back to a string to decompress it. (2)
|
|
|
|
# GzipFile needs a file-like object, StringIO emulates file for plain strings.
|
|
|
|
def iter_caps_data(self):
|
|
|
|
''' Iterate over caps cache data stored in the database.
|
|
|
|
The iterator values are pairs of (node, ver, ext, identities, features):
|
|
|
|
identities == {'category':'foo', 'type':'bar', 'name':'boo'},
|
|
|
|
features being a list of feature namespaces. '''
|
|
|
|
|
|
|
|
# get data from table
|
|
|
|
# the data field contains binary object (gzipped data), this is a hack
|
|
|
|
# to get that data without trying to convert it to unicode
|
|
|
|
#tmp, self.con.text_factory = self.con.text_factory, str
|
|
|
|
try:
|
|
|
|
self.cur.execute('''SELECT node, ver, ext, data FROM caps_cache;''');
|
|
|
|
except sqlite.OperationalError:
|
|
|
|
# might happen when there's no caps_cache table yet
|
|
|
|
# -- there's no data to read anyway then
|
|
|
|
#self.con.text_factory = tmp
|
|
|
|
return
|
|
|
|
#self.con.text_factory = tmp
|
2008-03-13 10:07:58 +01:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
for node, ver, ext, data in self.cur:
|
|
|
|
# for each row: unpack the data field
|
|
|
|
# (format: (category, type, name, category, type, name, ...
|
|
|
|
# ..., 'FEAT', feature1, feature2, ...).join(' '))
|
|
|
|
# NOTE: if there's a need to do more gzip, put that to a function
|
2008-03-13 10:07:58 +01:00
|
|
|
data=GzipFile(fileobj=StringIO(str(data))).read().split('\0')
|
2007-07-09 23:24:47 +02:00
|
|
|
i=0
|
|
|
|
identities=set()
|
|
|
|
features=set()
|
2007-07-13 15:02:42 +02:00
|
|
|
while i<(len(data)-2) and data[i]!='FEAT':
|
2007-07-09 23:24:47 +02:00
|
|
|
category=data[i]
|
|
|
|
type=data[i+1]
|
|
|
|
name=data[i+2]
|
|
|
|
identities.add((category,type,name))
|
|
|
|
i+=3
|
|
|
|
i+=1
|
|
|
|
while i<len(data):
|
|
|
|
features.add(data[i])
|
|
|
|
i+=1
|
|
|
|
if not ext: ext=None # to make '' a None
|
|
|
|
|
|
|
|
# yield the row
|
|
|
|
yield node, ver, ext, identities, features
|
|
|
|
|
|
|
|
def add_caps_entry(self, node, ver, ext, identities, features):
|
|
|
|
data=[]
|
|
|
|
for identity in identities:
|
|
|
|
# there is no FEAT category
|
|
|
|
if identity[0]=='FEAT': return
|
|
|
|
if len(identity)<2 or not identity[2]:
|
|
|
|
data.extend((identity[0], identity[1], ''))
|
|
|
|
else:
|
|
|
|
data.extend(identity)
|
|
|
|
data.append('FEAT')
|
|
|
|
data.extend(features)
|
2007-07-13 16:44:01 +02:00
|
|
|
data = '\0'.join(data)
|
2008-03-13 10:07:58 +01:00
|
|
|
# if there's a need to do more gzip, put that to a function
|
|
|
|
string = StringIO()
|
2007-07-09 23:24:47 +02:00
|
|
|
gzip=GzipFile(fileobj=string, mode='w')
|
|
|
|
gzip.write(data)
|
|
|
|
gzip.close()
|
|
|
|
data = string.getvalue()
|
2008-04-16 21:41:36 +02:00
|
|
|
self.cur.execute('''
|
2007-07-09 23:24:47 +02:00
|
|
|
INSERT INTO caps_cache ( node, ver, ext, data )
|
2008-04-16 21:41:36 +02:00
|
|
|
VALUES (?, ?, ?, ?);
|
2008-04-16 21:52:47 +02:00
|
|
|
''', (node, ver, ext, buffer(data))) # (1) -- note above
|
2008-04-16 21:41:36 +02:00
|
|
|
try:
|
|
|
|
self.con.commit()
|
|
|
|
except sqlite.OperationalError, e:
|
|
|
|
print >> sys.stderr, str(e)
|