diff --git a/src/common/caps.py b/src/common/caps.py
index bb52bdcf4f77ccfc899148c3f9ec803ce8985e6e..a4c3ffb29101dcede8ab9cf90e8a4858cedc78c8 100644
--- a/src/common/caps.py
+++ b/src/common/caps.py
@@ -40,6 +40,10 @@ from common.xmpp import NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION, NS_CHATSTATES
 # Features where we cannot safely assume that the other side supports them
 FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION]
 
+# Query entry status codes
+NEW = 0
+QUERIED = 1
+CACHED = 2 # got the answer
 
 ################################################################################
 ### Public API of this module
@@ -58,7 +62,7 @@ def client_supports(client_caps, requested_feature):
 	supported_features = cache_item.features
 	if requested_feature in supported_features:
 		return True
-	elif supported_features == [] and cache_item.queried in (0, 1):
+	elif supported_features == [] and cache_item.status in (NEW, QUERIED):
 		# assume feature is supported, if we don't know yet, what the client
 		# is capable of
 		return requested_feature not in FEATURE_BLACKLIST
@@ -168,8 +172,8 @@ class AbstractClientCaps(object):
 	def _is_hash_valid(self, identities, features, dataforms):
 		''' To be implemented by subclassess '''
 		raise NotImplementedError()		
-	
-	
+		
+
 class ClientCaps(AbstractClientCaps):
 	''' The current XEP-115 implementation '''
 	
@@ -188,7 +192,7 @@ class ClientCaps(AbstractClientCaps):
 		computed_hash = compute_caps_hash(identities, features,
 				dataforms=dataforms, hash_method=self._hash_method)
 		return computed_hash == self._hash	
-	
+
 	
 class OldClientCaps(AbstractClientCaps):
 	''' Old XEP-115 implemtation. Kept around for background competability.  '''
@@ -204,7 +208,7 @@ class OldClientCaps(AbstractClientCaps):
 		
 	def _is_hash_valid(self, identities, features, dataforms):
 		return True	
-		
+
 		
 class NullClientCaps(AbstractClientCaps):
 	'''
@@ -220,7 +224,7 @@ class NullClientCaps(AbstractClientCaps):
 	def _lookup_in_cache(self, caps_cache):
 		# lookup something which does not exist to get a new CacheItem created
 		cache_item = caps_cache[('dummy', '')]
-		assert cache_item.queried == 0
+		assert cache_item.status != CACHED
 		return cache_item
 	
 	def _discover(self, connection, jid):
@@ -248,7 +252,7 @@ class CapsCache(object):
 			#   another object, and we will have plenty of identical long
 			#   strings. therefore we can cache them
 			__names = {}
-			
+						
 			def __init__(self, hash_method, hash_, logger):
 				# cached into db
 				self.hash_method = hash_method
@@ -257,12 +261,8 @@ class CapsCache(object):
 				self._identities = []
 				self._logger = logger
 
-				# not cached into db:
-				# have we sent the query?
-				# 0 == not queried
-				# 1 == queried
-				# 2 == got the answer
-				self.queried = 0
+				self.status = NEW
+				self._recently_seen = False
 
 			def _get_features(self):
 				return self._features
@@ -304,19 +304,28 @@ class CapsCache(object):
 				self.features = features
 				self._logger.add_caps_entry(self.hash_method, self.hash,
 					identities, features)
+				self.status = CACHED
+				
+			def update_last_seen(self):
+				if not self._recently_seen:
+					self._recently_seen = True
+					self._logger.update_caps_time(self.hash_method, self.hash)
 
 		self.__CacheItem = CacheItem
 		self.logger = logger
 
 	def initialize_from_db(self):
-		# get data from logger...
-		if self.logger is not None:
-			for hash_method, hash_, identities, features in \
-			self.logger.iter_caps_data():
-				x = self[(hash_method, hash_)]
-				x.identities = identities
-				x.features = features
-				x.queried = 2
+		self._remove_outdated_caps()
+		for hash_method, hash_, identities, features in \
+		self.logger.iter_caps_data():
+			x = self[(hash_method, hash_)]
+			x.identities = identities
+			x.features = features
+			x.status = CACHED
+	
+	def _remove_outdated_caps(self):
+		'''Removes outdated values from the db'''
+		self.logger.clean_caps_table()
 
 	def __getitem__(self, caps):
 		if caps in self.__cache:
@@ -336,13 +345,14 @@ class CapsCache(object):
 		lookup_cache_item = client_caps.get_cache_lookup_strategy()
 		q = lookup_cache_item(self)	
 		
-		if q.queried == 0:
+		if q.status == NEW:
 			# do query for bare node+hash pair
 			# this will create proper object
-			q.queried = 1
+			q.status = QUERIED
 			discover = client_caps.get_discover_strategy()
 			discover(connection, jid)
-
+		else: 
+			q.update_last_seen()
 
 ################################################################################
 ### Caps network coding
@@ -391,7 +401,7 @@ class ConnectionCaps(object):
 				client_caps = OldClientCaps(caps_hash, node)
 			else:
 				client_caps = ClientCaps(caps_hash, node, hash_method)
-		
+
 		capscache.query_client_of_jid_if_unknown(self, jid, client_caps)
 		contact.client_caps = client_caps
 
@@ -409,7 +419,7 @@ class ConnectionCaps(object):
 		lookup = contact.client_caps.get_cache_lookup_strategy()
 		cache_item = lookup(capscache)	
 					
-		if cache_item.queried == 2:
+		if cache_item.status == CACHED:
 			return
 		else:
 			validate = contact.client_caps.get_hash_validation_strategy()
diff --git a/src/common/check_paths.py b/src/common/check_paths.py
index 0a9dcb83e5ba5825dc8fe82ae4fc91d957d569f7..c4864fc7813147bdd8b14e49b6615e3ec8b06ba2 100644
--- a/src/common/check_paths.py
+++ b/src/common/check_paths.py
@@ -91,7 +91,8 @@ def create_log_db():
 		CREATE TABLE caps_cache (
 			hash_method TEXT,
 			hash TEXT,
-			data BLOB);
+			data BLOB,
+			last_seen INTEGER);
 
 		CREATE TABLE rooms_last_message_time(
 			jid_id INTEGER PRIMARY KEY UNIQUE,
diff --git a/src/common/defs.py b/src/common/defs.py
index 2886838a1d4fb7f9ef079676afab711bac03e4f8..6640c3b32d26b63834c49b817a36803d37b4a475 100644
--- a/src/common/defs.py
+++ b/src/common/defs.py
@@ -27,7 +27,7 @@ docdir = '../'
 datadir = '../'
 localedir = '../po'
 
-version = '0.12.5.8-dev'
+version = '0.13.0.1-dev'
 
 import sys, os.path
 for base in ('.', 'common'):
diff --git a/src/common/logger.py b/src/common/logger.py
index d4af560f15f5bcf098c7fd17f7f4b20827d6e777..c962827437b9e1bc0ecc542aa6c5858f99b37427 100644
--- a/src/common/logger.py
+++ b/src/common/logger.py
@@ -838,14 +838,27 @@ class Logger:
 		gzip.close()
 		data = string.getvalue()
 		self.cur.execute('''
-			INSERT INTO caps_cache ( hash_method, hash, data )
-			VALUES (?, ?, ?);
-			''', (hash_method, hash_, buffer(data))) # (1) -- note above
+			INSERT INTO caps_cache ( hash_method, hash, data, last_seen )
+			VALUES (?, ?, ?, ?);
+			''', (hash_method, hash_, buffer(data), int(time.time())))
+		# (1) -- note above
 		try:
 			self.con.commit()
 		except sqlite.OperationalError, e:
 			print >> sys.stderr, str(e)
 
+	def update_caps_time(self, method, hash_):
+		sql = '''UPDATE caps_cache SET last_seen = %d
+			WHERE hash_method = "%s" and hash = "%s"''' % \
+			(int(time.time()), method, hash_)
+		self.simple_commit(sql)
+
+	def clean_caps_table(self):
+		'''Remove caps which was not seen for 3 months'''
+		sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
+			int(time.time() - 3*30*24*3600)
+		self.simple_commit(sql)
+
 	def replace_roster(self, account_name, roster_version, roster):
 		''' Replace current roster in DB by a new one.
 		accout_name is the name of the account to change
diff --git a/src/common/optparser.py b/src/common/optparser.py
index 783e332a575fe0ea6c851a2b2cfa142054403116..f9f738d5eda41c8808805b8db82fe9a36bb6344a 100644
--- a/src/common/optparser.py
+++ b/src/common/optparser.py
@@ -29,6 +29,7 @@
 import os
 import locale
 import re
+from time import time
 from common import gajim
 from common import helpers
 from common import caps
@@ -218,6 +219,8 @@ class OptionsParser:
 			self.update_config_to_01257()
 		if old < [0, 12, 5, 8] and new >= [0, 12, 5, 8]:
 			self.update_config_to_01258()
+		if old < [0, 13, 0, 1] and new >= [0, 13, 0, 1]:
+			self.update_config_to_01301()
 
 		gajim.logger.init_vars()
 		gajim.config.set('version', new_version)
@@ -817,4 +820,23 @@ class OptionsParser:
 			'proxy.jabber.ru', 'proxy.jabbim.cz'])
 		gajim.config.set('version', '0.12.5.8')
 
+	def update_config_to_01301(self):
+		back = os.getcwd()
+		os.chdir(logger.LOG_DB_FOLDER)
+		con = sqlite.connect(logger.LOG_DB_FILE)
+		os.chdir(back)
+		cur = con.cursor()
+		try:
+			cur.executescript(
+				'''
+				ALTER TABLE caps_cache
+				ADD last_seen INTEGER default %d;
+				''' % int(time())
+			)
+			con.commit()
+		except sqlite.OperationalError:
+			pass
+		con.close()
+		gajim.config.set('version', '0.13.0.1')
+
 # vim: se ts=3:
diff --git a/test/unit/test_caps.py b/test/unit/test_caps.py
index 160996b6a506289304ba4d8404e7efe3f375f2ee..a143f225c19eeb80a912a263210169a8fafb1120 100644
--- a/test/unit/test_caps.py
+++ b/test/unit/test_caps.py
@@ -66,9 +66,9 @@ class TestCapsCache(CommonCapsTest):
 		
 	def test_initialize_from_db(self):
 		''' Read cashed dummy data from db ''' 
-		self.assertEqual(self.cc[self.client_caps].queried, 0)
+		self.assertEqual(self.cc[self.client_caps].status, caps.NEW)
 		self.cc.initialize_from_db()
-		self.assertEqual(self.cc[self.client_caps].queried, 2)
+		self.assertEqual(self.cc[self.client_caps].status, caps.CACHED)
 
 	def test_preload_triggering_query(self):
 		''' Make sure that preload issues a disco '''