e483562b1d68877c265243ddcbd8c6ecdeb26de0
[cascardo/ipsilon.git] / ipsilon / util / data.py
1 # Copyright (C) 2013 Ipsilon project Contributors, for license see COPYING
2
3 import cherrypy
4 import datetime
5 from ipsilon.util.log import Log
6 from sqlalchemy import create_engine
7 from sqlalchemy import MetaData, Table, Column, Text
8 from sqlalchemy.pool import QueuePool, SingletonThreadPool
9 from sqlalchemy.schema import (PrimaryKeyConstraint, Index, AddConstraint,
10                                CreateIndex)
11 from sqlalchemy.sql import select, and_
12 import ConfigParser
13 import os
14 import uuid
15 import logging
16
17
18 CURRENT_SCHEMA_VERSION = 2
19 OPTIONS_TABLE = {'columns': ['name', 'option', 'value'],
20                  'primary_key': ('name', 'option'),
21                  'indexes': [('name',)]
22                  }
23 UNIQUE_DATA_TABLE = {'columns': ['uuid', 'name', 'value'],
24                      'primary_key': ('uuid', 'name'),
25                      'indexes': [('uuid',)]
26                      }
27
28
29 class DatabaseError(Exception):
30     pass
31
32
33 class BaseStore(Log):
34     # Some helper functions used for upgrades
35     def add_constraint(self, table):
36         raise NotImplementedError()
37
38     def add_index(self, index):
39         raise NotImplementedError()
40
41
42 class SqlStore(BaseStore):
43     __instances = {}
44
45     @classmethod
46     def get_connection(cls, name):
47         if name not in cls.__instances:
48             if cherrypy.config.get('db.conn.log', False):
49                 logging.debug('SqlStore new: %s', name)
50             cls.__instances[name] = SqlStore(name)
51         return cls.__instances[name]
52
53     def __init__(self, name):
54         self.db_conn_log = cherrypy.config.get('db.conn.log', False)
55         self.debug('SqlStore init: %s' % name)
56         self.name = name
57         engine_name = name
58         if '://' not in engine_name:
59             engine_name = 'sqlite:///' + engine_name
60         # This pool size is per configured database. The minimum needed,
61         #  determined by binary search, is 23. We're using 25 so we have a bit
62         #  more playroom, and then the overflow should make sure things don't
63         #  break when we suddenly need more.
64         pool_args = {'poolclass': QueuePool,
65                      'pool_size': 25,
66                      'max_overflow': 50}
67         if engine_name.startswith('sqlite://'):
68             # It's not possible to share connections for SQLite between
69             #  threads, so let's use the SingletonThreadPool for them
70             pool_args = {'poolclass': SingletonThreadPool}
71         self._dbengine = create_engine(engine_name, **pool_args)
72         self.is_readonly = False
73
74     def add_constraint(self, constraint):
75         if self._dbengine.dialect.name != 'sqlite':
76             # It is impossible to add constraints to a pre-existing table for
77             #  SQLite
78             # source: http://www.sqlite.org/omitted.html
79             create_constraint = AddConstraint(constraint, bind=self._dbengine)
80             create_constraint.execute()
81
82     def add_index(self, index):
83         add_index = CreateIndex(index, bind=self._dbengine)
84         add_index.execute()
85
86     def debug(self, fact):
87         if self.db_conn_log:
88             super(SqlStore, self).debug(fact)
89
90     def engine(self):
91         return self._dbengine
92
93     def connection(self):
94         self.debug('SqlStore connect: %s' % self.name)
95         conn = self._dbengine.connect()
96
97         def cleanup_connection():
98             self.debug('SqlStore cleanup: %s' % self.name)
99             conn.close()
100         cherrypy.request.hooks.attach('on_end_request', cleanup_connection)
101         return conn
102
103
104 class SqlQuery(Log):
105
106     def __init__(self, db_obj, table, table_def, trans=True):
107         self._db = db_obj
108         self._con = self._db.connection()
109         self._trans = self._con.begin() if trans else None
110         self._table = self._get_table(table, table_def)
111
112     def _get_table(self, name, table_def):
113         if isinstance(table_def, list):
114             table_def = {'columns': table_def,
115                          'indexes': [],
116                          'primary_key': None}
117         table_creation = []
118         for col_name in table_def['columns']:
119             table_creation.append(Column(col_name, Text()))
120         if table_def['primary_key']:
121             table_creation.append(PrimaryKeyConstraint(
122                 *table_def['primary_key']))
123         for index in table_def['indexes']:
124             idx_name = 'idx_%s_%s' % (name, '_'.join(index))
125             table_creation.append(Index(idx_name, *index))
126         table = Table(name, MetaData(self._db.engine()), *table_creation)
127         return table
128
129     def _where(self, kvfilter):
130         where = None
131         if kvfilter is not None:
132             for k in kvfilter:
133                 w = self._table.columns[k] == kvfilter[k]
134                 if where is None:
135                     where = w
136                 else:
137                     where = where & w
138         return where
139
140     def _columns(self, columns=None):
141         cols = None
142         if columns is not None:
143             cols = []
144             for c in columns:
145                 cols.append(self._table.columns[c])
146         else:
147             cols = self._table.columns
148         return cols
149
150     def rollback(self):
151         self._trans.rollback()
152
153     def commit(self):
154         self._trans.commit()
155
156     def create(self):
157         self._table.create(checkfirst=True)
158
159     def drop(self):
160         self._table.drop(checkfirst=True)
161
162     def select(self, kvfilter=None, columns=None):
163         return self._con.execute(select(self._columns(columns),
164                                         self._where(kvfilter)))
165
166     def insert(self, values):
167         self._con.execute(self._table.insert(values))
168
169     def update(self, values, kvfilter):
170         self._con.execute(self._table.update(self._where(kvfilter), values))
171
172     def delete(self, kvfilter):
173         self._con.execute(self._table.delete(self._where(kvfilter)))
174
175
176 class FileStore(BaseStore):
177
178     def __init__(self, name):
179         self._filename = name
180         self.is_readonly = True
181         self._timestamp = None
182         self._config = None
183
184     def get_config(self):
185         try:
186             stat = os.stat(self._filename)
187         except OSError, e:
188             self.error("Unable to check config file %s: [%s]" % (
189                 self._filename, e))
190             self._config = None
191             raise
192         timestamp = stat.st_mtime
193         if self._config is None or timestamp > self._timestamp:
194             self._config = ConfigParser.RawConfigParser()
195             self._config.optionxform = str
196             self._config.read(self._filename)
197         return self._config
198
199     def add_constraint(self, table):
200         raise NotImplementedError()
201
202     def add_index(self, index):
203         raise NotImplementedError()
204
205
206 class FileQuery(Log):
207
208     def __init__(self, fstore, table, table_def, trans=True):
209         # We don't need indexes in a FileQuery, so drop that info
210         if isinstance(table_def, dict):
211             columns = table_def['columns']
212         else:
213             columns = table_def
214         self._fstore = fstore
215         self._config = fstore.get_config()
216         self._section = table
217         if len(columns) > 3 or columns[-1] != 'value':
218             raise ValueError('Unsupported configuration format')
219         self._columns = columns
220
221     def rollback(self):
222         return
223
224     def commit(self):
225         return
226
227     def create(self):
228         raise NotImplementedError
229
230     def drop(self):
231         raise NotImplementedError
232
233     def select(self, kvfilter=None, columns=None):
234         if self._section not in self._config.sections():
235             return []
236
237         opts = self._config.options(self._section)
238
239         prefix = None
240         prefix_ = ''
241         if self._columns[0] in kvfilter:
242             prefix = kvfilter[self._columns[0]]
243             prefix_ = prefix + ' '
244
245         name = None
246         if len(self._columns) == 3 and self._columns[1] in kvfilter:
247             name = kvfilter[self._columns[1]]
248
249         value = None
250         if self._columns[-1] in kvfilter:
251             value = kvfilter[self._columns[-1]]
252
253         res = []
254         for o in opts:
255             if len(self._columns) == 3:
256                 # 3 cols
257                 if prefix and not o.startswith(prefix_):
258                     continue
259
260                 col1, col2 = o.split(' ', 1)
261                 if name and col2 != name:
262                     continue
263
264                 col3 = self._config.get(self._section, o)
265                 if value and col3 != value:
266                     continue
267
268                 r = [col1, col2, col3]
269             else:
270                 # 2 cols
271                 if prefix and o != prefix:
272                     continue
273                 r = [o, self._config.get(self._section, o)]
274
275             if columns:
276                 s = []
277                 for c in columns:
278                     s.append(r[self._columns.index(c)])
279                 res.append(s)
280             else:
281                 res.append(r)
282
283         self.debug('SELECT(%s, %s, %s) -> %s' % (self._section,
284                                                  repr(kvfilter),
285                                                  repr(columns),
286                                                  repr(res)))
287         return res
288
289     def insert(self, values):
290         raise NotImplementedError
291
292     def update(self, values, kvfilter):
293         raise NotImplementedError
294
295     def delete(self, kvfilter):
296         raise NotImplementedError
297
298
299 class Store(Log):
300     _is_upgrade = False
301
302     def __init__(self, config_name=None, database_url=None):
303         if config_name is None and database_url is None:
304             raise ValueError('config_name or database_url must be provided')
305         if config_name:
306             if config_name not in cherrypy.config:
307                 raise NameError('Unknown database %s' % config_name)
308             name = cherrypy.config[config_name]
309         else:
310             name = database_url
311         if name.startswith('configfile://'):
312             _, filename = name.split('://')
313             self._db = FileStore(filename)
314             self._query = FileQuery
315         else:
316             self._db = SqlStore.get_connection(name)
317             self._query = SqlQuery
318
319         if not self._is_upgrade:
320             self._check_database()
321
322     def _code_schema_version(self):
323         # This function makes it possible for separate plugins to have
324         #  different schema versions. We default to the global schema
325         #  version.
326         return CURRENT_SCHEMA_VERSION
327
328     def _get_schema_version(self):
329         # We are storing multiple versions: one per class
330         # That way, we can support plugins with differing schema versions from
331         #  the main codebase, and even in the same database.
332         q = self._query(self._db, 'dbinfo', OPTIONS_TABLE, trans=False)
333         q.create()
334         cls_name = self.__class__.__name__
335         current_version = self.load_options('dbinfo').get('%s_schema'
336                                                           % cls_name, {})
337         if 'version' in current_version:
338             return int(current_version['version'])
339         else:
340             # Also try the old table name.
341             # "scheme" was a typo, but we need to retain that now for compat
342             fallback_version = self.load_options('dbinfo').get('scheme',
343                                                                {})
344             if 'version' in fallback_version:
345                 # Explanation for this is in def upgrade_database(self)
346                 return -1
347             else:
348                 return None
349
350     def _check_database(self):
351         if self.is_readonly:
352             # If the database is readonly, we cannot do anything to the
353             #  schema. Let's just return, and assume people checked the
354             #  upgrade notes
355             return
356
357         current_version = self._get_schema_version()
358         if current_version is None:
359             self.error('Database initialization required! ' +
360                        'Please run ipsilon-upgrade-database')
361             raise DatabaseError('Database initialization required for %s' %
362                                 self.__class__.__name__)
363         if current_version != self._code_schema_version():
364             self.error('Database upgrade required! ' +
365                        'Please run ipsilon-upgrade-database')
366             raise DatabaseError('Database upgrade required for %s' %
367                                 self.__class__.__name__)
368
369     def _store_new_schema_version(self, new_version):
370         cls_name = self.__class__.__name__
371         self.save_options('dbinfo', '%s_schema' % cls_name,
372                           {'version': new_version})
373
374     def _initialize_schema(self):
375         raise NotImplementedError()
376
377     def _upgrade_schema(self, old_version):
378         # Datastores need to figure out what to do with bigger old_versions
379         #  themselves.
380         # They might implement downgrading if that's feasible, or just throw
381         #  NotImplementedError
382         # Should return the new schema version
383         raise NotImplementedError()
384
385     def upgrade_database(self):
386         # Do whatever is needed to get schema to current version
387         old_schema_version = self._get_schema_version()
388         if old_schema_version is None:
389             # Just initialize a new schema
390             self._initialize_schema()
391             self._store_new_schema_version(self._code_schema_version())
392         elif old_schema_version == -1:
393             # This is a special-case from 1.0: we only created tables at the
394             # first time they were actually used, but the upgrade code assumes
395             # that the tables exist. So let's fix this.
396             self._initialize_schema()
397             # The old version was schema version 1
398             self._store_new_schema_version(1)
399             self.upgrade_database()
400         elif old_schema_version != self._code_schema_version():
401             # Upgrade from old_schema_version to code_schema_version
402             self.debug('Upgrading from schema version %i' % old_schema_version)
403             new_version = self._upgrade_schema(old_schema_version)
404             if not new_version:
405                 error = ('Schema upgrade error: %s did not provide a ' +
406                          'new schema version number!' %
407                          self.__class__.__name__)
408                 self.error(error)
409                 raise Exception(error)
410             self._store_new_schema_version(new_version)
411             # Check if we are now up-to-date
412             self.upgrade_database()
413
414     @property
415     def is_readonly(self):
416         return self._db.is_readonly
417
418     def _row_to_dict_tree(self, data, row):
419         name = row[0]
420         if len(row) > 2:
421             if name not in data:
422                 data[name] = dict()
423             d2 = data[name]
424             self._row_to_dict_tree(d2, row[1:])
425         else:
426             value = row[1]
427             if name in data:
428                 if data[name] is list:
429                     data[name].append(value)
430                 else:
431                     v = data[name]
432                     data[name] = [v, value]
433             else:
434                 data[name] = value
435
436     def _rows_to_dict_tree(self, rows):
437         data = dict()
438         for r in rows:
439             self._row_to_dict_tree(data, r)
440         return data
441
442     def _load_data(self, table, columns, kvfilter=None):
443         rows = []
444         try:
445             q = self._query(self._db, table, columns, trans=False)
446             rows = q.select(kvfilter)
447         except Exception, e:  # pylint: disable=broad-except
448             self.error("Failed to load data for table %s: [%s]" % (table, e))
449         return self._rows_to_dict_tree(rows)
450
451     def load_config(self):
452         table = 'config'
453         columns = ['name', 'value']
454         return self._load_data(table, columns)
455
456     def load_options(self, table, name=None):
457         kvfilter = dict()
458         if name:
459             kvfilter['name'] = name
460         options = self._load_data(table, OPTIONS_TABLE, kvfilter)
461         if name and name in options:
462             return options[name]
463         return options
464
465     def save_options(self, table, name, options):
466         curvals = dict()
467         q = None
468         try:
469             q = self._query(self._db, table, OPTIONS_TABLE)
470             rows = q.select({'name': name}, ['option', 'value'])
471             for row in rows:
472                 curvals[row[0]] = row[1]
473
474             for opt in options:
475                 if opt in curvals:
476                     q.update({'value': options[opt]},
477                              {'name': name, 'option': opt})
478                 else:
479                     q.insert((name, opt, options[opt]))
480
481             q.commit()
482         except Exception, e:  # pylint: disable=broad-except
483             if q:
484                 q.rollback()
485             self.error("Failed to save options: [%s]" % e)
486             raise
487
488     def delete_options(self, table, name, options=None):
489         kvfilter = {'name': name}
490         q = None
491         try:
492             q = self._query(self._db, table, OPTIONS_TABLE)
493             if options is None:
494                 q.delete(kvfilter)
495             else:
496                 for opt in options:
497                     kvfilter['option'] = opt
498                     q.delete(kvfilter)
499             q.commit()
500         except Exception, e:  # pylint: disable=broad-except
501             if q:
502                 q.rollback()
503             self.error("Failed to delete from %s: [%s]" % (table, e))
504             raise
505
506     def new_unique_data(self, table, data):
507         newid = str(uuid.uuid4())
508         q = None
509         try:
510             q = self._query(self._db, table, UNIQUE_DATA_TABLE)
511             for name in data:
512                 q.insert((newid, name, data[name]))
513             q.commit()
514         except Exception, e:  # pylint: disable=broad-except
515             if q:
516                 q.rollback()
517             self.error("Failed to store %s data: [%s]" % (table, e))
518             raise
519         return newid
520
521     def get_unique_data(self, table, uuidval=None, name=None, value=None):
522         kvfilter = dict()
523         if uuidval:
524             kvfilter['uuid'] = uuidval
525         if name:
526             kvfilter['name'] = name
527         if value:
528             kvfilter['value'] = value
529         return self._load_data(table, UNIQUE_DATA_TABLE, kvfilter)
530
531     def save_unique_data(self, table, data):
532         q = None
533         try:
534             q = self._query(self._db, table, UNIQUE_DATA_TABLE)
535             for uid in data:
536                 curvals = dict()
537                 rows = q.select({'uuid': uid}, ['name', 'value'])
538                 for r in rows:
539                     curvals[r[0]] = r[1]
540
541                 datum = data[uid]
542                 for name in datum:
543                     if name in curvals:
544                         if datum[name] is None:
545                             q.delete({'uuid': uid, 'name': name})
546                         else:
547                             q.update({'value': datum[name]},
548                                      {'uuid': uid, 'name': name})
549                     else:
550                         if datum[name] is not None:
551                             q.insert((uid, name, datum[name]))
552
553             q.commit()
554         except Exception, e:  # pylint: disable=broad-except
555             if q:
556                 q.rollback()
557             self.error("Failed to store data in %s: [%s]" % (table, e))
558             raise
559
560     def del_unique_data(self, table, uuidval):
561         kvfilter = {'uuid': uuidval}
562         try:
563             q = self._query(self._db, table, UNIQUE_DATA_TABLE, trans=False)
564             q.delete(kvfilter)
565         except Exception, e:  # pylint: disable=broad-except
566             self.error("Failed to delete data from %s: [%s]" % (table, e))
567
568     def _reset_data(self, table):
569         q = None
570         try:
571             q = self._query(self._db, table, UNIQUE_DATA_TABLE)
572             q.drop()
573             q.create()
574             q.commit()
575         except Exception, e:  # pylint: disable=broad-except
576             if q:
577                 q.rollback()
578             self.error("Failed to erase all data from %s: [%s]" % (table, e))
579
580
581 class AdminStore(Store):
582
583     def __init__(self):
584         super(AdminStore, self).__init__('admin.config.db')
585
586     def get_data(self, plugin, idval=None, name=None, value=None):
587         return self.get_unique_data(plugin+"_data", idval, name, value)
588
589     def save_data(self, plugin, data):
590         return self.save_unique_data(plugin+"_data", data)
591
592     def new_datum(self, plugin, datum):
593         table = plugin+"_data"
594         return self.new_unique_data(table, datum)
595
596     def del_datum(self, plugin, idval):
597         table = plugin+"_data"
598         return self.del_unique_data(table, idval)
599
600     def wipe_data(self, plugin):
601         table = plugin+"_data"
602         self._reset_data(table)
603
604     def _initialize_schema(self):
605         for table in ['config',
606                       'info_config',
607                       'login_config',
608                       'provider_config']:
609             q = self._query(self._db, table, OPTIONS_TABLE, trans=False)
610             q.create()
611
612     def _upgrade_schema(self, old_version):
613         if old_version == 1:
614             # In schema version 2, we added indexes and primary keys
615             for table in ['config',
616                           'info_config',
617                           'login_config',
618                           'provider_config']:
619                 # pylint: disable=protected-access
620                 table = self._query(self._db, table, OPTIONS_TABLE,
621                                     trans=False)._table
622                 self._db.add_constraint(table.primary_key)
623                 for index in table.indexes:
624                     self._db.add_index(index)
625             return 2
626         else:
627             raise NotImplementedError()
628
629     def create_plugin_data_table(self, plugin_name):
630         if not self.is_readonly:
631             table = plugin_name+'_data'
632             q = self._query(self._db, table, UNIQUE_DATA_TABLE,
633                             trans=False)
634             q.create()
635
636
637 class UserStore(Store):
638
639     def __init__(self, path=None):
640         super(UserStore, self).__init__('user.prefs.db')
641
642     def save_user_preferences(self, user, options):
643         self.save_options('users', user, options)
644
645     def load_user_preferences(self, user):
646         return self.load_options('users', user)
647
648     def save_plugin_data(self, plugin, user, options):
649         self.save_options(plugin+"_data", user, options)
650
651     def load_plugin_data(self, plugin, user):
652         return self.load_options(plugin+"_data", user)
653
654     def _initialize_schema(self):
655         q = self._query(self._db, 'users', OPTIONS_TABLE, trans=False)
656         q.create()
657
658     def _upgrade_schema(self, old_version):
659         if old_version == 1:
660             # In schema version 2, we added indexes and primary keys
661             # pylint: disable=protected-access
662             table = self._query(self._db, 'users', OPTIONS_TABLE,
663                                 trans=False)._table
664             self._db.add_constraint(table.primary_key)
665             for index in table.indexes:
666                 self._db.add_index(index)
667             return 2
668         else:
669             raise NotImplementedError()
670
671
672 class TranStore(Store):
673
674     def __init__(self, path=None):
675         super(TranStore, self).__init__('transactions.db')
676
677     def _initialize_schema(self):
678         q = self._query(self._db, 'transactions', UNIQUE_DATA_TABLE,
679                         trans=False)
680         q.create()
681
682     def _upgrade_schema(self, old_version):
683         if old_version == 1:
684             # In schema version 2, we added indexes and primary keys
685             # pylint: disable=protected-access
686             table = self._query(self._db, 'transactions', UNIQUE_DATA_TABLE,
687                                 trans=False)._table
688             self._db.add_constraint(table.primary_key)
689             for index in table.indexes:
690                 self._db.add_index(index)
691             return 2
692         else:
693             raise NotImplementedError()
694
695
696 class SAML2SessionStore(Store):
697
698     def __init__(self, database_url):
699         super(SAML2SessionStore, self).__init__(database_url=database_url)
700         self.table = 'saml2_sessions'
701         # pylint: disable=protected-access
702         table = SqlQuery(self._db, self.table, UNIQUE_DATA_TABLE)._table
703         table.create(checkfirst=True)
704
705     def _get_unique_id_from_column(self, name, value):
706         """
707         The query is going to return only the column in the query.
708         Use this method to get the uuidval which can be used to fetch
709         the entire entry.
710
711         Returns None or the uuid of the first value found.
712         """
713         data = self.get_unique_data(self.table, name=name, value=value)
714         count = len(data)
715         if count == 0:
716             return None
717         elif count != 1:
718             raise ValueError("Multiple entries returned")
719         return data.keys()[0]
720
721     def remove_expired_sessions(self):
722         # pylint: disable=protected-access
723         table = SqlQuery(self._db, self.table, UNIQUE_DATA_TABLE)._table
724         sel = select([table.columns.uuid]). \
725             where(and_(table.c.name == 'expiration_time',
726                        table.c.value <= datetime.datetime.now()))
727         # pylint: disable=no-value-for-parameter
728         d = table.delete().where(table.c.uuid.in_(sel))
729         d.execute()
730
731     def get_data(self, idval=None, name=None, value=None):
732         return self.get_unique_data(self.table, idval, name, value)
733
734     def new_session(self, datum):
735         if 'supported_logout_mechs' in datum:
736             datum['supported_logout_mechs'] = ','.join(
737                 datum['supported_logout_mechs']
738             )
739         return self.new_unique_data(self.table, datum)
740
741     def get_session(self, session_id=None, request_id=None):
742         if session_id:
743             uuidval = self._get_unique_id_from_column('session_id', session_id)
744         elif request_id:
745             uuidval = self._get_unique_id_from_column('request_id', request_id)
746         else:
747             raise ValueError("Unable to find session")
748         if not uuidval:
749             return None, None
750         data = self.get_unique_data(self.table, uuidval=uuidval)
751         return uuidval, data[uuidval]
752
753     def get_user_sessions(self, user):
754         """
755         Return a list of all sessions for a given user.
756         """
757         rows = self.get_unique_data(self.table, name='user', value=user)
758
759         # We have a list of sessions for this user, now get the details
760         logged_in = []
761         for r in rows:
762             data = self.get_unique_data(self.table, uuidval=r)
763             data[r]['supported_logout_mechs'] = data[r].get(
764                 'supported_logout_mechs', '').split(',')
765             logged_in.append(data)
766
767         return logged_in
768
769     def update_session(self, datum):
770         self.save_unique_data(self.table, datum)
771
772     def remove_session(self, uuidval):
773         self.del_unique_data(self.table, uuidval)
774
775     def wipe_data(self):
776         self._reset_data(self.table)
777
778     def _initialize_schema(self):
779         q = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
780                         trans=False)
781         q.create()
782
783     def _upgrade_schema(self, old_version):
784         if old_version == 1:
785             # In schema version 2, we added indexes and primary keys
786             # pylint: disable=protected-access
787             table = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
788                                 trans=False)._table
789             self._db.add_constraint(table.primary_key)
790             for index in table.indexes:
791                 self._db.add_index(index)
792             return 2
793         else:
794             raise NotImplementedError()