1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite
21 - MySQL
22 - Postgres
23 - Oracle
24 - MS SQL
25 - DB2
26 - Interbase
27 - Ingres
28 - SapDB (experimental)
29 - Cubrid (experimental)
30 - CouchDB (experimental)
31 - MongoDB (in progress)
32 - Google:nosql
33 - Google:sql
34
35 Example of usage:
36
37 >>> # from dal import DAL, Field
38
39 ### create DAL connection (and create DB if not exists)
40 >>> db=DAL(('mysql://a:b@locahost/x','sqlite://storage.sqlite'),folder=None)
41
42 ### define a table 'person' (create/aster as necessary)
43 >>> person = db.define_table('person',Field('name','string'))
44
45 ### insert a record
46 >>> id = person.insert(name='James')
47
48 ### retrieve it by id
49 >>> james = person(id)
50
51 ### retrieve it by name
52 >>> james = person(name='James')
53
54 ### retrieve it by arbitrary query
55 >>> query = (person.name=='James')&(person.name.startswith('J'))
56 >>> james = db(query).select(person.ALL)[0]
57
58 ### update one record
59 >>> james.update_record(name='Jim')
60
61 ### update multiple records by query
62 >>> db(person.name.like('J%')).update(name='James')
63 1
64
65 ### delete records by query
66 >>> db(person.name.lower()=='jim').delete()
67 0
68
69 ### retrieve multiple records (rows)
70 >>> people = db(person).select(orderby=person.name,groupby=person.name,limitby=(0,100))
71
72 ### further filter them
73 >>> james = people.find(lambda row: row.name=='James').first()
74 >>> print james.id, james.name
75 1 James
76
77 ### check aggrgates
78 >>> counter = person.id.count()
79 >>> print db(person).select(counter).first()(counter)
80 1
81
82 ### delete one record
83 >>> james.delete_record()
84 1
85
86 ### delete (drop) entire database table
87 >>> person.drop()
88
89 Supported field types:
90 id string text boolean integer double decimal password upload blob time date datetime,
91
92 Supported DAL URI strings:
93 'sqlite://test.db'
94 'sqlite:memory'
95 'jdbc:sqlite://test.db'
96 'mysql://root:none@localhost/test'
97 'postgres://mdipierro:none@localhost/test'
98 'jdbc:postgres://mdipierro:none@localhost/test'
99 'mssql://web2py:none@A64X2/web2py_test'
100 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
101 'oracle://username:password@database'
102 'firebird://user:password@server:3050/database'
103 'db2://DSN=dsn;UID=user;PWD=pass'
104 'firebird://username:password@hostname/database'
105 'firebird_embedded://username:password@c://path'
106 'informix://user:password@server:3050/database'
107 'informixu://user:password@server:3050/database' # unicode informix
108 'google:datastore' # for google app engine datastore
109 'google:sql' # for google app engine with sql (mysql compatible)
110
111 For more info:
112 help(DAL)
113 help(Field)
114 """
115
116
117
118
119
120 __all__ = ['DAL', 'Field']
121 MAXCHARLENGTH = 512
122 INFINITY = 32768
123
124 import re
125 import sys
126 import locale
127 import os
128 import types
129 import cPickle
130 import datetime
131 import threading
132 import time
133 import cStringIO
134 import csv
135 import copy
136 import socket
137 import logging
138 import copy_reg
139 import base64
140 import shutil
141 import marshal
142 import decimal
143 import struct
144 import urllib
145 import hashlib
146 import uuid
147 import glob
148
149 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
150 types.MethodType, types.BuiltinMethodType)
151
152
153
154
155
156 try:
157 from utils import web2py_uuid
158 except ImportError:
159 import uuid
161
162 try:
163 import portalocker
164 have_portalocker = True
165 except ImportError:
166 have_portalocker = False
167
168 try:
169 import serializers
170 have_serializers = True
171 except ImportError:
172 have_serializers = False
173
174 try:
175 import validators
176 have_validators = True
177 except ImportError:
178 have_validators = False
179
180 logger = logging.getLogger("web2py.dal")
181 DEFAULT = lambda:0
182
183 sql_locker = threading.RLock()
184 thread = threading.local()
185
186
187
188
189 regex_dbname = re.compile('^(\w+)(\:\w+)*')
190 table_field = re.compile('^[\w_]+\.[\w_]+$')
191 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
192 regex_cleanup_fn = re.compile('[\'"\s;]+')
193 string_unpack=re.compile('(?<!\|)\|(?!\|)')
194 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
195
196
197
198
199
200 drivers = []
201
202 try:
203 from new import classobj
204 from google.appengine.ext import db as gae
205 from google.appengine.api import namespace_manager, rdbms
206 from google.appengine.api.datastore_types import Key
207 from google.appengine.ext.db.polymodel import PolyModel
208 drivers.append('google')
209 except ImportError:
210 pass
211
212 if not 'google' in drivers:
213
214 try:
215 from pysqlite2 import dbapi2 as sqlite3
216 drivers.append('pysqlite2')
217 except ImportError:
218 try:
219 from sqlite3 import dbapi2 as sqlite3
220 drivers.append('SQLite3')
221 except ImportError:
222 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
223
224 try:
225 import contrib.pymysql as pymysql
226 drivers.append('pymysql')
227 except ImportError:
228 logger.debug('no pymysql driver')
229
230 try:
231 import psycopg2
232 drivers.append('PostgreSQL')
233 except ImportError:
234 logger.debug('no psycopg2 driver')
235
236 try:
237 import cx_Oracle
238 drivers.append('Oracle')
239 except ImportError:
240 logger.debug('no cx_Oracle driver')
241
242 try:
243 import pyodbc
244 drivers.append('MSSQL/DB2')
245 except ImportError:
246 logger.debug('no MSSQL/DB2 driver')
247
248 try:
249 import kinterbasdb
250 drivers.append('Interbase')
251 except ImportError:
252 logger.debug('no kinterbasdb driver')
253
254 try:
255 import firebirdsql
256 drivers.append('Firebird')
257 except ImportError:
258 logger.debug('no Firebird driver')
259
260 try:
261 import informixdb
262 drivers.append('Informix')
263 logger.warning('Informix support is experimental')
264 except ImportError:
265 logger.debug('no informixdb driver')
266
267 try:
268 import sapdb
269 drivers.append('SAPDB')
270 logger.warning('SAPDB support is experimental')
271 except ImportError:
272 logger.debug('no sapdb driver')
273
274 try:
275 import cubriddb
276 drivers.append('Cubrid')
277 logger.warning('Cubrid support is experimental')
278 except ImportError:
279 logger.debug('no cubriddb driver')
280
281 try:
282 from com.ziclix.python.sql import zxJDBC
283 import java.sql
284 from org.sqlite import JDBC
285 drivers.append('zxJDBC')
286 logger.warning('zxJDBC support is experimental')
287 is_jdbc = True
288 except ImportError:
289 logger.debug('no zxJDBC driver')
290 is_jdbc = False
291
292 try:
293 import ingresdbi
294 drivers.append('Ingres')
295 except ImportError:
296 logger.debug('no Ingres driver')
297
298
299 try:
300 import couchdb
301 drivers.append('CouchDB')
302 except ImportError:
303 logger.debug('no couchdb driver')
304
305 try:
306 import pymongo
307 drivers.append('mongoDB')
308 except:
309 logger.debug('no mongoDB driver')
310
311
312 if 'google' in drivers:
313
314 is_jdbc = False
315
317 """
318 GAE decimal implementation
319 """
320 data_type = decimal.Decimal
321
322 - def __init__(self, precision, scale, **kwargs):
323 super(GAEDecimalProperty, self).__init__(self, **kwargs)
324 d = '1.'
325 for x in range(scale):
326 d += '0'
327 self.round = decimal.Decimal(d)
328
335
337 if value:
338 return decimal.Decimal(value).quantize(self.round)
339 else:
340 return None
341
343 value = super(GAEDecimalProperty, self).validate(value)
344 if value is None or isinstance(value, decimal.Decimal):
345 return value
346 elif isinstance(value, basestring):
347 return decimal.Decimal(value)
348 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
349
350
351
352
353
355
356 pools = {}
357
358 @staticmethod
361
362
363
364 @staticmethod
366 """ to close cleanly databases in a multithreaded environment """
367 if not hasattr(thread,'instances'):
368 return
369 while thread.instances:
370 instance = thread.instances.pop()
371 getattr(instance,action)()
372
373 really = True
374 if instance.pool_size:
375 sql_locker.acquire()
376 pool = ConnectionPool.pools[instance.uri]
377 if len(pool) < instance.pool_size:
378 pool.append(instance.connection)
379 really = False
380 sql_locker.release()
381 if really:
382 getattr(instance,'close')()
383 return
384
386 """ this actually does not make the folder. it has to be there """
387 if hasattr(thread,'folder'):
388 self.folder = thread.folder
389 else:
390 self.folder = thread.folder = ''
391
392
393 if False and self.folder and not os.path.exists(self.folder):
394 os.mkdir(self.folder)
395
413
414
415
416
417
418
420
421 maxcharlength = INFINITY
422 commit_on_alter_table = False
423 support_distributed_transaction = False
424 uploads_in_blob = False
425 types = {
426 'boolean': 'CHAR(1)',
427 'string': 'CHAR(%(length)s)',
428 'text': 'TEXT',
429 'password': 'CHAR(%(length)s)',
430 'blob': 'BLOB',
431 'upload': 'CHAR(%(length)s)',
432 'integer': 'INTEGER',
433 'double': 'DOUBLE',
434 'decimal': 'DOUBLE',
435 'date': 'DATE',
436 'time': 'TIME',
437 'datetime': 'TIMESTAMP',
438 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
439 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
440 'list:integer': 'TEXT',
441 'list:string': 'TEXT',
442 'list:reference': 'TEXT',
443 }
444
446 """
447 to be used ONLY for files that on GAE may not be on filesystem
448 """
449 return os.path.exists(filename)
450
451 - def file_open(self, filename, mode='rb', lock=True):
465
474
477
478 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
479 credential_decoder=lambda x:x, driver_args={},
480 adapter_args={}):
481 self.db = db
482 self.dbengine = "None"
483 self.uri = uri
484 self.pool_size = pool_size
485 self.folder = folder
486 self.db_codec = db_codec
487 class Dummy(object):
488 lastrowid = 1
489 def __getattr__(self, value):
490 return lambda *a, **b: []
491 self.connection = Dummy()
492 self.cursor = Dummy()
493
495 return '%s_sequence' % tablename
496
498 return '%s_sequence' % tablename
499
500
501 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
502 fields = []
503 sql_fields = {}
504 sql_fields_aux = {}
505 TFK = {}
506 tablename = table._tablename
507 sortable = 0
508 for field in table:
509 sortable += 1
510 k = field.name
511 if isinstance(field.type,SQLCustomType):
512 ftype = field.type.native or field.type.type
513 elif field.type.startswith('reference'):
514 referenced = field.type[10:].strip()
515 constraint_name = self.constraint_name(tablename, field.name)
516 if hasattr(table,'_primarykey'):
517 rtablename,rfieldname = referenced.split('.')
518 rtable = table._db[rtablename]
519 rfield = rtable[rfieldname]
520
521 if rfieldname in rtable._primarykey or rfield.unique:
522 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
523
524 if not rfield.unique and len(rtable._primarykey)>1 :
525
526 if rtablename not in TFK:
527 TFK[rtablename] = {}
528 TFK[rtablename][rfieldname] = field.name
529 else:
530 ftype = ftype + \
531 self.types['reference FK'] %dict(\
532 constraint_name=constraint_name,
533 table_name=tablename,
534 field_name=field.name,
535 foreign_key='%s (%s)'%(rtablename, rfieldname),
536 on_delete_action=field.ondelete)
537 else:
538
539 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
540 ftype = self.types[field.type[:9]]\
541 % dict(table_name=tablename,
542 field_name=field.name,
543 constraint_name=constraint_name,
544 foreign_key=referenced + ('(%s)' % id_fieldname),
545 on_delete_action=field.ondelete)
546 elif field.type.startswith('list:reference'):
547 ftype = self.types[field.type[:14]]
548 elif field.type.startswith('decimal'):
549 precision, scale = map(int,field.type[8:-1].split(','))
550 ftype = self.types[field.type[:7]] % \
551 dict(precision=precision,scale=scale)
552 elif not field.type in self.types:
553 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
554 (field.type, field.name)
555 else:
556 ftype = self.types[field.type]\
557 % dict(length=field.length)
558 if not field.type.startswith('id') and not field.type.startswith('reference'):
559 if field.notnull:
560 ftype += ' NOT NULL'
561 else:
562 ftype += self.ALLOW_NULL()
563 if field.unique:
564 ftype += ' UNIQUE'
565
566
567 sql_fields[field.name] = dict(sortable=sortable,
568 type=str(field.type),
569 sql=ftype)
570
571 if isinstance(field.default,(str,int,float)):
572
573
574
575
576
577 not_null = self.NOT_NULL(field.default,field.type)
578 ftype = ftype.replace('NOT NULL',not_null)
579 sql_fields_aux[field.name] = dict(sql=ftype)
580
581 fields.append('%s %s' % (field.name, ftype))
582 other = ';'
583
584
585 if self.dbengine == 'mysql':
586 if not hasattr(table, "_primarykey"):
587 fields.append('PRIMARY KEY(%s)' % table._id.name)
588 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
589
590 fields = ',\n '.join(fields)
591 for rtablename in TFK:
592 rfields = TFK[rtablename]
593 pkeys = table._db[rtablename]._primarykey
594 fkeys = [ rfields[k] for k in pkeys ]
595 fields = fields + ',\n ' + \
596 self.types['reference TFK'] %\
597 dict(table_name=tablename,
598 field_name=', '.join(fkeys),
599 foreign_table=rtablename,
600 foreign_key=', '.join(pkeys),
601 on_delete_action=field.ondelete)
602
603 if hasattr(table,'_primarykey'):
604 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
605 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
606 else:
607 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
608 (tablename, fields, other)
609
610 if self.uri.startswith('sqlite:///'):
611 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
612 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
613 else:
614 dbpath = self.folder
615
616 if not migrate:
617 return query
618 elif self.uri.startswith('sqlite:memory'):
619 table._dbt = None
620 elif isinstance(migrate, str):
621 table._dbt = os.path.join(dbpath, migrate)
622 else:
623 table._dbt = os.path.join(dbpath, '%s_%s.table' \
624 % (table._db._uri_hash, tablename))
625 if table._dbt:
626 table._loggername = os.path.join(dbpath, 'sql.log')
627 logfile = self.file_open(table._loggername, 'a')
628 else:
629 logfile = None
630 if not table._dbt or not self.file_exists(table._dbt):
631 if table._dbt:
632 logfile.write('timestamp: %s\n'
633 % datetime.datetime.today().isoformat())
634 logfile.write(query + '\n')
635 if not fake_migrate:
636 self.create_sequence_and_triggers(query,table)
637 table._db.commit()
638 if table._dbt:
639 tfile = self.file_open(table._dbt, 'w')
640 cPickle.dump(sql_fields, tfile)
641 self.file_close(tfile)
642 if fake_migrate:
643 logfile.write('faked!\n')
644 else:
645 logfile.write('success!\n')
646 else:
647 tfile = self.file_open(table._dbt, 'r')
648 try:
649 sql_fields_old = cPickle.load(tfile)
650 except EOFError:
651 self.file_close(tfile)
652 self.file_close(logfile)
653 raise RuntimeError, 'File %s appears corrupted' % table._dbt
654 self.file_close(tfile)
655 if sql_fields != sql_fields_old:
656 self.migrate_table(table,
657 sql_fields, sql_fields_old,
658 sql_fields_aux, logfile,
659 fake_migrate=fake_migrate)
660 self.file_close(logfile)
661 return query
662
663 - def migrate_table(
664 self,
665 table,
666 sql_fields,
667 sql_fields_old,
668 sql_fields_aux,
669 logfile,
670 fake_migrate=False,
671 ):
672 tablename = table._tablename
673 def fix(item):
674 k,v=item
675 if not isinstance(v,dict):
676 v=dict(type='unkown',sql=v)
677 return k.lower(),v
678
679 sql_fields = dict(map(fix,sql_fields.items()))
680 sql_fields_old = dict(map(fix,sql_fields_old.items()))
681 sql_fields_aux = dict(map(fix,sql_fields_aux.items()))
682
683 keys = sql_fields.keys()
684 for key in sql_fields_old:
685 if not key in keys:
686 keys.append(key)
687 if self.dbengine == 'mssql':
688 new_add = '; ALTER TABLE %s ADD ' % tablename
689 else:
690 new_add = ', ADD '
691
692 metadata_change = False
693 sql_fields_current = copy.copy(sql_fields_old)
694 for key in keys:
695 query = None
696 if not key in sql_fields_old:
697 sql_fields_current[key] = sql_fields[key]
698 query = ['ALTER TABLE %s ADD %s %s;' % \
699 (tablename, key,
700 sql_fields_aux[key]['sql'].replace(', ', new_add))]
701 metadata_change = True
702 elif self.dbengine == 'sqlite':
703 if key in sql_fields:
704 sql_fields_current[key] = sql_fields[key]
705 metadata_change = True
706 elif not key in sql_fields:
707 del sql_fields_current[key]
708 if not self.dbengine in ('firebird',):
709 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
710 else:
711 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
712 metadata_change = True
713 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
714 and not isinstance(table[key].type, SQLCustomType) \
715 and not (table[key].type.startswith('reference') and \
716 sql_fields[key]['sql'].startswith('INT,') and \
717 sql_fields_old[key]['sql'].startswith('INT NOT NULL,')):
718 sql_fields_current[key] = sql_fields[key]
719 t = tablename
720 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
721 if not self.dbengine in ('firebird',):
722 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
723 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
724 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
725 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
726 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
727 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
728 else:
729 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
730 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
731 'ALTER TABLE %s DROP %s;' % (t, key),
732 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
733 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
734 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
735 metadata_change = True
736 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
737 sql_fields_current[key] = sql_fields[key]
738 metadata_change = True
739
740 if query:
741 logfile.write('timestamp: %s\n'
742 % datetime.datetime.today().isoformat())
743 table._db['_lastsql'] = '\n'.join(query)
744 for sub_query in query:
745 logfile.write(sub_query + '\n')
746 if not fake_migrate:
747 self.execute(sub_query)
748
749
750
751 if table._db._adapter.commit_on_alter_table:
752 table._db.commit()
753 tfile = self.file_open(table._dbt, 'w')
754 cPickle.dump(sql_fields_current, tfile)
755 self.file_close(tfile)
756 logfile.write('success!\n')
757 else:
758 logfile.write('faked!\n')
759 elif metadata_change:
760 tfile = self.file_open(table._dbt, 'w')
761 cPickle.dump(sql_fields_current, tfile)
762 self.file_close(tfile)
763
764 if metadata_change and \
765 not (query and self.dbengine in ('mysql','oracle','firebird')):
766 table._db.commit()
767 tfile = self.file_open(table._dbt, 'w')
768 cPickle.dump(sql_fields_current, tfile)
769 self.file_close(tfile)
770
773
776
778 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
779
782
785
788
791
793 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
794
797
800
802 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
803
805 return 'PRIMARY KEY(%s)' % key
806
807 - def _drop(self,table,mode):
808 return ['DROP TABLE %s;' % table]
809
810 - def drop(self, table, mode=''):
811 if table._dbt:
812 logfile = self.file_open(table._loggername, 'a')
813 queries = self._drop(table, mode)
814 for query in queries:
815 if table._dbt:
816 logfile.write(query + '\n')
817 self.execute(query)
818 table._db.commit()
819 del table._db[table._tablename]
820 del table._db.tables[table._db.tables.index(table._tablename)]
821 table._db._update_referenced_by(table._tablename)
822 if table._dbt:
823 self.file_delete(table._dbt)
824 logfile.write('success!\n')
825
830
831 - def insert(self,table,fields):
832 query = self._insert(table,fields)
833 try:
834 self.execute(query)
835 except Exception, e:
836 if isinstance(e,self.integrity_error_class()):
837 return None
838 raise e
839 if hasattr(table,'_primarykey'):
840 return dict([(k[0].name, k[1]) for k in fields \
841 if k[0].name in table._primarykey])
842 id = self.lastrowid(table)
843 if not isinstance(id,int):
844 return id
845 rid = Reference(id)
846 (rid._table, rid._record) = (table, None)
847 return rid
848
851
852 - def NOT(self,first):
854
855 - def AND(self,first,second):
857
858 - def OR(self,first,second):
860
862 if isinstance(second,str):
863 return '(%s IN (%s))' % (self.expand(first),second[:-1])
864 return '(%s IN (%s))' % (self.expand(first),
865 ','.join(self.expand(item,first.type) for item in second))
866
867 - def LIKE(self,first,second):
869
872
875
877 if first.type in ('string','text'):
878 key = '%'+str(second).replace('%','%%')+'%'
879 elif first.type.startswith('list:'):
880 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
881 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
882
883 - def EQ(self,first,second=None):
887
888 - def NE(self,first,second=None):
892
893 - def LT(self,first,second=None):
895
896 - def LE(self,first,second=None):
898
899 - def GT(self,first,second=None):
901
902 - def GE(self,first,second=None):
904
905 - def ADD(self,first,second):
907
908 - def SUB(self,first,second):
910
911 - def MUL(self,first,second):
913
914 - def DIV(self,first,second):
916
917 - def MOD(self,first,second):
919
920 - def AS(self,first,second):
922
923 - def ON(self,first,second):
925
928
929 - def COMMA(self,first,second):
931
932 - def expand(self,expression,field_type=None):
933 if isinstance(expression,Field):
934 return str(expression)
935 elif isinstance(expression, (Expression, Query)):
936 if not expression.second is None:
937 return expression.op(expression.first, expression.second)
938 elif not expression.first is None:
939 return expression.op(expression.first)
940 else:
941 return expression.op()
942 elif field_type:
943 return self.represent(expression,field_type)
944 elif isinstance(expression,(list,tuple)):
945 return ','.join([self.represent(item,field_type) for item in expression])
946 else:
947 return str(expression)
948
949 - def alias(self,table,alias):
950 """
951 given a table object, makes a new table object
952 with alias name.
953 """
954 other = copy.copy(table)
955 other['_ot'] = other._tablename
956 other['ALL'] = SQLALL(other)
957 other['_tablename'] = alias
958 for fieldname in other.fields:
959 other[fieldname] = copy.copy(other[fieldname])
960 other[fieldname]._tablename = alias
961 other[fieldname].tablename = alias
962 other[fieldname].table = other
963 table._db[alias] = table
964 return other
965
967 tablename = table._tablename
968 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
969
971
972 if table._dbt:
973 logfile = self.file_open(table._loggername, 'a')
974 else:
975 class Logfile(object):
976 def write(self, value):
977 pass
978 def close(self):
979 pass
980 logfile = Logfile()
981
982 try:
983 queries = table._db._adapter._truncate(table, mode)
984 for query in queries:
985 logfile.write(query + '\n')
986 self.execute(query)
987 table._db.commit()
988 logfile.write('success!\n')
989 finally:
990 logfile.close()
991
992 - def _update(self,tablename,query,fields):
993 if query:
994 sql_w = ' WHERE ' + self.expand(query)
995 else:
996 sql_w = ''
997 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
998 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
999
1000 - def update(self,tablename,query,fields):
1001 sql = self._update(tablename,query,fields)
1002 self.execute(sql)
1003 try:
1004 return self.cursor.rowcount
1005 except:
1006 return None
1007
1008 - def _delete(self,tablename, query):
1009 if query:
1010 sql_w = ' WHERE ' + self.expand(query)
1011 else:
1012 sql_w = ''
1013 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1014
1015 - def delete(self,tablename,query):
1016 sql = self._delete(tablename,query)
1017
1018 db = self.db
1019 table = db[tablename]
1020 if self.dbengine=='sqlite' and table._referenced_by:
1021 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1022
1023 self.execute(sql)
1024 try:
1025 counter = self.cursor.rowcount
1026 except:
1027 counter = None
1028
1029 if self.dbengine=='sqlite' and counter:
1030 for tablename,fieldname in table._referenced_by:
1031 f = db[tablename][fieldname]
1032 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
1033 db(db[tablename][fieldname].belongs(deleted)).delete()
1034
1035 return counter
1036
1038 tablenames = self.tables(query)
1039 if len(tablenames)==1:
1040 return tablenames[0]
1041 elif len(tablenames)<1:
1042 raise RuntimeError, "No table selected"
1043 else:
1044 raise RuntimeError, "Too many tables selected"
1045
1046 - def _select(self, query, fields, attributes):
1047 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
1048 'required','cache','left',
1049 'distinct','having', 'join')):
1050 raise SyntaxError, 'invalid select attribute: %s' % key
1051
1052 new_fields = []
1053 for item in fields:
1054 if isinstance(item,SQLALL):
1055 new_fields += item.table
1056 else:
1057 new_fields.append(item)
1058 fields = new_fields
1059 tablenames = self.tables(query)
1060 query = self.filter_tenant(query,tablenames)
1061 if not fields:
1062 for table in tablenames:
1063 for field in self.db[table]:
1064 fields.append(field)
1065 else:
1066 for field in fields:
1067 if isinstance(field,basestring) and table_field.match(field):
1068 tn,fn = field.split('.')
1069 field = self.db[tn][fn]
1070 for tablename in self.tables(field):
1071 if not tablename in tablenames:
1072 tablenames.append(tablename)
1073 if len(tablenames) < 1:
1074 raise SyntaxError, 'Set: no tables selected'
1075 sql_f = ', '.join(map(self.expand,fields))
1076 self._colnames = [c.strip() for c in sql_f.split(', ')]
1077 if query:
1078 sql_w = ' WHERE ' + self.expand(query)
1079 else:
1080 sql_w = ''
1081 sql_o = ''
1082 sql_s = ''
1083 left = attributes.get('left', False)
1084 inner_join = attributes.get('join', False)
1085 distinct = attributes.get('distinct', False)
1086 groupby = attributes.get('groupby', False)
1087 orderby = attributes.get('orderby', False)
1088 having = attributes.get('having', False)
1089 limitby = attributes.get('limitby', False)
1090 if distinct is True:
1091 sql_s += 'DISTINCT'
1092 elif distinct:
1093 sql_s += 'DISTINCT ON (%s)' % distinct
1094 if inner_join:
1095 icommand = self.JOIN()
1096 if not isinstance(inner_join, (tuple, list)):
1097 inner_join = [inner_join]
1098 ijoint = [t._tablename for t in inner_join if not isinstance(t,Expression)]
1099 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1100 ijoinont = [t.first._tablename for t in ijoinon]
1101 iexcluded = [t for t in tablenames if not t in ijoint + ijoinont]
1102 if left:
1103 join = attributes['left']
1104 command = self.LEFT_JOIN()
1105 if not isinstance(join, (tuple, list)):
1106 join = [join]
1107 joint = [t._tablename for t in join if not isinstance(t,Expression)]
1108 joinon = [t for t in join if isinstance(t, Expression)]
1109
1110 tables_to_merge={}
1111 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1112 joinont = [t.first._tablename for t in joinon]
1113 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1114 important_tablenames = joint + joinont + tables_to_merge.keys()
1115 excluded = [t for t in tablenames if not t in important_tablenames ]
1116 if inner_join and not left:
1117 sql_t = ', '.join(iexcluded)
1118 for t in ijoinon:
1119 sql_t += ' %s %s' % (icommand, str(t))
1120 elif not inner_join and left:
1121 sql_t = ', '.join([ t for t in excluded + tables_to_merge.keys()])
1122 if joint:
1123 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1124 for t in joinon:
1125 sql_t += ' %s %s' % (command, str(t))
1126 elif inner_join and left:
1127 sql_t = ','.join([ t for t in excluded + tables_to_merge.keys() if t in iexcluded ])
1128 for t in ijoinon:
1129 sql_t += ' %s %s' % (icommand, str(t))
1130 if joint:
1131 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1132 for t in joinon:
1133 sql_t += ' %s %s' % (command, str(t))
1134 else:
1135 sql_t = ', '.join(tablenames)
1136 if groupby:
1137 if isinstance(groupby, (list, tuple)):
1138 groupby = xorify(groupby)
1139 sql_o += ' GROUP BY %s' % self.expand(groupby)
1140 if having:
1141 sql_o += ' HAVING %s' % attributes['having']
1142 if orderby:
1143 if isinstance(orderby, (list, tuple)):
1144 orderby = xorify(orderby)
1145 if str(orderby) == '<random>':
1146 sql_o += ' ORDER BY %s' % self.RANDOM()
1147 else:
1148 sql_o += ' ORDER BY %s' % self.expand(orderby)
1149 if limitby:
1150 if not orderby and tablenames:
1151 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1152
1153 return self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1154
1155 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1156 if limitby:
1157 (lmin, lmax) = limitby
1158 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1159 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1160
1161 - def select(self,query,fields,attributes):
1162 """
1163 Always returns a Rows object, even if it may be empty
1164 """
1165 def response(sql):
1166 self.execute(sql)
1167 return self.cursor.fetchall()
1168 sql = self._select(query,fields,attributes)
1169 if attributes.get('cache', None):
1170 (cache_model, time_expire) = attributes['cache']
1171 del attributes['cache']
1172 key = self.uri + '/' + sql
1173 key = (key<=200) and key or hashlib.md5(key).hexdigest()
1174 rows = cache_model(key, lambda: response(sql), time_expire)
1175 else:
1176 rows = response(sql)
1177 if isinstance(rows,tuple):
1178 rows = list(rows)
1179 limitby = attributes.get('limitby',None) or (0,)
1180 rows = self.rowslice(rows,limitby[0],None)
1181 return self.parse(rows,self._colnames)
1182
1183 - def _count(self,query,distinct=None):
1184 tablenames = self.tables(query)
1185 if query:
1186 sql_w = ' WHERE ' + self.expand(query)
1187 else:
1188 sql_w = ''
1189 sql_t = ','.join(tablenames)
1190 if distinct:
1191 if isinstance(distinct,(list,tuple)):
1192 distinct = xorify(distinct)
1193 sql_d = self.expand(distinct)
1194 return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
1195 return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
1196
1197 - def count(self,query,distinct=None):
1198 self.execute(self._count(query,distinct))
1199 return self.cursor.fetchone()[0]
1200
1201
1212
1214 return self.connection.commit()
1215
1218
1220 return self.connection.close()
1221
1224
1227
1230
1233
1236
1238 return '%s_%s__constraint' % (table,fieldname)
1239
1242
1244 self.db._lastsql = a[0]
1245 t0 = time.time()
1246 ret = self.cursor.execute(*a,**b)
1247 self.db._timings.append((a[0],time.time()-t0))
1248 return ret
1249
1252
1254 if isinstance(obj,CALLABLETYPES):
1255 obj = obj()
1256 if isinstance(fieldtype, SQLCustomType):
1257 return fieldtype.encoder(obj)
1258 if isinstance(obj, (Expression, Field)):
1259 return str(obj)
1260 if fieldtype.startswith('list:'):
1261 if not obj:
1262 obj = []
1263 if not isinstance(obj, (list, tuple)):
1264 obj = [obj]
1265 if isinstance(obj, (list, tuple)):
1266 obj = bar_encode(obj)
1267 if obj is None:
1268 return 'NULL'
1269 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1270 return 'NULL'
1271 r = self.represent_exceptions(obj,fieldtype)
1272 if r != None:
1273 return r
1274 if fieldtype == 'boolean':
1275 if obj and not str(obj)[:1].upper() in ['F', '0']:
1276 return "'T'"
1277 else:
1278 return "'F'"
1279 if fieldtype == 'id' or fieldtype == 'integer':
1280 return str(int(obj))
1281 if fieldtype.startswith('decimal'):
1282 return str(obj)
1283 elif fieldtype.startswith('reference'):
1284 if fieldtype.find('.')>0:
1285 return repr(obj)
1286 elif isinstance(obj, (Row, Reference)):
1287 return str(obj['id'])
1288 return str(int(obj))
1289 elif fieldtype == 'double':
1290 return repr(float(obj))
1291 if isinstance(obj, unicode):
1292 obj = obj.encode(self.db_codec)
1293 if fieldtype == 'blob':
1294 obj = base64.b64encode(str(obj))
1295 elif fieldtype == 'date':
1296 if isinstance(obj, (datetime.date, datetime.datetime)):
1297 obj = obj.isoformat()[:10]
1298 else:
1299 obj = str(obj)
1300 elif fieldtype == 'datetime':
1301 if isinstance(obj, datetime.datetime):
1302 obj = obj.isoformat()[:19].replace('T',' ')
1303 elif isinstance(obj, datetime.date):
1304 obj = obj.isoformat()[:10]+' 00:00:00'
1305 else:
1306 obj = str(obj)
1307 elif fieldtype == 'time':
1308 if isinstance(obj, datetime.time):
1309 obj = obj.isoformat()[:10]
1310 else:
1311 obj = str(obj)
1312 if not isinstance(obj,str):
1313 obj = str(obj)
1314 try:
1315 obj.decode(self.db_codec)
1316 except:
1317 obj = obj.decode('latin1').encode(self.db_codec)
1318 return "'%s'" % obj.replace("'", "''")
1319
1322
1325
1328
1329 - def rowslice(self,rows,minimum=0,maximum=None):
1330 """ by default this function does nothing, overload when db does not do slicing """
1331 return rows
1332
1333 - def parse(self, rows, colnames, blob_decode=True):
1334 db = self.db
1335 virtualtables = []
1336 new_rows = []
1337 for (i,row) in enumerate(rows):
1338 new_row = Row()
1339 for j,colname in enumerate(colnames):
1340 value = row[j]
1341 if not table_field.match(colnames[j]):
1342 if not '_extra' in new_row:
1343 new_row['_extra'] = Row()
1344 new_row['_extra'][colnames[j]] = value
1345 select_as_parser = re.compile("\s+AS\s+(\S+)")
1346 new_column_name = select_as_parser.search(colnames[j])
1347 if not new_column_name is None:
1348 column_name = new_column_name.groups(0)
1349 setattr(new_row,column_name[0],value)
1350 continue
1351 (tablename, fieldname) = colname.split('.')
1352 table = db[tablename]
1353 field = table[fieldname]
1354 field_type = field.type
1355 if field.type != 'blob' and isinstance(value, str):
1356 try:
1357 value = value.decode(db._db_codec)
1358 except Exception:
1359 pass
1360 if isinstance(value, unicode):
1361 value = value.encode('utf-8')
1362 if not tablename in new_row:
1363 colset = new_row[tablename] = Row()
1364 virtualtables.append(tablename)
1365 else:
1366 colset = new_row[tablename]
1367
1368 if isinstance(field_type, SQLCustomType):
1369 colset[fieldname] = field_type.decoder(value)
1370
1371 elif not isinstance(field_type, str) or value is None:
1372 colset[fieldname] = value
1373 elif isinstance(field_type, str) and \
1374 field_type.startswith('reference'):
1375 referee = field_type[10:].strip()
1376 if not '.' in referee:
1377 colset[fieldname] = rid = Reference(value)
1378 (rid._table, rid._record) = (db[referee], None)
1379 else:
1380 colset[fieldname] = value
1381 elif field_type == 'boolean':
1382 if value == True or str(value)[:1].lower() == 't':
1383 colset[fieldname] = True
1384 else:
1385 colset[fieldname] = False
1386 elif field_type == 'date' \
1387 and (not isinstance(value, datetime.date)\
1388 or isinstance(value, datetime.datetime)):
1389 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
1390 colset[fieldname] = datetime.date(y, m, d)
1391 elif field_type == 'time' \
1392 and not isinstance(value, datetime.time):
1393 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1394 if len(time_items) == 3:
1395 (h, mi, s) = time_items
1396 else:
1397 (h, mi, s) = time_items + [0]
1398 colset[fieldname] = datetime.time(h, mi, s)
1399 elif field_type == 'datetime'\
1400 and not isinstance(value, datetime.datetime):
1401 (y, m, d) = map(int,str(value)[:10].strip().split('-'))
1402 time_items = map(int,str(value)[11:19].strip().split(':')[:3])
1403 if len(time_items) == 3:
1404 (h, mi, s) = time_items
1405 else:
1406 (h, mi, s) = time_items + [0]
1407 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s)
1408 elif field_type == 'blob' and blob_decode:
1409 colset[fieldname] = base64.b64decode(str(value))
1410 elif field_type.startswith('decimal'):
1411 decimals = int(field_type[8:-1].split(',')[-1])
1412 if self.dbengine == 'sqlite':
1413 value = ('%.' + str(decimals) + 'f') % value
1414 if not isinstance(value, decimal.Decimal):
1415 value = decimal.Decimal(str(value))
1416 colset[fieldname] = value
1417 elif field_type.startswith('list:integer'):
1418 if not self.dbengine=='google:datastore':
1419 colset[fieldname] = bar_decode_integer(value)
1420 else:
1421 colset[fieldname] = value
1422 elif field_type.startswith('list:reference'):
1423 if not self.dbengine=='google:datastore':
1424 colset[fieldname] = bar_decode_integer(value)
1425 else:
1426 colset[fieldname] = value
1427 elif field_type.startswith('list:string'):
1428 if not self.dbengine=='google:datastore':
1429 colset[fieldname] = bar_decode_string(value)
1430 else:
1431 colset[fieldname] = value
1432 else:
1433 colset[fieldname] = value
1434 if field_type == 'id':
1435 id = colset[field.name]
1436 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1437 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1438 for (referee_table, referee_name) in \
1439 table._referenced_by:
1440 s = db[referee_table][referee_name]
1441 if not referee_table in colset:
1442
1443 colset[referee_table] = Set(db, s == id)
1444
1445
1446 colset['id'] = id
1447 new_rows.append(new_row)
1448 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1449 for tablename in virtualtables:
1450 for item in db[tablename].virtualfields:
1451 try:
1452 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
1453 except KeyError:
1454
1455 pass
1456 return rowsobj
1457
1459 fieldname = self.db._request_tenant
1460 for tablename in tablenames:
1461 table = self.db[tablename]
1462 if fieldname in table:
1463 default = table[fieldname].default
1464 if default!=None:
1465 query = query&(table[fieldname]==default)
1466 return query
1467
1468
1469
1470
1471
1473
1475 return "web2py_extract('%s',%s)" % (what,self.expand(field))
1476
1477 @staticmethod
1479 table = {
1480 'year': (0, 4),
1481 'month': (5, 7),
1482 'day': (8, 10),
1483 'hour': (11, 13),
1484 'minute': (14, 16),
1485 'second': (17, 19),
1486 }
1487 try:
1488 (i, j) = table[lookup]
1489 return int(s[i:j])
1490 except:
1491 return None
1492
1493 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1494 credential_decoder=lambda x:x, driver_args={},
1495 adapter_args={}):
1496 self.db = db
1497 self.dbengine = "sqlite"
1498 self.uri = uri
1499 self.pool_size = pool_size
1500 self.folder = folder
1501 self.db_codec = db_codec
1502 self.find_or_make_work_folder()
1503 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1504 if uri.startswith('sqlite:memory'):
1505 dbpath = ':memory:'
1506 else:
1507 dbpath = uri.split('://')[1]
1508 if dbpath[0] != '/':
1509 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1510 if not 'check_same_thread' in driver_args:
1511 driver_args['check_same_thread'] = False
1512 def connect(dbpath=dbpath, driver_args=driver_args):
1513 return sqlite3.Connection(dbpath, **driver_args)
1514 self.pool_connection(connect)
1515 self.cursor = self.connection.cursor()
1516 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1517
1519 tablename = table._tablename
1520 return ['DELETE FROM %s;' % tablename,
1521 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1522
1525
1526
1528
1529 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1530 credential_decoder=lambda x:x, driver_args={},
1531 adapter_args={}):
1532 self.db = db
1533 self.dbengine = "sqlite"
1534 self.uri = uri
1535 self.pool_size = pool_size
1536 self.folder = folder
1537 self.db_codec = db_codec
1538 self.find_or_make_work_folder()
1539 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1540 if uri.startswith('sqlite:memory'):
1541 dbpath = ':memory:'
1542 else:
1543 dbpath = uri.split('://')[1]
1544 if dbpath[0] != '/':
1545 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1546 def connect(dbpath=dbpath,driver_args=driver_args):
1547 return zxJDBC.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
1548 self.pool_connection(connect)
1549 self.cursor = self.connection.cursor()
1550 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1551
1554
1555
1557
1558 driver = globals().get('pymysql',None)
1559 maxcharlength = 255
1560 commit_on_alter_table = True
1561 support_distributed_transaction = True
1562 types = {
1563 'boolean': 'CHAR(1)',
1564 'string': 'VARCHAR(%(length)s)',
1565 'text': 'LONGTEXT',
1566 'password': 'VARCHAR(%(length)s)',
1567 'blob': 'LONGBLOB',
1568 'upload': 'VARCHAR(%(length)s)',
1569 'integer': 'INT',
1570 'double': 'DOUBLE',
1571 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1572 'date': 'DATE',
1573 'time': 'TIME',
1574 'datetime': 'DATETIME',
1575 'id': 'INT AUTO_INCREMENT NOT NULL',
1576 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1577 'list:integer': 'LONGTEXT',
1578 'list:string': 'LONGTEXT',
1579 'list:reference': 'LONGTEXT',
1580 }
1581
1584
1586 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1587
1588 - def _drop(self,table,mode):
1589
1590 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1591
1594
1598
1601
1604
1606 return '; ALTER TABLE %s ADD ' % table
1607
1608 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1609 credential_decoder=lambda x:x, driver_args={},
1610 adapter_args={}):
1611 self.db = db
1612 self.dbengine = "mysql"
1613 self.uri = uri
1614 self.pool_size = pool_size
1615 self.folder = folder
1616 self.db_codec = db_codec
1617 self.find_or_make_work_folder()
1618 uri = uri.split('://')[1]
1619 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1620 if not m:
1621 raise SyntaxError, \
1622 "Invalid URI string in DAL: %s" % self.uri
1623 user = credential_decoder(m.group('user'))
1624 if not user:
1625 raise SyntaxError, 'User required'
1626 password = credential_decoder(m.group('password'))
1627 if not password:
1628 password = ''
1629 host = m.group('host')
1630 if not host:
1631 raise SyntaxError, 'Host name required'
1632 db = m.group('db')
1633 if not db:
1634 raise SyntaxError, 'Database name required'
1635 port = int(m.group('port') or '3306')
1636 charset = m.group('charset') or 'utf8'
1637 driver_args.update(dict(db=db,
1638 user=credential_decoder(user),
1639 passwd=credential_decoder(password),
1640 host=host,
1641 port=port,
1642 charset=charset))
1643 def connect(driver_args=driver_args):
1644 return self.driver.connect(**driver_args)
1645 self.pool_connection(connect)
1646 self.cursor = self.connection.cursor()
1647 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1648 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1649
1651 self.execute('select last_insert_id();')
1652 return int(self.cursor.fetchone()[0])
1653
1654
1655 -class PostgreSQLAdapter(BaseAdapter):
1656
1657 support_distributed_transaction = True
1658 types = {
1659 'boolean': 'CHAR(1)',
1660 'string': 'VARCHAR(%(length)s)',
1661 'text': 'TEXT',
1662 'password': 'VARCHAR(%(length)s)',
1663 'blob': 'BYTEA',
1664 'upload': 'VARCHAR(%(length)s)',
1665 'integer': 'INTEGER',
1666 'double': 'FLOAT8',
1667 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1668 'date': 'DATE',
1669 'time': 'TIME',
1670 'datetime': 'TIMESTAMP',
1671 'id': 'SERIAL PRIMARY KEY',
1672 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1673 'list:integer': 'TEXT',
1674 'list:string': 'TEXT',
1675 'list:reference': 'TEXT',
1676 }
1677
1678 - def sequence_name(self,table):
1679 return '%s_id_Seq' % table
1680
1683
1686
1687 - def prepare(self,key):
1688 self.execute("PREPARE TRANSACTION '%s';" % key)
1689
1690 - def commit_prepared(self,key):
1691 self.execute("COMMIT PREPARED '%s';" % key)
1692
1693 - def rollback_prepared(self,key):
1694 self.execute("ROLLBACK PREPARED '%s';" % key)
1695
1696 - def create_sequence_and_triggers(self, query, table, **args):
1697
1698
1699
1700
1701 self.execute(query)
1702
1703 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1704 credential_decoder=lambda x:x, driver_args={},
1705 adapter_args={}):
1706 self.db = db
1707 self.dbengine = "postgres"
1708 self.uri = uri
1709 self.pool_size = pool_size
1710 self.folder = folder
1711 self.db_codec = db_codec
1712 self.find_or_make_work_folder()
1713 uri = uri.split('://')[1]
1714 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1715 if not m:
1716 raise SyntaxError, "Invalid URI string in DAL"
1717 user = credential_decoder(m.group('user'))
1718 if not user:
1719 raise SyntaxError, 'User required'
1720 password = credential_decoder(m.group('password'))
1721 if not password:
1722 password = ''
1723 host = m.group('host')
1724 if not host:
1725 raise SyntaxError, 'Host name required'
1726 db = m.group('db')
1727 if not db:
1728 raise SyntaxError, 'Database name required'
1729 port = m.group('port') or '5432'
1730 sslmode = m.group('sslmode')
1731 if sslmode:
1732 msg = ("dbname='%s' user='%s' host='%s'"
1733 "port=%s password='%s' sslmode='%s'") \
1734 % (db, user, host, port, password, sslmode)
1735 else:
1736 msg = ("dbname='%s' user='%s' host='%s'"
1737 "port=%s password='%s'") \
1738 % (db, user, host, port, password)
1739 def connect(msg=msg,driver_args=driver_args):
1740 return psycopg2.connect(msg,**driver_args)
1741 self.pool_connection(connect)
1742 self.connection.set_client_encoding('UTF8')
1743 self.cursor = self.connection.cursor()
1744 self.execute('BEGIN;')
1745 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1746 self.execute("SET standard_conforming_strings=on;")
1747
1748 - def lastrowid(self,table):
1749 self.execute("select currval('%s')" % table._sequence_name)
1750 return int(self.cursor.fetchone()[0])
1751
1752 - def LIKE(self,first,second):
1753 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second,'string'))
1754
1755 - def STARTSWITH(self,first,second):
1756 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second+'%','string'))
1757
1758 - def ENDSWITH(self,first,second):
1759 return '(%s ILIKE %s)' % (self.expand(first),self.expand('%'+second,'string'))
1760
1761 - def CONTAINS(self,first,second):
1762 if first.type in ('string','text'):
1763 key = '%'+str(second).replace('%','%%')+'%'
1764 elif first.type.startswith('list:'):
1765 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
1766 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
1767
1768 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
1769
1770 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1771 credential_decoder=lambda x:x, driver_args={},
1772 adapter_args={}):
1773 self.db = db
1774 self.dbengine = "postgres"
1775 self.uri = uri
1776 self.pool_size = pool_size
1777 self.folder = folder
1778 self.db_codec = db_codec
1779 self.find_or_make_work_folder()
1780 uri = uri.split('://')[1]
1781 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
1782 if not m:
1783 raise SyntaxError, "Invalid URI string in DAL"
1784 user = credential_decoder(m.group('user'))
1785 if not user:
1786 raise SyntaxError, 'User required'
1787 password = credential_decoder(m.group('password'))
1788 if not password:
1789 password = ''
1790 host = m.group('host')
1791 if not host:
1792 raise SyntaxError, 'Host name required'
1793 db = m.group('db')
1794 if not db:
1795 raise SyntaxError, 'Database name required'
1796 port = m.group('port') or '5432'
1797 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
1798 def connect(msg=msg,driver_args=driver_args):
1799 return zxJDBC.connect(*msg,**driver_args)
1800 self.pool_connection(connect)
1801 self.connection.set_client_encoding('UTF8')
1802 self.cursor = self.connection.cursor()
1803 self.execute('BEGIN;')
1804 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1805
1806
1808 commit_on_alter_table = False
1809 types = {
1810 'boolean': 'CHAR(1)',
1811 'string': 'VARCHAR2(%(length)s)',
1812 'text': 'CLOB',
1813 'password': 'VARCHAR2(%(length)s)',
1814 'blob': 'CLOB',
1815 'upload': 'VARCHAR2(%(length)s)',
1816 'integer': 'INT',
1817 'double': 'FLOAT',
1818 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1819 'date': 'DATE',
1820 'time': 'CHAR(8)',
1821 'datetime': 'DATE',
1822 'id': 'NUMBER PRIMARY KEY',
1823 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1824 'list:integer': 'CLOB',
1825 'list:string': 'CLOB',
1826 'list:reference': 'CLOB',
1827 }
1828
1830 return '%s_sequence' % tablename
1831
1833 return '%s_trigger' % tablename
1834
1836 return 'LEFT OUTER JOIN'
1837
1839 return 'dbms_random.value'
1840
1841 - def NOT_NULL(self,default,field_type):
1842 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
1843
1844 - def _drop(self,table,mode):
1847
1848 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1849 if limitby:
1850 (lmin, lmax) = limitby
1851 if len(sql_w) > 1:
1852 sql_w_row = sql_w + ' AND w_row > %i' % lmin
1853 else:
1854 sql_w_row = 'WHERE w_row > %i' % lmin
1855 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
1856 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1857
1863
1865 if fieldtype == 'blob':
1866 obj = base64.b64encode(str(obj))
1867 return ":CLOB('%s')" % obj
1868 elif fieldtype == 'date':
1869 if isinstance(obj, (datetime.date, datetime.datetime)):
1870 obj = obj.isoformat()[:10]
1871 else:
1872 obj = str(obj)
1873 return "to_date('%s','yyyy-mm-dd')" % obj
1874 elif fieldtype == 'datetime':
1875 if isinstance(obj, datetime.datetime):
1876 obj = obj.isoformat()[:19].replace('T',' ')
1877 elif isinstance(obj, datetime.date):
1878 obj = obj.isoformat()[:10]+' 00:00:00'
1879 else:
1880 obj = str(obj)
1881 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
1882 return None
1883
1884 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1885 credential_decoder=lambda x:x, driver_args={},
1886 adapter_args={}):
1887 self.db = db
1888 self.dbengine = "oracle"
1889 self.uri = uri
1890 self.pool_size = pool_size
1891 self.folder = folder
1892 self.db_codec = db_codec
1893 self.find_or_make_work_folder()
1894 uri = uri.split('://')[1]
1895 if not 'threaded' in driver_args:
1896 driver_args['threaded']=True
1897 def connect(uri=uri,driver_args=driver_args):
1898 return cx_Oracle.connect(uri,**driver_args)
1899 self.pool_connection(connect)
1900 self.cursor = self.connection.cursor()
1901 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1902 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1903 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
1904
1906 args = []
1907 i = 1
1908 while True:
1909 m = self.oracle_fix.match(command)
1910 if not m:
1911 break
1912 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
1913 args.append(m.group('clob')[6:-2].replace("''", "'"))
1914 i += 1
1915 return self.log_execute(command[:-1], args)
1916
1924
1929
1930
1932 types = {
1933 'boolean': 'BIT',
1934 'string': 'VARCHAR(%(length)s)',
1935 'text': 'TEXT',
1936 'password': 'VARCHAR(%(length)s)',
1937 'blob': 'IMAGE',
1938 'upload': 'VARCHAR(%(length)s)',
1939 'integer': 'INT',
1940 'double': 'FLOAT',
1941 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1942 'date': 'DATETIME',
1943 'time': 'CHAR(8)',
1944 'datetime': 'DATETIME',
1945 'id': 'INT IDENTITY PRIMARY KEY',
1946 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1947 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1948 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1949 'list:integer': 'TEXT',
1950 'list:string': 'TEXT',
1951 'list:reference': 'TEXT',
1952 }
1953
1955 return "DATEPART(%s,%s)" % (what, self.expand(field))
1956
1958 return 'LEFT OUTER JOIN'
1959
1962
1965
1967 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1968
1970 return 'PRIMARY KEY CLUSTERED (%s)' % key
1971
1972 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1973 if limitby:
1974 (lmin, lmax) = limitby
1975 sql_s += ' TOP %i' % lmax
1976 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1977
1979 if fieldtype == 'boolean':
1980 if obj and not str(obj)[0].upper() == 'F':
1981 return '1'
1982 else:
1983 return '0'
1984 return None
1985
1986 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1987 credential_decoder=lambda x:x, driver_args={},
1988 adapter_args={}, fake_connect=False):
1989 self.db = db
1990 self.dbengine = "mssql"
1991 self.uri = uri
1992 self.pool_size = pool_size
1993 self.folder = folder
1994 self.db_codec = db_codec
1995 self.find_or_make_work_folder()
1996
1997 uri = uri.split('://')[1]
1998 if '@' not in uri:
1999 try:
2000 m = re.compile('^(?P<dsn>.+)$').match(uri)
2001 if not m:
2002 raise SyntaxError, \
2003 'Parsing uri string(%s) has no result' % self.uri
2004 dsn = m.group('dsn')
2005 if not dsn:
2006 raise SyntaxError, 'DSN required'
2007 except SyntaxError, e:
2008 logger.error('NdGpatch error')
2009 raise e
2010 cnxn = 'DSN=%s' % dsn
2011 else:
2012 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
2013 if not m:
2014 raise SyntaxError, \
2015 "Invalid URI string in DAL: %s" % uri
2016 user = credential_decoder(m.group('user'))
2017 if not user:
2018 raise SyntaxError, 'User required'
2019 password = credential_decoder(m.group('password'))
2020 if not password:
2021 password = ''
2022 host = m.group('host')
2023 if not host:
2024 raise SyntaxError, 'Host name required'
2025 db = m.group('db')
2026 if not db:
2027 raise SyntaxError, 'Database name required'
2028 port = m.group('port') or '1433'
2029
2030
2031
2032 argsdict = { 'DRIVER':'{SQL Server}' }
2033 urlargs = m.group('urlargs') or ''
2034 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
2035 for argmatch in argpattern.finditer(urlargs):
2036 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
2037 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
2038 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
2039 % (host, port, db, user, password, urlargs)
2040 def connect(cnxn=cnxn,driver_args=driver_args):
2041 return pyodbc.connect(cnxn,**driver_args)
2042 if not fake_connect:
2043 self.pool_connection(connect)
2044 self.cursor = self.connection.cursor()
2045
2047
2048 self.execute('SELECT SCOPE_IDENTITY();')
2049 return int(self.cursor.fetchone()[0])
2050
2053
2054 - def rowslice(self,rows,minimum=0,maximum=None):
2055 if maximum is None:
2056 return rows[minimum:]
2057 return rows[minimum:maximum]
2058
2059
2061 types = {
2062 'boolean': 'CHAR(1)',
2063 'string': 'NVARCHAR(%(length)s)',
2064 'text': 'NTEXT',
2065 'password': 'NVARCHAR(%(length)s)',
2066 'blob': 'IMAGE',
2067 'upload': 'NVARCHAR(%(length)s)',
2068 'integer': 'INT',
2069 'double': 'FLOAT',
2070 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2071 'date': 'DATETIME',
2072 'time': 'CHAR(8)',
2073 'datetime': 'DATETIME',
2074 'id': 'INT IDENTITY PRIMARY KEY',
2075 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2076 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2077 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2078 'list:integer': 'NTEXT',
2079 'list:string': 'NTEXT',
2080 'list:reference': 'NTEXT',
2081 }
2082
2084 value = BaseAdapter.represent(self, obj, fieldtype)
2085 if fieldtype == 'string' or fieldtype == 'text' and value[:1]=="'":
2086 value = 'N'+value
2087 return value
2088
2091
2092
2094
2095 commit_on_alter_table = False
2096 support_distributed_transaction = True
2097 types = {
2098 'boolean': 'CHAR(1)',
2099 'string': 'VARCHAR(%(length)s)',
2100 'text': 'BLOB SUB_TYPE 1',
2101 'password': 'VARCHAR(%(length)s)',
2102 'blob': 'BLOB SUB_TYPE 0',
2103 'upload': 'VARCHAR(%(length)s)',
2104 'integer': 'INTEGER',
2105 'double': 'DOUBLE PRECISION',
2106 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
2107 'date': 'DATE',
2108 'time': 'TIME',
2109 'datetime': 'TIMESTAMP',
2110 'id': 'INTEGER PRIMARY KEY',
2111 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2112 'list:integer': 'BLOB SUB_TYPE 1',
2113 'list:string': 'BLOB SUB_TYPE 1',
2114 'list:reference': 'BLOB SUB_TYPE 1',
2115 }
2116
2118 return 'genid_%s' % tablename
2119
2121 return 'trg_id_%s' % tablename
2122
2125
2126 - def NOT_NULL(self,default,field_type):
2127 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2128
2130 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2131
2132 - def _drop(self,table,mode):
2135
2136 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2137 if limitby:
2138 (lmin, lmax) = limitby
2139 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2140 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2141
2143 return ['DELETE FROM %s;' % table._tablename,
2144 'SET GENERATOR %s TO 0;' % table._sequence_name]
2145
2146 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2147 credential_decoder=lambda x:x, driver_args={},
2148 adapter_args={}):
2149 self.db = db
2150 self.dbengine = "firebird"
2151 self.uri = uri
2152 self.pool_size = pool_size
2153 self.folder = folder
2154 self.db_codec = db_codec
2155 self.find_or_make_work_folder()
2156 uri = uri.split('://')[1]
2157 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2158 if not m:
2159 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2160 user = credential_decoder(m.group('user'))
2161 if not user:
2162 raise SyntaxError, 'User required'
2163 password = credential_decoder(m.group('password'))
2164 if not password:
2165 password = ''
2166 host = m.group('host')
2167 if not host:
2168 raise SyntaxError, 'Host name required'
2169 port = int(m.group('port') or 3050)
2170 db = m.group('db')
2171 if not db:
2172 raise SyntaxError, 'Database name required'
2173 charset = m.group('charset') or 'UTF8'
2174 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2175 user = credential_decoder(user),
2176 password = credential_decoder(password),
2177 charset = charset))
2178 def connect(driver_args=driver_args, adapter_args=adapter_args):
2179 if adapter_args.has_key('driver_name'):
2180 if adapter_args['driver_name'] == 'kinterbasdb':
2181 conn = kinterbasdb.connect(**driver_args)
2182 elif adapter_args['driver_name'] == 'firebirdsql':
2183 conn = firebirdsql.connect(**driver_args)
2184 else:
2185 conn = kinterbasdb.connect(**driver_args)
2186
2187 return conn
2188
2189 self.pool_connection(connect)
2190
2191 self.cursor = self.connection.cursor()
2192
2201
2206
2207
2209
2210 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2211 credential_decoder=lambda x:x, driver_args={},
2212 adapter_args={}):
2213 self.db = db
2214 self.dbengine = "firebird"
2215 self.uri = uri
2216 self.pool_size = pool_size
2217 self.folder = folder
2218 self.db_codec = db_codec
2219 self.find_or_make_work_folder()
2220 uri = uri.split('://')[1]
2221 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2222 if not m:
2223 raise SyntaxError, \
2224 "Invalid URI string in DAL: %s" % self.uri
2225 user = credential_decoder(m.group('user'))
2226 if not user:
2227 raise SyntaxError, 'User required'
2228 password = credential_decoder(m.group('password'))
2229 if not password:
2230 password = ''
2231 pathdb = m.group('path')
2232 if not pathdb:
2233 raise SyntaxError, 'Path required'
2234 charset = m.group('charset')
2235 if not charset:
2236 charset = 'UTF8'
2237 host = ''
2238 driver_args.update(dict(host=host,
2239 database=pathdb,
2240 user=credential_decoder(user),
2241 password=credential_decoder(password),
2242 charset=charset))
2243
2244
2245 def connect(driver_args=driver_args, adapter_args=adapter_args):
2246 if adapter_args.has_key('driver_name'):
2247 if adapter_args['driver_name'] == 'kinterbasdb':
2248 conn = kinterbasdb.connect(**driver_args)
2249 elif adapter_args['driver_name'] == 'firebirdsql':
2250 conn = firebirdsql.connect(**driver_args)
2251 else:
2252 conn = kinterbasdb.connect(**driver_args)
2253
2254 return conn
2255
2256 self.pool_connection(connect)
2257
2258 self.cursor = self.connection.cursor()
2259
2260
2355
2360
2363
2366
2367
2369 types = {
2370 'boolean': 'CHAR(1)',
2371 'string': 'VARCHAR(%(length)s)',
2372 'text': 'CLOB',
2373 'password': 'VARCHAR(%(length)s)',
2374 'blob': 'BLOB',
2375 'upload': 'VARCHAR(%(length)s)',
2376 'integer': 'INT',
2377 'double': 'DOUBLE',
2378 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2379 'date': 'DATE',
2380 'time': 'TIME',
2381 'datetime': 'TIMESTAMP',
2382 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2383 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2384 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2385 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2386 'list:integer': 'CLOB',
2387 'list:string': 'CLOB',
2388 'list:reference': 'CLOB',
2389 }
2390
2392 return 'LEFT OUTER JOIN'
2393
2396
2397 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2398 if limitby:
2399 (lmin, lmax) = limitby
2400 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2401 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2402
2404 if fieldtype == 'blob':
2405 obj = base64.b64encode(str(obj))
2406 return "BLOB('%s')" % obj
2407 elif fieldtype == 'datetime':
2408 if isinstance(obj, datetime.datetime):
2409 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2410 elif isinstance(obj, datetime.date):
2411 obj = obj.isoformat()[:10]+'-00.00.00'
2412 return "'%s'" % obj
2413 return None
2414
2415 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2416 credential_decoder=lambda x:x, driver_args={},
2417 adapter_args={}):
2418 self.db = db
2419 self.dbengine = "db2"
2420 self.uri = uri
2421 self.pool_size = pool_size
2422 self.folder = folder
2423 self.db_codec = db_codec
2424 self.find_or_make_work_folder()
2425 cnxn = uri.split('://', 1)[1]
2426 def connect(cnxn=cnxn,driver_args=driver_args):
2427 return pyodbc.connect(cnxn,**driver_args)
2428 self.pool_connection(connect)
2429 self.cursor = self.connection.cursor()
2430
2432 if command[-1:]==';':
2433 command = command[:-1]
2434 return self.log_execute(command)
2435
2437 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2438 return int(self.cursor.fetchone()[0])
2439
2440 - def rowslice(self,rows,minimum=0,maximum=None):
2441 if maximum is None:
2442 return rows[minimum:]
2443 return rows[minimum:maximum]
2444
2445
2446 INGRES_SEQNAME='ii***lineitemsequence'
2447
2448
2449
2451
2452 types = {
2453 'boolean': 'CHAR(1)',
2454 'string': 'VARCHAR(%(length)s)',
2455 'text': 'CLOB',
2456 'password': 'VARCHAR(%(length)s)',
2457 'blob': 'BLOB',
2458 'upload': 'VARCHAR(%(length)s)',
2459 'integer': 'INTEGER4',
2460 'double': 'FLOAT8',
2461 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2462 'date': 'ANSIDATE',
2463 'time': 'TIME WITHOUT TIME ZONE',
2464 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2465 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2466 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2467 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2468 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2469 'list:integer': 'CLOB',
2470 'list:string': 'CLOB',
2471 'list:reference': 'CLOB',
2472 }
2473
2475 return 'LEFT OUTER JOIN'
2476
2479
2480 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2481 if limitby:
2482 (lmin, lmax) = limitby
2483 fetch_amt = lmax - lmin
2484 if fetch_amt:
2485 sql_s += ' FIRST %d ' % (fetch_amt, )
2486 if lmin:
2487
2488 sql_o += ' OFFSET %d' % (lmin, )
2489 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2490
2491 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2492 credential_decoder=lambda x:x, driver_args={},
2493 adapter_args={}):
2494 self.db = db
2495 self.dbengine = "ingres"
2496 self.uri = uri
2497 self.pool_size = pool_size
2498 self.folder = folder
2499 self.db_codec = db_codec
2500 self.find_or_make_work_folder()
2501 connstr = self._uri.split(':', 1)[1]
2502
2503 connstr = connstr.lstrip()
2504 while connstr.startswith('/'):
2505 connstr = connstr[1:]
2506 database_name=connstr
2507 vnode = '(local)'
2508 servertype = 'ingres'
2509 trace = (0, None)
2510 driver_args.update(dict(database=database_name,
2511 vnode=vnode,
2512 servertype=servertype,
2513 trace=trace))
2514 def connect(driver_args=driver_args):
2515 return ingresdbi.connect(**driver_args)
2516 self.pool_connection(connect)
2517 self.cursor = self.connection.cursor()
2518
2520
2521
2522
2523 if hasattr(table,'_primarykey'):
2524 modify_tbl_sql = 'modify %s to btree unique on %s' % \
2525 (table._tablename,
2526 ', '.join(["'%s'" % x for x in table.primarykey]))
2527 self.execute(modify_tbl_sql)
2528 else:
2529 tmp_seqname='%s_iisq' % table._tablename
2530 query=query.replace(INGRES_SEQNAME, tmp_seqname)
2531 self.execute('create sequence %s' % tmp_seqname)
2532 self.execute(query)
2533 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
2534
2535
2537 tmp_seqname='%s_iisq' % table
2538 self.execute('select current value for %s' % tmp_seqname)
2539 return int(self.cursor.fetchone()[0])
2540
2543
2544
2546 types = {
2547 'boolean': 'CHAR(1)',
2548 'string': 'NVARCHAR(%(length)s)',
2549 'text': 'NCLOB',
2550 'password': 'NVARCHAR(%(length)s)',
2551 'blob': 'BLOB',
2552 'upload': 'VARCHAR(%(length)s)',
2553 'integer': 'INTEGER4',
2554 'double': 'FLOAT8',
2555 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2556 'date': 'ANSIDATE',
2557 'time': 'TIME WITHOUT TIME ZONE',
2558 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2559 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
2560 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2561 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2562 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2563 'list:integer': 'NCLOB',
2564 'list:string': 'NCLOB',
2565 'list:reference': 'NCLOB',
2566 }
2567
2569
2570 support_distributed_transaction = False
2571 types = {
2572 'boolean': 'CHAR(1)',
2573 'string': 'VARCHAR(%(length)s)',
2574 'text': 'LONG',
2575 'password': 'VARCHAR(%(length)s)',
2576 'blob': 'LONG',
2577 'upload': 'VARCHAR(%(length)s)',
2578 'integer': 'INT',
2579 'double': 'FLOAT',
2580 'decimal': 'FIXED(%(precision)s,%(scale)s)',
2581 'date': 'DATE',
2582 'time': 'TIME',
2583 'datetime': 'TIMESTAMP',
2584 'id': 'INT PRIMARY KEY',
2585 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2586 'list:integer': 'LONG',
2587 'list:string': 'LONG',
2588 'list:reference': 'LONG',
2589 }
2590
2592 return '%s_id_Seq' % table
2593
2594 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2595 if limitby:
2596 (lmin, lmax) = limitby
2597 if len(sql_w) > 1:
2598 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2599 else:
2600 sql_w_row = 'WHERE w_row > %i' % lmin
2601 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2602 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2603
2605
2606 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
2607 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
2608 % (table._tablename, table._id.name, table._sequence_name))
2609 self.execute(query)
2610
2611 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2612 credential_decoder=lambda x:x, driver_args={},
2613 adapter_args={}):
2614 self.db = db
2615 self.dbengine = "sapdb"
2616 self.uri = uri
2617 self.pool_size = pool_size
2618 self.folder = folder
2619 self.db_codec = db_codec
2620 self.find_or_make_work_folder()
2621 uri = uri.split('://')[1]
2622 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
2623 if not m:
2624 raise SyntaxError, "Invalid URI string in DAL"
2625 user = credential_decoder(m.group('user'))
2626 if not user:
2627 raise SyntaxError, 'User required'
2628 password = credential_decoder(m.group('password'))
2629 if not password:
2630 password = ''
2631 host = m.group('host')
2632 if not host:
2633 raise SyntaxError, 'Host name required'
2634 db = m.group('db')
2635 if not db:
2636 raise SyntaxError, 'Database name required'
2637 def connect(user=user,password=password,database=db,host=host,driver_args=driver_args):
2638 return sapdb.Connection(user,password,database,host,**driver_args)
2639 self.pool_connection(connect)
2640
2641 self.cursor = self.connection.cursor()
2642
2644 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
2645 return int(self.cursor.fetchone()[0])
2646
2648
2649 driver = globals().get('cubriddb',None)
2650
2651 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2652 credential_decoder=lambda x:x, driver_args={},
2653 adapter_args={}):
2654 self.db = db
2655 self.dbengine = "cubrid"
2656 self.uri = uri
2657 self.pool_size = pool_size
2658 self.folder = folder
2659 self.db_codec = db_codec
2660 self.find_or_make_work_folder()
2661 uri = uri.split('://')[1]
2662 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2663 if not m:
2664 raise SyntaxError, \
2665 "Invalid URI string in DAL: %s" % self.uri
2666 user = credential_decoder(m.group('user'))
2667 if not user:
2668 raise SyntaxError, 'User required'
2669 password = credential_decoder(m.group('password'))
2670 if not password:
2671 password = ''
2672 host = m.group('host')
2673 if not host:
2674 raise SyntaxError, 'Host name required'
2675 db = m.group('db')
2676 if not db:
2677 raise SyntaxError, 'Database name required'
2678 port = int(m.group('port') or '30000')
2679 charset = m.group('charset') or 'utf8'
2680 user=credential_decoder(user),
2681 passwd=credential_decoder(password),
2682 def connect(host,port,db,user,passwd,driver_args=driver_args):
2683 return self.driver.connect(host,port,db,user,passwd,**driver_args)
2684 self.pool_connection(connect)
2685 self.cursor = self.connection.cursor()
2686 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2687 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2688
2689
2690
2691
2693
2694 web2py_filesystem = False
2695
2697 if db._adapter.dbengine != 'mysql':
2698 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
2699 self.db = db
2700 self.filename = filename
2701 self.mode = mode
2702 if not self.web2py_filesystem:
2703 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
2704 DatabaseStoredFile.web2py_filesystem = True
2705 self.p=0
2706 self.data = ''
2707 if mode in ('r','rw','a'):
2708 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
2709 rows = self.db.executesql(query)
2710 if rows:
2711 self.data = rows[0][0]
2712 elif os.path.exists(filename):
2713 datafile = open(filename, 'r')
2714 try:
2715 self.data = datafile.read()
2716 finally:
2717 datafile.close()
2718 elif mode in ('r','rw'):
2719 raise RuntimeError, "File %s does not exist" % filename
2720
2721 - def read(self, bytes):
2722 data = self.data[self.p:self.p+bytes]
2723 self.p += len(data)
2724 return data
2725
2727 i = self.data.find('\n',self.p)+1
2728 if i>0:
2729 data, self.p = self.data[self.p:i], i
2730 else:
2731 data, self.p = self.data[self.p:], len(self.data)
2732 return data
2733
2736
2738 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
2739 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
2740 (self.filename, self.data.replace("'","''"))
2741 self.db.executesql(query)
2742 self.db.commit()
2743
2744 @staticmethod
2746 if os.path.exists(filename):
2747 return True
2748 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
2749 if db.executesql(query):
2750 return True
2751 return False
2752
2753
2755
2758
2759 - def file_open(self, filename, mode='rb', lock=True):
2761
2764
2766 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
2767 self.db.executesql(query)
2768 self.db.commit()
2769
2771
2772 - def __init__(self, db, uri='google:sql://realm:domain/database', pool_size=0,
2773 folder=None, db_codec='UTF-8', check_reserved=None,
2774 migrate=True, fake_migrate=False,
2775 credential_decoder = lambda x:x, driver_args={},
2776 adapter_args={}):
2777
2778 self.db = db
2779 self.dbengine = "mysql"
2780 self.uri = uri
2781 self.pool_size = pool_size
2782 self.folder = folder
2783 self.db_codec = db_codec
2784 self.folder = folder or '$HOME/'+thread.folder.split('/applications/',1)[1]
2785
2786 m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(self.uri[len('google:sql://'):])
2787 if not m:
2788 raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
2789 instance = credential_decoder(m.group('instance'))
2790 db = credential_decoder(m.group('db'))
2791 driver_args['instance'] = instance
2792 if not migrate:
2793 driver_args['database'] = db
2794 def connect(driver_args=driver_args):
2795 return rdbms.connect(**driver_args)
2796 self.pool_connection(connect)
2797 self.cursor = self.connection.cursor()
2798 if migrate:
2799
2800 self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
2801 self.execute('USE %s' % db)
2802 self.execute("SET FOREIGN_KEY_CHECKS=1;")
2803 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2804
2806
2807 @staticmethod
2809 if isinstance(obj, str):
2810 return obj.decode('utf8')
2811 elif not isinstance(obj, unicode):
2812 return unicode(obj)
2813 return obj
2814
2816 if isinstance(obj,CALLABLETYPES):
2817 obj = obj()
2818 if isinstance(fieldtype, SQLCustomType):
2819 return fieldtype.encoder(obj)
2820 if isinstance(obj, (Expression, Field)):
2821 raise SyntaxError, "non supported on GAE"
2822 if self.dbengine=='google:datastore' in globals():
2823 if isinstance(fieldtype, gae.Property):
2824 return obj
2825 if fieldtype.startswith('list:'):
2826 if not obj:
2827 obj = []
2828 if not isinstance(obj, (list, tuple)):
2829 obj = [obj]
2830 if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
2831 return None
2832 if obj != None:
2833 if isinstance(obj, list) and not fieldtype.startswith('list'):
2834 obj = [self.represent(o, fieldtype) for o in obj]
2835 elif fieldtype in ('integer','id'):
2836 obj = long(obj)
2837 elif fieldtype == 'double':
2838 obj = float(obj)
2839 elif fieldtype.startswith('reference'):
2840 if isinstance(obj, (Row, Reference)):
2841 obj = obj['id']
2842 obj = long(obj)
2843 elif fieldtype == 'boolean':
2844 if obj and not str(obj)[0].upper() == 'F':
2845 obj = True
2846 else:
2847 obj = False
2848 elif fieldtype == 'date':
2849 if not isinstance(obj, datetime.date):
2850 (y, m, d) = map(int,str(obj).strip().split('-'))
2851 obj = datetime.date(y, m, d)
2852 elif isinstance(obj,datetime.datetime):
2853 (y, m, d) = (obj.year, obj.month, obj.day)
2854 obj = datetime.date(y, m, d)
2855 elif fieldtype == 'time':
2856 if not isinstance(obj, datetime.time):
2857 time_items = map(int,str(obj).strip().split(':')[:3])
2858 if len(time_items) == 3:
2859 (h, mi, s) = time_items
2860 else:
2861 (h, mi, s) = time_items + [0]
2862 obj = datetime.time(h, mi, s)
2863 elif fieldtype == 'datetime':
2864 if not isinstance(obj, datetime.datetime):
2865 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
2866 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
2867 while len(time_items)<3:
2868 time_items.append(0)
2869 (h, mi, s) = time_items
2870 obj = datetime.datetime(y, m, d, h, mi, s)
2871 elif fieldtype == 'blob':
2872 pass
2873 elif fieldtype.startswith('list:string'):
2874 return map(self.to_unicode,obj)
2875 elif fieldtype.startswith('list:'):
2876 return map(int,obj)
2877 else:
2878 obj = self.to_unicode(obj)
2879 return obj
2880
2882 return 'insert %s in %s' % (fields, table)
2883
2884 - def _count(self,query,distinct=None):
2885 return 'count %s' % repr(query)
2886
2887 - def _select(self,query,fields,attributes):
2888 return 'select %s where %s' % (repr(fields), repr(query))
2889
2890 - def _delete(self,tablename, query):
2891 return 'delete %s where %s' % (repr(tablename),repr(query))
2892
2893 - def _update(self,tablename,query,fields):
2894 return 'update %s (%s) where %s' % (repr(tablename),
2895 repr(fields),repr(query))
2896
2898 """
2899 remember: no transactions on many NoSQL
2900 """
2901 pass
2902
2904 """
2905 remember: no transactions on many NoSQL
2906 """
2907 pass
2908
2910 """
2911 remember: no transactions on many NoSQL
2912 """
2913 pass
2914
2915
2916
2917 - def OR(self,first,second): raise SyntaxError, "Not supported"
2918 - def AND(self,first,second): raise SyntaxError, "Not supported"
2919 - def AS(self,first,second): raise SyntaxError, "Not supported"
2920 - def ON(self,first,second): raise SyntaxError, "Not supported"
2921 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2922 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2923 - def ADD(self,first,second): raise SyntaxError, "Not supported"
2924 - def SUB(self,first,second): raise SyntaxError, "Not supported"
2925 - def MUL(self,first,second): raise SyntaxError, "Not supported"
2926 - def DIV(self,first,second): raise SyntaxError, "Not supported"
2927 - def LOWER(self,first): raise SyntaxError, "Not supported"
2928 - def UPPER(self,first): raise SyntaxError, "Not supported"
2930 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
2931 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
2932 - def RANDOM(self): raise SyntaxError, "Not supported"
2933 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
2934 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
2935 - def LIKE(self,first,second): raise SyntaxError, "Not supported"
2936 - def drop(self,table,mode): raise SyntaxError, "Not supported"
2937 - def alias(self,table,alias): raise SyntaxError, "Not supported"
2938 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
2940 - def prepare(self,key): raise SyntaxError, "Not supported"
2943 - def concat_add(self,table): raise SyntaxError, "Not supported"
2944 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
2946 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
2947 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
2949 - def lastrowid(self,table): raise SyntaxError, "Not supported"
2951 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
2952
2953
2954 -class GAEF(object):
2955 - def __init__(self,name,op,value,apply):
2956 self.name=name=='id' and '__key__' or name
2957 self.op=op
2958 self.value=value
2959 self.apply=apply
2961 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
2962
2964 uploads_in_blob = True
2965 types = {}
2966
2968 - def file_open(self, filename, mode='rb', lock=True): pass
2969 - def file_close(self, fileobj, unlock=True): pass
2970
2971 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2972 credential_decoder=lambda x:x, driver_args={},
2973 adapter_args={}):
2974 self.types.update({
2975 'boolean': gae.BooleanProperty,
2976 'string': (lambda: gae.StringProperty(multiline=True)),
2977 'text': gae.TextProperty,
2978 'password': gae.StringProperty,
2979 'blob': gae.BlobProperty,
2980 'upload': gae.StringProperty,
2981 'integer': gae.IntegerProperty,
2982 'double': gae.FloatProperty,
2983 'decimal': GAEDecimalProperty,
2984 'date': gae.DateProperty,
2985 'time': gae.TimeProperty,
2986 'datetime': gae.DateTimeProperty,
2987 'id': None,
2988 'reference': gae.IntegerProperty,
2989 'list:string': (lambda: gae.StringListProperty(default=None)),
2990 'list:integer': (lambda: gae.ListProperty(int,default=None)),
2991 'list:reference': (lambda: gae.ListProperty(int,default=None)),
2992 })
2993 self.db = db
2994 self.uri = uri
2995 self.dbengine = 'google:datastore'
2996 self.folder = folder
2997 db['_lastsql'] = ''
2998 self.db_codec = 'UTF-8'
2999 self.pool_size = 0
3000 match = re.compile('.*://(?P<namespace>.+)').match(uri)
3001 if match:
3002 namespace_manager.set_namespace(match.group('namespace'))
3003
3004 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
3005 myfields = {}
3006 for k in table.fields:
3007 if isinstance(polymodel,Table) and k in polymodel.fields():
3008 continue
3009 field = table[k]
3010 attr = {}
3011 if isinstance(field.type, SQLCustomType):
3012 ftype = self.types[field.type.native or field.type.type](**attr)
3013 elif isinstance(field.type, gae.Property):
3014 ftype = field.type
3015 elif field.type.startswith('id'):
3016 continue
3017 elif field.type.startswith('decimal'):
3018 precision, scale = field.type[7:].strip('()').split(',')
3019 precision = int(precision)
3020 scale = int(scale)
3021 ftype = GAEDecimalProperty(precision, scale, **attr)
3022 elif field.type.startswith('reference'):
3023 if field.notnull:
3024 attr = dict(required=True)
3025 referenced = field.type[10:].strip()
3026 ftype = self.types[field.type[:9]](table._db[referenced])
3027 elif field.type.startswith('list:reference'):
3028 if field.notnull:
3029 attr = dict(required=True)
3030 referenced = field.type[15:].strip()
3031 ftype = self.types[field.type[:14]](**attr)
3032 elif field.type.startswith('list:'):
3033 ftype = self.types[field.type](**attr)
3034 elif not field.type in self.types\
3035 or not self.types[field.type]:
3036 raise SyntaxError, 'Field: unknown field type: %s' % field.type
3037 else:
3038 ftype = self.types[field.type](**attr)
3039 myfields[field.name] = ftype
3040 if not polymodel:
3041 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
3042 elif polymodel==True:
3043 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
3044 elif isinstance(polymodel,Table):
3045 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
3046 else:
3047 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
3048 return None
3049
3050 - def expand(self,expression,field_type=None):
3051 if isinstance(expression,Field):
3052 if expression.type in ('text','blob'):
3053 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
3054 return expression.name
3055 elif isinstance(expression, (Expression, Query)):
3056 if not expression.second is None:
3057 return expression.op(expression.first, expression.second)
3058 elif not expression.first is None:
3059 return expression.op(expression.first)
3060 else:
3061 return expression.op()
3062 elif field_type:
3063 return self.represent(expression,field_type)
3064 elif isinstance(expression,(list,tuple)):
3065 return ','.join([self.represent(item,field_type) for item in expression])
3066 else:
3067 return str(expression)
3068
3069
3070 - def AND(self,first,second):
3076
3077 - def EQ(self,first,second=None):
3078 if isinstance(second, Key):
3079 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
3080 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
3081
3082 - def NE(self,first,second=None):
3083 if first.type != 'id':
3084 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
3085 else:
3086 second = Key.from_path(first._tablename, long(second))
3087 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
3088
3089 - def LT(self,first,second=None):
3090 if first.type != 'id':
3091 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
3092 else:
3093 second = Key.from_path(first._tablename, long(second))
3094 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
3095
3096 - def LE(self,first,second=None):
3097 if first.type != 'id':
3098 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
3099 else:
3100 second = Key.from_path(first._tablename, long(second))
3101 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
3102
3103 - def GT(self,first,second=None):
3104 if first.type != 'id' or second==0 or second == '0':
3105 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
3106 else:
3107 second = Key.from_path(first._tablename, long(second))
3108 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
3109
3110 - def GE(self,first,second=None):
3111 if first.type != 'id':
3112 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
3113 else:
3114 second = Key.from_path(first._tablename, long(second))
3115 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
3116
3119
3120 - def COMMA(self,first,second):
3122
3123 - def BELONGS(self,first,second=None):
3124 if not isinstance(second,(list, tuple)):
3125 raise SyntaxError, "Not supported"
3126 if first.type != 'id':
3127 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
3128 else:
3129 second = [Key.from_path(first._tablename, i) for i in second]
3130 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
3131
3136
3137 - def NOT(self,first):
3138 nops = { self.EQ: self.NE,
3139 self.NE: self.EQ,
3140 self.LT: self.GE,
3141 self.GT: self.LE,
3142 self.LE: self.GT,
3143 self.GE: self.LT}
3144 if not isinstance(first,Query):
3145 raise SyntaxError, "Not suported"
3146 nop = nops.get(first.op,None)
3147 if not nop:
3148 raise SyntaxError, "Not suported %s" % first.op.__name__
3149 first.op = nop
3150 return self.expand(first)
3151
3153 self.db(table._id > 0).delete()
3154
3155 - def select_raw(self,query,fields=[],attributes={}):
3156 new_fields = []
3157 for item in fields:
3158 if isinstance(item,SQLALL):
3159 new_fields += item.table
3160 else:
3161 new_fields.append(item)
3162 fields = new_fields
3163 if query:
3164 tablename = self.get_table(query)
3165 elif fields:
3166 tablename = fields[0].tablename
3167 query = fields[0].table._id>0
3168 else:
3169 raise SyntaxError, "Unable to determine a tablename"
3170 query = self.filter_tenant(query,[tablename])
3171 tableobj = self.db[tablename]._tableobj
3172 items = tableobj.all()
3173 filters = self.expand(query)
3174 for filter in filters:
3175 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
3176 continue
3177 elif filter.name=='__key__' and filter.op=='=':
3178 if filter.value==0:
3179 items = []
3180 elif isinstance(filter.value, Key):
3181 item = tableobj.get(filter.value)
3182 items = (item and [item]) or []
3183 else:
3184 item = tableobj.get_by_id(filter.value)
3185 items = (item and [item]) or []
3186 elif isinstance(items,list):
3187 items = [i for i in items if filter.apply(getattr(item,filter.name),
3188 filter.value)]
3189 else:
3190 if filter.name=='__key__': items.order('__key__')
3191 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
3192 if not isinstance(items,list):
3193 if attributes.get('left', None):
3194 raise SyntaxError, 'Set: no left join in appengine'
3195 if attributes.get('groupby', None):
3196 raise SyntaxError, 'Set: no groupby in appengine'
3197 orderby = attributes.get('orderby', False)
3198 if orderby:
3199
3200 if isinstance(orderby, (list, tuple)):
3201 orderby = xorify(orderby)
3202 if isinstance(orderby,Expression):
3203 orderby = self.expand(orderby)
3204 orders = orderby.split(', ')
3205 for order in orders:
3206 order={'-id':'-__key__','id':'__key__'}.get(order,order)
3207 items = items.order(order)
3208 if attributes.get('limitby', None):
3209 (lmin, lmax) = attributes['limitby']
3210 (limit, offset) = (lmax - lmin, lmin)
3211 items = items.fetch(limit, offset=offset)
3212 fields = self.db[tablename].fields
3213 return (items, tablename, fields)
3214
3215 - def select(self,query,fields,attributes):
3216 (items, tablename, fields) = self.select_raw(query,fields,attributes)
3217
3218 rows = [
3219 [t=='id' and int(item.key().id()) or getattr(item, t) for t in fields]
3220 for item in items]
3221 colnames = ['%s.%s' % (tablename, t) for t in fields]
3222 return self.parse(rows, colnames, False)
3223
3224
3225 - def count(self,query,distinct=None):
3226 if distinct:
3227 raise RuntimeError, "COUNT DISTINCT not supported"
3228 (items, tablename, fields) = self.select_raw(query)
3229
3230 try:
3231 return len(items)
3232 except TypeError:
3233 return items.count(limit=None)
3234
3235 - def delete(self,tablename, query):
3236 """
3237 This function was changed on 2010-05-04 because according to
3238 http://code.google.com/p/googleappengine/issues/detail?id=3119
3239 GAE no longer support deleting more than 1000 records.
3240 """
3241
3242 (items, tablename, fields) = self.select_raw(query)
3243
3244 if not isinstance(items,list):
3245 counter = items.count(limit=None)
3246 leftitems = items.fetch(1000)
3247 while len(leftitems):
3248 gae.delete(leftitems)
3249 leftitems = items.fetch(1000)
3250 else:
3251 counter = len(items)
3252 gae.delete(items)
3253 return counter
3254
3255 - def update(self,tablename,query,update_fields):
3256
3257 (items, tablename, fields) = self.select_raw(query)
3258 counter = 0
3259 for item in items:
3260 for field, value in update_fields:
3261 setattr(item, field.name, self.represent(value,field.type))
3262 item.put()
3263 counter += 1
3264 logger.info(str(counter))
3265 return counter
3266
3267 - def insert(self,table,fields):
3268 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3269
3270 tmp = table._tableobj(**dfields)
3271 tmp.put()
3272 rid = Reference(tmp.key().id())
3273 (rid._table, rid._record) = (table, None)
3274 return rid
3275
3277 parsed_items = []
3278 for item in items:
3279 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3280 parsed_items.append(table._tableobj(**dfields))
3281 gae.put(parsed_items)
3282 return True
3283
3284
3286 return uuid.UUID(uuidv).int
3287
3289 return str(uuid.UUID(int=n))
3290
3292 uploads_in_blob = True
3293 types = {
3294 'boolean': bool,
3295 'string': str,
3296 'text': str,
3297 'password': str,
3298 'blob': str,
3299 'upload': str,
3300 'integer': long,
3301 'double': float,
3302 'date': datetime.date,
3303 'time': datetime.time,
3304 'datetime': datetime.datetime,
3305 'id': long,
3306 'reference': long,
3307 'list:string': list,
3308 'list:integer': list,
3309 'list:reference': list,
3310 }
3311
3313 - def file_open(self, filename, mode='rb', lock=True): pass
3314 - def file_close(self, fileobj, unlock=True): pass
3315
3316 - def expand(self,expression,field_type=None):
3317 if isinstance(expression,Field):
3318 if expression.type=='id':
3319 return "%s._id" % expression.tablename
3320 return BaseAdapter.expand(self,expression,field_type)
3321
3322 - def AND(self,first,second):
3324
3325 - def OR(self,first,second):
3327
3328 - def EQ(self,first,second):
3332
3333 - def NE(self,first,second):
3337
3338 - def COMMA(self,first,second):
3340
3342 value = NoSQLAdapter.represent(self, obj, fieldtype)
3343 if fieldtype=='id':
3344 return repr(str(int(value)))
3345 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3346
3347 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3348 pool_size=0,folder=None,db_codec ='UTF-8',
3349 credential_decoder=lambda x:x, driver_args={},
3350 adapter_args={}):
3351 self.db = db
3352 self.uri = uri
3353 self.dbengine = 'couchdb'
3354 self.folder = folder
3355 db['_lastsql'] = ''
3356 self.db_codec = 'UTF-8'
3357 self.pool_size = pool_size
3358
3359 url='http://'+uri[10:]
3360 def connect(url=url,driver_args=driver_args):
3361 return couchdb.Server(url,**driver_args)
3362 self.pool_connection(connect)
3363
3364 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3365 if migrate:
3366 try:
3367 self.connection.create(table._tablename)
3368 except:
3369 pass
3370
3371 - def insert(self,table,fields):
3378
3379 - def _select(self,query,fields,attributes):
3380 if not isinstance(query,Query):
3381 raise SyntaxError, "Not Supported"
3382 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3383 'required','cache','left',
3384 'distinct','having')):
3385 raise SyntaxError, 'invalid select attribute: %s' % key
3386 new_fields=[]
3387 for item in fields:
3388 if isinstance(item,SQLALL):
3389 new_fields += item.table
3390 else:
3391 new_fields.append(item)
3392 def uid(fd):
3393 return fd=='id' and '_id' or fd
3394 def get(row,fd):
3395 return fd=='id' and int(row['_id']) or row.get(fd,None)
3396 fields = new_fields
3397 tablename = self.get_table(query)
3398 fieldnames = [f.name for f in (fields or self.db[tablename])]
3399 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3400 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3401 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3402 dict(t=tablename,
3403 query=self.expand(query),
3404 order='%s._id' % tablename,
3405 fields=fields)
3406 return fn, colnames
3407
3408 - def select(self,query,fields,attributes):
3409 if not isinstance(query,Query):
3410 raise SyntaxError, "Not Supported"
3411 fn, colnames = self._select(query,fields,attributes)
3412 tablename = colnames[0].split('.')[0]
3413 ctable = self.connection[tablename]
3414 rows = [cols['value'] for cols in ctable.query(fn)]
3415 return self.parse(rows, colnames, False)
3416
3417 - def delete(self,tablename,query):
3418 if not isinstance(query,Query):
3419 raise SyntaxError, "Not Supported"
3420 if query.first.type=='id' and query.op==self.EQ:
3421 id = query.second
3422 tablename = query.first.tablename
3423 assert(tablename == query.first.tablename)
3424 ctable = self.connection[tablename]
3425 try:
3426 del ctable[str(id)]
3427 return 1
3428 except couchdb.http.ResourceNotFound:
3429 return 0
3430 else:
3431 tablename = self.get_table(query)
3432 rows = self.select(query,[self.db[tablename]._id],{})
3433 ctable = self.connection[tablename]
3434 for row in rows:
3435 del ctable[str(row.id)]
3436 return len(rows)
3437
3438 - def update(self,tablename,query,fields):
3439 if not isinstance(query,Query):
3440 raise SyntaxError, "Not Supported"
3441 if query.first.type=='id' and query.op==self.EQ:
3442 id = query.second
3443 tablename = query.first.tablename
3444 ctable = self.connection[tablename]
3445 try:
3446 doc = ctable[str(id)]
3447 for key,value in fields:
3448 doc[key.name] = NoSQLAdapter.represent(self,value,self.db[tablename][key.name].type)
3449 ctable.save(doc)
3450 return 1
3451 except couchdb.http.ResourceNotFound:
3452 return 0
3453 else:
3454 tablename = self.get_table(query)
3455 rows = self.select(query,[self.db[tablename]._id],{})
3456 ctable = self.connection[tablename]
3457 table = self.db[tablename]
3458 for row in rows:
3459 doc = ctable[str(row.id)]
3460 for key,value in fields:
3461 doc[key.name] = NoSQLAdapter.represent(self,value,table[key.name].type)
3462 ctable.save(doc)
3463 return len(rows)
3464
3465 - def count(self,query,distinct=None):
3466 if distinct:
3467 raise RuntimeError, "COUNT DISTINCT not supported"
3468 if not isinstance(query,Query):
3469 raise SyntaxError, "Not Supported"
3470 tablename = self.get_table(query)
3471 rows = self.select(query,[self.db[tablename]._id],{})
3472 return len(rows)
3473
3475 """
3476 validates that the given text is clean: only contains [0-9a-zA-Z_]
3477 """
3478
3479 if re.compile('[^0-9a-zA-Z_]').findall(text):
3480 raise SyntaxError, \
3481 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
3482 % text
3483 return text
3484
3485
3487 uploads_in_blob = True
3488 types = {
3489 'boolean': bool,
3490 'string': str,
3491 'text': str,
3492 'password': str,
3493 'blob': str,
3494 'upload': str,
3495 'integer': long,
3496 'double': float,
3497 'date': datetime.date,
3498 'time': datetime.time,
3499 'datetime': datetime.datetime,
3500 'id': long,
3501 'reference': long,
3502 'list:string': list,
3503 'list:integer': list,
3504 'list:reference': list,
3505 }
3506
3507 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
3508 pool_size=0,folder=None,db_codec ='UTF-8',
3509 credential_decoder=lambda x:x, driver_args={},
3510 adapter_args={}):
3511 self.db = db
3512 self.uri = uri
3513 self.dbengine = 'mongodb'
3514 self.folder = folder
3515 db['_lastsql'] = ''
3516 self.db_codec = 'UTF-8'
3517 self.pool_size = pool_size
3518
3519 m = re.compile('^(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(self._uri[10:])
3520 if not m:
3521 raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
3522 host = m.group('host')
3523 if not host:
3524 raise SyntaxError, 'mongodb: host name required'
3525 dbname = m.group('db')
3526 if not dbname:
3527 raise SyntaxError, 'mongodb: db name required'
3528 port = m.group('port') or 27017
3529 driver_args.update(dict(host=host,port=port))
3530 def connect(dbname=dbname,driver_args=driver_args):
3531 return pymongo.Connection(**driver_args)[dbname]
3532 self.pool_connection(connect)
3533
3534 - def insert(self,table,fields):
3539
3540
3542 raise RuntimeError, "Not implemented"
3543
3544 - def select(self,query,fields,attributes):
3545 raise RuntimeError, "Not implemented"
3546
3547 - def delete(self,tablename, query):
3548 raise RuntimeError, "Not implemented"
3549
3550 - def update(self,tablename,query,fields):
3551 raise RuntimeError, "Not implemented"
3552
3553
3554
3555
3556
3557
3558 ADAPTERS = {
3559 'sqlite': SQLiteAdapter,
3560 'sqlite:memory': SQLiteAdapter,
3561 'mysql': MySQLAdapter,
3562 'postgres': PostgreSQLAdapter,
3563 'oracle': OracleAdapter,
3564 'mssql': MSSQLAdapter,
3565 'mssql2': MSSQL2Adapter,
3566 'db2': DB2Adapter,
3567 'informix': InformixAdapter,
3568 'firebird': FireBirdAdapter,
3569 'firebird_embedded': FireBirdAdapter,
3570 'ingres': IngresAdapter,
3571 'ingresu': IngresUnicodeAdapter,
3572 'sapdb': SAPDBAdapter,
3573 'cubrid': CubridAdapter,
3574 'jdbc:sqlite': JDBCSQLiteAdapter,
3575 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
3576 'jdbc:postgres': JDBCPostgreSQLAdapter,
3577 'gae': GoogleDatastoreAdapter,
3578 'google:datastore': GoogleDatastoreAdapter,
3579 'google:sql': GoogleSQLAdapter,
3580 'couchdb': CouchDBAdapter,
3581 'mongodb': MongoDBAdapter,
3582 }
3583
3584
3586 """
3587 Field type validation, using web2py's validators mechanism.
3588
3589 makes sure the content of a field is in line with the declared
3590 fieldtype
3591 """
3592 if not have_validators:
3593 return []
3594 field_type, field_length = field.type, field.length
3595 if isinstance(field_type, SQLCustomType):
3596 if hasattr(field_type, 'validator'):
3597 return field_type.validator
3598 else:
3599 field_type = field_type.type
3600 elif not isinstance(field_type,str):
3601 return []
3602 requires=[]
3603 def ff(r,id):
3604 row=r(id)
3605 if not row:
3606 return id
3607 elif hasattr(r, '_format') and isinstance(r._format,str):
3608 return r._format % row
3609 elif hasattr(r, '_format') and callable(r._format):
3610 return r._format(row)
3611 else:
3612 return id
3613 if field_type == 'string':
3614 requires.append(validators.IS_LENGTH(field_length))
3615 elif field_type == 'text':
3616 requires.append(validators.IS_LENGTH(2 ** 16))
3617 elif field_type == 'password':
3618 requires.append(validators.IS_LENGTH(field_length))
3619 elif field_type == 'double':
3620 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
3621 elif field_type == 'integer':
3622 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
3623 elif field_type.startswith('decimal'):
3624 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
3625 elif field_type == 'date':
3626 requires.append(validators.IS_DATE())
3627 elif field_type == 'time':
3628 requires.append(validators.IS_TIME())
3629 elif field_type == 'datetime':
3630 requires.append(validators.IS_DATETIME())
3631 elif field.db and field_type.startswith('reference') and \
3632 field_type.find('.') < 0 and \
3633 field_type[10:] in field.db.tables:
3634 referenced = field.db[field_type[10:]]
3635 def repr_ref(id, r=referenced, f=ff): return f(r, id)
3636 field.represent = field.represent or repr_ref
3637 if hasattr(referenced, '_format') and referenced._format:
3638 requires = validators.IS_IN_DB(field.db,referenced._id,
3639 referenced._format)
3640 if field.unique:
3641 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3642 if field.tablename == field_type[10:]:
3643 return validators.IS_EMPTY_OR(requires)
3644 return requires
3645 elif field.db and field_type.startswith('list:reference') and \
3646 field_type.find('.') < 0 and \
3647 field_type[15:] in field.db.tables:
3648 referenced = field.db[field_type[15:]]
3649 def list_ref_repr(ids, r=referenced, f=ff):
3650 if not ids:
3651 return None
3652 refs = r._db(r._id.belongs(ids)).select(r._id)
3653 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
3654 field.represent = field.represent or list_ref_repr
3655 if hasattr(referenced, '_format') and referenced._format:
3656 requires = validators.IS_IN_DB(field.db,referenced._id,
3657 referenced._format,multiple=True)
3658 else:
3659 requires = validators.IS_IN_DB(field.db,referenced._id,
3660 multiple=True)
3661 if field.unique:
3662 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3663 return requires
3664 elif field_type.startswith('list:'):
3665 def repr_list(values): return', '.join(str(v) for v in (values or []))
3666 field.represent = field.represent or repr_list
3667 if field.unique:
3668 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
3669 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
3670 if field.notnull and not field_type[:2] in sff:
3671 requires.insert(0, validators.IS_NOT_EMPTY())
3672 elif not field.notnull and field_type[:2] in sff and requires:
3673 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
3674 return requires
3675
3676
3678 return str(item).replace('|', '||')
3679
3682
3684 return [int(x) for x in value.split('|') if x.strip()]
3685
3688
3689
3691
3692 """
3693 a dictionary that lets you do d['a'] as well as d.a
3694 this is only used to store a Row
3695 """
3696
3698 key=str(key)
3699 if key in self.get('_extra',{}):
3700 return self._extra[key]
3701 return dict.__getitem__(self, key)
3702
3705
3708
3711
3714
3716 return '<Row ' + dict.__repr__(self) + '>'
3717
3720
3722 try:
3723 return self.as_dict() == other.as_dict()
3724 except AttributeError:
3725 return False
3726
3728 return not (self == other)
3729
3731 return Row(dict(self))
3732
3733 - def as_dict(self,datetime_to_str=False):
3734 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
3735 d = dict(self)
3736 for k in copy.copy(d.keys()):
3737 v=d[k]
3738 if d[k] is None:
3739 continue
3740 elif isinstance(v,Row):
3741 d[k]=v.as_dict()
3742 elif isinstance(v,Reference):
3743 d[k]=int(v)
3744 elif isinstance(v,decimal.Decimal):
3745 d[k]=float(v)
3746 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
3747 if datetime_to_str:
3748 d[k] = v.isoformat().replace('T',' ')[:19]
3749 elif not isinstance(v,SERIALIZABLE_TYPES):
3750 del d[k]
3751 return d
3752
3753
3755 return Row(cPickle.loads(data))
3756
3759
3760 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
3761
3762
3763
3764
3765
3766
3767
3770 return copy.copy(self)
3771
3772
3774
3775 """
3776 an instance of this class represents a database connection
3777
3778 Example::
3779
3780 db = DAL('sqlite://test.db')
3781 db.define_table('tablename', Field('fieldname1'),
3782 Field('fieldname2'))
3783 """
3784
3785 @staticmethod
3787 """
3788 # ## this allows gluon to set a folder for this thread
3789 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
3790 """
3791 BaseAdapter.set_folder(folder)
3792
3793 @staticmethod
3806
3807 @staticmethod
3829
3830
3831 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
3832 db_codec='UTF-8', check_reserved=None,
3833 migrate=True, fake_migrate=False,
3834 migrate_enabled=True, fake_migrate_all=False,
3835 decode_credentials=False, driver_args=None,
3836 adapter_args={}, attempts=5, auto_import=False):
3837 """
3838 Creates a new Database Abstraction Layer instance.
3839
3840 Keyword arguments:
3841
3842 :uri: string that contains information for connecting to a database.
3843 (default: 'sqlite://dummy.db')
3844 :pool_size: How many open connections to make to the database object.
3845 :folder: <please update me>
3846 :db_codec: string encoding of the database (default: 'UTF-8')
3847 :check_reserved: list of adapters to check tablenames and column names
3848 against sql reserved keywords. (Default None)
3849
3850 * 'common' List of sql keywords that are common to all database types
3851 such as "SELECT, INSERT". (recommended)
3852 * 'all' Checks against all known SQL keywords. (not recommended)
3853 <adaptername> Checks against the specific adapters list of keywords
3854 (recommended)
3855 * '<adaptername>_nonreserved' Checks against the specific adapters
3856 list of nonreserved keywords. (if available)
3857 :migrate (defaults to True) sets default migrate behavior for all tables
3858 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
3859 :migrate_enabled (defaults to True). If set to False disables ALL migrations
3860 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
3861 :attempts (defaults to 5). Number of times to attempt connecting
3862 """
3863 if not decode_credentials:
3864 credential_decoder = lambda cred: cred
3865 else:
3866 credential_decoder = lambda cred: urllib.unquote(cred)
3867 if folder:
3868 self.set_folder(folder)
3869 self._uri = uri
3870 self._pool_size = pool_size
3871 self._db_codec = db_codec
3872 self._lastsql = ''
3873 self._timings = []
3874 self._pending_references = {}
3875 self._request_tenant = 'request_tenant'
3876 self._common_fields = []
3877 if not str(attempts).isdigit() or attempts < 0:
3878 attempts = 5
3879 if uri:
3880 uris = isinstance(uri,(list,tuple)) and uri or [uri]
3881 error = ''
3882 connected = False
3883 for k in range(attempts):
3884 for uri in uris:
3885 try:
3886 if is_jdbc and not uri.startswith('jdbc:'):
3887 uri = 'jdbc:'+uri
3888 self._dbname = regex_dbname.match(uri).group()
3889 if not self._dbname in ADAPTERS:
3890 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
3891
3892 args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {}, adapter_args)
3893 self._adapter = ADAPTERS[self._dbname](*args)
3894 connected = True
3895 break
3896 except SyntaxError:
3897 raise
3898 except Exception, error:
3899 pass
3900 if connected:
3901 break
3902 else:
3903 time.sleep(1)
3904 if not connected:
3905 raise RuntimeError, "Failure to connect, tried %d times:\n%s" % (attempts, error)
3906 else:
3907 args = (self,'None',0,folder,db_codec)
3908 self._adapter = BaseAdapter(*args)
3909 migrate = fake_migrate = False
3910 adapter = self._adapter
3911 self._uri_hash = hashlib.md5(adapter.uri).hexdigest()
3912 self.tables = SQLCallableList()
3913 self.check_reserved = check_reserved
3914 if self.check_reserved:
3915 from reserved_sql_keywords import ADAPTERS as RSK
3916 self.RSK = RSK
3917 self._migrate = migrate
3918 self._fake_migrate = fake_migrate
3919 self._migrate_enabled = migrate_enabled
3920 self._fake_migrate_all = fake_migrate_all
3921 if auto_import:
3922 self.import_table_definitions(adapter.folder)
3923
3925 pattern = os.path.join(path,self._uri_hash+'_*.table')
3926 for filename in glob.glob(pattern):
3927 tfile = self._adapter.file_open(filename, 'r')
3928 try:
3929 sql_fields = cPickle.load(tfile)
3930 name = filename[len(pattern)-7:-6]
3931 mf = [(value['sortable'],Field(key,type=value['type'])) \
3932 for key, value in sql_fields.items()]
3933 mf.sort(lambda a,b: cmp(a[0],b[0]))
3934 self.define_table(name,*[item[1] for item in mf],
3935 **dict(migrate=migrate,fake_migrate=fake_migrate))
3936 finally:
3937 self._adapter.file_close(tfile)
3938
3940 """
3941 Validates ``name`` against SQL keywords
3942 Uses self.check_reserve which is a list of
3943 operators to use.
3944 self.check_reserved
3945 ['common', 'postgres', 'mysql']
3946 self.check_reserved
3947 ['all']
3948 """
3949 for backend in self.check_reserved:
3950 if name.upper() in self.RSK[backend]:
3951 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
3952
3954 if self.has_key(tablename):
3955 return True
3956 else:
3957 return False
3958
3959 - def parse_as_rest(self,patterns,args,vars,query=None,nested_select=True):
3960 """
3961 EXAMPLE:
3962
3963 db.define_table('person',Field('name'),Field('info'))
3964 db.define_table('pet',Field('person',db.person),Field('name'),Field('info'))
3965
3966 @request.restful()
3967 def index():
3968 def GET(*kargs,**kvars):
3969 patterns = [
3970 "/persons[person]",
3971 "/{person.name.startswith}",
3972 "/{person.name}/:field",
3973 "/{person.name}/pets[pet.person]",
3974 "/{person.name}/pet[pet.person]/{pet.name}",
3975 "/{person.name}/pet[pet.person]/{pet.name}/:field"
3976 ]
3977 parser = db.parse_as_rest(patterns,kargs,kvars)
3978 if parser.status == 200:
3979 return dict(content=parser.response)
3980 else:
3981 raise HTTP(parser.status,parser.error)
3982 def POST(table_name,**kvars):
3983 if table_name == 'person':
3984 return db.person.validate_and_insert(**kvars)
3985 elif table_name == 'pet':
3986 return db.pet.validate_and_insert(**kvars)
3987 else:
3988 raise HTTP(400)
3989 return locals()
3990 """
3991
3992 db = self
3993 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
3994 re2 = re.compile('^.+\[.+\]$')
3995
3996 def auto_table(table,base='',depth=0):
3997 patterns = []
3998 for field in db[table].fields:
3999 if base:
4000 tag = '%s/%s' % (base,field.replace('_','-'))
4001 else:
4002 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
4003 f = db[table][field]
4004 if not f.readable: continue
4005 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
4006 tag += '/{%s.%s}' % (table,field)
4007 patterns.append(tag)
4008 patterns.append(tag+'/:field')
4009 elif f.type.startswith('boolean'):
4010 tag += '/{%s.%s}' % (table,field)
4011 patterns.append(tag)
4012 patterns.append(tag+'/:field')
4013 elif f.type.startswith('double') or f.type.startswith('integer'):
4014 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
4015 patterns.append(tag)
4016 patterns.append(tag+'/:field')
4017 elif f.type.startswith('list:'):
4018 tag += '/{%s.%s.contains}' % (table,field)
4019 patterns.append(tag)
4020 patterns.append(tag+'/:field')
4021 elif f.type in ('date','datetime'):
4022 tag+= '/{%s.%s.year}' % (table,field)
4023 patterns.append(tag)
4024 patterns.append(tag+'/:field')
4025 tag+='/{%s.%s.month}' % (table,field)
4026 patterns.append(tag)
4027 patterns.append(tag+'/:field')
4028 tag+='/{%s.%s.day}' % (table,field)
4029 patterns.append(tag)
4030 patterns.append(tag+'/:field')
4031 if f.type in ('datetime','time'):
4032 tag+= '/{%s.%s.hour}' % (table,field)
4033 patterns.append(tag)
4034 patterns.append(tag+'/:field')
4035 tag+='/{%s.%s.minute}' % (table,field)
4036 patterns.append(tag)
4037 patterns.append(tag+'/:field')
4038 tag+='/{%s.%s.second}' % (table,field)
4039 patterns.append(tag)
4040 patterns.append(tag+'/:field')
4041 if depth>0:
4042 for rtable,rfield in db[table]._referenced_by:
4043 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
4044 patterns.append(tag)
4045 patterns += auto_table(rtable,base=tag,depth=depth-1)
4046 return patterns
4047
4048 if patterns=='auto':
4049 patterns=[]
4050 for table in db.tables:
4051 if not table.startswith('auth_'):
4052 patterns += auto_table(table,base='',depth=1)
4053 else:
4054 i = 0
4055 while i<len(patterns):
4056 pattern = patterns[i]
4057 tokens = pattern.split('/')
4058 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
4059 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],'/'.join(tokens[:-1]))
4060 patterns = patterns[:i]+new_patterns+patterns[i+1:]
4061 i += len(new_patterns)
4062 else:
4063 i += 1
4064 if '/'.join(args) == 'patterns':
4065 return Row({'status':200,'pattern':'list',
4066 'error':None,'response':patterns})
4067 for pattern in patterns:
4068 otable=table=None
4069 dbset=db(query)
4070 i=0
4071 tags = pattern[1:].split('/')
4072
4073 if len(tags)!=len(args):
4074 continue
4075 for tag in tags:
4076
4077 if re1.match(tag):
4078
4079 tokens = tag[1:-1].split('.')
4080 table, field = tokens[0], tokens[1]
4081 if not otable or table == otable:
4082 if len(tokens)==2 or tokens[2]=='eq':
4083 query = db[table][field]==args[i]
4084 elif tokens[2]=='ne':
4085 query = db[table][field]!=args[i]
4086 elif tokens[2]=='lt':
4087 query = db[table][field]<args[i]
4088 elif tokens[2]=='gt':
4089 query = db[table][field]>args[i]
4090 elif tokens[2]=='ge':
4091 query = db[table][field]>=args[i]
4092 elif tokens[2]=='le':
4093 query = db[table][field]<=args[i]
4094 elif tokens[2]=='year':
4095 query = db[table][field].year()==args[i]
4096 elif tokens[2]=='month':
4097 query = db[table][field].month()==args[i]
4098 elif tokens[2]=='day':
4099 query = db[table][field].day()==args[i]
4100 elif tokens[2]=='hour':
4101 query = db[table][field].hour()==args[i]
4102 elif tokens[2]=='minute':
4103 query = db[table][field].minutes()==args[i]
4104 elif tokens[2]=='second':
4105 query = db[table][field].seconds()==args[i]
4106 elif tokens[2]=='startswith':
4107 query = db[table][field].startswith(args[i])
4108 elif tokens[2]=='contains':
4109 query = db[table][field].contains(args[i])
4110 else:
4111 raise RuntimeError, "invalid pattern: %s" % pattern
4112 if len(tokens)==4 and tokens[3]=='not':
4113 query = ~query
4114 elif len(tokens)>=4:
4115 raise RuntimeError, "invalid pattern: %s" % pattern
4116 dbset=dbset(query)
4117 else:
4118 raise RuntimeError, "missing relation in pattern: %s" % pattern
4119 elif otable and re2.match(tag) and args[i]==tag[:tag.find('[')]:
4120
4121 ref = tag[tag.find('[')+1:-1]
4122 if '.' in ref:
4123 table,field = ref.split('.')
4124
4125 if nested_select:
4126 try:
4127 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
4128 except ValueError:
4129 return Row({'status':400,'pattern':pattern,
4130 'error':'invalid path','response':None})
4131 else:
4132 items = [item.id for item in dbset.select(db[otable]._id)]
4133 dbset=db(db[table][field].belongs(items))
4134 else:
4135 dbset=dbset(db[ref])
4136 elif tag==':field' and table:
4137
4138 field = args[i]
4139 if not field in db[table]: break
4140 try:
4141 item = dbset.select(db[table][field],limitby=(0,1)).first()
4142 except ValueError:
4143 return Row({'status':400,'pattern':pattern,
4144 'error':'invalid path','response':None})
4145 if not item:
4146 return Row({'status':404,'pattern':pattern,
4147 'error':'record not found','response':None})
4148 else:
4149 return Row({'status':200,'response':item[field],
4150 'pattern':pattern})
4151 elif tag != args[i]:
4152 break
4153 otable = table
4154 i += 1
4155 if i==len(tags) and table:
4156 otable,ofield = vars.get('order','%s.%s' % (table,field)).split('.',1)
4157 try:
4158 if otable[:1]=='~': orderby = ~db[otable[1:]][ofield]
4159 else: orderby = db[otable][ofield]
4160 except KeyError:
4161 return Row({'status':400,'error':'invalid orderby','response':None})
4162 fields = [field for field in db[table] if field.readable]
4163 count = dbset.count()
4164 try:
4165 limits = (int(vars.get('min',0)),int(vars.get('max',1000)))
4166 if limits[0]<0 or limits[1]<limits[0]: raise ValueError
4167 except ValueError:
4168 Row({'status':400,'error':'invalid limits','response':None})
4169 if count > limits[1]-limits[0]:
4170 Row({'status':400,'error':'too many records','response':None})
4171 try:
4172 response = dbset.select(limitby=limits,orderby=orderby,*fields)
4173 except ValueError:
4174 return Row({'status':400,'pattern':pattern,
4175 'error':'invalid path','response':None})
4176 return Row({'status':200,'response':response,'pattern':pattern})
4177 return Row({'status':400,'error':'no matching pattern','response':None})
4178
4179
4180 - def define_table(
4181 self,
4182 tablename,
4183 *fields,
4184 **args
4185 ):
4186
4187 for key in args:
4188 if key not in [
4189 'migrate',
4190 'primarykey',
4191 'fake_migrate',
4192 'format',
4193 'trigger_name',
4194 'sequence_name',
4195 'polymodel']:
4196 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
4197 migrate = self._migrate_enabled and args.get('migrate',self._migrate)
4198 fake_migrate = self._fake_migrate_all or args.get('fake_migrate',self._fake_migrate)
4199 format = args.get('format',None)
4200 trigger_name = args.get('trigger_name', None)
4201 sequence_name = args.get('sequence_name', None)
4202 primarykey=args.get('primarykey',None)
4203 polymodel=args.get('polymodel',None)
4204 if not isinstance(tablename,str):
4205 raise SyntaxError, "missing table name"
4206 tablename = cleanup(tablename)
4207 lowertablename = tablename.lower()
4208
4209 if tablename.startswith('_') or hasattr(self,lowertablename) or \
4210 regex_python_keywords.match(tablename):
4211 raise SyntaxError, 'invalid table name: %s' % tablename
4212 elif lowertablename in self.tables:
4213 raise SyntaxError, 'table already defined: %s' % tablename
4214 elif self.check_reserved:
4215 self.check_reserved_keyword(tablename)
4216
4217 if self._common_fields:
4218 fields = [f for f in fields] + [f for f in self._common_fields]
4219
4220 t = self[tablename] = Table(self, tablename, *fields,
4221 **dict(primarykey=primarykey,
4222 trigger_name=trigger_name,
4223 sequence_name=sequence_name))
4224
4225 if self._uri in (None,'None'):
4226 return t
4227
4228 t._create_references()
4229
4230 if migrate or self._adapter.dbengine=='google:datastore':
4231 try:
4232 sql_locker.acquire()
4233 self._adapter.create_table(t,migrate=migrate,
4234 fake_migrate=fake_migrate,
4235 polymodel=polymodel)
4236 finally:
4237 sql_locker.release()
4238 else:
4239 t._dbt = None
4240 self.tables.append(tablename)
4241 t._format = format
4242 return t
4243
4245 for tablename in self.tables:
4246 yield self[tablename]
4247
4250
4253
4256
4258 if key[:1]!='_' and key in self:
4259 raise SyntaxError, \
4260 'Object %s exists and cannot be redefined' % key
4261 self[key] = value
4262
4264 return '<DAL ' + dict.__repr__(self) + '>'
4265
4267 if isinstance(query,Table):
4268 query = query._id>0
4269 elif isinstance(query,Field):
4270 query = query!=None
4271 return Set(self, query)
4272
4275
4278
4279 - def executesql(self, query, placeholders=None, as_dict=False):
4280 """
4281 placeholders is optional and will always be None when using DAL
4282 if using raw SQL with placeholders, placeholders may be
4283 a sequence of values to be substituted in
4284 or, *if supported by the DB driver*, a dictionary with keys
4285 matching named placeholders in your SQL.
4286
4287 Added 2009-12-05 "as_dict" optional argument. Will always be
4288 None when using DAL. If using raw SQL can be set to True
4289 and the results cursor returned by the DB driver will be
4290 converted to a sequence of dictionaries keyed with the db
4291 field names. Tested with SQLite but should work with any database
4292 since the cursor.description used to get field names is part of the
4293 Python dbi 2.0 specs. Results returned with as_dict = True are
4294 the same as those returned when applying .to_list() to a DAL query.
4295
4296 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
4297
4298 --bmeredyk
4299 """
4300 if placeholders:
4301 self._adapter.execute(query, placeholders)
4302 else:
4303 self._adapter.execute(query)
4304 if as_dict:
4305 if not hasattr(self._adapter.cursor,'description'):
4306 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
4307
4308
4309
4310 columns = self._adapter.cursor.description
4311
4312 fields = [f[0] for f in columns]
4313
4314 data = self._adapter.cursor.fetchall()
4315
4316
4317 return [dict(zip(fields,row)) for row in data]
4318
4319 try:
4320 return self._adapter.cursor.fetchall()
4321 except:
4322 return None
4323
4325 for tablename in self.tables:
4326 by = self[tablename]._referenced_by
4327 by[:] = [item for item in by if not item[0] == other]
4328
4335
4336 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>',
4337 unique='uuid', *args, **kwargs):
4338 for line in ifile:
4339 line = line.strip()
4340 if not line:
4341 continue
4342 elif line == 'END':
4343 return
4344 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
4345 raise SyntaxError, 'invalid file format'
4346 else:
4347 tablename = line[6:]
4348 self[tablename].import_from_csv_file(ifile, id_map, null,
4349 unique, *args, **kwargs)
4350
4351
4353 """
4354 Helper class providing a comma-separated string having all the field names
4355 (prefixed by table name and '.')
4356
4357 normally only called from within gluon.sql
4358 """
4359
4362
4364 return ', '.join([str(field) for field in self.table])
4365
4366
4368
4370 if not self._record:
4371 self._record = self._table[int(self)]
4372 if not self._record:
4373 raise RuntimeError, "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self))
4374
4376 if key == 'id':
4377 return int(self)
4378 self.__allocate()
4379 return self._record.get(key, None)
4380
4387
4389 if key == 'id':
4390 return int(self)
4391 self.__allocate()
4392 return self._record.get(key, None)
4393
4395 self.__allocate()
4396 self._record[key] = value
4397
4398
4400 return marshal.loads(data)
4401
4403 try:
4404 marshal_dump = marshal.dumps(int(data))
4405 except AttributeError:
4406 marshal_dump = 'i%s' % struct.pack('<i', int(data))
4407 return (Reference_unpickler, (marshal_dump,))
4408
4409 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
4410
4411
4413
4414 """
4415 an instance of this class represents a database table
4416
4417 Example::
4418
4419 db = DAL(...)
4420 db.define_table('users', Field('name'))
4421 db.users.insert(name='me') # print db.users._insert(...) to see SQL
4422 db.users.drop()
4423 """
4424
4425 - def __init__(
4426 self,
4427 db,
4428 tablename,
4429 *fields,
4430 **args
4431 ):
4432 """
4433 Initializes the table and performs checking on the provided fields.
4434
4435 Each table will have automatically an 'id'.
4436
4437 If a field is of type Table, the fields (excluding 'id') from that table
4438 will be used instead.
4439
4440 :raises SyntaxError: when a supplied field is of incorrect type.
4441 """
4442 self._tablename = tablename
4443 self._sequence_name = args.get('sequence_name',None) or \
4444 db and db._adapter.sequence_name(tablename)
4445 self._trigger_name = args.get('trigger_name',None) or \
4446 db and db._adapter.trigger_name(tablename)
4447
4448 primarykey = args.get('primarykey', None)
4449 fieldnames,newfields=set(),[]
4450 if primarykey and not isinstance(primarykey,list):
4451 raise SyntaxError, "primarykey must be a list of fields from table '%s'" \
4452 % tablename
4453 elif primarykey:
4454 self._primarykey = primarykey
4455 elif not [f for f in fields if hasattr(f,'type') and f.type=='id']:
4456 field = Field('id', 'id')
4457 newfields.append(field)
4458 fieldnames.add('id')
4459 self._id = field
4460 for field in fields:
4461 if not isinstance(field, (Field, Table)):
4462 raise SyntaxError, \
4463 'define_table argument is not a Field or Table: %s' % field
4464 elif isinstance(field, Field) and not field.name in fieldnames:
4465 if hasattr(field, '_db'):
4466 field = copy.copy(field)
4467 newfields.append(field)
4468 fieldnames.add(field.name)
4469 if field.type=='id':
4470 self._id = field
4471 elif isinstance(field, Table):
4472 table = field
4473 for field in table:
4474 if not field.name in fieldnames and not field.type=='id':
4475 newfields.append(copy.copy(field))
4476 fieldnames.add(field.name)
4477 else:
4478
4479 pass
4480 fields = newfields
4481 self._db = db
4482 tablename = tablename
4483 self.fields = SQLCallableList()
4484 self.virtualfields = []
4485 fields = list(fields)
4486
4487 if db and self._db._adapter.uploads_in_blob==True:
4488 for field in fields:
4489 if isinstance(field, Field) and field.type == 'upload'\
4490 and field.uploadfield is True:
4491 tmp = field.uploadfield = '%s_blob' % field.name
4492 fields.append(self._db.Field(tmp, 'blob', default=''))
4493
4494 lower_fieldnames = set()
4495 for field in fields:
4496 if db and db.check_reserved:
4497 db.check_reserved_keyword(field.name)
4498
4499 if field.name.lower() in lower_fieldnames:
4500 raise SyntaxError, "duplicate field %s in table %s" % (field.name, tablename)
4501 else:
4502 lower_fieldnames.add(field.name.lower())
4503
4504 self.fields.append(field.name)
4505 self[field.name] = field
4506 if field.type == 'id':
4507 self['id'] = field
4508 field.tablename = field._tablename = tablename
4509 field.table = field._table = self
4510 field.db = field._db = self._db
4511 field.length = min(field.length,self._db and self._db._adapter.maxcharlength or INFINITY)
4512 if field.requires == DEFAULT:
4513 field.requires = sqlhtml_validators(field)
4514 self.ALL = SQLALL(self)
4515
4516 if hasattr(self,'_primarykey'):
4517 for k in self._primarykey:
4518 if k not in self.fields:
4519 raise SyntaxError, \
4520 "primarykey must be a list of fields from table '%s " % tablename
4521 else:
4522 self[k].notnull = True
4523
4525 errors = Row()
4526 for key,value in vars.items():
4527 value,error = self[key].validate(value)
4528 if error:
4529 errors[key] = error
4530 return errors
4531
4533 pr = self._db._pending_references
4534 self._referenced_by = []
4535 for fieldname in self.fields:
4536 field=self[fieldname]
4537 if isinstance(field.type,str) and field.type[:10] == 'reference ':
4538 ref = field.type[10:].strip()
4539 if not ref.split():
4540 raise SyntaxError, 'Table: reference to nothing: %s' %ref
4541 refs = ref.split('.')
4542 rtablename = refs[0]
4543 if not rtablename in self._db:
4544 pr[rtablename] = pr.get(rtablename,[]) + [field]
4545 continue
4546 rtable = self._db[rtablename]
4547 if len(refs)==2:
4548 rfieldname = refs[1]
4549 if not hasattr(rtable,'_primarykey'):
4550 raise SyntaxError,\
4551 'keyed tables can only reference other keyed tables (for now)'
4552 if rfieldname not in rtable.fields:
4553 raise SyntaxError,\
4554 "invalid field '%s' for referenced table '%s' in table '%s'" \
4555 % (rfieldname, rtablename, self._tablename)
4556 rtable._referenced_by.append((self._tablename, field.name))
4557 for referee in pr.get(self._tablename,[]):
4558 self._referenced_by.append((referee._tablename,referee.name))
4559
4561 return dict([(k, v) for (k, v) in record.items() if k
4562 in self.fields and (self[k].type!='id' or id)])
4563
4565 """ for keyed table only """
4566 query = None
4567 for k,v in key.iteritems():
4568 if k in self._primarykey:
4569 if query:
4570 query = query & (self[k] == v)
4571 else:
4572 query = (self[k] == v)
4573 else:
4574 raise SyntaxError, \
4575 'Field %s is not part of the primary key of %s' % \
4576 (k,self._tablename)
4577 return query
4578
4580 if not key:
4581 return None
4582 elif isinstance(key, dict):
4583 """ for keyed table """
4584 query = self._build_query(key)
4585 rows = self._db(query).select()
4586 if rows:
4587 return rows[0]
4588 return None
4589 elif str(key).isdigit():
4590 return self._db(self._id == key).select(limitby=(0,1)).first()
4591 elif key:
4592 return dict.__getitem__(self, str(key))
4593
4595 if key!=DEFAULT:
4596 if isinstance(key, Query):
4597 record = self._db(key).select(limitby=(0,1)).first()
4598 elif not str(key).isdigit():
4599 record = None
4600 else:
4601 record = self._db(self._id == key).select(limitby=(0,1)).first()
4602 if record:
4603 for k,v in kwargs.items():
4604 if record[k]!=v: return None
4605 return record
4606 elif kwargs:
4607 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
4608 return self._db(query).select(limitby=(0,1)).first()
4609 else:
4610 return None
4611
4613 if isinstance(key, dict) and isinstance(value, dict):
4614 """ option for keyed table """
4615 if set(key.keys()) == set(self._primarykey):
4616 value = self._filter_fields(value)
4617 kv = {}
4618 kv.update(value)
4619 kv.update(key)
4620 if not self.insert(**kv):
4621 query = self._build_query(key)
4622 self._db(query).update(**self._filter_fields(value))
4623 else:
4624 raise SyntaxError,\
4625 'key must have all fields from primary key: %s'%\
4626 (self._primarykey)
4627 elif str(key).isdigit():
4628 if key == 0:
4629 self.insert(**self._filter_fields(value))
4630 elif not self._db(self._id == key)\
4631 .update(**self._filter_fields(value)):
4632 raise SyntaxError, 'No such record: %s' % key
4633 else:
4634 if isinstance(key, dict):
4635 raise SyntaxError,\
4636 'value must be a dictionary: %s' % value
4637 dict.__setitem__(self, str(key), value)
4638
4640 if isinstance(key, dict):
4641 query = self._build_query(key)
4642 if not self._db(query).delete():
4643 raise SyntaxError, 'No such record: %s' % key
4644 elif not str(key).isdigit() or not self._db(self._id == key).delete():
4645 raise SyntaxError, 'No such record: %s' % key
4646
4649
4651 if key in self:
4652 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
4653 self[key] = value
4654
4656 for fieldname in self.fields:
4657 yield self[fieldname]
4658
4660 return '<Table ' + dict.__repr__(self) + '>'
4661
4663 if self.get('_ot', None):
4664 return '%s AS %s' % (self._ot, self._tablename)
4665 return self._tablename
4666
4667 - def _drop(self, mode = ''):
4668 return self._db._adapter._drop(self, mode)
4669
4670 - def drop(self, mode = ''):
4671 return self._db._adapter.drop(self,mode)
4672
4673 - def _listify(self,fields,update=False):
4674 new_fields = []
4675 new_fields_names = []
4676 for name in fields:
4677 if not name in self.fields:
4678 if name != 'id':
4679 raise SyntaxError, 'Field %s does not belong to the table' % name
4680 else:
4681 new_fields.append((self[name],fields[name]))
4682 new_fields_names.append(name)
4683 for ofield in self:
4684 if not ofield.name in new_fields_names:
4685 if not update and ofield.default!=None:
4686 new_fields.append((ofield,ofield.default))
4687 elif update and ofield.update!=None:
4688 new_fields.append((ofield,ofield.update))
4689 for ofield in self:
4690 if not ofield.name in new_fields_names and ofield.compute:
4691 try:
4692 new_fields.append((ofield,ofield.compute(Row(fields))))
4693 except KeyError:
4694 pass
4695 if not update and ofield.required and not ofield.name in new_fields_names:
4696 raise SyntaxError,'Table: missing required field: %s' % ofield.name
4697 return new_fields
4698
4701
4704
4706 response = Row()
4707 response.errors = self._validate(**fields)
4708 if not response.errors:
4709 response.id = self.insert(**fields)
4710 else:
4711 response.id = None
4712 return response
4713
4725
4732
4734 return self._db._adapter._truncate(self, mode)
4735
4737 return self._db._adapter.truncate(self, mode)
4738
4739 - def import_from_csv_file(
4740 self,
4741 csvfile,
4742 id_map=None,
4743 null='<NULL>',
4744 unique='uuid',
4745 *args, **kwargs
4746 ):
4747 """
4748 import records from csv file. Column headers must have same names as
4749 table fields. field 'id' is ignored. If column names read 'table.file'
4750 the 'table.' prefix is ignored.
4751 'unique' argument is a field which must be unique
4752 (typically a uuid field)
4753 """
4754
4755 delimiter = kwargs.get('delimiter', ',')
4756 quotechar = kwargs.get('quotechar', '"')
4757 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
4758
4759 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
4760 colnames = None
4761 if isinstance(id_map, dict):
4762 if not self._tablename in id_map:
4763 id_map[self._tablename] = {}
4764 id_map_self = id_map[self._tablename]
4765
4766 def fix(field, value, id_map):
4767 if value == null:
4768 value = None
4769 elif field.type=='double':
4770 if not value.strip():
4771 value = None
4772 else:
4773 value = float(value)
4774 elif field.type=='integer':
4775 if not value.strip():
4776 value = None
4777 else:
4778 value = int(value)
4779 elif field.type.startswith('list:string'):
4780 value = bar_decode_string(value)
4781 elif field.type.startswith('list:reference'):
4782 ref_table = field.type[10:].strip()
4783 value = [id_map[ref_table][int(v)] \
4784 for v in bar_decode_string(value)]
4785 elif field.type.startswith('list:'):
4786 value = bar_decode_integer(value)
4787 elif id_map and field.type.startswith('reference'):
4788 try:
4789 value = id_map[field.type[9:].strip()][value]
4790 except KeyError:
4791 pass
4792 return (field.name, value)
4793
4794 def is_id(colname):
4795 if colname in self:
4796 return self[colname].type == 'id'
4797 else:
4798 return False
4799
4800 for line in reader:
4801 if not line:
4802 break
4803 if not colnames:
4804 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
4805 cols, cid = [], []
4806 for i,colname in enumerate(colnames):
4807 if is_id(colname):
4808 cid = i
4809 else:
4810 cols.append(i)
4811 if colname == unique:
4812 unique_idx = i
4813 else:
4814 items = [fix(self[colnames[i]], line[i], id_map) \
4815 for i in cols if colnames[i] in self.fields]
4816
4817
4818 if not unique or unique not in colnames:
4819 new_id = self.insert(**dict(items))
4820 else:
4821 unique_value = line[unique_idx]
4822 query = self._db[self][unique] == unique_value
4823 record = self._db(query).select().first()
4824 if record:
4825 record.update_record(**dict(items))
4826 new_id = record[self._id.name]
4827 else:
4828 new_id = self.insert(**dict(items))
4829 if id_map and cid != []:
4830 id_map_self[line[cid]] = new_id
4831
4834
4835 - def on(self, query):
4836 return Expression(self._db,self._db._adapter.ON,self,query)
4837
4838
4839
4841
4842 - def __init__(
4843 self,
4844 db,
4845 op,
4846 first=None,
4847 second=None,
4848 type=None,
4849 ):
4850
4851 self.db = db
4852 self.op = op
4853 self.first = first
4854 self.second = second
4855
4856 if not type and first and hasattr(first,'type'):
4857 self.type = first.type
4858 else:
4859 self.type = type
4860
4863
4866
4869
4872
4874 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
4875
4877 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
4878
4881
4884
4887
4890
4893
4896
4899
4901 if start < 0:
4902 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
4903 else:
4904 pos0 = start + 1
4905
4906 if stop < 0:
4907 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
4908 elif stop == sys.maxint:
4909 length = self.len()
4910 else:
4911 length = '(%s - %s)' % (stop + 1, pos0)
4912 return Expression(self.db,self.db._adapter.SUBSTRING,
4913 self, (pos0, length), self.type)
4914
4916 return self[i:i + 1]
4917
4919 return self.db._adapter.expand(self,self.type)
4920
4922 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
4923
4925 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
4926 return self.first
4927 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
4928
4930 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
4931
4933 if self.type == 'integer':
4934 result_type = 'integer'
4935 elif self.type in ['date','time','datetime','double']:
4936 result_type = 'double'
4937 else:
4938 raise SyntaxError, "subtraction operation not supported for type"
4939 return Expression(self.db,self.db._adapter.SUB,self,other,
4940 result_type)
4942 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
4943
4945 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
4946
4948 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
4949
4951 return Query(self.db, self.db._adapter.EQ, self, value)
4952
4954 return Query(self.db, self.db._adapter.NE, self, value)
4955
4957 return Query(self.db, self.db._adapter.LT, self, value)
4958
4960 return Query(self.db, self.db._adapter.LE, self, value)
4961
4963 return Query(self.db, self.db._adapter.GT, self, value)
4964
4966 return Query(self.db, self.db._adapter.GE, self, value)
4967
4968 - def like(self, value):
4969 return Query(self.db, self.db._adapter.LIKE, self, value)
4970
4972 return Query(self.db, self.db._adapter.BELONGS, self, value)
4973
4975 if not self.type in ('string', 'text'):
4976 raise SyntaxError, "startswith used with incompatible field type"
4977 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
4978
4980 if not self.type in ('string', 'text'):
4981 raise SyntaxError, "endswith used with incompatible field type"
4982 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
4983
4985 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
4986 raise SyntaxError, "contains used with incompatible field type"
4987 return Query(self.db, self.db._adapter.CONTAINS, self, value)
4988
4991
4992
4993
4994
4996 """
4997 allows defining of custom SQL types
4998
4999 Example::
5000
5001 decimal = SQLCustomType(
5002 type ='double',
5003 native ='integer',
5004 encoder =(lambda x: int(float(x) * 100)),
5005 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
5006 )
5007
5008 db.define_table(
5009 'example',
5010 Field('value', type=decimal)
5011 )
5012
5013 :param type: the web2py type (default = 'string')
5014 :param native: the backend type
5015 :param encoder: how to encode the value to store it in the backend
5016 :param decoder: how to decode the value retrieved from the backend
5017 :param validator: what validators to use ( default = None, will use the
5018 default validator for type)
5019 """
5020
5021 - def __init__(
5022 self,
5023 type='string',
5024 native=None,
5025 encoder=None,
5026 decoder=None,
5027 validator=None,
5028 _class=None,
5029 ):
5030
5031 self.type = type
5032 self.native = native
5033 self.encoder = encoder or (lambda x: x)
5034 self.decoder = decoder or (lambda x: x)
5035 self.validator = validator
5036 self._class = _class or type
5037
5040
5043
5046
5049
5050
5051 -class Field(Expression):
5052
5053 """
5054 an instance of this class represents a database field
5055
5056 example::
5057
5058 a = Field(name, 'string', length=32, default=None, required=False,
5059 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
5060 notnull=False, unique=False,
5061 uploadfield=True, widget=None, label=None, comment=None,
5062 uploadfield=True, # True means store on disk,
5063 # 'a_field_name' means store in this field in db
5064 # False means file content will be discarded.
5065 writable=True, readable=True, update=None, authorize=None,
5066 autodelete=False, represent=None, uploadfolder=None,
5067 uploadseparate=False # upload to separate directories by uuid_keys
5068 # first 2 character and tablename.fieldname
5069 # False - old behavior
5070 # True - put uploaded file in
5071 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
5072 # directory)
5073
5074 to be used as argument of DAL.define_table
5075
5076 allowed field types:
5077 string, boolean, integer, double, text, blob,
5078 date, time, datetime, upload, password
5079
5080 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
5081 fields should have a default or they will be required in SQLFORMs
5082 the requires argument is used to validate the field input in SQLFORMs
5083
5084 """
5085
5086 - def __init__(
5087 self,
5088 fieldname,
5089 type='string',
5090 length=None,
5091 default=DEFAULT,
5092 required=False,
5093 requires=DEFAULT,
5094 ondelete='CASCADE',
5095 notnull=False,
5096 unique=False,
5097 uploadfield=True,
5098 widget=None,
5099 label=None,
5100 comment=None,
5101 writable=True,
5102 readable=True,
5103 update=None,
5104 authorize=None,
5105 autodelete=False,
5106 represent=None,
5107 uploadfolder=None,
5108 uploadseparate=False,
5109 compute=None,
5110 custom_store=None,
5111 custom_retrieve=None,
5112 ):
5113 self.db = None
5114 self.op = None
5115 self.first = None
5116 self.second = None
5117 if not isinstance(fieldname,str):
5118 raise SyntaxError, "missing field name"
5119 if fieldname.startswith(':'):
5120 fieldname,readable,writable=fieldname[1:],False,False
5121 elif fieldname.startswith('.'):
5122 fieldname,readable,writable=fieldname[1:],False,False
5123 if '=' in fieldname:
5124 fieldname,default = fieldname.split('=',1)
5125 self.name = fieldname = cleanup(fieldname)
5126 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
5127 regex_python_keywords.match(fieldname):
5128 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
5129 if isinstance(type, Table):
5130 type = 'reference ' + type._tablename
5131 self.type = type
5132 self.length = (length is None) and MAXCHARLENGTH or length
5133 if default==DEFAULT:
5134 self.default = update or None
5135 else:
5136 self.default = default
5137 self.required = required
5138 self.ondelete = ondelete.upper()
5139 self.notnull = notnull
5140 self.unique = unique
5141 self.uploadfield = uploadfield
5142 self.uploadfolder = uploadfolder
5143 self.uploadseparate = uploadseparate
5144 self.widget = widget
5145 self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
5146 self.comment = comment
5147 self.writable = writable
5148 self.readable = readable
5149 self.update = update
5150 self.authorize = authorize
5151 self.autodelete = autodelete
5152 if not represent and type in ('list:integer','list:string'):
5153 represent=lambda x: ', '.join(str(y) for y in x or [])
5154 self.represent = represent
5155 self.compute = compute
5156 self.isattachment = True
5157 self.custom_store = custom_store
5158 self.custom_retrieve = custom_retrieve
5159 if self.label is None:
5160 self.label = ' '.join([x.capitalize() for x in
5161 fieldname.split('_')])
5162 if requires is None:
5163 self.requires = []
5164 else:
5165 self.requires = requires
5166
5167 - def store(self, file, filename=None, path=None):
5168 if self.custom_store:
5169 return self.custom_store(file,filename,path)
5170 if not filename:
5171 filename = file.name
5172 filename = os.path.basename(filename.replace('/', os.sep)\
5173 .replace('\\', os.sep))
5174 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
5175 extension = m and m.group('e') or 'txt'
5176 uuid_key = web2py_uuid().replace('-', '')[-16:]
5177 encoded_filename = base64.b16encode(filename).lower()
5178 newfilename = '%s.%s.%s.%s' % \
5179 (self._tablename, self.name, uuid_key, encoded_filename)
5180 newfilename = newfilename[:200] + '.' + extension
5181 if isinstance(self.uploadfield,Field):
5182 blob_uploadfield_name = self.uploadfield.uploadfield
5183 keys={self.uploadfield.name: newfilename,
5184 blob_uploadfield_name: file.read()}
5185 self.uploadfield.table.insert(**keys)
5186 elif self.uploadfield == True:
5187 if path:
5188 pass
5189 elif self.uploadfolder:
5190 path = self.uploadfolder
5191 elif self.db._adapter.folder:
5192 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5193 else:
5194 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
5195 if self.uploadseparate:
5196 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
5197 if not os.path.exists(path):
5198 os.makedirs(path)
5199 pathfilename = os.path.join(path, newfilename)
5200 dest_file = open(pathfilename, 'wb')
5201 try:
5202 shutil.copyfileobj(file, dest_file)
5203 finally:
5204 dest_file.close()
5205 return newfilename
5206
5208 if self.custom_retrieve:
5209 return self.custom_retrieve(name, path)
5210 import http
5211 if self.authorize or isinstance(self.uploadfield, str):
5212 row = self.db(self == name).select().first()
5213 if not row:
5214 raise http.HTTP(404)
5215 if self.authorize and not self.authorize(row):
5216 raise http.HTTP(403)
5217 try:
5218 m = regex_content.match(name)
5219 if not m or not self.isattachment:
5220 raise TypeError, 'Can\'t retrieve %s' % name
5221 filename = base64.b16decode(m.group('name'), True)
5222 filename = regex_cleanup_fn.sub('_', filename)
5223 except (TypeError, AttributeError):
5224 filename = name
5225 if isinstance(self.uploadfield, str):
5226 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
5227 elif isinstance(self.uploadfield,Field):
5228 blob_uploadfield_name = self.uploadfield.uploadfield
5229 query = self.uploadfield == name
5230 data = self.uploadfield.table(query)[blob_uploadfield_name]
5231 return (filename, cStringIO.StringIO(data))
5232 else:
5233
5234 if path:
5235 pass
5236 elif self.uploadfolder:
5237 path = self.uploadfolder
5238 else:
5239 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5240 if self.uploadseparate:
5241 t = m.group('table')
5242 f = m.group('field')
5243 u = m.group('uuidkey')
5244 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
5245 return (filename, open(os.path.join(path, name), 'rb'))
5246
5261
5273
5276
5279
5281 try:
5282 return '%s.%s' % (self.tablename, self.name)
5283 except:
5284 return '<no table>.%s' % self.name
5285
5286
5288
5289 """
5290 a query object necessary to define a set.
5291 it can be stored or can be passed to DAL.__call__() to obtain a Set
5292
5293 Example::
5294
5295 query = db.users.name=='Max'
5296 set = db(query)
5297 records = set.select()
5298
5299 """
5300
5301 - def __init__(
5302 self,
5303 db,
5304 op,
5305 first=None,
5306 second=None,
5307 ):
5308 self.db = db
5309 self.op = op
5310 self.first = first
5311 self.second = second
5312
5314 return self.db._adapter.expand(self)
5315
5317 return Query(self.db,self.db._adapter.AND,self,other)
5318
5320 return Query(self.db,self.db._adapter.OR,self,other)
5321
5323 if self.op==self.db._adapter.NOT:
5324 return self.first
5325 return Query(self.db,self.db._adapter.NOT,self)
5326
5327
5328 regex_quotes = re.compile("'[^']*'")
5329
5330
5332 if not orderby:
5333 return None
5334 orderby2 = orderby[0]
5335 for item in orderby[1:]:
5336 orderby2 = orderby2 | item
5337 return orderby2
5338
5339
5341
5342 """
5343 a Set represents a set of records in the database,
5344 the records are identified by the query=Query(...) object.
5345 normally the Set is generated by DAL.__call__(Query(...))
5346
5347 given a set, for example
5348 set = db(db.users.name=='Max')
5349 you can:
5350 set.update(db.users.name='Massimo')
5351 set.delete() # all elements in the set
5352 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
5353 and take subsets:
5354 subset = set(db.users.id<5)
5355 """
5356
5358 self.db = db
5359 self._db = db
5360 self.query = query
5361
5363 if isinstance(query,Table):
5364 query = query._id>0
5365 elif isinstance(query,Field):
5366 query = query!=None
5367 if self.query:
5368 return Set(self.db, self.query & query)
5369 else:
5370 return Set(self.db, query)
5371
5372 - def _count(self,distinct=None):
5373 return self.db._adapter._count(self.query,distinct)
5374
5375 - def _select(self, *fields, **attributes):
5376 return self.db._adapter._select(self.query,fields,attributes)
5377
5379 tablename=self.db._adapter.get_table(self.query)
5380 return self.db._adapter._delete(tablename,self.query)
5381
5382 - def _update(self, **update_fields):
5383 tablename = self.db._adapter.get_table(self.query)
5384 fields = self.db[tablename]._listify(update_fields,update=True)
5385 return self.db._adapter._update(tablename,self.query,fields)
5386
5388 return not self.select(limitby=(0,1))
5389
5390 - def count(self,distinct=None):
5391 return self.db._adapter.count(self.query,distinct)
5392
5393 - def select(self, *fields, **attributes):
5394 return self.db._adapter.select(self.query,fields,attributes)
5395
5400
5401 - def update(self, **update_fields):
5402 tablename = self.db._adapter.get_table(self.query)
5403 fields = self.db[tablename]._listify(update_fields,update=True)
5404 if not fields:
5405 raise SyntaxError, "No fields to update"
5406 self.delete_uploaded_files(update_fields)
5407 return self.db._adapter.update(tablename,self.query,fields)
5408
5410 tablename = self.db._adapter.get_table(self.query)
5411 response = Row()
5412 response.errors = self.db[tablename]._validate(**update_fields)
5413 fields = self.db[tablename]._listify(update_fields,update=True)
5414 if not fields:
5415 raise SyntaxError, "No fields to update"
5416 self.delete_uploaded_files(update_fields)
5417 if not response.errors:
5418 response.updated = self.db._adapter.update(tablename,self.query,fields)
5419 else:
5420 response.updated = None
5421 return response
5422
5424 table = self.db[self.db._adapter.tables(self.query)[0]]
5425
5426 if upload_fields:
5427 fields = upload_fields.keys()
5428 else:
5429 fields = table.fields
5430 fields = [f for f in fields if table[f].type == 'upload'
5431 and table[f].uploadfield == True
5432 and table[f].autodelete]
5433 if not fields:
5434 return
5435 for record in self.select(*[table[f] for f in fields]):
5436 for fieldname in fields:
5437 field = table[fieldname]
5438 oldname = record.get(fieldname, None)
5439 if not oldname:
5440 continue
5441 if upload_fields and oldname == upload_fields[fieldname]:
5442 continue
5443 uploadfolder = field.uploadfolder
5444 if not uploadfolder:
5445 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
5446 if field.uploadseparate:
5447 items = oldname.split('.')
5448 uploadfolder = os.path.join(uploadfolder,
5449 "%s.%s" % (items[0], items[1]),
5450 items[2][:2])
5451 oldpath = os.path.join(uploadfolder, oldname)
5452 if os.path.exists(oldpath):
5453 os.unlink(oldpath)
5454
5455
5457 (colset, table, id) = pack
5458 b = a or dict(colset)
5459 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
5460 table._db(table._id==id).update(**c)
5461 for (k, v) in c.items():
5462 colset[k] = v
5463
5464
5465 -class Rows(object):
5466
5467 """
5468 A wrapper for the return value of a select. It basically represents a table.
5469 It has an iterator and each row is represented as a dictionary.
5470 """
5471
5472
5473
5474 - def __init__(
5475 self,
5476 db=None,
5477 records=[],
5478 colnames=[],
5479 compact=True,
5480 rawrows=None
5481 ):
5482 self.db = db
5483 self.records = records
5484 self.colnames = colnames
5485 self.compact = compact
5486 self.response = rawrows
5487
5489 if not keyed_virtualfields:
5490 return self
5491 for row in self.records:
5492 for (tablename,virtualfields) in keyed_virtualfields.items():
5493 attributes = dir(virtualfields)
5494 virtualfields.__dict__.update(row)
5495 if not tablename in row:
5496 box = row[tablename] = Row()
5497 else:
5498 box = row[tablename]
5499 for attribute in attributes:
5500 if attribute[0] != '_':
5501 method = getattr(virtualfields,attribute)
5502 if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
5503 box[attribute]=method()
5504 return self
5505
5507 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
5508 records = self.records+other.records
5509 return Rows(self.db,records,self.colnames)
5510
5512 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
5513 records = self.records
5514 records += [record for record in other.records \
5515 if not record in records]
5516 return Rows(self.db,records,self.colnames)
5517
5519 if len(self.records):
5520 return 1
5521 return 0
5522
5524 return len(self.records)
5525
5527 return Rows(self.db,self.records[a:b],self.colnames)
5528
5530 row = self.records[i]
5531 keys = row.keys()
5532 if self.compact and len(keys) == 1 and keys[0] != '_extra':
5533 return row[row.keys()[0]]
5534 return row
5535
5537 """
5538 iterator over records
5539 """
5540
5541 for i in xrange(len(self)):
5542 yield self[i]
5543
5545 """
5546 serializes the table into a csv file
5547 """
5548
5549 s = cStringIO.StringIO()
5550 self.export_to_csv_file(s)
5551 return s.getvalue()
5552
5554 if not self.records:
5555 return None
5556 return self[0]
5557
5559 if not self.records:
5560 return None
5561 return self[-1]
5562
5564 """
5565 returns a new Rows object, a subset of the original object,
5566 filtered by the function f
5567 """
5568 if not self.records:
5569 return Rows(self.db, [], self.colnames)
5570 records = []
5571 for i in range(0,len(self)):
5572 row = self[i]
5573 if f(row):
5574 records.append(self.records[i])
5575 return Rows(self.db, records, self.colnames)
5576
5578 """
5579 removes elements from the calling Rows object, filtered by the function f,
5580 and returns a new Rows object containing the removed elements
5581 """
5582 if not self.records:
5583 return Rows(self.db, [], self.colnames)
5584 removed = []
5585 i=0
5586 while i<len(self):
5587 row = self[i]
5588 if f(row):
5589 removed.append(self.records[i])
5590 del self.records[i]
5591 else:
5592 i += 1
5593 return Rows(self.db, removed, self.colnames)
5594
5595 - def sort(self, f, reverse=False):
5596 """
5597 returns a list of sorted elements (not sorted in place)
5598 """
5599 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
5600
5601 - def as_list(self,
5602 compact=True,
5603 storage_to_dict=True,
5604 datetime_to_str=True):
5605 """
5606 returns the data as a list or dictionary.
5607 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5608 :param datetime_to_str: convert datetime fields as strings (default True)
5609 """
5610 (oc, self.compact) = (self.compact, compact)
5611 if storage_to_dict:
5612 items = [item.as_dict(datetime_to_str) for item in self]
5613 else:
5614 items = [item for item in self]
5615 self.compact = compact
5616 return items
5617
5618
5619 - def as_dict(self,
5620 key='id',
5621 compact=True,
5622 storage_to_dict=True,
5623 datetime_to_str=True):
5624 """
5625 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
5626
5627 :param key: the name of the field to be used as dict key, normally the id
5628 :param compact: ? (default True)
5629 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5630 :param datetime_to_str: convert datetime fields as strings (default True)
5631 """
5632 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
5633 if isinstance(key,str) and key.count('.')==1:
5634 (table, field) = key.split('.')
5635 return dict([(r[table][field],r) for r in rows])
5636 elif isinstance(key,str):
5637 return dict([(r[key],r) for r in rows])
5638 else:
5639 return dict([(key(r),r) for r in rows])
5640
5642 """
5643 export data to csv, the first line contains the column names
5644
5645 :param ofile: where the csv must be exported to
5646 :param null: how null values must be represented (default '<NULL>')
5647 :param delimiter: delimiter to separate values (default ',')
5648 :param quotechar: character to use to quote string values (default '"')
5649 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
5650 :param represent: use the fields .represent value (default False)
5651 :param colnames: list of column names to use (default self.colnames)
5652 This will only work when exporting rows objects!!!!
5653 DO NOT use this with db.export_to_csv()
5654 """
5655 delimiter = kwargs.get('delimiter', ',')
5656 quotechar = kwargs.get('quotechar', '"')
5657 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
5658 represent = kwargs.get('represent', False)
5659 writer = csv.writer(ofile, delimiter=delimiter,
5660 quotechar=quotechar, quoting=quoting)
5661 colnames = kwargs.get('colnames', self.colnames)
5662
5663 writer.writerow(colnames)
5664
5665 def none_exception(value):
5666 """
5667 returns a cleaned up value that can be used for csv export:
5668 - unicode text is encoded as such
5669 - None values are replaced with the given representation (default <NULL>)
5670 """
5671 if value is None:
5672 return null
5673 elif isinstance(value, unicode):
5674 return value.encode('utf8')
5675 elif isinstance(value,Reference):
5676 return int(value)
5677 elif hasattr(value, 'isoformat'):
5678 return value.isoformat()[:19].replace('T', ' ')
5679 elif isinstance(value, (list,tuple)):
5680 return bar_encode(value)
5681 return value
5682
5683 for record in self:
5684 row = []
5685 for col in colnames:
5686 if not table_field.match(col):
5687 row.append(record._extra[col])
5688 else:
5689 (t, f) = col.split('.')
5690 field = self.db[t][f]
5691 if isinstance(record.get(t, None), (Row,dict)):
5692 value = record[t][f]
5693 else:
5694 value = record[f]
5695 if represent and field.represent:
5696 value = field.represent(value)
5697 row.append(none_exception(value))
5698 writer.writerow(row)
5699
5701 """
5702 serializes the table using sqlhtml.SQLTABLE (if present)
5703 """
5704
5705 import sqlhtml
5706 return sqlhtml.SQLTABLE(self).xml()
5707
5708 - def json(self, mode='object', default=None):
5709 """
5710 serializes the table to a JSON list of objects
5711 """
5712 mode = mode.lower()
5713 if not mode in ['object', 'array']:
5714 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
5715
5716 def inner_loop(record, col):
5717 (t, f) = col.split('.')
5718 res = None
5719 if not table_field.match(col):
5720 res = record._extra[col]
5721 else:
5722 if isinstance(record.get(t, None), Row):
5723 res = record[t][f]
5724 else:
5725 res = record[f]
5726 if mode == 'object':
5727 return (f, res)
5728 else:
5729 return res
5730
5731 if mode == 'object':
5732 items = [dict([inner_loop(record, col) for col in
5733 self.colnames]) for record in self]
5734 else:
5735 items = [[inner_loop(record, col) for col in self.colnames]
5736 for record in self]
5737 if have_serializers:
5738 return serializers.json(items,default=default or serializers.custom_json)
5739 else:
5740 import simplejson
5741 return simplejson.dumps(items)
5742
5744 return cPickle.loads(data)
5745
5747 return Rows_unpickler, \
5748 (cPickle.dumps(data.as_list(storage_to_dict=True,
5749 datetime_to_str=False)),)
5750
5751 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
5752
5753
5754
5755
5756
5757
5759 """
5760
5761 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
5762 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
5763 >>> tmp = db.define_table('users',\
5764 Field('stringf', 'string', length=32, required=True),\
5765 Field('booleanf', 'boolean', default=False),\
5766 Field('passwordf', 'password', notnull=True),\
5767 Field('uploadf', 'upload'),\
5768 Field('blobf', 'blob'),\
5769 Field('integerf', 'integer', unique=True),\
5770 Field('doublef', 'double', unique=True,notnull=True),\
5771 Field('datef', 'date', default=datetime.date.today()),\
5772 Field('timef', 'time'),\
5773 Field('datetimef', 'datetime'),\
5774 migrate='test_user.table')
5775
5776 Insert a field
5777
5778 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
5779 uploadf=None, integerf=5, doublef=3.14,\
5780 datef=datetime.date(2001, 1, 1),\
5781 timef=datetime.time(12, 30, 15),\
5782 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
5783 1
5784
5785 Drop the table
5786
5787 >>> db.users.drop()
5788
5789 Examples of insert, select, update, delete
5790
5791 >>> tmp = db.define_table('person',\
5792 Field('name'),\
5793 Field('birth','date'),\
5794 migrate='test_person.table')
5795 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
5796 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
5797
5798 commented len(db().select(db.person.ALL))
5799 commented 2
5800
5801 >>> me = db(db.person.id==person_id).select()[0] # test select
5802 >>> me.name
5803 'Massimo'
5804 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
5805 1
5806 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
5807 1
5808
5809 Update a single record
5810
5811 >>> me.update_record(name=\"Max\")
5812 >>> me.name
5813 'Max'
5814
5815 Examples of complex search conditions
5816
5817 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
5818 1
5819 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
5820 1
5821 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
5822 1
5823 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
5824 >>> me.name
5825 'Max'
5826
5827 Examples of search conditions using extract from date/datetime/time
5828
5829 >>> len(db(db.person.birth.month()==12).select())
5830 1
5831 >>> len(db(db.person.birth.year()>1900).select())
5832 1
5833
5834 Example of usage of NULL
5835
5836 >>> len(db(db.person.birth==None).select()) ### test NULL
5837 0
5838 >>> len(db(db.person.birth!=None).select()) ### test NULL
5839 1
5840
5841 Examples of search conditions using lower, upper, and like
5842
5843 >>> len(db(db.person.name.upper()=='MAX').select())
5844 1
5845 >>> len(db(db.person.name.like('%ax')).select())
5846 1
5847 >>> len(db(db.person.name.upper().like('%AX')).select())
5848 1
5849 >>> len(db(~db.person.name.upper().like('%AX')).select())
5850 0
5851
5852 orderby, groupby and limitby
5853
5854 >>> people = db().select(db.person.name, orderby=db.person.name)
5855 >>> order = db.person.name|~db.person.birth
5856 >>> people = db().select(db.person.name, orderby=order)
5857
5858 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
5859
5860 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
5861
5862 Example of one 2 many relation
5863
5864 >>> tmp = db.define_table('dog',\
5865 Field('name'),\
5866 Field('birth','date'),\
5867 Field('owner',db.person),\
5868 migrate='test_dog.table')
5869 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
5870 1
5871
5872 A simple JOIN
5873
5874 >>> len(db(db.dog.owner==db.person.id).select())
5875 1
5876
5877 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
5878 1
5879
5880 Drop tables
5881
5882 >>> db.dog.drop()
5883 >>> db.person.drop()
5884
5885 Example of many 2 many relation and Set
5886
5887 >>> tmp = db.define_table('author', Field('name'),\
5888 migrate='test_author.table')
5889 >>> tmp = db.define_table('paper', Field('title'),\
5890 migrate='test_paper.table')
5891 >>> tmp = db.define_table('authorship',\
5892 Field('author_id', db.author),\
5893 Field('paper_id', db.paper),\
5894 migrate='test_authorship.table')
5895 >>> aid = db.author.insert(name='Massimo')
5896 >>> pid = db.paper.insert(title='QCD')
5897 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
5898
5899 Define a Set
5900
5901 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
5902 >>> rows = authored_papers.select(db.author.name, db.paper.title)
5903 >>> for row in rows: print row.author.name, row.paper.title
5904 Massimo QCD
5905
5906 Example of search condition using belongs
5907
5908 >>> set = (1, 2, 3)
5909 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
5910 >>> print rows[0].title
5911 QCD
5912
5913 Example of search condition using nested select
5914
5915 >>> nested_select = db()._select(db.authorship.paper_id)
5916 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
5917 >>> print rows[0].title
5918 QCD
5919
5920 Example of expressions
5921
5922 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
5923 >>> db(mynumber.id>0).delete()
5924 0
5925 >>> for i in range(10): tmp = mynumber.insert(x=i)
5926 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
5927 45
5928
5929 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
5930 5
5931
5932 Output in csv
5933
5934 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
5935 author.name,paper.title\r
5936 Massimo,QCD
5937
5938 Delete all leftover tables
5939
5940 >>> DAL.distributed_transaction_commit(db)
5941
5942 >>> db.mynumber.drop()
5943 >>> db.authorship.drop()
5944 >>> db.author.drop()
5945 >>> db.paper.drop()
5946 """
5947
5948
5949
5950
5951 SQLField = Field
5952 SQLTable = Table
5953 SQLXorable = Expression
5954 SQLQuery = Query
5955 SQLSet = Set
5956 SQLRows = Rows
5957 SQLStorage = Row
5958 SQLDB = DAL
5959 GQLDB = DAL
5960 DAL.Field = Field
5961 DAL.Table = Table
5962
5963
5964
5965
5966
5967 if __name__ == '__main__':
5968 import doctest
5969 doctest.testmod()
5970