1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite
21 - MySQL
22 - Postgres
23 - Oracle
24 - MS SQL
25 - DB2
26 - Interbase
27 - Ingres
28 - SapDB (experimental)
29 - Cubrid (experimental)
30 - CouchDB (experimental)
31 - MongoDB (in progress)
32 - Google:nosql
33 - Google:sql
34 - Teradata
35 - IMAP (experimental)
36
37 Example of usage:
38
39 >>> # from dal import DAL, Field
40
41 ### create DAL connection (and create DB if it doesn't exist)
42 >>> db = DAL(('mysql://a:b@localhost/x', 'sqlite://storage.sqlite'), folder=None)
43
44 ### define a table 'person' (create/alter as necessary)
45 >>> person = db.define_table('person',Field('name','string'))
46
47 ### insert a record
48 >>> id = person.insert(name='James')
49
50 ### retrieve it by id
51 >>> james = person(id)
52
53 ### retrieve it by name
54 >>> james = person(name='James')
55
56 ### retrieve it by arbitrary query
57 >>> query = (person.name=='James') & (person.name.startswith('J'))
58 >>> james = db(query).select(person.ALL)[0]
59
60 ### update one record
61 >>> james.update_record(name='Jim')
62
63 ### update multiple records by query
64 >>> db(person.name.like('J%')).update(name='James')
65 1
66
67 ### delete records by query
68 >>> db(person.name.lower() == 'jim').delete()
69 0
70
71 ### retrieve multiple records (rows)
72 >>> people = db(person).select(orderby=person.name, groupby=person.name, limitby=(0,100))
73
74 ### further filter them
75 >>> james = people.find(lambda row: row.name == 'James').first()
76 >>> print james.id, james.name
77 1 James
78
79 ### check aggregates
80 >>> counter = person.id.count()
81 >>> print db(person).select(counter).first()(counter)
82 1
83
84 ### delete one record
85 >>> james.delete_record()
86 1
87
88 ### delete (drop) entire database table
89 >>> person.drop()
90
91 Supported field types:
92 id string text boolean integer double decimal password upload blob time date datetime
93
94 Supported DAL URI strings:
95 'sqlite://test.db'
96 'sqlite:memory'
97 'jdbc:sqlite://test.db'
98 'mysql://root:none@localhost/test'
99 'postgres://mdipierro:password@localhost/test'
100 'postgres:psycopg2://mdipierro:password@localhost/test'
101 'postgres:pg8000://mdipierro:password@localhost/test'
102 'jdbc:postgres://mdipierro:none@localhost/test'
103 'mssql://web2py:none@A64X2/web2py_test'
104 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
105 'oracle://username:password@database'
106 'firebird://user:password@server:3050/database'
107 'db2://DSN=dsn;UID=user;PWD=pass'
108 'firebird://username:password@hostname/database'
109 'firebird_embedded://username:password@c://path'
110 'informix://user:password@server:3050/database'
111 'informixu://user:password@server:3050/database' # unicode informix
112 'google:datastore' # for google app engine datastore
113 'google:sql' # for google app engine with sql (mysql compatible)
114 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
115 'imap://user:password@server:port' # experimental
116
117 For more info:
118 help(DAL)
119 help(Field)
120 """
121
122
123
124
125
126 __all__ = ['DAL', 'Field']
127
128 MAXCHARLENGTH = 2**15
129 DEFAULTLENGTH = {'string':512,
130 'password':512,
131 'upload':512,
132 'text':2**15,
133 'blob':2**31}
134 TIMINGSSIZE = 100
135
136 import re
137 import sys
138 import locale
139 import os
140 import types
141 import cPickle
142 import datetime
143 import threading
144 import time
145 import cStringIO
146 import csv
147 import cgi
148 import copy
149 import socket
150 import logging
151 import copy_reg
152 import base64
153 import shutil
154 import marshal
155 import decimal
156 import struct
157 import urllib
158 import hashlib
159 import uuid
160 import glob
161 import traceback
162
163 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
164 types.MethodType, types.BuiltinMethodType)
165
166
167
168
169
170 try:
171 from utils import web2py_uuid
172 except ImportError:
173 import uuid
175
176 try:
177 import portalocker
178 have_portalocker = True
179 except ImportError:
180 have_portalocker = False
181
182 try:
183 import serializers
184 have_serializers = True
185 except ImportError:
186 have_serializers = False
187
188 try:
189 import validators
190 have_validators = True
191 except ImportError:
192 have_validators = False
193
194 logger = logging.getLogger("web2py.dal")
195 DEFAULT = lambda:0
196
197 sql_locker = threading.RLock()
198 thread = threading.local()
199
200
201
202
203 regex_type = re.compile('^([\w\_\:]+)')
204 regex_dbname = re.compile('^(\w+)(\:\w+)*')
205 regex_table_field = re.compile('^([\w_]+)\.([\w_]+)$')
206 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
207 regex_cleanup_fn = re.compile('[\'"\s;]+')
208 string_unpack=re.compile('(?<!\|)\|(?!\|)')
209 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
210 regex_select_as_parser = re.compile("\s+AS\s+(\S+)")
211
212
213
214 drivers = []
215
216 try:
217 from new import classobj
218 from google.appengine.ext import db as gae
219 from google.appengine.api import namespace_manager, rdbms
220 from google.appengine.api.datastore_types import Key
221 from google.appengine.ext.db.polymodel import PolyModel
222 drivers.append('google')
223 except ImportError:
224 pass
225
226 if not 'google' in drivers:
227
228 try:
229 from pysqlite2 import dbapi2 as sqlite3
230 drivers.append('pysqlite2')
231 except ImportError:
232 try:
233 from sqlite3 import dbapi2 as sqlite3
234 drivers.append('SQLite3')
235 except ImportError:
236 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
237
238 try:
239 import contrib.pymysql as pymysql
240 drivers.append('pymysql')
241 except ImportError:
242 logger.debug('no pymysql driver')
243
244 try:
245 import psycopg2
246 from psycopg2.extensions import adapt as psycopg2_adapt
247 drivers.append('psycopg2')
248 except ImportError:
249 logger.debug('no psycopg2 driver')
250
251 try:
252
253 try:
254 import contrib.pg8000.dbapi as pg8000
255 except ImportError:
256 import pg8000.dbapi as pg8000
257 drivers.append('pg8000')
258 except ImportError:
259 logger.debug('no pg8000 driver')
260
261 try:
262 import cx_Oracle
263 drivers.append('Oracle')
264 except ImportError:
265 logger.debug('no cx_Oracle driver')
266
267 try:
268 import pyodbc
269 drivers.append('MSSQL/DB2/Teradata')
270 except ImportError:
271 logger.debug('no MSSQL/DB2/Teradata driver')
272
273 try:
274 import kinterbasdb
275 drivers.append('Interbase')
276 except ImportError:
277 logger.debug('no kinterbasdb driver')
278
279 try:
280 import firebirdsql
281 drivers.append('Firebird')
282 except ImportError:
283 logger.debug('no Firebird driver')
284
285 try:
286 import informixdb
287 drivers.append('Informix')
288 logger.warning('Informix support is experimental')
289 except ImportError:
290 logger.debug('no informixdb driver')
291
292 try:
293 import sapdb
294 drivers.append('SAPDB')
295 logger.warning('SAPDB support is experimental')
296 except ImportError:
297 logger.debug('no sapdb driver')
298
299 try:
300 import cubriddb
301 drivers.append('Cubrid')
302 logger.warning('Cubrid support is experimental')
303 except ImportError:
304 logger.debug('no cubriddb driver')
305
306 try:
307 from com.ziclix.python.sql import zxJDBC
308 import java.sql
309
310 from org.sqlite import JDBC
311 drivers.append('zxJDBC')
312 logger.warning('zxJDBC support is experimental')
313 is_jdbc = True
314 except ImportError:
315 logger.debug('no zxJDBC driver')
316 is_jdbc = False
317
318 try:
319 import ingresdbi
320 drivers.append('Ingres')
321 except ImportError:
322 logger.debug('no Ingres driver')
323
324
325 try:
326 import couchdb
327 drivers.append('CouchDB')
328 except ImportError:
329 logger.debug('no couchdb driver')
330
331 try:
332 import pymongo
333 drivers.append('mongoDB')
334 except:
335 logger.debug('no mongoDB driver')
336
337 try:
338 import imaplib
339 drivers.append('IMAP')
340 except:
341 logger.debug('could not import imaplib')
342
343 PLURALIZE_RULES = [
344 (re.compile('child$'), re.compile('child$'), 'children'),
345 (re.compile('oot$'), re.compile('oot$'), 'eet'),
346 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
347 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
348 (re.compile('sis$'), re.compile('sis$'), 'ses'),
349 (re.compile('man$'), re.compile('man$'), 'men'),
350 (re.compile('ife$'), re.compile('ife$'), 'ives'),
351 (re.compile('eau$'), re.compile('eau$'), 'eaux'),
352 (re.compile('lf$'), re.compile('lf$'), 'lves'),
353 (re.compile('[sxz]$'), re.compile('$'), 'es'),
354 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
355 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
356 (re.compile('$'), re.compile('$'), 's'),
357 ]
358
360 for line in rules:
361 re_search, re_sub, replace = line
362 plural = re_search.search(singular) and re_sub.sub(replace, singular)
363 if plural: return plural
364
367
370
371 if 'google' in drivers:
372
373 is_jdbc = False
374
376 """
377 GAE decimal implementation
378 """
379 data_type = decimal.Decimal
380
381 - def __init__(self, precision, scale, **kwargs):
382 super(GAEDecimalProperty, self).__init__(self, **kwargs)
383 d = '1.'
384 for x in range(scale):
385 d += '0'
386 self.round = decimal.Decimal(d)
387
394
396 if value is None or value == '':
397 return None
398 else:
399 return decimal.Decimal(value).quantize(self.round)
400
402 value = super(GAEDecimalProperty, self).validate(value)
403 if value is None or isinstance(value, decimal.Decimal):
404 return value
405 elif isinstance(value, basestring):
406 return decimal.Decimal(value)
407 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
408
409
410
411
412
414
415 pools = {}
416 check_active_connection = True
417
418 @staticmethod
421
422
423
424 @staticmethod
426 """ to close cleanly databases in a multithreaded environment """
427 if not hasattr(thread, 'instances'):
428 return
429 while thread.instances:
430 instance = thread.instances.pop()
431 if action:
432 getattr(instance, action)()
433
434 really = True
435 if instance.pool_size:
436 sql_locker.acquire()
437 pool = ConnectionPool.pools[instance.uri]
438 if len(pool) < instance.pool_size:
439 pool.append(instance.connection)
440 really = False
441 sql_locker.release()
442 if really:
443 getattr(instance, 'close')()
444 return
445
447 """ this actually does not make the folder. it has to be there """
448 if hasattr(thread,'folder'):
449 self.folder = thread.folder
450 else:
451 self.folder = thread.folder = ''
452
453
454 if False and self.folder and not os.path.exists(self.folder):
455 os.mkdir(self.folder)
456
458 """
459 this function defines: self.connection and self.cursor (iff cursor is True)
460 if self.pool_size>0 it will try pull the connection from the pool
461 if the connection is not active (closed by db server) it will loop
462 if not self.pool_size or no active connections in pool makes a new one
463 """
464 if not self.pool_size:
465 self.connection = f()
466 self.cursor = cursor and self.connection.cursor()
467 else:
468 uri = self.uri
469 while True:
470 sql_locker.acquire()
471 if not uri in ConnectionPool.pools:
472 ConnectionPool.pools[uri] = []
473 if ConnectionPool.pools[uri]:
474 self.connection = ConnectionPool.pools[uri].pop()
475 sql_locker.release()
476 self.cursor = cursor and self.connection.cursor()
477 try:
478 if self.cursor and self.check_active_connection:
479 self.execute('SELECT 1;')
480 break
481 except:
482 pass
483 else:
484 sql_locker.release()
485 self.connection = f()
486 self.cursor = cursor and self.connection.cursor()
487 break
488 if not hasattr(thread,'instances'):
489 thread.instances = []
490 thread.instances.append(self)
491
492
493
494
495
496
498
499 driver = None
500 maxcharlength = MAXCHARLENGTH
501 commit_on_alter_table = False
502 support_distributed_transaction = False
503 uploads_in_blob = False
504 can_select_for_update = True
505 types = {
506 'boolean': 'CHAR(1)',
507 'string': 'CHAR(%(length)s)',
508 'text': 'TEXT',
509 'password': 'CHAR(%(length)s)',
510 'blob': 'BLOB',
511 'upload': 'CHAR(%(length)s)',
512 'integer': 'INTEGER',
513 'double': 'DOUBLE',
514 'decimal': 'DOUBLE',
515 'date': 'DATE',
516 'time': 'TIME',
517 'datetime': 'TIMESTAMP',
518 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
519 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
520 'list:integer': 'TEXT',
521 'list:string': 'TEXT',
522 'list:reference': 'TEXT',
523 }
524
526 return "'%s'" % obj.replace("'", "''")
527
530
533
535 """
536 to be used ONLY for files that on GAE may not be on filesystem
537 """
538 return os.path.exists(filename)
539
540 - def file_open(self, filename, mode='rb', lock=True):
541 """
542 to be used ONLY for files that on GAE may not be on filesystem
543 """
544 if have_portalocker and lock:
545 fileobj = portalocker.LockedFile(filename,mode)
546 else:
547 fileobj = open(filename,mode)
548 return fileobj
549
551 """
552 to be used ONLY for files that on GAE may not be on filesystem
553 """
554 if fileobj:
555 fileobj.close()
556
559
560 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
561 credential_decoder=lambda x:x, driver_args={},
562 adapter_args={}):
563 self.db = db
564 self.dbengine = "None"
565 self.uri = uri
566 self.pool_size = pool_size
567 self.folder = folder
568 self.db_codec = db_codec
569 class Dummy(object):
570 lastrowid = 1
571 def __getattr__(self, value):
572 return lambda *a, **b: []
573 self.connection = Dummy()
574 self.cursor = Dummy()
575
577 return '%s_sequence' % tablename
578
580 return '%s_sequence' % tablename
581
582 - def create_table(self, table,
583 migrate=True,
584 fake_migrate=False,
585 polymodel=None):
586 fields = []
587 sql_fields = {}
588 sql_fields_aux = {}
589 TFK = {}
590 tablename = table._tablename
591 sortable = 0
592 for field in table:
593 sortable += 1
594 k = field.name
595 if isinstance(field.type,SQLCustomType):
596 ftype = field.type.native or field.type.type
597 elif field.type.startswith('reference'):
598 referenced = field.type[10:].strip()
599 constraint_name = self.constraint_name(tablename, field.name)
600 if hasattr(table,'_primarykey'):
601 rtablename,rfieldname = referenced.split('.')
602 rtable = table._db[rtablename]
603 rfield = rtable[rfieldname]
604
605 if rfieldname in rtable._primarykey or rfield.unique:
606 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
607
608 if not rfield.unique and len(rtable._primarykey)>1 :
609
610 if rtablename not in TFK:
611 TFK[rtablename] = {}
612 TFK[rtablename][rfieldname] = field.name
613 else:
614 ftype = ftype + \
615 self.types['reference FK'] %dict(\
616 constraint_name=constraint_name,
617 table_name=tablename,
618 field_name=field.name,
619 foreign_key='%s (%s)'%(rtablename, rfieldname),
620 on_delete_action=field.ondelete)
621 else:
622
623 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
624 ftype = self.types[field.type[:9]]\
625 % dict(table_name=tablename,
626 field_name=field.name,
627 constraint_name=constraint_name,
628 foreign_key=referenced + ('(%s)' % id_fieldname),
629 on_delete_action=field.ondelete)
630 elif field.type.startswith('list:reference'):
631 ftype = self.types[field.type[:14]]
632 elif field.type.startswith('decimal'):
633 precision, scale = map(int,field.type[8:-1].split(','))
634 ftype = self.types[field.type[:7]] % \
635 dict(precision=precision,scale=scale)
636 elif not field.type in self.types:
637 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
638 (field.type, field.name)
639 else:
640 ftype = self.types[field.type]\
641 % dict(length=field.length)
642 if not field.type.startswith('id') and not field.type.startswith('reference'):
643 if field.notnull:
644 ftype += ' NOT NULL'
645 else:
646 ftype += self.ALLOW_NULL()
647 if field.unique:
648 ftype += ' UNIQUE'
649
650
651 sql_fields[field.name] = dict(sortable=sortable,
652 type=str(field.type),
653 sql=ftype)
654
655 if isinstance(field.default,(str,int,float)):
656
657
658
659
660
661 not_null = self.NOT_NULL(field.default, field.type)
662 ftype = ftype.replace('NOT NULL', not_null)
663 sql_fields_aux[field.name] = dict(sql=ftype)
664 fields.append('%s %s' % (field.name, ftype))
665 other = ';'
666
667
668 if self.dbengine == 'mysql':
669 if not hasattr(table, "_primarykey"):
670 fields.append('PRIMARY KEY(%s)' % table._id.name)
671 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
672
673 fields = ',\n '.join(fields)
674 for rtablename in TFK:
675 rfields = TFK[rtablename]
676 pkeys = table._db[rtablename]._primarykey
677 fkeys = [ rfields[k] for k in pkeys ]
678 fields = fields + ',\n ' + \
679 self.types['reference TFK'] %\
680 dict(table_name=tablename,
681 field_name=', '.join(fkeys),
682 foreign_table=rtablename,
683 foreign_key=', '.join(pkeys),
684 on_delete_action=field.ondelete)
685
686 if hasattr(table,'_primarykey'):
687 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
688 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
689 else:
690 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
691 (tablename, fields, other)
692
693 if self.uri.startswith('sqlite:///'):
694 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
695 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
696 else:
697 dbpath = self.folder
698
699 if not migrate:
700 return query
701 elif self.uri.startswith('sqlite:memory'):
702 table._dbt = None
703 elif isinstance(migrate, str):
704 table._dbt = os.path.join(dbpath, migrate)
705 else:
706 table._dbt = os.path.join(dbpath, '%s_%s.table' \
707 % (table._db._uri_hash, tablename))
708 if table._dbt:
709 table._loggername = os.path.join(dbpath, 'sql.log')
710 logfile = self.file_open(table._loggername, 'a')
711 else:
712 logfile = None
713 if not table._dbt or not self.file_exists(table._dbt):
714 if table._dbt:
715 logfile.write('timestamp: %s\n'
716 % datetime.datetime.today().isoformat())
717 logfile.write(query + '\n')
718 if not fake_migrate:
719 self.create_sequence_and_triggers(query,table)
720 table._db.commit()
721 if table._dbt:
722 tfile = self.file_open(table._dbt, 'w')
723 cPickle.dump(sql_fields, tfile)
724 self.file_close(tfile)
725 if fake_migrate:
726 logfile.write('faked!\n')
727 else:
728 logfile.write('success!\n')
729 else:
730 tfile = self.file_open(table._dbt, 'r')
731 try:
732 sql_fields_old = cPickle.load(tfile)
733 except EOFError:
734 self.file_close(tfile)
735 self.file_close(logfile)
736 raise RuntimeError, 'File %s appears corrupted' % table._dbt
737 self.file_close(tfile)
738 if sql_fields != sql_fields_old:
739 self.migrate_table(table,
740 sql_fields, sql_fields_old,
741 sql_fields_aux, logfile,
742 fake_migrate=fake_migrate)
743 self.file_close(logfile)
744 return query
745
746 - def migrate_table(
747 self,
748 table,
749 sql_fields,
750 sql_fields_old,
751 sql_fields_aux,
752 logfile,
753 fake_migrate=False,
754 ):
755 tablename = table._tablename
756 def fix(item):
757 k,v=item
758 if not isinstance(v,dict):
759 v=dict(type='unkown',sql=v)
760 return k.lower(),v
761
762 sql_fields = dict(map(fix,sql_fields.items()))
763 sql_fields_old = dict(map(fix,sql_fields_old.items()))
764 sql_fields_aux = dict(map(fix,sql_fields_aux.items()))
765
766 keys = sql_fields.keys()
767 for key in sql_fields_old:
768 if not key in keys:
769 keys.append(key)
770 if self.dbengine == 'mssql':
771 new_add = '; ALTER TABLE %s ADD ' % tablename
772 else:
773 new_add = ', ADD '
774
775 metadata_change = False
776 sql_fields_current = copy.copy(sql_fields_old)
777 for key in keys:
778 query = None
779 if not key in sql_fields_old:
780 sql_fields_current[key] = sql_fields[key]
781 query = ['ALTER TABLE %s ADD %s %s;' % \
782 (tablename, key,
783 sql_fields_aux[key]['sql'].replace(', ', new_add))]
784 metadata_change = True
785 elif self.dbengine == 'sqlite':
786 if key in sql_fields:
787 sql_fields_current[key] = sql_fields[key]
788 metadata_change = True
789 elif not key in sql_fields:
790 del sql_fields_current[key]
791 if not self.dbengine in ('firebird',):
792 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
793 else:
794 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
795 metadata_change = True
796 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
797 and not isinstance(table[key].type, SQLCustomType) \
798 and not (table[key].type.startswith('reference') and \
799 sql_fields[key]['sql'].startswith('INT,') and \
800 sql_fields_old[key]['sql'].startswith('INT NOT NULL,')):
801 sql_fields_current[key] = sql_fields[key]
802 t = tablename
803 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
804 if not self.dbengine in ('firebird',):
805 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
806 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
807 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
808 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
809 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
810 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
811 else:
812 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
813 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
814 'ALTER TABLE %s DROP %s;' % (t, key),
815 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
816 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
817 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
818 metadata_change = True
819 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
820 sql_fields_current[key] = sql_fields[key]
821 metadata_change = True
822
823 if query:
824 logfile.write('timestamp: %s\n'
825 % datetime.datetime.today().isoformat())
826 table._db['_lastsql'] = '\n'.join(query)
827 for sub_query in query:
828 logfile.write(sub_query + '\n')
829 if not fake_migrate:
830 self.execute(sub_query)
831
832
833
834 if table._db._adapter.commit_on_alter_table:
835 table._db.commit()
836 tfile = self.file_open(table._dbt, 'w')
837 cPickle.dump(sql_fields_current, tfile)
838 self.file_close(tfile)
839 logfile.write('success!\n')
840 else:
841 logfile.write('faked!\n')
842 elif metadata_change:
843 tfile = self.file_open(table._dbt, 'w')
844 cPickle.dump(sql_fields_current, tfile)
845 self.file_close(tfile)
846
847 if metadata_change and \
848 not (query and self.dbengine in ('mysql','oracle','firebird')):
849 table._db.commit()
850 tfile = self.file_open(table._dbt, 'w')
851 cPickle.dump(sql_fields_current, tfile)
852 self.file_close(tfile)
853
856
859
861 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
862
865
868
871
874
875 - def NOT_NULL(self, default, field_type):
876 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
877
879 expressions = [self.expand(first)]+[self.expand(e) for e in second]
880 return 'COALESCE(%s)' % ','.join(expressions)
881
884
885 - def RAW(self, first):
887
890
892 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
893
895 return 'PRIMARY KEY(%s)' % key
896
897 - def _drop(self, table, mode):
898 return ['DROP TABLE %s;' % table]
899
900 - def drop(self, table, mode=''):
901 if table._dbt:
902 logfile = self.file_open(table._loggername, 'a')
903 queries = self._drop(table, mode)
904 for query in queries:
905 if table._dbt:
906 logfile.write(query + '\n')
907 self.execute(query)
908 table._db.commit()
909 del table._db[table._tablename]
910 del table._db.tables[table._db.tables.index(table._tablename)]
911 table._db._update_referenced_by(table._tablename)
912 if table._dbt:
913 self.file_delete(table._dbt)
914 logfile.write('success!\n')
915
920
921 - def insert(self, table, fields):
922 query = self._insert(table,fields)
923 try:
924 self.execute(query)
925 except Exception, e:
926 if isinstance(e,self.integrity_error_class()):
927 return None
928 raise e
929 if hasattr(table,'_primarykey'):
930 return dict([(k[0].name, k[1]) for k in fields \
931 if k[0].name in table._primarykey])
932 id = self.lastrowid(table)
933 if not isinstance(id,int):
934 return id
935 rid = Reference(id)
936 (rid._table, rid._record) = (table, None)
937 return rid
938
940 return [self.insert(table,item) for item in items]
941
942 - def NOT(self, first):
944
945 - def AND(self, first, second):
947
948 - def OR(self, first, second):
950
952 if isinstance(second, str):
953 return '(%s IN (%s))' % (self.expand(first), second[:-1])
954 elif second==[] or second==():
955 return '(1=0)'
956 items = ','.join(self.expand(item, first.type) for item in second)
957 return '(%s IN (%s))' % (self.expand(first), items)
958
959 - def REGEXP(self, first, second):
960 "regular expression operator"
961 raise NotImplementedError
962
963 - def LIKE(self, first, second):
964 "case sensitive like operator"
965 raise NotImplementedError
966
967 - def ILIKE(self, first, second):
968 "case in-sensitive like operator"
969 return '(%s LIKE %s)' % (self.expand(first), self.expand(second, 'string'))
970
973
976
978 if first.type in ('string', 'text'):
979 key = '%'+str(second).replace('%','%%')+'%'
980 elif first.type.startswith('list:'):
981 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
982 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
983
984 - def EQ(self, first, second=None):
988
989 - def NE(self, first, second=None):
993
994 - def LT(self,first,second=None):
995 if second is None:
996 raise RuntimeError, "Cannot compare %s < None" % first
997 return '(%s < %s)' % (self.expand(first),self.expand(second,first.type))
998
999 - def LE(self,first,second=None):
1000 if second is None:
1001 raise RuntimeError, "Cannot compare %s <= None" % first
1002 return '(%s <= %s)' % (self.expand(first),self.expand(second,first.type))
1003
1004 - def GT(self,first,second=None):
1005 if second is None:
1006 raise RuntimeError, "Cannot compare %s > None" % first
1007 return '(%s > %s)' % (self.expand(first),self.expand(second,first.type))
1008
1009 - def GE(self,first,second=None):
1010 if second is None:
1011 raise RuntimeError, "Cannot compare %s >= None" % first
1012 return '(%s >= %s)' % (self.expand(first),self.expand(second,first.type))
1013
1014 - def ADD(self, first, second):
1016
1017 - def SUB(self, first, second):
1019
1020 - def MUL(self, first, second):
1022
1023 - def DIV(self, first, second):
1025
1026 - def MOD(self, first, second):
1028
1029 - def AS(self, first, second):
1031
1032 - def ON(self, first, second):
1034
1037
1038 - def COMMA(self, first, second):
1040
1041 - def expand(self, expression, field_type=None):
1042 if isinstance(expression, Field):
1043 return str(expression)
1044 elif isinstance(expression, (Expression, Query)):
1045 if not expression.second is None:
1046 return expression.op(expression.first, expression.second)
1047 elif not expression.first is None:
1048 return expression.op(expression.first)
1049 elif not isinstance(expression.op, str):
1050 return expression.op()
1051 else:
1052 return '(%s)' % expression.op
1053 elif field_type:
1054 return str(self.represent(expression,field_type))
1055 elif isinstance(expression,(list,tuple)):
1056 return ','.join(self.represent(item,field_type) for item in expression)
1057 else:
1058 return str(expression)
1059
1060 - def alias(self, table, alias):
1061 """
1062 Given a table object, makes a new table object
1063 with alias name.
1064 """
1065 other = copy.copy(table)
1066 other['_ot'] = other._tablename
1067 other['ALL'] = SQLALL(other)
1068 other['_tablename'] = alias
1069 for fieldname in other.fields:
1070 other[fieldname] = copy.copy(other[fieldname])
1071 other[fieldname]._tablename = alias
1072 other[fieldname].tablename = alias
1073 other[fieldname].table = other
1074 table._db[alias] = other
1075 return other
1076
1078 tablename = table._tablename
1079 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
1080
1082
1083 if table._dbt:
1084 logfile = self.file_open(table._loggername, 'a')
1085 else:
1086 class Logfile(object):
1087 def write(self, value):
1088 pass
1089 def close(self):
1090 pass
1091 logfile = Logfile()
1092
1093 try:
1094 queries = table._db._adapter._truncate(table, mode)
1095 for query in queries:
1096 logfile.write(query + '\n')
1097 self.execute(query)
1098 table._db.commit()
1099 logfile.write('success!\n')
1100 finally:
1101 logfile.close()
1102
1103 - def _update(self, tablename, query, fields):
1104 if query:
1105 if use_common_filters(query):
1106 query = self.common_filter(query, [tablename])
1107 sql_w = ' WHERE ' + self.expand(query)
1108 else:
1109 sql_w = ''
1110 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value, field.type)) \
1111 for (field, value) in fields])
1112 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1113
1114 - def update(self, tablename, query, fields):
1115 sql = self._update(tablename, query, fields)
1116 self.execute(sql)
1117 try:
1118 return self.cursor.rowcount
1119 except:
1120 return None
1121
1122 - def _delete(self, tablename, query):
1123 if query:
1124 if use_common_filters(query):
1125 query = self.common_filter(query, [tablename])
1126 sql_w = ' WHERE ' + self.expand(query)
1127 else:
1128 sql_w = ''
1129 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1130
1131 - def delete(self, tablename, query):
1132 sql = self._delete(tablename, query)
1133
1134 db = self.db
1135 table = db[tablename]
1136 if self.dbengine=='sqlite' and table._referenced_by:
1137 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1138
1139 self.execute(sql)
1140 try:
1141 counter = self.cursor.rowcount
1142 except:
1143 counter = None
1144
1145 if self.dbengine=='sqlite' and counter:
1146 for tablename,fieldname in table._referenced_by:
1147 f = db[tablename][fieldname]
1148 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
1149 db(db[tablename][fieldname].belongs(deleted)).delete()
1150
1151 return counter
1152
1154 tablenames = self.tables(query)
1155 if len(tablenames)==1:
1156 return tablenames[0]
1157 elif len(tablenames)<1:
1158 raise RuntimeError, "No table selected"
1159 else:
1160 raise RuntimeError, "Too many tables selected"
1161
1163 new_fields = []
1164 for item in fields:
1165 if isinstance(item,SQLALL):
1166 new_fields += item.table
1167 else:
1168 new_fields.append(item)
1169
1170 if not new_fields:
1171 for table in tablenames:
1172 for field in self.db[table]:
1173 new_fields.append(field)
1174 return new_fields
1175
1176 - def _select(self, query, fields, attributes):
1177 for key in set(attributes.keys())-set(('orderby', 'groupby', 'limitby',
1178 'required', 'cache', 'left',
1179 'distinct', 'having', 'join',
1180 'for_update')):
1181 raise SyntaxError, 'invalid select attribute: %s' % key
1182
1183 tablenames = self.tables(query)
1184 for field in fields:
1185 if isinstance(field, basestring) and regex_table_field.match(field):
1186 tn,fn = field.split('.')
1187 field = self.db[tn][fn]
1188 for tablename in self.tables(field):
1189 if not tablename in tablenames:
1190 tablenames.append(tablename)
1191
1192 if use_common_filters(query):
1193 query = self.common_filter(query,tablenames)
1194
1195 if len(tablenames) < 1:
1196 raise SyntaxError, 'Set: no tables selected'
1197 sql_f = ', '.join(map(self.expand, fields))
1198 self._colnames = [c.strip() for c in sql_f.split(', ')]
1199 if query:
1200 sql_w = ' WHERE ' + self.expand(query)
1201 else:
1202 sql_w = ''
1203 sql_o = ''
1204 sql_s = ''
1205 left = attributes.get('left', False)
1206 inner_join = attributes.get('join', False)
1207 distinct = attributes.get('distinct', False)
1208 groupby = attributes.get('groupby', False)
1209 orderby = attributes.get('orderby', False)
1210 having = attributes.get('having', False)
1211 limitby = attributes.get('limitby', False)
1212 for_update = attributes.get('for_update', False)
1213 if self.can_select_for_update is False and for_update is True:
1214 raise SyntaxError, 'invalid select attribute: for_update'
1215 if distinct is True:
1216 sql_s += 'DISTINCT'
1217 elif distinct:
1218 sql_s += 'DISTINCT ON (%s)' % distinct
1219 if inner_join:
1220 icommand = self.JOIN()
1221 if not isinstance(inner_join, (tuple, list)):
1222 inner_join = [inner_join]
1223 ijoint = [t._tablename for t in inner_join if not isinstance(t,Expression)]
1224 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1225 itables_to_merge={}
1226 [itables_to_merge.update(dict.fromkeys(self.tables(t))) for t in ijoinon]
1227 ijoinont = [t.first._tablename for t in ijoinon]
1228 [itables_to_merge.pop(t) for t in ijoinont if t in itables_to_merge]
1229 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
1230 iexcluded = [t for t in tablenames if not t in iimportant_tablenames]
1231 if left:
1232 join = attributes['left']
1233 command = self.LEFT_JOIN()
1234 if not isinstance(join, (tuple, list)):
1235 join = [join]
1236 joint = [t._tablename for t in join if not isinstance(t, Expression)]
1237 joinon = [t for t in join if isinstance(t, Expression)]
1238
1239 tables_to_merge={}
1240 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1241 joinont = [t.first._tablename for t in joinon]
1242 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1243 important_tablenames = joint + joinont + tables_to_merge.keys()
1244 excluded = [t for t in tablenames if not t in important_tablenames ]
1245 def alias(t):
1246 return str(self.db[t])
1247 if inner_join and not left:
1248 sql_t = ', '.join([alias(t) for t in iexcluded + itables_to_merge.keys()])
1249 for t in ijoinon:
1250 sql_t += ' %s %s' % (icommand, str(t))
1251 elif not inner_join and left:
1252 sql_t = ', '.join([alias(t) for t in excluded + tables_to_merge.keys()])
1253 if joint:
1254 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1255 for t in joinon:
1256 sql_t += ' %s %s' % (command, str(t))
1257 elif inner_join and left:
1258 all_tables_in_query = set(important_tablenames + \
1259 iimportant_tablenames + \
1260 tablenames)
1261 tables_in_joinon = set(joinont + ijoinont)
1262 tables_not_in_joinon = all_tables_in_query.difference(tables_in_joinon)
1263 sql_t = ','.join([alias(t) for t in tables_not_in_joinon])
1264 for t in ijoinon:
1265 sql_t += ' %s %s' % (icommand, str(t))
1266 if joint:
1267 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1268 for t in joinon:
1269 sql_t += ' %s %s' % (command, str(t))
1270 else:
1271 sql_t = ', '.join(alias(t) for t in tablenames)
1272 if groupby:
1273 if isinstance(groupby, (list, tuple)):
1274 groupby = xorify(groupby)
1275 sql_o += ' GROUP BY %s' % self.expand(groupby)
1276 if having:
1277 sql_o += ' HAVING %s' % attributes['having']
1278 if orderby:
1279 if isinstance(orderby, (list, tuple)):
1280 orderby = xorify(orderby)
1281 if str(orderby) == '<random>':
1282 sql_o += ' ORDER BY %s' % self.RANDOM()
1283 else:
1284 sql_o += ' ORDER BY %s' % self.expand(orderby)
1285 if limitby:
1286 if not orderby and tablenames:
1287 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t], '_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1288
1289 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1290 if for_update and self.can_select_for_update is True:
1291 sql = sql.rstrip(';') + ' FOR UPDATE;'
1292 return sql
1293
1294 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1295 if limitby:
1296 (lmin, lmax) = limitby
1297 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1298 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1299
1300 - def select(self, query, fields, attributes):
1301 """
1302 Always returns a Rows object, possibly empty.
1303 """
1304 def response(sql):
1305 self.execute(sql)
1306 return self.cursor.fetchall()
1307 sql = self._select(query, fields, attributes)
1308 if attributes.get('cache', None):
1309 (cache_model, time_expire) = attributes['cache']
1310 del attributes['cache']
1311 key = self.uri + '/' + sql
1312 key = (len(key)<=200) and key or hashlib.md5(key).hexdigest()
1313 rows = cache_model(key, lambda: response(sql), time_expire)
1314 else:
1315 rows = response(sql)
1316 if isinstance(rows,tuple):
1317 rows = list(rows)
1318 limitby = attributes.get('limitby', None) or (0,)
1319 rows = self.rowslice(rows,limitby[0],None)
1320 return self.parse(rows,fields,self._colnames)
1321
1322 - def _count(self, query, distinct=None):
1323 tablenames = self.tables(query)
1324 if query:
1325 if use_common_filters(query):
1326 query = self.common_filter(query, tablenames)
1327 sql_w = ' WHERE ' + self.expand(query)
1328 else:
1329 sql_w = ''
1330 sql_t = ','.join(tablenames)
1331 if distinct:
1332 if isinstance(distinct,(list, tuple)):
1333 distinct = xorify(distinct)
1334 sql_d = self.expand(distinct)
1335 return 'SELECT count(DISTINCT %s) FROM %s%s;' % (sql_d, sql_t, sql_w)
1336 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1337
1338 - def count(self, query, distinct=None):
1339 self.execute(self._count(query, distinct))
1340 return self.cursor.fetchone()[0]
1341
1352
1354 return self.connection.commit()
1355
1358
1360 return self.connection.close()
1361
1364
1367
1370
1373
1376
1378 return '%s_%s__constraint' % (table,fieldname)
1379
1382
1390
1393
1395 if isinstance(obj, CALLABLETYPES):
1396 obj = obj()
1397 if isinstance(fieldtype, SQLCustomType):
1398 return fieldtype.encoder(obj)
1399 if isinstance(obj, (Expression, Field)):
1400 return str(obj)
1401 if fieldtype.startswith('list:'):
1402 if not obj:
1403 obj = []
1404 elif not isinstance(obj, (list, tuple)):
1405 obj = [int(obj)]
1406 elif fieldtype.startswith('list:string'):
1407 obj = [str(item) for item in obj]
1408 else:
1409 obj = [int(item) for item in obj]
1410 if isinstance(obj, (list, tuple)):
1411 obj = bar_encode(obj)
1412 if obj is None:
1413 return 'NULL'
1414 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1415 return 'NULL'
1416 r = self.represent_exceptions(obj, fieldtype)
1417 if not r is None:
1418 return r
1419 if fieldtype == 'boolean':
1420 if obj and not str(obj)[:1].upper() in ['F', '0']:
1421 return "'T'"
1422 else:
1423 return "'F'"
1424 if fieldtype == 'id' or fieldtype == 'integer':
1425 return str(int(obj))
1426 if fieldtype.startswith('decimal'):
1427 return str(obj)
1428 elif fieldtype.startswith('reference'):
1429 if fieldtype.find('.')>0:
1430 return repr(obj)
1431 elif isinstance(obj, (Row, Reference)):
1432 return str(obj['id'])
1433 return str(int(obj))
1434 elif fieldtype == 'double':
1435 return repr(float(obj))
1436 if isinstance(obj, unicode):
1437 obj = obj.encode(self.db_codec)
1438 if fieldtype == 'blob':
1439 obj = base64.b64encode(str(obj))
1440 elif fieldtype == 'date':
1441 if isinstance(obj, (datetime.date, datetime.datetime)):
1442 obj = obj.isoformat()[:10]
1443 else:
1444 obj = str(obj)
1445 elif fieldtype == 'datetime':
1446 if isinstance(obj, datetime.datetime):
1447 obj = obj.isoformat()[:19].replace('T',' ')
1448 elif isinstance(obj, datetime.date):
1449 obj = obj.isoformat()[:10]+' 00:00:00'
1450 else:
1451 obj = str(obj)
1452 elif fieldtype == 'time':
1453 if isinstance(obj, datetime.time):
1454 obj = obj.isoformat()[:10]
1455 else:
1456 obj = str(obj)
1457 if not isinstance(obj,str):
1458 obj = str(obj)
1459 try:
1460 obj.decode(self.db_codec)
1461 except:
1462 obj = obj.decode('latin1').encode(self.db_codec)
1463 return self.adapt(obj)
1464
1467
1470
1473
1474 - def rowslice(self, rows, minimum=0, maximum=None):
1475 """ By default this function does nothing; overload when db does not do slicing. """
1476 return rows
1477
1478 - def parse_value(self, value, field_type, blob_decode=True):
1479 if field_type != 'blob' and isinstance(value, str):
1480 try:
1481 value = value.decode(self.db._db_codec)
1482 except Exception:
1483 pass
1484 if isinstance(value, unicode):
1485 value = value.encode('utf-8')
1486 elif isinstance(field_type, SQLCustomType):
1487 value = field_type.decoder(value)
1488 if not isinstance(field_type, str) or value is None:
1489 return value
1490 elif field_type in ('string', 'text', 'password', 'upload'):
1491 return value
1492 elif field_type == 'blob' and not blob_decode:
1493 return value
1494 else:
1495 key = regex_type.match(field_type).group(0)
1496 return self.parsemap[key](value,field_type)
1497
1499 referee = field_type[10:].strip()
1500 if not '.' in referee:
1501 value = Reference(value)
1502 value._table, value._record = self.db[referee], None
1503 return value
1504
1506 return value == True or str(value)[:1].lower() == 't'
1507
1513
1515 if not isinstance(value, datetime.time):
1516 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1517 if len(time_items) == 3:
1518 (h, mi, s) = time_items
1519 else:
1520 (h, mi, s) = time_items + [0]
1521 value = datetime.time(h, mi, s)
1522 return value
1523
1525 if not isinstance(value, datetime.datetime):
1526 (y, m, d) = map(int,str(value)[:10].strip().split('-'))
1527 time_items = map(int,str(value)[11:19].strip().split(':')[:3])
1528 if len(time_items) == 3:
1529 (h, mi, s) = time_items
1530 else:
1531 (h, mi, s) = time_items + [0]
1532 value = datetime.datetime(y, m, d, h, mi, s)
1533 return value
1534
1536 return base64.b64decode(str(value))
1537
1539 decimals = int(field_type[8:-1].split(',')[-1])
1540 if self.dbengine == 'sqlite':
1541 value = ('%.' + str(decimals) + 'f') % value
1542 if not isinstance(value, decimal.Decimal):
1543 value = decimal.Decimal(str(value))
1544 return value
1545
1550
1555
1560
1561 - def parse_id(self, value, field_type):
1563
1566
1569
1571 self.parsemap = {
1572 'id':self.parse_id,
1573 'integer':self.parse_integer,
1574 'double':self.parse_double,
1575 'reference':self.parse_reference,
1576 'boolean':self.parse_boolean,
1577 'date':self.parse_date,
1578 'time':self.parse_time,
1579 'datetime':self.parse_datetime,
1580 'blob':self.parse_blob,
1581 'decimal':self.parse_decimal,
1582 'list:integer':self.parse_list_integers,
1583 'list:reference':self.parse_list_references,
1584 'list:string':self.parse_list_strings,
1585 }
1586
1587 - def parse(self, rows, fields, colnames, blob_decode=True):
1588 self.build_parsemap()
1589 db = self.db
1590 virtualtables = []
1591 new_rows = []
1592 for (i,row) in enumerate(rows):
1593 new_row = Row()
1594 for j,colname in enumerate(colnames):
1595 value = row[j]
1596 if not regex_table_field.match(colnames[j]):
1597 if not '_extra' in new_row:
1598 new_row['_extra'] = Row()
1599 new_row['_extra'][colnames[j]] = \
1600 self.parse_value(value, fields[j].type,blob_decode)
1601 new_column_name = \
1602 regex_select_as_parser.search(colnames[j])
1603 if not new_column_name is None:
1604 column_name = new_column_name.groups(0)
1605 setattr(new_row,column_name[0],value)
1606 else:
1607 (tablename, fieldname) = colname.split('.')
1608 table = db[tablename]
1609 field = table[fieldname]
1610 if not tablename in new_row:
1611 colset = new_row[tablename] = Row()
1612 if tablename not in virtualtables:
1613 virtualtables.append(tablename)
1614 else:
1615 colset = new_row[tablename]
1616 colset[fieldname] = value = \
1617 self.parse_value(value,field.type,blob_decode)
1618
1619 if field.type == 'id':
1620 id = value
1621 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1622 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1623 for (referee_table, referee_name) in table._referenced_by:
1624 s = db[referee_table][referee_name]
1625 referee_link = db._referee_name and \
1626 db._referee_name % dict(table=referee_table,field=referee_name)
1627 if referee_link and not referee_link in colset:
1628 colset[referee_link] = Set(db, s == id)
1629 new_rows.append(new_row)
1630 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1631
1632 for tablename in virtualtables:
1633
1634 table = db[tablename]
1635 fields_virtual = [(f,v) for (f,v) in table.items() if isinstance(v,FieldVirtual)]
1636 fields_lazy = [(f,v) for (f,v) in table.items() if isinstance(v,FieldLazy)]
1637 if fields_virtual or fields_lazy:
1638 for row in rowsobj.records:
1639 box = row[tablename]
1640 for f,v in fields_virtual:
1641 box[f] = v.f(row)
1642 for f,v in fields_lazy:
1643 box[f] = (v.handler or VirtualCommand)(v.f,row)
1644
1645
1646 for item in table.virtualfields:
1647 try:
1648 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
1649 except KeyError:
1650
1651 pass
1652 return rowsobj
1653
1655 tenant_fieldname = self.db._request_tenant
1656
1657 for tablename in tablenames:
1658 table = self.db[tablename]
1659
1660
1661 if table._common_filter != None:
1662 query = query & table._common_filter(query)
1663
1664
1665 if tenant_fieldname in table:
1666 default = table[tenant_fieldname].default
1667 if not default is None:
1668 newquery = table[tenant_fieldname] == default
1669 if query is None:
1670 query = newquery
1671 else:
1672 query = query & newquery
1673 return query
1674
1675
1676
1677
1678
1680
1681 driver = globals().get('sqlite3', None)
1682 can_select_for_update = None
1683
1685 return "web2py_extract('%s',%s)" % (what, self.expand(field))
1686
1687 @staticmethod
1689 table = {
1690 'year': (0, 4),
1691 'month': (5, 7),
1692 'day': (8, 10),
1693 'hour': (11, 13),
1694 'minute': (14, 16),
1695 'second': (17, 19),
1696 }
1697 try:
1698 (i, j) = table[lookup]
1699 return int(s[i:j])
1700 except:
1701 return None
1702
1703 @staticmethod
1705 return re.compile(expression).search(item) is not None
1706
1707 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
1708 credential_decoder=lambda x:x, driver_args={},
1709 adapter_args={}):
1710 if not self.driver:
1711 raise RuntimeError, "Unable to import driver"
1712 self.db = db
1713 self.dbengine = "sqlite"
1714 self.uri = uri
1715 self.pool_size = 0
1716 self.folder = folder
1717 self.db_codec = db_codec
1718 self.find_or_make_work_folder()
1719 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1720 if uri.startswith('sqlite:memory'):
1721 dbpath = ':memory:'
1722 else:
1723 dbpath = uri.split('://')[1]
1724 if dbpath[0] != '/':
1725 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'), dbpath)
1726 if not 'check_same_thread' in driver_args:
1727 driver_args['check_same_thread'] = False
1728 if not 'detect_types' in driver_args:
1729 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
1730 def connect(dbpath=dbpath, driver_args=driver_args):
1731 return self.driver.Connection(dbpath, **driver_args)
1732 self.pool_connection(connect)
1733 self.connection.create_function('web2py_extract', 2,
1734 SQLiteAdapter.web2py_extract)
1735 self.connection.create_function("REGEXP", 2,
1736 SQLiteAdapter.web2py_regexp)
1737
1739 tablename = table._tablename
1740 return ['DELETE FROM %s;' % tablename,
1741 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1742
1745
1746 - def REGEXP(self,first,second):
1747 return '(%s REGEXP %s)' % (self.expand(first),
1748 self.expand(second,'string'))
1749
1750 - def _select(self, query, fields, attributes):
1751 """
1752 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
1753 Note that the entire database, rather than one record, is locked
1754 (it will be locked eventually anyway by the following UPDATE).
1755 """
1756 sql = super(SQLiteAdapter, self)._select(query, fields, attributes)
1757 if attributes.get('for_update', False):
1758 sql = 'BEGIN IMMEDIATE TRANSACTION; ' + sql
1759 return sql
1760
1761
1763
1764 driver = globals().get('zxJDBC', None)
1765
1766 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
1767 credential_decoder=lambda x:x, driver_args={},
1768 adapter_args={}):
1769 if not self.driver:
1770 raise RuntimeError, "Unable to import driver"
1771 self.db = db
1772 self.dbengine = "sqlite"
1773 self.uri = uri
1774 self.pool_size = pool_size
1775 self.folder = folder
1776 self.db_codec = db_codec
1777 self.find_or_make_work_folder()
1778 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1779 if uri.startswith('sqlite:memory'):
1780 dbpath = ':memory:'
1781 else:
1782 dbpath = uri.split('://')[1]
1783 if dbpath[0] != '/':
1784 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'), dbpath)
1785 def connect(dbpath=dbpath,driver_args=driver_args):
1786 return self.driver.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath), **driver_args)
1787 self.pool_connection(connect)
1788
1789 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1790
1793
1794
1796
1797 driver = globals().get('pymysql',None)
1798 maxcharlength = 255
1799 commit_on_alter_table = True
1800 support_distributed_transaction = True
1801 types = {
1802 'boolean': 'CHAR(1)',
1803 'string': 'VARCHAR(%(length)s)',
1804 'text': 'LONGTEXT',
1805 'password': 'VARCHAR(%(length)s)',
1806 'blob': 'LONGBLOB',
1807 'upload': 'VARCHAR(%(length)s)',
1808 'integer': 'INT',
1809 'double': 'DOUBLE',
1810 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1811 'date': 'DATE',
1812 'time': 'TIME',
1813 'datetime': 'DATETIME',
1814 'id': 'INT AUTO_INCREMENT NOT NULL',
1815 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1816 'list:integer': 'LONGTEXT',
1817 'list:string': 'LONGTEXT',
1818 'list:reference': 'LONGTEXT',
1819 }
1820
1823
1825 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1826
1827 - def _drop(self,table,mode):
1828
1829 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1830
1833
1837
1840
1843
1845 return '; ALTER TABLE %s ADD ' % table
1846
1847 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1848 credential_decoder=lambda x:x, driver_args={},
1849 adapter_args={}):
1850 if not self.driver:
1851 raise RuntimeError, "Unable to import driver"
1852 self.db = db
1853 self.dbengine = "mysql"
1854 self.uri = uri
1855 self.pool_size = pool_size
1856 self.folder = folder
1857 self.db_codec = db_codec
1858 self.find_or_make_work_folder()
1859 uri = uri.split('://')[1]
1860 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1861 if not m:
1862 raise SyntaxError, \
1863 "Invalid URI string in DAL: %s" % self.uri
1864 user = credential_decoder(m.group('user'))
1865 if not user:
1866 raise SyntaxError, 'User required'
1867 password = credential_decoder(m.group('password'))
1868 if not password:
1869 password = ''
1870 host = m.group('host')
1871 if not host:
1872 raise SyntaxError, 'Host name required'
1873 db = m.group('db')
1874 if not db:
1875 raise SyntaxError, 'Database name required'
1876 port = int(m.group('port') or '3306')
1877 charset = m.group('charset') or 'utf8'
1878 driver_args.update(dict(db=db,
1879 user=credential_decoder(user),
1880 passwd=credential_decoder(password),
1881 host=host,
1882 port=port,
1883 charset=charset))
1884 def connect(driver_args=driver_args):
1885 return self.driver.connect(**driver_args)
1886 self.pool_connection(connect)
1887 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1888 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1889
1891 self.execute('select last_insert_id();')
1892 return int(self.cursor.fetchone()[0])
1893
1894 -class PostgreSQLAdapter(BaseAdapter):
1895
1896 driver = None
1897 drivers = {'psycopg2': globals().get('psycopg2', None),
1898 'pg8000': globals().get('pg8000', None), }
1899
1900 support_distributed_transaction = True
1901 types = {
1902 'boolean': 'CHAR(1)',
1903 'string': 'VARCHAR(%(length)s)',
1904 'text': 'TEXT',
1905 'password': 'VARCHAR(%(length)s)',
1906 'blob': 'BYTEA',
1907 'upload': 'VARCHAR(%(length)s)',
1908 'integer': 'INTEGER',
1909 'double': 'FLOAT8',
1910 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1911 'date': 'DATE',
1912 'time': 'TIME',
1913 'datetime': 'TIMESTAMP',
1914 'id': 'SERIAL PRIMARY KEY',
1915 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1916 'list:integer': 'TEXT',
1917 'list:string': 'TEXT',
1918 'list:reference': 'TEXT',
1919 }
1920
1921 - def adapt(self,obj):
1922 return psycopg2_adapt(obj).getquoted()
1923
1924 - def sequence_name(self,table):
1925 return '%s_id_Seq' % table
1926
1929
1930 - def ADD(self, first, second):
1931 t = first.type
1932 if t in ('text','string','password','upload','blob'):
1933 return '(%s || %s)' % (self.expand(first), self.expand(second, t))
1934 else:
1935 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
1936
1939
1940 - def prepare(self,key):
1941 self.execute("PREPARE TRANSACTION '%s';" % key)
1942
1943 - def commit_prepared(self,key):
1944 self.execute("COMMIT PREPARED '%s';" % key)
1945
1946 - def rollback_prepared(self,key):
1947 self.execute("ROLLBACK PREPARED '%s';" % key)
1948
1949 - def create_sequence_and_triggers(self, query, table, **args):
1950
1951
1952
1953
1954 self.execute(query)
1955
1956 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1957 credential_decoder=lambda x:x, driver_args={},
1958 adapter_args={}):
1959 if not self.drivers.get('psycopg2') and not self.drivers.get('pg8000'):
1960 raise RuntimeError, "Unable to import any drivers (psycopg2 or pg8000)"
1961 self.db = db
1962 self.dbengine = "postgres"
1963 self.uri = uri
1964 self.pool_size = pool_size
1965 self.folder = folder
1966 self.db_codec = db_codec
1967 self.find_or_make_work_folder()
1968 library, uri = uri.split('://')[:2]
1969 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1970 if not m:
1971 raise SyntaxError, "Invalid URI string in DAL"
1972 user = credential_decoder(m.group('user'))
1973 if not user:
1974 raise SyntaxError, 'User required'
1975 password = credential_decoder(m.group('password'))
1976 if not password:
1977 password = ''
1978 host = m.group('host')
1979 if not host:
1980 raise SyntaxError, 'Host name required'
1981 db = m.group('db')
1982 if not db:
1983 raise SyntaxError, 'Database name required'
1984 port = m.group('port') or '5432'
1985 sslmode = m.group('sslmode')
1986 if sslmode:
1987 msg = ("dbname='%s' user='%s' host='%s' "
1988 "port=%s password='%s' sslmode='%s'") \
1989 % (db, user, host, port, password, sslmode)
1990 else:
1991 msg = ("dbname='%s' user='%s' host='%s' "
1992 "port=%s password='%s'") \
1993 % (db, user, host, port, password)
1994
1995 if library == "postgres":
1996 if self.drivers.get('psycopg2'):
1997 self.driver = self.drivers['psycopg2']
1998 elif self.drivers.get('pg8000'):
1999 self.driver = drivers.get('pg8000')
2000 elif library == "postgres:psycopg2":
2001 self.driver = self.drivers.get('psycopg2')
2002 elif library == "postgres:pg8000":
2003 self.driver = self.drivers.get('pg8000')
2004 if not self.driver:
2005 raise RuntimeError, "%s is not available" % library
2006
2007 self.__version__ = "%s %s" % (self.driver.__name__, self.driver.__version__)
2008 def connect(msg=msg,driver_args=driver_args):
2009 return self.driver.connect(msg,**driver_args)
2010 self.pool_connection(connect)
2011 self.connection.set_client_encoding('UTF8')
2012 self.execute("SET standard_conforming_strings=on;")
2013
2014 - def lastrowid(self,table):
2015 self.execute("select currval('%s')" % table._sequence_name)
2016 return int(self.cursor.fetchone()[0])
2017
2018
2019 - def LIKE(self,first,second):
2020 return '(%s LIKE %s)' % (self.expand(first),
2021 self.expand(second,'string'))
2022
2023 - def ILIKE(self,first,second):
2024 return '(%s ILIKE %s)' % (self.expand(first),
2025 self.expand(second,'string'))
2026
2027 - def REGEXP(self,first,second):
2028 return '(%s ~ %s)' % (self.expand(first),
2029 self.expand(second,'string'))
2030
2031 - def STARTSWITH(self,first,second):
2032 return '(%s ILIKE %s)' % (self.expand(first),
2033 self.expand(second+'%','string'))
2034
2035 - def ENDSWITH(self,first,second):
2036 return '(%s ILIKE %s)' % (self.expand(first),
2037 self.expand('%'+second,'string'))
2038
2039 - def CONTAINS(self,first,second):
2040 if first.type in ('string','text'):
2041 key = '%'+str(second).replace('%','%%')+'%'
2042 elif first.type.startswith('list:'):
2043 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
2044 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
2045
2046 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
2047
2048 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2049 credential_decoder=lambda x:x, driver_args={},
2050 adapter_args={}):
2051 if not self.driver:
2052 raise RuntimeError, "Unable to import driver"
2053 self.db = db
2054 self.dbengine = "postgres"
2055 self.uri = uri
2056 self.pool_size = pool_size
2057 self.folder = folder
2058 self.db_codec = db_codec
2059 self.find_or_make_work_folder()
2060 uri = uri.split('://')[1]
2061 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
2062 if not m:
2063 raise SyntaxError, "Invalid URI string in DAL"
2064 user = credential_decoder(m.group('user'))
2065 if not user:
2066 raise SyntaxError, 'User required'
2067 password = credential_decoder(m.group('password'))
2068 if not password:
2069 password = ''
2070 host = m.group('host')
2071 if not host:
2072 raise SyntaxError, 'Host name required'
2073 db = m.group('db')
2074 if not db:
2075 raise SyntaxError, 'Database name required'
2076 port = m.group('port') or '5432'
2077 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
2078 def connect(msg=msg,driver_args=driver_args):
2079 return self.driver.connect(*msg,**driver_args)
2080 self.pool_connection(connect)
2081 self.connection.set_client_encoding('UTF8')
2082 self.execute('BEGIN;')
2083 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
2084
2085
2087
2088 driver = globals().get('cx_Oracle',None)
2089
2090 commit_on_alter_table = False
2091 types = {
2092 'boolean': 'CHAR(1)',
2093 'string': 'VARCHAR2(%(length)s)',
2094 'text': 'CLOB',
2095 'password': 'VARCHAR2(%(length)s)',
2096 'blob': 'CLOB',
2097 'upload': 'VARCHAR2(%(length)s)',
2098 'integer': 'INT',
2099 'double': 'FLOAT',
2100 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2101 'date': 'DATE',
2102 'time': 'CHAR(8)',
2103 'datetime': 'DATE',
2104 'id': 'NUMBER PRIMARY KEY',
2105 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2106 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2107 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2108 'list:integer': 'CLOB',
2109 'list:string': 'CLOB',
2110 'list:reference': 'CLOB',
2111 }
2112
2114 return '%s_sequence' % tablename
2115
2117 return '%s_trigger' % tablename
2118
2120 return 'LEFT OUTER JOIN'
2121
2123 return 'dbms_random.value'
2124
2125 - def NOT_NULL(self,default,field_type):
2126 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2127
2128 - def _drop(self,table,mode):
2131
2132 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2133 if limitby:
2134 (lmin, lmax) = limitby
2135 if len(sql_w) > 1:
2136 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2137 else:
2138 sql_w_row = 'WHERE w_row > %i' % lmin
2139 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2140 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2141
2147
2149 if fieldtype == 'blob':
2150 obj = base64.b64encode(str(obj))
2151 return ":CLOB('%s')" % obj
2152 elif fieldtype == 'date':
2153 if isinstance(obj, (datetime.date, datetime.datetime)):
2154 obj = obj.isoformat()[:10]
2155 else:
2156 obj = str(obj)
2157 return "to_date('%s','yyyy-mm-dd')" % obj
2158 elif fieldtype == 'datetime':
2159 if isinstance(obj, datetime.datetime):
2160 obj = obj.isoformat()[:19].replace('T',' ')
2161 elif isinstance(obj, datetime.date):
2162 obj = obj.isoformat()[:10]+' 00:00:00'
2163 else:
2164 obj = str(obj)
2165 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
2166 return None
2167
2168 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2169 credential_decoder=lambda x:x, driver_args={},
2170 adapter_args={}):
2171 if not self.driver:
2172 raise RuntimeError, "Unable to import driver"
2173 self.db = db
2174 self.dbengine = "oracle"
2175 self.uri = uri
2176 self.pool_size = pool_size
2177 self.folder = folder
2178 self.db_codec = db_codec
2179 self.find_or_make_work_folder()
2180 uri = uri.split('://')[1]
2181 if not 'threaded' in driver_args:
2182 driver_args['threaded']=True
2183 def connect(uri=uri,driver_args=driver_args):
2184 return self.driver.connect(uri,**driver_args)
2185 self.pool_connection(connect)
2186 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
2187 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
2188 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
2189
2190 - def execute(self, command, args=None):
2191 args = args or []
2192 i = 1
2193 while True:
2194 m = self.oracle_fix.match(command)
2195 if not m:
2196 break
2197 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
2198 args.append(m.group('clob')[6:-2].replace("''", "'"))
2199 i += 1
2200 if command[-1:]==';':
2201 command = command[:-1]
2202 return self.log_execute(command, args)
2203
2211
2216
2217
2219
2220 driver = globals().get('pyodbc',None)
2221
2222 types = {
2223 'boolean': 'BIT',
2224 'string': 'VARCHAR(%(length)s)',
2225 'text': 'TEXT',
2226 'password': 'VARCHAR(%(length)s)',
2227 'blob': 'IMAGE',
2228 'upload': 'VARCHAR(%(length)s)',
2229 'integer': 'INT',
2230 'double': 'FLOAT',
2231 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2232 'date': 'DATETIME',
2233 'time': 'CHAR(8)',
2234 'datetime': 'DATETIME',
2235 'id': 'INT IDENTITY PRIMARY KEY',
2236 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2237 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2238 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2239 'list:integer': 'TEXT',
2240 'list:string': 'TEXT',
2241 'list:reference': 'TEXT',
2242 }
2243
2245 return "DATEPART(%s,%s)" % (what, self.expand(field))
2246
2248 return 'LEFT OUTER JOIN'
2249
2252
2255
2257 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
2258
2260 return 'PRIMARY KEY CLUSTERED (%s)' % key
2261
2262 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2263 if limitby:
2264 (lmin, lmax) = limitby
2265 sql_s += ' TOP %i' % lmax
2266 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2267
2269 if fieldtype == 'boolean':
2270 if obj and not str(obj)[0].upper() == 'F':
2271 return '1'
2272 else:
2273 return '0'
2274 return None
2275
2276 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2277 credential_decoder=lambda x:x, driver_args={},
2278 adapter_args={}, fake_connect=False):
2279 if not self.driver:
2280 raise RuntimeError, "Unable to import driver"
2281 self.db = db
2282 self.dbengine = "mssql"
2283 self.uri = uri
2284 self.pool_size = pool_size
2285 self.folder = folder
2286 self.db_codec = db_codec
2287 self.find_or_make_work_folder()
2288
2289 uri = uri.split('://')[1]
2290 if '@' not in uri:
2291 try:
2292 m = re.compile('^(?P<dsn>.+)$').match(uri)
2293 if not m:
2294 raise SyntaxError, \
2295 'Parsing uri string(%s) has no result' % self.uri
2296 dsn = m.group('dsn')
2297 if not dsn:
2298 raise SyntaxError, 'DSN required'
2299 except SyntaxError, e:
2300 logger.error('NdGpatch error')
2301 raise e
2302
2303 cnxn = dsn
2304 else:
2305 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
2306 if not m:
2307 raise SyntaxError, \
2308 "Invalid URI string in DAL: %s" % uri
2309 user = credential_decoder(m.group('user'))
2310 if not user:
2311 raise SyntaxError, 'User required'
2312 password = credential_decoder(m.group('password'))
2313 if not password:
2314 password = ''
2315 host = m.group('host')
2316 if not host:
2317 raise SyntaxError, 'Host name required'
2318 db = m.group('db')
2319 if not db:
2320 raise SyntaxError, 'Database name required'
2321 port = m.group('port') or '1433'
2322
2323
2324
2325 argsdict = { 'DRIVER':'{SQL Server}' }
2326 urlargs = m.group('urlargs') or ''
2327 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
2328 for argmatch in argpattern.finditer(urlargs):
2329 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
2330 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
2331 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
2332 % (host, port, db, user, password, urlargs)
2333 def connect(cnxn=cnxn,driver_args=driver_args):
2334 return self.driver.connect(cnxn,**driver_args)
2335 if not fake_connect:
2336 self.pool_connection(connect)
2337
2339
2340 self.execute('SELECT SCOPE_IDENTITY();')
2341 return int(self.cursor.fetchone()[0])
2342
2345
2346 - def rowslice(self,rows,minimum=0,maximum=None):
2347 if maximum is None:
2348 return rows[minimum:]
2349 return rows[minimum:maximum]
2350
2351
2353 types = {
2354 'boolean': 'CHAR(1)',
2355 'string': 'NVARCHAR(%(length)s)',
2356 'text': 'NTEXT',
2357 'password': 'NVARCHAR(%(length)s)',
2358 'blob': 'IMAGE',
2359 'upload': 'NVARCHAR(%(length)s)',
2360 'integer': 'INT',
2361 'double': 'FLOAT',
2362 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2363 'date': 'DATETIME',
2364 'time': 'CHAR(8)',
2365 'datetime': 'DATETIME',
2366 'id': 'INT IDENTITY PRIMARY KEY',
2367 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2368 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2369 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2370 'list:integer': 'NTEXT',
2371 'list:string': 'NTEXT',
2372 'list:reference': 'NTEXT',
2373 }
2374
2376 value = BaseAdapter.represent(self, obj, fieldtype)
2377 if (fieldtype == 'string' or fieldtype == 'text') and value[:1]=="'":
2378 value = 'N'+value
2379 return value
2380
2383
2384
2386
2387 driver = globals().get('pyodbc',None)
2388
2389 commit_on_alter_table = False
2390 support_distributed_transaction = True
2391 types = {
2392 'boolean': 'CHAR(1)',
2393 'string': 'VARCHAR(%(length)s)',
2394 'text': 'BLOB SUB_TYPE 1',
2395 'password': 'VARCHAR(%(length)s)',
2396 'blob': 'BLOB SUB_TYPE 0',
2397 'upload': 'VARCHAR(%(length)s)',
2398 'integer': 'INTEGER',
2399 'double': 'DOUBLE PRECISION',
2400 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
2401 'date': 'DATE',
2402 'time': 'TIME',
2403 'datetime': 'TIMESTAMP',
2404 'id': 'INTEGER PRIMARY KEY',
2405 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2406 'list:integer': 'BLOB SUB_TYPE 1',
2407 'list:string': 'BLOB SUB_TYPE 1',
2408 'list:reference': 'BLOB SUB_TYPE 1',
2409 }
2410
2412 return 'genid_%s' % tablename
2413
2415 return 'trg_id_%s' % tablename
2416
2419
2420 - def NOT_NULL(self,default,field_type):
2421 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2422
2424 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2425
2426 - def _drop(self,table,mode):
2429
2430 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2431 if limitby:
2432 (lmin, lmax) = limitby
2433 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2434 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2435
2437 return ['DELETE FROM %s;' % table._tablename,
2438 'SET GENERATOR %s TO 0;' % table._sequence_name]
2439
2440 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2441 credential_decoder=lambda x:x, driver_args={},
2442 adapter_args={}):
2443 if adapter_args.has_key('driver_name'):
2444 if adapter_args['driver_name'] == 'kinterbasdb':
2445 self.driver = kinterbasdb
2446 elif adapter_args['driver_name'] == 'firebirdsql':
2447 self.driver = firebirdsql
2448 else:
2449 self.driver = kinterbasdb
2450
2451 if not self.driver:
2452 raise RuntimeError, "Unable to import driver"
2453 self.db = db
2454 self.dbengine = "firebird"
2455 self.uri = uri
2456 self.pool_size = pool_size
2457 self.folder = folder
2458 self.db_codec = db_codec
2459 self.find_or_make_work_folder()
2460 uri = uri.split('://')[1]
2461 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2462 if not m:
2463 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2464 user = credential_decoder(m.group('user'))
2465 if not user:
2466 raise SyntaxError, 'User required'
2467 password = credential_decoder(m.group('password'))
2468 if not password:
2469 password = ''
2470 host = m.group('host')
2471 if not host:
2472 raise SyntaxError, 'Host name required'
2473 port = int(m.group('port') or 3050)
2474 db = m.group('db')
2475 if not db:
2476 raise SyntaxError, 'Database name required'
2477 charset = m.group('charset') or 'UTF8'
2478 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2479 user = credential_decoder(user),
2480 password = credential_decoder(password),
2481 charset = charset))
2482
2483 def connect(driver_args=driver_args):
2484 return self.driver.connect(**driver_args)
2485 self.pool_connection(connect)
2486
2495
2500
2501
2503
2504 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2505 credential_decoder=lambda x:x, driver_args={},
2506 adapter_args={}):
2507
2508 if adapter_args.has_key('driver_name'):
2509 if adapter_args['driver_name'] == 'kinterbasdb':
2510 self.driver = kinterbasdb
2511 elif adapter_args['driver_name'] == 'firebirdsql':
2512 self.driver = firebirdsql
2513 else:
2514 self.driver = kinterbasdb
2515
2516 if not self.driver:
2517 raise RuntimeError, "Unable to import driver"
2518 self.db = db
2519 self.dbengine = "firebird"
2520 self.uri = uri
2521 self.pool_size = pool_size
2522 self.folder = folder
2523 self.db_codec = db_codec
2524 self.find_or_make_work_folder()
2525 uri = uri.split('://')[1]
2526 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2527 if not m:
2528 raise SyntaxError, \
2529 "Invalid URI string in DAL: %s" % self.uri
2530 user = credential_decoder(m.group('user'))
2531 if not user:
2532 raise SyntaxError, 'User required'
2533 password = credential_decoder(m.group('password'))
2534 if not password:
2535 password = ''
2536 pathdb = m.group('path')
2537 if not pathdb:
2538 raise SyntaxError, 'Path required'
2539 charset = m.group('charset')
2540 if not charset:
2541 charset = 'UTF8'
2542 host = ''
2543 driver_args.update(dict(host=host,
2544 database=pathdb,
2545 user=credential_decoder(user),
2546 password=credential_decoder(password),
2547 charset=charset))
2548
2549
2550
2551 def connect(driver_args=driver_args):
2552 return self.driver.connect(**driver_args)
2553 self.pool_connection(connect)
2554
2555
2654
2659
2662
2665
2666
2668
2669 driver = globals().get('pyodbc',None)
2670
2671 types = {
2672 'boolean': 'CHAR(1)',
2673 'string': 'VARCHAR(%(length)s)',
2674 'text': 'CLOB',
2675 'password': 'VARCHAR(%(length)s)',
2676 'blob': 'BLOB',
2677 'upload': 'VARCHAR(%(length)s)',
2678 'integer': 'INT',
2679 'double': 'DOUBLE',
2680 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2681 'date': 'DATE',
2682 'time': 'TIME',
2683 'datetime': 'TIMESTAMP',
2684 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2685 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2686 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2687 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2688 'list:integer': 'CLOB',
2689 'list:string': 'CLOB',
2690 'list:reference': 'CLOB',
2691 }
2692
2694 return 'LEFT OUTER JOIN'
2695
2698
2699 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2700 if limitby:
2701 (lmin, lmax) = limitby
2702 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2703 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2704
2706 if fieldtype == 'blob':
2707 obj = base64.b64encode(str(obj))
2708 return "BLOB('%s')" % obj
2709 elif fieldtype == 'datetime':
2710 if isinstance(obj, datetime.datetime):
2711 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2712 elif isinstance(obj, datetime.date):
2713 obj = obj.isoformat()[:10]+'-00.00.00'
2714 return "'%s'" % obj
2715 return None
2716
2717 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2718 credential_decoder=lambda x:x, driver_args={},
2719 adapter_args={}):
2720 if not self.driver:
2721 raise RuntimeError, "Unable to import driver"
2722 self.db = db
2723 self.dbengine = "db2"
2724 self.uri = uri
2725 self.pool_size = pool_size
2726 self.folder = folder
2727 self.db_codec = db_codec
2728 self.find_or_make_work_folder()
2729 cnxn = uri.split('://', 1)[1]
2730 def connect(cnxn=cnxn,driver_args=driver_args):
2731 return self.driver.connect(cnxn,**driver_args)
2732 self.pool_connection(connect)
2733
2735 if command[-1:]==';':
2736 command = command[:-1]
2737 return self.log_execute(command)
2738
2740 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2741 return int(self.cursor.fetchone()[0])
2742
2743 - def rowslice(self,rows,minimum=0,maximum=None):
2744 if maximum is None:
2745 return rows[minimum:]
2746 return rows[minimum:maximum]
2747
2748
2750
2751 driver = globals().get('pyodbc',None)
2752
2753 types = {
2754 'boolean': 'CHAR(1)',
2755 'string': 'VARCHAR(%(length)s)',
2756 'text': 'CLOB',
2757 'password': 'VARCHAR(%(length)s)',
2758 'blob': 'BLOB',
2759 'upload': 'VARCHAR(%(length)s)',
2760 'integer': 'INT',
2761 'double': 'DOUBLE',
2762 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2763 'date': 'DATE',
2764 'time': 'TIME',
2765 'datetime': 'TIMESTAMP',
2766 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY',
2767
2768 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
2769 'list:integer': 'CLOB',
2770 'list:string': 'CLOB',
2771 'list:reference': 'CLOB',
2772 }
2773
2775 return 'LEFT OUTER JOIN'
2776
2777 - def create_table(self, table,
2778 migrate=True,
2779 fake_migrate=False,
2780 polymodel=None):
2781 fields = []
2782 sql_fields = {}
2783 sql_fields_aux = {}
2784 TFK = {}
2785 tablename = table._tablename
2786 sortable = 0
2787 for field in table:
2788 sortable += 1
2789 k = field.name
2790 if isinstance(field.type,SQLCustomType):
2791 ftype = field.type.native or field.type.type
2792 elif field.type.startswith('reference'):
2793 referenced = field.type[10:].strip()
2794 constraint_name = self.constraint_name(tablename, field.name)
2795 if hasattr(table,'_primarykey'):
2796 rtablename,rfieldname = referenced.split('.')
2797 rtable = table._db[rtablename]
2798 rfield = rtable[rfieldname]
2799
2800 if rfieldname in rtable._primarykey or rfield.unique:
2801 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
2802
2803 if not rfield.unique and len(rtable._primarykey)>1 :
2804
2805 if rtablename not in TFK:
2806 TFK[rtablename] = {}
2807 TFK[rtablename][rfieldname] = field.name
2808 else:
2809 ftype = ftype + \
2810 self.types['reference FK'] %dict(\
2811 constraint_name=constraint_name,
2812 table_name=tablename,
2813 field_name=field.name,
2814 foreign_key='%s (%s)'%(rtablename, rfieldname),
2815 on_delete_action=field.ondelete)
2816 else:
2817
2818 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
2819 ftype = self.types[field.type[:9]]\
2820 % dict(table_name=tablename,
2821 field_name=field.name,
2822 constraint_name=constraint_name,
2823 foreign_key=referenced + ('(%s)' % id_fieldname),
2824 on_delete_action=field.ondelete)
2825 elif field.type.startswith('list:reference'):
2826 ftype = self.types[field.type[:14]]
2827 elif field.type.startswith('decimal'):
2828 precision, scale = map(int,field.type[8:-1].split(','))
2829 ftype = self.types[field.type[:7]] % \
2830 dict(precision=precision,scale=scale)
2831 elif not field.type in self.types:
2832 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
2833 (field.type, field.name)
2834 else:
2835 ftype = self.types[field.type]\
2836 % dict(length=field.length)
2837 if not field.type.startswith('id') and not field.type.startswith('reference'):
2838 if field.notnull:
2839 ftype += ' NOT NULL'
2840 else:
2841 ftype += self.ALLOW_NULL()
2842 if field.unique:
2843 ftype += ' UNIQUE'
2844
2845
2846 sql_fields[field.name] = dict(sortable=sortable,
2847 type=str(field.type),
2848 sql=ftype)
2849
2850 if isinstance(field.default,(str,int,float)):
2851
2852
2853
2854
2855
2856 not_null = self.NOT_NULL(field.default, field.type)
2857 ftype = ftype.replace('NOT NULL', not_null)
2858 sql_fields_aux[field.name] = dict(sql=ftype)
2859 fields.append('%s %s' % (field.name, ftype))
2860 other = ';'
2861
2862 fields = ',\n '.join(fields)
2863 for rtablename in TFK:
2864 rfields = TFK[rtablename]
2865 pkeys = table._db[rtablename]._primarykey
2866 fkeys = [ rfields[k] for k in pkeys ]
2867 fields = fields + ',\n ' + \
2868 self.types['reference TFK'] %\
2869 dict(table_name=tablename,
2870 field_name=', '.join(fkeys),
2871 foreign_table=rtablename,
2872 foreign_key=', '.join(pkeys),
2873 on_delete_action=field.ondelete)
2874
2875 if hasattr(table,'_primarykey'):
2876 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
2877 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
2878 else:
2879 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
2880 (tablename, fields, other)
2881
2882 if self.uri.startswith('sqlite:///'):
2883 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
2884 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
2885 else:
2886 dbpath = self.folder
2887
2888 if not migrate:
2889 return query
2890 elif self.uri.startswith('sqlite:memory'):
2891 table._dbt = None
2892 elif isinstance(migrate, str):
2893 table._dbt = os.path.join(dbpath, migrate)
2894 else:
2895 table._dbt = os.path.join(dbpath, '%s_%s.table' \
2896 % (table._db._uri_hash, tablename))
2897 if table._dbt:
2898 table._loggername = os.path.join(dbpath, 'sql.log')
2899 logfile = self.file_open(table._loggername, 'a')
2900 else:
2901 logfile = None
2902 if not table._dbt or not self.file_exists(table._dbt):
2903 if table._dbt:
2904 logfile.write('timestamp: %s\n'
2905 % datetime.datetime.today().isoformat())
2906 logfile.write(query + '\n')
2907 if not fake_migrate:
2908 self.create_sequence_and_triggers(query,table)
2909 table._db.commit()
2910 if table._dbt:
2911 tfile = self.file_open(table._dbt, 'w')
2912 cPickle.dump(sql_fields, tfile)
2913 self.file_close(tfile)
2914 if fake_migrate:
2915 logfile.write('faked!\n')
2916 else:
2917 logfile.write('success!\n')
2918 else:
2919 tfile = self.file_open(table._dbt, 'r')
2920 try:
2921 sql_fields_old = cPickle.load(tfile)
2922 except EOFError:
2923 self.file_close(tfile)
2924 self.file_close(logfile)
2925 raise RuntimeError, 'File %s appears corrupted' % table._dbt
2926 self.file_close(tfile)
2927 if sql_fields != sql_fields_old:
2928 self.migrate_table(table,
2929 sql_fields, sql_fields_old,
2930 sql_fields_aux, logfile,
2931 fake_migrate=fake_migrate)
2932 self.file_close(logfile)
2933 return query
2934
2935 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2936 credential_decoder=lambda x:x, driver_args={},
2937 adapter_args={}):
2938 if not self.driver:
2939 raise RuntimeError, "Unable to import driver"
2940 self.db = db
2941 self.dbengine = "teradata"
2942 self.uri = uri
2943 self.pool_size = pool_size
2944 self.folder = folder
2945 self.db_codec = db_codec
2946 self.find_or_make_work_folder()
2947 cnxn = uri.split('://', 1)[1]
2948 def connect(cnxn=cnxn,driver_args=driver_args):
2949 return self.driver.connect(cnxn,**driver_args)
2950 self.pool_connection(connect)
2951
2952
2953 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2954 if limitby:
2955 (lmin, lmax) = limitby
2956 sql_s += ' TOP %i' % lmax
2957 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2958
2959
2960 INGRES_SEQNAME='ii***lineitemsequence'
2961
2962
2963
2965
2966 driver = globals().get('ingresdbi',None)
2967
2968 types = {
2969 'boolean': 'CHAR(1)',
2970 'string': 'VARCHAR(%(length)s)',
2971 'text': 'CLOB',
2972 'password': 'VARCHAR(%(length)s)',
2973 'blob': 'BLOB',
2974 'upload': 'VARCHAR(%(length)s)',
2975 'integer': 'INTEGER4',
2976 'double': 'FLOAT8',
2977 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2978 'date': 'ANSIDATE',
2979 'time': 'TIME WITHOUT TIME ZONE',
2980 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2981 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2982 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2983 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2984 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2985 'list:integer': 'CLOB',
2986 'list:string': 'CLOB',
2987 'list:reference': 'CLOB',
2988 }
2989
2991 return 'LEFT OUTER JOIN'
2992
2995
2996 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2997 if limitby:
2998 (lmin, lmax) = limitby
2999 fetch_amt = lmax - lmin
3000 if fetch_amt:
3001 sql_s += ' FIRST %d ' % (fetch_amt, )
3002 if lmin:
3003
3004 sql_o += ' OFFSET %d' % (lmin, )
3005 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3006
3007 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3008 credential_decoder=lambda x:x, driver_args={},
3009 adapter_args={}):
3010 if not self.driver:
3011 raise RuntimeError, "Unable to import driver"
3012 self.db = db
3013 self.dbengine = "ingres"
3014 self.uri = uri
3015 self.pool_size = pool_size
3016 self.folder = folder
3017 self.db_codec = db_codec
3018 self.find_or_make_work_folder()
3019 connstr = self._uri.split(':', 1)[1]
3020
3021 connstr = connstr.lstrip()
3022 while connstr.startswith('/'):
3023 connstr = connstr[1:]
3024 database_name=connstr
3025 vnode = '(local)'
3026 servertype = 'ingres'
3027 trace = (0, None)
3028 driver_args.update(dict(database=database_name,
3029 vnode=vnode,
3030 servertype=servertype,
3031 trace=trace))
3032 def connect(driver_args=driver_args):
3033 return self.driver.connect(**driver_args)
3034 self.pool_connection(connect)
3035
3037
3038
3039
3040 if hasattr(table,'_primarykey'):
3041 modify_tbl_sql = 'modify %s to btree unique on %s' % \
3042 (table._tablename,
3043 ', '.join(["'%s'" % x for x in table.primarykey]))
3044 self.execute(modify_tbl_sql)
3045 else:
3046 tmp_seqname='%s_iisq' % table._tablename
3047 query=query.replace(INGRES_SEQNAME, tmp_seqname)
3048 self.execute('create sequence %s' % tmp_seqname)
3049 self.execute(query)
3050 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
3051
3052
3054 tmp_seqname='%s_iisq' % table
3055 self.execute('select current value for %s' % tmp_seqname)
3056 return int(self.cursor.fetchone()[0])
3057
3060
3061
3063 types = {
3064 'boolean': 'CHAR(1)',
3065 'string': 'NVARCHAR(%(length)s)',
3066 'text': 'NCLOB',
3067 'password': 'NVARCHAR(%(length)s)',
3068 'blob': 'BLOB',
3069 'upload': 'VARCHAR(%(length)s)',
3070 'integer': 'INTEGER4',
3071 'double': 'FLOAT8',
3072 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3073 'date': 'ANSIDATE',
3074 'time': 'TIME WITHOUT TIME ZONE',
3075 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
3076 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
3077 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3078 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3079 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3080 'list:integer': 'NCLOB',
3081 'list:string': 'NCLOB',
3082 'list:reference': 'NCLOB',
3083 }
3084
3086
3087 driver = globals().get('sapdb',None)
3088 support_distributed_transaction = False
3089 types = {
3090 'boolean': 'CHAR(1)',
3091 'string': 'VARCHAR(%(length)s)',
3092 'text': 'LONG',
3093 'password': 'VARCHAR(%(length)s)',
3094 'blob': 'LONG',
3095 'upload': 'VARCHAR(%(length)s)',
3096 'integer': 'INT',
3097 'double': 'FLOAT',
3098 'decimal': 'FIXED(%(precision)s,%(scale)s)',
3099 'date': 'DATE',
3100 'time': 'TIME',
3101 'datetime': 'TIMESTAMP',
3102 'id': 'INT PRIMARY KEY',
3103 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3104 'list:integer': 'LONG',
3105 'list:string': 'LONG',
3106 'list:reference': 'LONG',
3107 }
3108
3110 return '%s_id_Seq' % table
3111
3112 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3113 if limitby:
3114 (lmin, lmax) = limitby
3115 if len(sql_w) > 1:
3116 sql_w_row = sql_w + ' AND w_row > %i' % lmin
3117 else:
3118 sql_w_row = 'WHERE w_row > %i' % lmin
3119 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
3120 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3121
3123
3124 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
3125 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
3126 % (table._tablename, table._id.name, table._sequence_name))
3127 self.execute(query)
3128
3129 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3130 credential_decoder=lambda x:x, driver_args={},
3131 adapter_args={}):
3132 if not self.driver:
3133 raise RuntimeError, "Unable to import driver"
3134 self.db = db
3135 self.dbengine = "sapdb"
3136 self.uri = uri
3137 self.pool_size = pool_size
3138 self.folder = folder
3139 self.db_codec = db_codec
3140 self.find_or_make_work_folder()
3141 uri = uri.split('://')[1]
3142 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
3143 if not m:
3144 raise SyntaxError, "Invalid URI string in DAL"
3145 user = credential_decoder(m.group('user'))
3146 if not user:
3147 raise SyntaxError, 'User required'
3148 password = credential_decoder(m.group('password'))
3149 if not password:
3150 password = ''
3151 host = m.group('host')
3152 if not host:
3153 raise SyntaxError, 'Host name required'
3154 db = m.group('db')
3155 if not db:
3156 raise SyntaxError, 'Database name required'
3157 def connect(user=user, password=password, database=db,
3158 host=host, driver_args=driver_args):
3159 return self.driver.Connection(user, password, database,
3160 host, **driver_args)
3161 self.pool_connection(connect)
3162
3164 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
3165 return int(self.cursor.fetchone()[0])
3166
3168
3169 driver = globals().get('cubriddb', None)
3170
3171 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
3172 credential_decoder=lambda x:x, driver_args={},
3173 adapter_args={}):
3174 if not self.driver:
3175 raise RuntimeError, "Unable to import driver"
3176 self.db = db
3177 self.dbengine = "cubrid"
3178 self.uri = uri
3179 self.pool_size = pool_size
3180 self.folder = folder
3181 self.db_codec = db_codec
3182 self.find_or_make_work_folder()
3183 uri = uri.split('://')[1]
3184 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
3185 if not m:
3186 raise SyntaxError, \
3187 "Invalid URI string in DAL: %s" % self.uri
3188 user = credential_decoder(m.group('user'))
3189 if not user:
3190 raise SyntaxError, 'User required'
3191 password = credential_decoder(m.group('password'))
3192 if not password:
3193 password = ''
3194 host = m.group('host')
3195 if not host:
3196 raise SyntaxError, 'Host name required'
3197 db = m.group('db')
3198 if not db:
3199 raise SyntaxError, 'Database name required'
3200 port = int(m.group('port') or '30000')
3201 charset = m.group('charset') or 'utf8'
3202 user = credential_decoder(user)
3203 passwd = credential_decoder(password)
3204 def connect(host=host,port=port,db=db,
3205 user=user,passwd=password,driver_args=driver_args):
3206 return self.driver.connect(host,port,db,user,passwd,**driver_args)
3207 self.pool_connection(connect)
3208 self.execute('SET FOREIGN_KEY_CHECKS=1;')
3209 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
3210
3211
3212
3213
3215
3216 web2py_filesystem = False
3217
3219 return self.db._adapter.escape(obj)
3220
3222 if db._adapter.dbengine != 'mysql':
3223 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
3224 self.db = db
3225 self.filename = filename
3226 self.mode = mode
3227 if not self.web2py_filesystem:
3228 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
3229 DatabaseStoredFile.web2py_filesystem = True
3230 self.p=0
3231 self.data = ''
3232 if mode in ('r','rw','a'):
3233 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
3234 % filename
3235 rows = self.db.executesql(query)
3236 if rows:
3237 self.data = rows[0][0]
3238 elif os.path.exists(filename):
3239 datafile = open(filename, 'r')
3240 try:
3241 self.data = datafile.read()
3242 finally:
3243 datafile.close()
3244 elif mode in ('r','rw'):
3245 raise RuntimeError, "File %s does not exist" % filename
3246
3247 - def read(self, bytes):
3248 data = self.data[self.p:self.p+bytes]
3249 self.p += len(data)
3250 return data
3251
3253 i = self.data.find('\n',self.p)+1
3254 if i>0:
3255 data, self.p = self.data[self.p:i], i
3256 else:
3257 data, self.p = self.data[self.p:], len(self.data)
3258 return data
3259
3262
3264 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" \
3265 % self.filename)
3266 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
3267 % (self.filename, self.data.replace("'","''"))
3268 self.db.executesql(query)
3269 self.db.commit()
3270
3271 @staticmethod
3273 if os.path.exists(filename):
3274 return True
3275 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
3276 if db.executesql(query):
3277 return True
3278 return False
3279
3280
3282
3285
3286 - def file_open(self, filename, mode='rb', lock=True):
3288
3291
3293 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
3294 self.db.executesql(query)
3295 self.db.commit()
3296
3298
3299 - def __init__(self, db, uri='google:sql://realm:domain/database',
3300 pool_size=0, folder=None, db_codec='UTF-8',
3301 credential_decoder=lambda x:x, driver_args={},
3302 adapter_args={}):
3303
3304 self.db = db
3305 self.dbengine = "mysql"
3306 self.uri = uri
3307 self.pool_size = pool_size
3308 self.folder = folder
3309 self.db_codec = db_codec
3310 self.folder = folder or '$HOME/'+thread.folder.split('/applications/',1)[1]
3311
3312 m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(self.uri[len('google:sql://'):])
3313 if not m:
3314 raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
3315 instance = credential_decoder(m.group('instance'))
3316 db = credential_decoder(m.group('db'))
3317 driver_args['instance'] = instance
3318 createdb = adapter_args.get('createdb',True)
3319 if not createdb:
3320 driver_args['database'] = db
3321 def connect(driver_args=driver_args):
3322 return rdbms.connect(**driver_args)
3323 self.pool_connection(connect)
3324 if createdb:
3325
3326 self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
3327 self.execute('USE %s' % db)
3328 self.execute("SET FOREIGN_KEY_CHECKS=1;")
3329 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
3330
3332 can_select_for_update = False
3333
3334 @staticmethod
3336 if isinstance(obj, str):
3337 return obj.decode('utf8')
3338 elif not isinstance(obj, unicode):
3339 return unicode(obj)
3340 return obj
3341
3343 if isinstance(obj, CALLABLETYPES):
3344 obj = obj()
3345 if isinstance(fieldtype, SQLCustomType):
3346 return fieldtype.encoder(obj)
3347 if isinstance(obj, (Expression, Field)):
3348 raise SyntaxError, "non supported on GAE"
3349 if self.dbengine == 'google:datastore':
3350 if isinstance(fieldtype, gae.Property):
3351 return obj
3352 is_string = isinstance(fieldtype,str)
3353 is_list = is_string and fieldtype.startswith('list:')
3354 if is_list:
3355 if not obj:
3356 obj = []
3357 if not isinstance(obj, (list, tuple)):
3358 obj = [obj]
3359 if obj == '' and not \
3360 (is_string and fieldtype[:2] in ['st','te','pa','up']):
3361 return None
3362 if not obj is None:
3363 if isinstance(obj, list) and not is_list:
3364 obj = [self.represent(o, fieldtype) for o in obj]
3365 elif fieldtype in ('integer','id'):
3366 obj = long(obj)
3367 elif fieldtype == 'double':
3368 obj = float(obj)
3369 elif is_string and fieldtype.startswith('reference'):
3370 if isinstance(obj, (Row, Reference)):
3371 obj = obj['id']
3372 obj = long(obj)
3373 elif fieldtype == 'boolean':
3374 if obj and not str(obj)[0].upper() == 'F':
3375 obj = True
3376 else:
3377 obj = False
3378 elif fieldtype == 'date':
3379 if not isinstance(obj, datetime.date):
3380 (y, m, d) = map(int,str(obj).strip().split('-'))
3381 obj = datetime.date(y, m, d)
3382 elif isinstance(obj,datetime.datetime):
3383 (y, m, d) = (obj.year, obj.month, obj.day)
3384 obj = datetime.date(y, m, d)
3385 elif fieldtype == 'time':
3386 if not isinstance(obj, datetime.time):
3387 time_items = map(int,str(obj).strip().split(':')[:3])
3388 if len(time_items) == 3:
3389 (h, mi, s) = time_items
3390 else:
3391 (h, mi, s) = time_items + [0]
3392 obj = datetime.time(h, mi, s)
3393 elif fieldtype == 'datetime':
3394 if not isinstance(obj, datetime.datetime):
3395 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
3396 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
3397 while len(time_items)<3:
3398 time_items.append(0)
3399 (h, mi, s) = time_items
3400 obj = datetime.datetime(y, m, d, h, mi, s)
3401 elif fieldtype == 'blob':
3402 pass
3403 elif is_string and fieldtype.startswith('list:string'):
3404 return map(self.to_unicode,obj)
3405 elif is_list:
3406 return map(int,obj)
3407 else:
3408 obj = self.to_unicode(obj)
3409 return obj
3410
3412 return 'insert %s in %s' % (fields, table)
3413
3414 - def _count(self,query,distinct=None):
3415 return 'count %s' % repr(query)
3416
3417 - def _select(self,query,fields,attributes):
3418 return 'select %s where %s' % (repr(fields), repr(query))
3419
3420 - def _delete(self,tablename, query):
3421 return 'delete %s where %s' % (repr(tablename),repr(query))
3422
3423 - def _update(self,tablename,query,fields):
3424 return 'update %s (%s) where %s' % (repr(tablename),
3425 repr(fields),repr(query))
3426
3428 """
3429 remember: no transactions on many NoSQL
3430 """
3431 pass
3432
3434 """
3435 remember: no transactions on many NoSQL
3436 """
3437 pass
3438
3440 """
3441 remember: no transactions on many NoSQL
3442 """
3443 pass
3444
3445
3446
3447 - def OR(self,first,second): raise SyntaxError, "Not supported"
3448 - def AND(self,first,second): raise SyntaxError, "Not supported"
3449 - def AS(self,first,second): raise SyntaxError, "Not supported"
3450 - def ON(self,first,second): raise SyntaxError, "Not supported"
3451 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
3452 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
3453 - def ADD(self,first,second): raise SyntaxError, "Not supported"
3454 - def SUB(self,first,second): raise SyntaxError, "Not supported"
3455 - def MUL(self,first,second): raise SyntaxError, "Not supported"
3456 - def DIV(self,first,second): raise SyntaxError, "Not supported"
3457 - def LOWER(self,first): raise SyntaxError, "Not supported"
3458 - def UPPER(self,first): raise SyntaxError, "Not supported"
3460 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
3461 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
3462 - def RANDOM(self): raise SyntaxError, "Not supported"
3463 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
3464 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
3465 - def ILIKE(self,first,second): raise SyntaxError, "Not supported"
3466 - def drop(self,table,mode): raise SyntaxError, "Not supported"
3467 - def alias(self,table,alias): raise SyntaxError, "Not supported"
3468 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
3470 - def prepare(self,key): raise SyntaxError, "Not supported"
3473 - def concat_add(self,table): raise SyntaxError, "Not supported"
3474 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
3476 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
3477 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
3479 - def lastrowid(self,table): raise SyntaxError, "Not supported"
3481 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
3482
3483
3484 -class GAEF(object):
3485 - def __init__(self,name,op,value,apply):
3486 self.name=name=='id' and '__key__' or name
3487 self.op=op
3488 self.value=value
3489 self.apply=apply
3491 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
3492
3494 uploads_in_blob = True
3495 types = {}
3496
3498 - def file_open(self, filename, mode='rb', lock=True): pass
3500
3501 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3502 credential_decoder=lambda x:x, driver_args={},
3503 adapter_args={}):
3504 self.types.update({
3505 'boolean': gae.BooleanProperty,
3506 'string': (lambda: gae.StringProperty(multiline=True)),
3507 'text': gae.TextProperty,
3508 'password': gae.StringProperty,
3509 'blob': gae.BlobProperty,
3510 'upload': gae.StringProperty,
3511 'integer': gae.IntegerProperty,
3512 'double': gae.FloatProperty,
3513 'decimal': GAEDecimalProperty,
3514 'date': gae.DateProperty,
3515 'time': gae.TimeProperty,
3516 'datetime': gae.DateTimeProperty,
3517 'id': None,
3518 'reference': gae.IntegerProperty,
3519 'list:string': (lambda: gae.StringListProperty(default=None)),
3520 'list:integer': (lambda: gae.ListProperty(int,default=None)),
3521 'list:reference': (lambda: gae.ListProperty(int,default=None)),
3522 })
3523 self.db = db
3524 self.uri = uri
3525 self.dbengine = 'google:datastore'
3526 self.folder = folder
3527 db['_lastsql'] = ''
3528 self.db_codec = 'UTF-8'
3529 self.pool_size = 0
3530 match = re.compile('.*://(?P<namespace>.+)').match(uri)
3531 if match:
3532 namespace_manager.set_namespace(match.group('namespace'))
3533
3534 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
3535 myfields = {}
3536 for k in table.fields:
3537 if isinstance(polymodel,Table) and k in polymodel.fields():
3538 continue
3539 field = table[k]
3540 attr = {}
3541 if isinstance(field.type, SQLCustomType):
3542 ftype = self.types[field.type.native or field.type.type](**attr)
3543 elif isinstance(field.type, gae.Property):
3544 ftype = field.type
3545 elif field.type.startswith('id'):
3546 continue
3547 elif field.type.startswith('decimal'):
3548 precision, scale = field.type[7:].strip('()').split(',')
3549 precision = int(precision)
3550 scale = int(scale)
3551 ftype = GAEDecimalProperty(precision, scale, **attr)
3552 elif field.type.startswith('reference'):
3553 if field.notnull:
3554 attr = dict(required=True)
3555 referenced = field.type[10:].strip()
3556 ftype = self.types[field.type[:9]](referenced)
3557 elif field.type.startswith('list:reference'):
3558 if field.notnull:
3559 attr = dict(required=True)
3560 referenced = field.type[15:].strip()
3561 ftype = self.types[field.type[:14]](**attr)
3562 elif field.type.startswith('list:'):
3563 ftype = self.types[field.type](**attr)
3564 elif not field.type in self.types\
3565 or not self.types[field.type]:
3566 raise SyntaxError, 'Field: unknown field type: %s' % field.type
3567 else:
3568 ftype = self.types[field.type](**attr)
3569 myfields[field.name] = ftype
3570 if not polymodel:
3571 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
3572 elif polymodel==True:
3573 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
3574 elif isinstance(polymodel,Table):
3575 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
3576 else:
3577 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
3578 return None
3579
3580 - def expand(self,expression,field_type=None):
3581 if isinstance(expression,Field):
3582 if expression.type in ('text','blob'):
3583 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
3584 return expression.name
3585 elif isinstance(expression, (Expression, Query)):
3586 if not expression.second is None:
3587 return expression.op(expression.first, expression.second)
3588 elif not expression.first is None:
3589 return expression.op(expression.first)
3590 else:
3591 return expression.op()
3592 elif field_type:
3593 return self.represent(expression,field_type)
3594 elif isinstance(expression,(list,tuple)):
3595 return ','.join([self.represent(item,field_type) for item in expression])
3596 else:
3597 return str(expression)
3598
3599
3600 - def AND(self,first,second):
3606
3607 - def EQ(self,first,second=None):
3608 if isinstance(second, Key):
3609 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
3610 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
3611
3612 - def NE(self,first,second=None):
3613 if first.type != 'id':
3614 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
3615 else:
3616 second = Key.from_path(first._tablename, long(second))
3617 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
3618
3619 - def LT(self,first,second=None):
3620 if first.type != 'id':
3621 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
3622 else:
3623 second = Key.from_path(first._tablename, long(second))
3624 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
3625
3626 - def LE(self,first,second=None):
3627 if first.type != 'id':
3628 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
3629 else:
3630 second = Key.from_path(first._tablename, long(second))
3631 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
3632
3633 - def GT(self,first,second=None):
3634 if first.type != 'id' or second==0 or second == '0':
3635 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
3636 else:
3637 second = Key.from_path(first._tablename, long(second))
3638 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
3639
3640 - def GE(self,first,second=None):
3641 if first.type != 'id':
3642 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
3643 else:
3644 second = Key.from_path(first._tablename, long(second))
3645 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
3646
3649
3650 - def COMMA(self,first,second):
3652
3653 - def BELONGS(self,first,second=None):
3654 if not isinstance(second,(list, tuple)):
3655 raise SyntaxError, "Not supported"
3656 if first.type != 'id':
3657 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
3658 else:
3659 second = [Key.from_path(first._tablename, i) for i in second]
3660 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
3661
3666
3667 - def NOT(self,first):
3668 nops = { self.EQ: self.NE,
3669 self.NE: self.EQ,
3670 self.LT: self.GE,
3671 self.GT: self.LE,
3672 self.LE: self.GT,
3673 self.GE: self.LT}
3674 if not isinstance(first,Query):
3675 raise SyntaxError, "Not suported"
3676 nop = nops.get(first.op,None)
3677 if not nop:
3678 raise SyntaxError, "Not suported %s" % first.op.__name__
3679 first.op = nop
3680 return self.expand(first)
3681
3683 self.db(table._id > 0).delete()
3684
3685 - def select_raw(self,query,fields=None,attributes=None):
3686 fields = fields or []
3687 attributes = attributes or {}
3688 new_fields = []
3689 for item in fields:
3690 if isinstance(item,SQLALL):
3691 new_fields += item.table
3692 else:
3693 new_fields.append(item)
3694 fields = new_fields
3695 if query:
3696 tablename = self.get_table(query)
3697 elif fields:
3698 tablename = fields[0].tablename
3699 query = fields[0].table._id>0
3700 else:
3701 raise SyntaxError, "Unable to determine a tablename"
3702
3703 if query:
3704 if use_common_filters(query):
3705 query = self.common_filter(query,[tablename])
3706
3707 tableobj = self.db[tablename]._tableobj
3708 items = tableobj.all()
3709 filters = self.expand(query)
3710 for filter in filters:
3711 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
3712 continue
3713 elif filter.name=='__key__' and filter.op=='=':
3714 if filter.value==0:
3715 items = []
3716 elif isinstance(filter.value, Key):
3717 item = tableobj.get(filter.value)
3718 items = (item and [item]) or []
3719 else:
3720 item = tableobj.get_by_id(filter.value)
3721 items = (item and [item]) or []
3722 elif isinstance(items,list):
3723 items = [i for i in items if filter.apply(getattr(item,filter.name),
3724 filter.value)]
3725 else:
3726 if filter.name=='__key__': items.order('__key__')
3727 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
3728 if not isinstance(items,list):
3729 if attributes.get('left', None):
3730 raise SyntaxError, 'Set: no left join in appengine'
3731 if attributes.get('groupby', None):
3732 raise SyntaxError, 'Set: no groupby in appengine'
3733 orderby = attributes.get('orderby', False)
3734 if orderby:
3735
3736 if isinstance(orderby, (list, tuple)):
3737 orderby = xorify(orderby)
3738 if isinstance(orderby,Expression):
3739 orderby = self.expand(orderby)
3740 orders = orderby.split(', ')
3741 for order in orders:
3742 order={'-id':'-__key__','id':'__key__'}.get(order,order)
3743 items = items.order(order)
3744 if attributes.get('limitby', None):
3745 (lmin, lmax) = attributes['limitby']
3746 (limit, offset) = (lmax - lmin, lmin)
3747 items = items.fetch(limit, offset=offset)
3748 fields = self.db[tablename].fields
3749 return (items, tablename, fields)
3750
3751 - def select(self,query,fields,attributes):
3752 (items, tablename, fields) = self.select_raw(query,fields,attributes)
3753
3754 rows = [
3755 [t=='id' and (int(item.key().id()) if item.key().id() else
3756 item.key().name()) or getattr(item, t) for t in fields]
3757 for item in items]
3758 colnames = ['%s.%s' % (tablename, t) for t in fields]
3759 return self.parse(rows, fields, colnames, False)
3760
3761
3762 - def count(self,query,distinct=None):
3763 if distinct:
3764 raise RuntimeError, "COUNT DISTINCT not supported"
3765 (items, tablename, fields) = self.select_raw(query)
3766
3767 try:
3768 return len(items)
3769 except TypeError:
3770 return items.count(limit=None)
3771
3772 - def delete(self,tablename, query):
3773 """
3774 This function was changed on 2010-05-04 because according to
3775 http://code.google.com/p/googleappengine/issues/detail?id=3119
3776 GAE no longer support deleting more than 1000 records.
3777 """
3778
3779 (items, tablename, fields) = self.select_raw(query)
3780
3781 if not isinstance(items,list):
3782 counter = items.count(limit=None)
3783 leftitems = items.fetch(1000)
3784 while len(leftitems):
3785 gae.delete(leftitems)
3786 leftitems = items.fetch(1000)
3787 else:
3788 counter = len(items)
3789 gae.delete(items)
3790 return counter
3791
3792 - def update(self,tablename,query,update_fields):
3793
3794 (items, tablename, fields) = self.select_raw(query)
3795 counter = 0
3796 for item in items:
3797 for field, value in update_fields:
3798 setattr(item, field.name, self.represent(value,field.type))
3799 item.put()
3800 counter += 1
3801 logger.info(str(counter))
3802 return counter
3803
3804 - def insert(self,table,fields):
3805 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3806
3807 tmp = table._tableobj(**dfields)
3808 tmp.put()
3809 rid = Reference(tmp.key().id())
3810 (rid._table, rid._record) = (table, None)
3811 return rid
3812
3814 parsed_items = []
3815 for item in items:
3816 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3817 parsed_items.append(table._tableobj(**dfields))
3818 gae.put(parsed_items)
3819 return True
3820
3822 return uuid.UUID(uuidv).int
3823
3825 return str(uuid.UUID(int=n))
3826
3828 uploads_in_blob = True
3829 types = {
3830 'boolean': bool,
3831 'string': str,
3832 'text': str,
3833 'password': str,
3834 'blob': str,
3835 'upload': str,
3836 'integer': long,
3837 'double': float,
3838 'date': datetime.date,
3839 'time': datetime.time,
3840 'datetime': datetime.datetime,
3841 'id': long,
3842 'reference': long,
3843 'list:string': list,
3844 'list:integer': list,
3845 'list:reference': list,
3846 }
3847
3849 - def file_open(self, filename, mode='rb', lock=True): pass
3851
3852 - def expand(self,expression,field_type=None):
3853 if isinstance(expression,Field):
3854 if expression.type=='id':
3855 return "%s._id" % expression.tablename
3856 return BaseAdapter.expand(self,expression,field_type)
3857
3858 - def AND(self,first,second):
3860
3861 - def OR(self,first,second):
3863
3864 - def EQ(self,first,second):
3868
3869 - def NE(self,first,second):
3873
3874 - def COMMA(self,first,second):
3876
3878 value = NoSQLAdapter.represent(self, obj, fieldtype)
3879 if fieldtype=='id':
3880 return repr(str(int(value)))
3881 elif fieldtype in ('date','time','datetime','boolean'):
3882 return serializers.json(value)
3883 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3884
3885 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3886 pool_size=0,folder=None,db_codec ='UTF-8',
3887 credential_decoder=lambda x:x, driver_args={},
3888 adapter_args={}):
3889 self.db = db
3890 self.uri = uri
3891 self.dbengine = 'couchdb'
3892 self.folder = folder
3893 db['_lastsql'] = ''
3894 self.db_codec = 'UTF-8'
3895 self.pool_size = pool_size
3896
3897 url='http://'+uri[10:]
3898 def connect(url=url,driver_args=driver_args):
3899 return couchdb.Server(url,**driver_args)
3900 self.pool_connection(connect,cursor=False)
3901
3902 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3903 if migrate:
3904 try:
3905 self.connection.create(table._tablename)
3906 except:
3907 pass
3908
3909 - def insert(self,table,fields):
3910 id = uuid2int(web2py_uuid())
3911 ctable = self.connection[table._tablename]
3912 values = dict((k.name,self.represent(v,k.type)) for k,v in fields)
3913 values['_id'] = str(id)
3914 ctable.save(values)
3915 return id
3916
3917 - def _select(self,query,fields,attributes):
3918 if not isinstance(query,Query):
3919 raise SyntaxError, "Not Supported"
3920 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3921 'required','cache','left',
3922 'distinct','having')):
3923 raise SyntaxError, 'invalid select attribute: %s' % key
3924 new_fields=[]
3925 for item in fields:
3926 if isinstance(item,SQLALL):
3927 new_fields += item.table
3928 else:
3929 new_fields.append(item)
3930 def uid(fd):
3931 return fd=='id' and '_id' or fd
3932 def get(row,fd):
3933 return fd=='id' and int(row['_id']) or row.get(fd,None)
3934 fields = new_fields
3935 tablename = self.get_table(query)
3936 fieldnames = [f.name for f in (fields or self.db[tablename])]
3937 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3938 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3939 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3940 dict(t=tablename,
3941 query=self.expand(query),
3942 order='%s._id' % tablename,
3943 fields=fields)
3944 return fn, colnames
3945
3946 - def select(self,query,fields,attributes):
3947 if not isinstance(query,Query):
3948 raise SyntaxError, "Not Supported"
3949 fn, colnames = self._select(query,fields,attributes)
3950 tablename = colnames[0].split('.')[0]
3951 ctable = self.connection[tablename]
3952 rows = [cols['value'] for cols in ctable.query(fn)]
3953 return self.parse(rows, fields, colnames, False)
3954
3955 - def delete(self,tablename,query):
3956 if not isinstance(query,Query):
3957 raise SyntaxError, "Not Supported"
3958 if query.first.type=='id' and query.op==self.EQ:
3959 id = query.second
3960 tablename = query.first.tablename
3961 assert(tablename == query.first.tablename)
3962 ctable = self.connection[tablename]
3963 try:
3964 del ctable[str(id)]
3965 return 1
3966 except couchdb.http.ResourceNotFound:
3967 return 0
3968 else:
3969 tablename = self.get_table(query)
3970 rows = self.select(query,[self.db[tablename]._id],{})
3971 ctable = self.connection[tablename]
3972 for row in rows:
3973 del ctable[str(row.id)]
3974 return len(rows)
3975
3976 - def update(self,tablename,query,fields):
3977 if not isinstance(query,Query):
3978 raise SyntaxError, "Not Supported"
3979 if query.first.type=='id' and query.op==self.EQ:
3980 id = query.second
3981 tablename = query.first.tablename
3982 ctable = self.connection[tablename]
3983 try:
3984 doc = ctable[str(id)]
3985 for key,value in fields:
3986 doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
3987 ctable.save(doc)
3988 return 1
3989 except couchdb.http.ResourceNotFound:
3990 return 0
3991 else:
3992 tablename = self.get_table(query)
3993 rows = self.select(query,[self.db[tablename]._id],{})
3994 ctable = self.connection[tablename]
3995 table = self.db[tablename]
3996 for row in rows:
3997 doc = ctable[str(row.id)]
3998 for key,value in fields:
3999 doc[key.name] = self.represent(value,table[key.name].type)
4000 ctable.save(doc)
4001 return len(rows)
4002
4003 - def count(self,query,distinct=None):
4004 if distinct:
4005 raise RuntimeError, "COUNT DISTINCT not supported"
4006 if not isinstance(query,Query):
4007 raise SyntaxError, "Not Supported"
4008 tablename = self.get_table(query)
4009 rows = self.select(query,[self.db[tablename]._id],{})
4010 return len(rows)
4011
4013 """
4014 validates that the given text is clean: only contains [0-9a-zA-Z_]
4015 """
4016
4017 if re.compile('[^0-9a-zA-Z_]').findall(text):
4018 raise SyntaxError, \
4019 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
4020 % text
4021 return text
4022
4024 uploads_in_blob = True
4025
4026 types = {
4027 'boolean': bool,
4028 'string': str,
4029 'text': str,
4030 'password': str,
4031 'blob': str,
4032 'upload': str,
4033 'integer': long,
4034 'double': float,
4035 'date': datetime.date,
4036 'time': datetime.time,
4037 'datetime': datetime.datetime,
4038 'id': long,
4039 'reference': long,
4040 'list:string': list,
4041 'list:integer': list,
4042 'list:reference': list,
4043 }
4044
4045 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
4046 pool_size=0,folder=None,db_codec ='UTF-8',
4047 credential_decoder=lambda x:x, driver_args={},
4048 adapter_args={}):
4049 m=None
4050 try:
4051
4052 import pymongo.uri_parser
4053 m = pymongo.uri_parser.parse_uri(uri)
4054 except ImportError:
4055 try:
4056
4057 import pymongo.connection
4058 m = pymongo.connection._parse_uri(uri)
4059 except ImportError:
4060 raise ImportError("Uriparser for mongodb is not available")
4061 except:
4062 raise SyntaxError("This type of uri is not supported by the mongodb uri parser")
4063 self.db = db
4064 self.uri = uri
4065 self.dbengine = 'mongodb'
4066 self.folder = folder
4067 db['_lastsql'] = ''
4068 self.db_codec = 'UTF-8'
4069 self.pool_size = pool_size
4070
4071 self.minimumreplication = adapter_args.get('minimumreplication',0)
4072
4073
4074 self.safe = adapter_args.get('safe',True)
4075
4076
4077 if isinstance(m,tuple):
4078 m = {"database" : m[1]}
4079 if m.get('database')==None:
4080 raise SyntaxError("Database is required!")
4081 def connect(uri=self.uri,m=m):
4082 try:
4083 return pymongo.Connection(uri)[m.get('database')]
4084 except pymongo.errors.ConnectionFailure, inst:
4085 raise SyntaxError, "The connection to " + uri + " could not be made"
4086 except Exception, inst:
4087 if inst == "cannot specify database without a username and password":
4088 raise SyntaxError("You are probebly running version 1.1 of pymongo which contains a bug which requires authentication. Update your pymongo.")
4089 else:
4090 raise SyntaxError("This is not an official Mongodb uri (http://www.mongodb.org/display/DOCS/Connections) Error : %s" % inst)
4091 self.pool_connection(connect,cursor=False)
4092
4093
4094
4096 value = NoSQLAdapter.represent(self, obj, fieldtype)
4097 if fieldtype =='date':
4098 if value == None:
4099 return value
4100 t = datetime.time(0, 0, 0)
4101 return datetime.datetime.combine(value, t)
4102 elif fieldtype == 'time':
4103 if value == None:
4104 return value
4105 d = datetime.date(2000, 1, 1)
4106 return datetime.datetime.combine(d, value)
4107 elif fieldtype == 'list:string' or fieldtype == 'list:integer' or fieldtype == 'list:reference':
4108 return value
4109 return value
4110
4111
4112
4113 - def insert(self,table,fields,safe=None):
4114 if safe==None:
4115 safe=self.safe
4116 ctable = self.connection[table._tablename]
4117 values = dict((k.name,self.represent(v,table[k.name].type)) for k,v in fields)
4118 ctable.insert(values,safe=safe)
4119 return int(str(values['_id']), 16)
4120
4121 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None, isCapped=False):
4122 if isCapped:
4123 raise RuntimeError, "Not implemented"
4124 else:
4125 pass
4126
4127 - def count(self,query,distinct=None,snapshot=True):
4128 if distinct:
4129 raise RuntimeError, "COUNT DISTINCT not supported"
4130 if not isinstance(query,Query):
4131 raise SyntaxError, "Not Supported"
4132 tablename = self.get_table(query)
4133 return int(self.select(query,[self.db[tablename]._id],{},count=True,snapshot=snapshot)['count'])
4134
4135
4136
4137 - def expand(self, expression, field_type=None):
4138 import pymongo.objectid
4139
4140
4141
4142 if isinstance(expression, Query):
4143 print "in expand and this is a query"
4144
4145
4146
4147
4148 if isinstance(expression.first,Field) and expression.first.type == 'id':
4149 expression.first.name = '_id'
4150 if expression.second != 0 and not isinstance(expression.second,pymongo.objectid.ObjectId):
4151 if isinstance(expression.second,int):
4152 try:
4153
4154 expression.second = pymongo.objectid.ObjectId(("%X" % expression.second))
4155 except:
4156 raise SyntaxError, 'The second argument must by an integer that can represent an objectid.'
4157 else:
4158 try:
4159
4160 expression.second = pymongo.objectid.ObjectId(expression.second)
4161 except:
4162 raise SyntaxError, 'second argument must be of type bson.objectid.ObjectId or an objectid representable integer'
4163 elif expression.second == 0:
4164 expression.second = pymongo.objectid.ObjectId('000000000000000000000000')
4165 return expression.op(expression.first, expression.second)
4166 if isinstance(expression, Field):
4167 if expression.type=='id':
4168 return "_id"
4169 else:
4170 return expression.name
4171
4172 elif isinstance(expression, (Expression, Query)):
4173 if not expression.second is None:
4174 return expression.op(expression.first, expression.second)
4175 elif not expression.first is None:
4176 return expression.op(expression.first)
4177 elif not isinstance(expression.op, str):
4178 return expression.op()
4179 else:
4180 return expression.op
4181 elif field_type:
4182 return str(self.represent(expression,field_type))
4183 elif isinstance(expression,(list,tuple)):
4184 return ','.join(self.represent(item,field_type) for item in expression)
4185 else:
4186 return expression
4187
4188 - def _select(self,query,fields,attributes):
4189 from pymongo import son
4190
4191 for key in set(attributes.keys())-set(('limitby','orderby')):
4192 raise SyntaxError, 'invalid select attribute: %s' % key
4193
4194 new_fields=[]
4195 mongosort_list = []
4196
4197
4198 orderby = attributes.get('orderby', False)
4199 limitby = attributes.get('limitby', False)
4200
4201 if orderby:
4202
4203 if isinstance(orderby, (list, tuple)):
4204 print "in xorify"
4205 orderby = xorify(orderby)
4206
4207
4208
4209 for f in self.expand(orderby).split(','):
4210 if f.startswith('-'):
4211 mongosort_list.append((f[1:],-1))
4212 else:
4213 mongosort_list.append((f,1))
4214 print "mongosort_list = %s" % mongosort_list
4215
4216 if limitby:
4217
4218 limitby_skip,limitby_limit = limitby
4219 else:
4220 limitby_skip = 0
4221 limitby_limit = 0
4222
4223
4224
4225
4226
4227
4228
4229 mongofields_dict = son.SON()
4230 mongoqry_dict = {}
4231 for item in fields:
4232 if isinstance(item,SQLALL):
4233 new_fields += item.table
4234 else:
4235 new_fields.append(item)
4236 fields = new_fields
4237 if isinstance(query,Query):
4238 tablename = self.get_table(query)
4239 elif len(fields) != 0:
4240 tablename = fields[0].tablename
4241 else:
4242 raise SyntaxError, "The table name could not be found in the query nor from the select statement."
4243 fieldnames = [f for f in (fields or self.db[tablename])]
4244 mongoqry_dict = self.expand(query)
4245 for f in fieldnames:
4246 mongofields_dict[f.name] = 1
4247 return tablename, mongoqry_dict, mongofields_dict, mongosort_list, limitby_limit, limitby_skip
4248
4249
4250
4251 - def select(self,query,fields,attributes,count=False,snapshot=False):
4252 withId=False
4253 tablename, mongoqry_dict , mongofields_dict, mongosort_list, limitby_limit, limitby_skip = self._select(query,fields,attributes)
4254 for key in mongofields_dict.keys():
4255 if key == 'id':
4256 withId = True
4257 break;
4258 try:
4259 print "mongoqry_dict=%s" % mongoqry_dict
4260 except:
4261 pass
4262 print "mongofields_dict=%s" % mongofields_dict
4263 ctable = self.connection[tablename]
4264 if count:
4265 return {'count' : ctable.find(mongoqry_dict,mongofields_dict,skip=limitby_skip, limit=limitby_limit, sort=mongosort_list,snapshot=snapshot).count()}
4266 else:
4267 mongo_list_dicts = ctable.find(mongoqry_dict,mongofields_dict,skip=limitby_skip, limit=limitby_limit, sort=mongosort_list,snapshot=snapshot)
4268 print "mongo_list_dicts=%s" % mongo_list_dicts
4269
4270
4271
4272
4273
4274
4275 rows = []
4276 for record in mongo_list_dicts:
4277 row=[]
4278 for column in record:
4279 if withId and (column == '_id'):
4280 if isinstance(record[column],pymongo.objectid.ObjectId):
4281 row.append( int(str(record[column]),16))
4282 else:
4283
4284 row.append( record[column] )
4285 elif not (column == '_id'):
4286 row.append(record[column])
4287 rows.append(row)
4288
4289
4290 return self.parse(rows,fields,mongofields_dict.keys(),False)
4291
4295
4296 - def drop(self, table, mode=''):
4297 ctable = self.connection[table._tablename]
4298 ctable.drop()
4299
4300
4301 - def truncate(self,table,mode,safe=None):
4302 if safe==None:
4303 safe=self.safe
4304 ctable = self.connection[table._tablename]
4305 ctable.remove(None, safe=True)
4306
4307
4308 - def oupdate(self,tablename,query,fields):
4309 if not isinstance(query,Query):
4310 raise SyntaxError, "Not Supported"
4311 filter = None
4312 if query:
4313 filter = self.expand(query)
4314 f_v = []
4315
4316
4317 modify = { '$set' : dict(((k.name,self.represent(v,k.type)) for k,v in fields)) }
4318 return modify,filter
4319
4320
4321
4322
4323
4324 - def update(self,tablename,query,fields,safe=None):
4325 if safe==None:
4326 safe=self.safe
4327
4328 if not isinstance(query,Query):
4329 raise RuntimeError, "Not implemented"
4330 amount = self.count(query,False)
4331 modify,filter = self.oupdate(tablename,query,fields)
4332 try:
4333 if safe:
4334 return self.connection[tablename].update(filter,modify,multi=True,safe=safe).n
4335 else:
4336 amount =self.count(query)
4337 self.connection[tablename].update(filter,modify,multi=True,safe=safe)
4338 return amount
4339 except:
4340
4341 return 0
4342 """
4343 An special update operator that enables the update of specific field
4344 return a dict
4345 """
4346
4347
4348
4349
4350 - def _update(self,tablename,query,fields):
4352
4354 return [self.insert(table,item) for item in items]
4355
4356
4357 - def NOT(self, first):
4358 result = {}
4359 result["$not"] = self.expand(first)
4360 return result
4361
4362 - def AND(self,first,second):
4367
4368 - def OR(self,first,second):
4369
4370 result = {}
4371 f = self.expand(first)
4372 s = self.expand(second)
4373 result['$or'] = [f,s]
4374 return result
4375
4376 - def BELONGS(self, first, second):
4377 if isinstance(second, str):
4378 return {self.expand(first) : {"$in" : [ second[:-1]]} }
4379 elif second==[] or second==():
4380 return {1:0}
4381 items = [self.expand(item, first.type) for item in second]
4382 return {self.expand(first) : {"$in" : items} }
4383
4384 - def LIKE(self, first, second):
4385
4386 return {self.expand(first) : ('%s' % self.expand(second, 'string').replace('%','/'))}
4387
4391
4395
4399
4400 - def EQ(self,first,second):
4401 result = {}
4402
4403
4404
4405 result[self.expand(first)] = self.expand(second)
4406 return result
4407
4408 - def NE(self, first, second=None):
4409 print "in NE"
4410 result = {}
4411 result[self.expand(first)] = {'$ne': self.expand(second)}
4412 return result
4413
4414 - def LT(self,first,second=None):
4415 if second is None:
4416 raise RuntimeError, "Cannot compare %s < None" % first
4417 print "in LT"
4418 result = {}
4419 result[self.expand(first)] = {'$lt': self.expand(second)}
4420 return result
4421
4422 - def LE(self,first,second=None):
4423 if second is None:
4424 raise RuntimeError, "Cannot compare %s <= None" % first
4425 print "in LE"
4426 result = {}
4427 result[self.expand(first)] = {'$lte': self.expand(second)}
4428 return result
4429
4430 - def GT(self,first,second):
4431 print "in GT"
4432
4433 result = {}
4434
4435
4436
4437
4438
4439 result[self.expand(first)] = {'$gt': self.expand(second)}
4440 return result
4441
4442 - def GE(self,first,second=None):
4443 if second is None:
4444 raise RuntimeError, "Cannot compare %s >= None" % first
4445 print "in GE"
4446 result = {}
4447 result[self.expand(first)] = {'$gte': self.expand(second)}
4448 return result
4449
4450 - def ADD(self, first, second):
4451 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4452 return '%s + %s' % (self.expand(first), self.expand(second, first.type))
4453
4454 - def SUB(self, first, second):
4455 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4456 return '(%s - %s)' % (self.expand(first), self.expand(second, first.type))
4457
4458 - def MUL(self, first, second):
4459 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4460 return '(%s * %s)' % (self.expand(first), self.expand(second, first.type))
4461
4462 - def DIV(self, first, second):
4463 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4464 return '(%s / %s)' % (self.expand(first), self.expand(second, first.type))
4465
4466 - def MOD(self, first, second):
4467 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4468 return '(%s %% %s)' % (self.expand(first), self.expand(second, first.type))
4469
4470 - def AS(self, first, second):
4471 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4472 return '%s AS %s' % (self.expand(first), second)
4473
4474
4475 - def ON(self, first, second):
4476 raise NotImplementedError, "This is not possible in NoSQL, but can be simulated with a wrapper."
4477 return '%s ON %s' % (self.expand(first), self.expand(second))
4478
4479 - def COMMA(self, first, second):
4481
4483 return [self.insert(table,item) for item in items]
4484
4485
4486 - def NOT(self, first):
4487 result = {}
4488 result["$not"] = self.expand(first)
4489 return result
4490
4491 - def AND(self,first,second):
4496
4497 - def OR(self,first,second):
4498
4499 result = {}
4500 f = self.expand(first)
4501 s = self.expand(second)
4502 result['$or'] = [f,s]
4503 return result
4504
4505 - def BELONGS(self, first, second):
4506 if isinstance(second, str):
4507 return {self.expand(first) : {"$in" : [ second[:-1]]} }
4508 elif second==[] or second==():
4509 return {1:0}
4510 items = [self.expand(item, first.type) for item in second]
4511 return {self.expand(first) : {"$in" : items} }
4512
4513
4514 - def LIKE(self, first, second):
4515 import re
4516 return {self.expand(first) : {'$regex' : re.escape(self.expand(second, 'string')).replace('%','.*')}}
4517
4518
4523
4524
4526
4527
4528 import re
4529 return {self.expand(first) : {'$regex' : re.escape(self.expand(second, 'string')) + '$'}}
4530
4531
4536
4537 - def EQ(self,first,second):
4538 result = {}
4539
4540
4541
4542 result[self.expand(first)] = self.expand(second)
4543 return result
4544
4545 - def NE(self, first, second=None):
4546 print "in NE"
4547 result = {}
4548 result[self.expand(first)] = {'$ne': self.expand(second)}
4549 return result
4550
4551 - def LT(self,first,second=None):
4552 if second is None:
4553 raise RuntimeError, "Cannot compare %s < None" % first
4554 print "in LT"
4555 result = {}
4556 result[self.expand(first)] = {'$lt': self.expand(second)}
4557 return result
4558
4559 - def LE(self,first,second=None):
4560 if second is None:
4561 raise RuntimeError, "Cannot compare %s <= None" % first
4562 print "in LE"
4563 result = {}
4564 result[self.expand(first)] = {'$lte': self.expand(second)}
4565 return result
4566
4567 - def GT(self,first,second):
4568 print "in GT"
4569
4570 result = {}
4571
4572
4573
4574
4575
4576 result[self.expand(first)] = {'$gt': self.expand(second)}
4577 return result
4578
4579 - def GE(self,first,second=None):
4580 if second is None:
4581 raise RuntimeError, "Cannot compare %s >= None" % first
4582 print "in GE"
4583 result = {}
4584 result[self.expand(first)] = {'$gte': self.expand(second)}
4585 return result
4586
4587
4588 - def ADD(self, first, second):
4589 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4590 return '%s + %s' % (self.expand(first), self.expand(second, first.type))
4591
4592
4593 - def SUB(self, first, second):
4594 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4595 return '(%s - %s)' % (self.expand(first), self.expand(second, first.type))
4596
4597
4598 - def MUL(self, first, second):
4599 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4600 return '(%s * %s)' % (self.expand(first), self.expand(second, first.type))
4601
4602
4603 - def DIV(self, first, second):
4604 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4605 return '(%s / %s)' % (self.expand(first), self.expand(second, first.type))
4606
4607 - def MOD(self, first, second):
4608 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4609 return '(%s %% %s)' % (self.expand(first), self.expand(second, first.type))
4610
4611
4612 - def AS(self, first, second):
4613 raise NotImplementedError, "This must yet be replaced with javescript in order to accomplish this. Sorry"
4614 return '%s AS %s' % (self.expand(first), second)
4615
4616
4617 - def ON(self, first, second):
4618 raise NotImplementedError, "This is not possible in NoSQL, but can be simulated with a wrapper."
4619 return '%s ON %s' % (self.expand(first), self.expand(second))
4620
4621
4622 - def COMMA(self, first, second):
4624
4625
4626
4628 """ IMAP server adapter
4629
4630 This class is intended as an interface with
4631 email IMAP servers to perform simple queries in the
4632 web2py DAL query syntax, so email read, search and
4633 other related IMAP mail services (as those implemented
4634 by brands like Google(r), and Yahoo!(r)
4635 can be managed from web2py applications.
4636
4637 The code uses examples by Yuji Tomita on this post:
4638 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
4639 and is based in docs for Python imaplib, python email
4640 and email IETF's (i.e. RFC2060 and RFC3501)
4641
4642 This adapter was tested with a small set of operations with Gmail(r). Other
4643 services requests could raise command syntax and response data issues.
4644
4645 It creates its table and field names "statically",
4646 meaning that the developer should leave the table and field
4647 definitions to the DAL instance by calling the adapter's
4648 .define_tables() method. The tables are defined with the
4649 IMAP server mailbox list information.
4650
4651 Here is a list of supported fields:
4652
4653 Field Type Description
4654 ################################################################
4655 uid string
4656 answered boolean Flag
4657 created date
4658 content list:string A list of text or html parts
4659 to string
4660 cc string
4661 bcc string
4662 size integer the amount of octets of the message*
4663 deleted boolean Flag
4664 draft boolean Flag
4665 flagged boolean Flag
4666 sender string
4667 recent boolean Flag
4668 seen boolean Flag
4669 subject string
4670 mime string The mime header declaration
4671 email string The complete RFC822 message**
4672 attachments list:string Each non text decoded part as string
4673
4674 *At the application side it is measured as the length of the RFC822
4675 message string
4676
4677 WARNING: As row id's are mapped to email sequence numbers,
4678 make sure your imap client web2py app does not delete messages
4679 during select or update actions, to prevent
4680 updating or deleting different messages.
4681 Sequence numbers change whenever the mailbox is updated.
4682 To avoid this sequence numbers issues, it is recommended the use
4683 of uid fields in query references (although the update and delete
4684 in separate actions rule still applies).
4685
4686 # This is the code recommended to start imap support
4687 # at the app's model:
4688
4689 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
4690 imapdb.define_tables()
4691
4692 Here is an (incomplete) list of possible imap commands:
4693
4694 # Count today's unseen messages
4695 # smaller than 6000 octets from the
4696 # inbox mailbox
4697
4698 q = imapdb.INBOX.seen == False
4699 q &= imapdb.INBOX.created == datetime.date.today()
4700 q &= imapdb.INBOX.size < 6000
4701 unread = imapdb(q).count()
4702
4703 # Fetch last query messages
4704 rows = imapdb(q).select()
4705
4706 # it is also possible to filter query select results with limitby and
4707 # sequences of mailbox fields
4708
4709 set.select(<fields sequence>, limitby=(<int>, <int>))
4710
4711 # Mark last query messages as seen
4712 messages = [row.uid for row in rows]
4713 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
4714
4715 # Delete messages in the imap database that have mails from mr. Gumby
4716
4717 deleted = 0
4718 for mailbox in imapdb.tables
4719 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
4720
4721 # It is possible also to mark messages for deletion instead of ereasing them
4722 # directly with set.update(deleted=True)
4723
4724 """
4725
4726 types = {
4727 'string': str,
4728 'text': str,
4729 'date': datetime.date,
4730 'datetime': datetime.datetime,
4731 'id': long,
4732 'boolean': bool,
4733 'integer': int,
4734 'blob': str,
4735 'list:string': str,
4736 }
4737
4738 dbengine = 'imap'
4739 driver = globals().get('imaplib',None)
4740
4741 - def __init__(self,
4742 db,
4743 uri,
4744 pool_size=0,
4745 folder=None,
4746 db_codec ='UTF-8',
4747 credential_decoder=lambda x:x,
4748 driver_args={},
4749 adapter_args={}):
4750
4751
4752
4753
4754 uri = uri.split("://")[1]
4755 self.db = db
4756 self.uri = uri
4757 self.pool_size=pool_size
4758 self.folder = folder
4759 self.db_codec = db_codec
4760 self.credential_decoder = credential_decoder
4761 self.driver_args = driver_args
4762 self.adapter_args = adapter_args
4763 self.mailbox_size = None
4764 self.charset = sys.getfilesystemencoding()
4765
4766 self.imap4 = None
4767
4768 """ MESSAGE is an identifier for sequence number"""
4769
4770 self.flags = ['\\Deleted', '\\Draft', '\\Flagged',
4771 '\\Recent', '\\Seen', '\\Answered']
4772 self.search_fields = {
4773 'id': 'MESSAGE', 'created': 'DATE',
4774 'uid': 'UID', 'sender': 'FROM',
4775 'to': 'TO', 'cc': 'CC',
4776 'bcc': 'BCC', 'content': 'TEXT',
4777 'size': 'SIZE', 'deleted': '\\Deleted',
4778 'draft': '\\Draft', 'flagged': '\\Flagged',
4779 'recent': '\\Recent', 'seen': '\\Seen',
4780 'subject': 'SUBJECT', 'answered': '\\Answered',
4781 'mime': None, 'email': None,
4782 'attachments': None
4783 }
4784
4785 db['_lastsql'] = ''
4786
4787 m = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?$').match(uri)
4788 user = m.group('user')
4789 password = m.group('password')
4790 host = m.group('host')
4791 port = int(m.group('port'))
4792 over_ssl = False
4793 if port==993:
4794 over_ssl = True
4795
4796 driver_args.update(dict(host=host,port=port, password=password, user=user))
4797 def connect(driver_args=driver_args):
4798
4799
4800 if over_ssl:
4801 self.imap4 = self.driver.IMAP4_SSL
4802 else:
4803 self.imap4 = self.driver.IMAP4
4804 connection = self.imap4(driver_args["host"], driver_args["port"])
4805 data = connection.login(driver_args["user"], driver_args["password"])
4806
4807
4808
4809 connection.mailbox_names = None
4810
4811
4812 connection.cursor = lambda : True
4813
4814 return connection
4815
4816 self.pool_connection(connect)
4817 self.db.define_tables = self.define_tables
4818
4820 """
4821 IMAP4 Pool connection method
4822
4823 imap connection lacks of self cursor command.
4824 A custom command should be provided as a replacement
4825 for connection pooling to prevent uncaught remote session
4826 closing
4827
4828 """
4829
4830 if not self.pool_size:
4831 self.connection = f()
4832 self.cursor = cursor and self.connection.cursor()
4833 else:
4834 uri = self.uri
4835
4836 while True:
4837 sql_locker.acquire()
4838 if not uri in ConnectionPool.pools:
4839 ConnectionPool.pools[uri] = []
4840 if ConnectionPool.pools[uri]:
4841 self.connection = ConnectionPool.pools[uri].pop()
4842 sql_locker.release()
4843 self.cursor = cursor and self.connection.cursor()
4844
4845 if self.cursor and self.check_active_connection:
4846 try:
4847
4848 result, data = self.connection.list()
4849
4850
4851
4852 except:
4853
4854
4855
4856 self.connection = f()
4857 break
4858 else:
4859 sql_locker.release()
4860 self.connection = f()
4861 self.cursor = cursor and self.connection.cursor()
4862 break
4863
4864 if not hasattr(thread,'instances'):
4865 thread.instances = []
4866 thread.instances.append(self)
4867
4869 last_message = None
4870
4871
4872 if not isinstance(self.connection.mailbox_names, dict):
4873 self.get_mailboxes()
4874 try:
4875 result = self.connection.select(self.connection.mailbox_names[tablename])
4876 last_message = int(result[1][0])
4877 except (IndexError, ValueError, TypeError, KeyError), e:
4878 logger.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
4879 return last_message
4880
4882 if not isinstance(self.connection.mailbox_names, dict):
4883 self.get_mailboxes()
4884
4885
4886 last_message = self.get_last_message(tablename)
4887 result, data = self.connection.uid("search", None, "(ALL)")
4888 uid_list = data[0].strip().split()
4889 if len(uid_list) <= 0:
4890 return None
4891 else:
4892 return (uid_list[0], uid_list[-1])
4893
4895 if add is None:
4896 add = datetime.timedelta()
4897 """ Convert a date object to a string
4898 with d-Mon-Y style for IMAP or the inverse
4899 case
4900
4901 add <timedelta> adds to the date object
4902 """
4903 months = [None, "Jan","Feb","Mar","Apr","May","Jun",
4904 "Jul", "Aug","Sep","Oct","Nov","Dec"]
4905 if isinstance(date, basestring):
4906
4907 try:
4908 dayname, datestring = date.split(",")
4909 except (ValueError):
4910 logger.debug("Could not parse date text: %s" % date)
4911 return None
4912 date_list = datestring.strip().split()
4913 year = int(date_list[2])
4914 month = months.index(date_list[1])
4915 day = int(date_list[0])
4916 hms = [int(value) for value in date_list[3].split(":")]
4917 return datetime.datetime(year, month, day,
4918 hms[0], hms[1], hms[2]) + add
4919 elif isinstance(date, (datetime.datetime, datetime.date)):
4920 return (date + add).strftime("%d-%b-%Y")
4921
4922 else:
4923 return None
4924
4925 - def encode_text(self, text, charset, errors="replace"):
4926 """ convert text for mail to unicode"""
4927 if text is None:
4928 text = ""
4929 else:
4930 if isinstance(text, str):
4931 if charset is not None:
4932 text = unicode(text, charset, errors)
4933 else:
4934 text = unicode(text, "utf-8", errors)
4935 else:
4936 raise Exception("Unsupported mail text type %s" % type(text))
4937 return text.encode("utf-8")
4938
4940 charset = message.get_content_charset()
4941 return charset
4942
4944 self.connection.mailbox_names = None
4945 self.get_mailboxes()
4946
4948 """ Query the mail database for mailbox names """
4949 mailboxes_list = self.connection.list()
4950 self.connection.mailbox_names = dict()
4951 mailboxes = list()
4952 for item in mailboxes_list[1]:
4953 item = item.strip()
4954 if not "NOSELECT" in item.upper():
4955 sub_items = item.split("\"")
4956 sub_items = [sub_item for sub_item in sub_items if len(sub_item.strip()) > 0]
4957 mailbox = sub_items[len(sub_items) - 1]
4958
4959 mailbox_name = mailbox.replace("[", "").replace("]", "").replace("/", "_")
4960 mailboxes.append(mailbox_name)
4961 self.connection.mailbox_names[mailbox_name] = mailbox
4962
4963 return mailboxes
4964
4966 nofield = True
4967 tablename = None
4968 attr = query
4969 while nofield:
4970 if hasattr(attr, "first"):
4971 attr = attr.first
4972 if isinstance(attr, Field):
4973 return attr.tablename
4974 elif isinstance(attr, Query):
4975 pass
4976 else:
4977 return None
4978 else:
4979 return None
4980 return tablename
4981
4983 if self.search_fields.get(flag, None) in self.flags:
4984 return True
4985 else:
4986 return False
4987
4989 """
4990 Auto create common IMAP fileds
4991
4992 This function creates fields definitions "statically"
4993 meaning that custom fields as in other adapters should
4994 not be supported and definitions handled on a service/mode
4995 basis (local syntax for Gmail(r), Ymail(r)
4996 """
4997 if not isinstance(self.connection.mailbox_names, dict):
4998 self.get_mailboxes()
4999 mailboxes = self.connection.mailbox_names.keys()
5000 for mailbox_name in mailboxes:
5001 self.db.define_table("%s" % mailbox_name,
5002 Field("uid", "string", writable=False),
5003 Field("answered", "boolean"),
5004 Field("created", "datetime", writable=False),
5005 Field("content", "list:string", writable=False),
5006 Field("to", "string", writable=False),
5007 Field("cc", "string", writable=False),
5008 Field("bcc", "string", writable=False),
5009 Field("size", "integer", writable=False),
5010 Field("deleted", "boolean"),
5011 Field("draft", "boolean"),
5012 Field("flagged", "boolean"),
5013 Field("sender", "string", writable=False),
5014 Field("recent", "boolean", writable=False),
5015 Field("seen", "boolean"),
5016 Field("subject", "string", writable=False),
5017 Field("mime", "string", writable=False),
5018 Field("email", "string", writable=False, readable=False),
5019 Field("attachments", "list:string", writable=False, readable=False),
5020 )
5021
5023
5024 logger.debug("Create table feature is not implemented for %s" % type(self))
5025
5026 - def _select(self,query,fields,attributes):
5027 """ Search and Fetch records and return web2py
5028 rows
5029 """
5030
5031 if use_common_filters(query):
5032 query = self.common_filter(query, [self.get_query_mailbox(query),])
5033
5034
5035 import email
5036 import email.header
5037 decode_header = email.header.decode_header
5038
5039
5040 tablename = None
5041 fetch_results = list()
5042 if isinstance(query, (Expression, Query)):
5043 tablename = self.get_table(query)
5044 mailbox = self.connection.mailbox_names.get(tablename, None)
5045 if isinstance(query, Expression):
5046 pass
5047 elif isinstance(query, Query):
5048 if mailbox is not None:
5049
5050 selected = self.connection.select(mailbox, True)
5051 self.mailbox_size = int(selected[1][0])
5052 search_query = "(%s)" % str(query).strip()
5053
5054
5055 search_result = self.connection.uid("search", None, search_query)
5056
5057
5058
5059 if search_result[0] == "OK":
5060
5061
5062
5063
5064
5065 limitby = attributes.get('limitby', None)
5066 messages_set = search_result[1][0].split()
5067
5068 messages_set.reverse()
5069 if limitby is not None:
5070
5071 messages_set = messages_set[int(limitby[0]):int(limitby[1])]
5072
5073
5074
5075 imap_fields = "(RFC822)"
5076 if len(messages_set) > 0:
5077
5078
5079
5080
5081 for uid in messages_set:
5082
5083 typ, data = self.connection.uid("fetch", uid, imap_fields)
5084 if typ == "OK":
5085 fr = {"message": int(data[0][0].split()[0]),
5086 "uid": int(uid),
5087 "email": email.message_from_string(data[0][1]),
5088 "raw_message": data[0][1]
5089 }
5090 fr["multipart"] = fr["email"].is_multipart()
5091
5092 ftyp, fdata = self.connection.uid("fetch", uid, "(FLAGS)")
5093 if ftyp == "OK":
5094
5095 fr["flags"] = self.driver.ParseFlags(fdata[0])
5096
5097 fetch_results.append(fr)
5098 else:
5099
5100 pass
5101 else:
5102
5103 pass
5104
5105 elif isinstance(query, basestring):
5106
5107 pass
5108 else:
5109 pass
5110
5111 imapqry_dict = {}
5112 imapfields_dict = {}
5113
5114 if len(fields) == 1 and isinstance(fields[0], SQLALL):
5115 allfields = True
5116 elif len(fields) == 0:
5117 allfields = True
5118 else:
5119 allfields = False
5120 if allfields:
5121 fieldnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
5122 else:
5123 fieldnames = ["%s.%s" % (tablename, field.name) for field in fields]
5124
5125 for k in fieldnames:
5126 imapfields_dict[k] = k
5127
5128 imapqry_list = list()
5129 imapqry_array = list()
5130 for fr in fetch_results:
5131 attachments = []
5132 content = []
5133 size = 0
5134 n = int(fr["message"])
5135 item_dict = dict()
5136 message = fr["email"]
5137 uid = fr["uid"]
5138 charset = self.get_charset(message)
5139 flags = fr["flags"]
5140 raw_message = fr["raw_message"]
5141
5142
5143
5144
5145
5146
5147
5148
5149 if "%s.id" % tablename in fieldnames:
5150 item_dict["%s.id" % tablename] = n
5151 if "%s.created" % tablename in fieldnames:
5152 item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
5153 if "%s.uid" % tablename in fieldnames:
5154 item_dict["%s.uid" % tablename] = uid
5155 if "%s.sender" % tablename in fieldnames:
5156
5157
5158
5159
5160
5161
5162 item_dict["%s.sender" % tablename] = self.encode_text(message["From"], charset)
5163 if "%s.to" % tablename in fieldnames:
5164 item_dict["%s.to" % tablename] = self.encode_text(message["To"], charset)
5165 if "%s.cc" % tablename in fieldnames:
5166 if "Cc" in message.keys():
5167
5168 item_dict["%s.cc" % tablename] = self.encode_text(message["Cc"], charset)
5169 else:
5170 item_dict["%s.cc" % tablename] = ""
5171 if "%s.bcc" % tablename in fieldnames:
5172 if "Bcc" in message.keys():
5173
5174 item_dict["%s.bcc" % tablename] = self.encode_text(message["Bcc"], charset)
5175 else:
5176 item_dict["%s.bcc" % tablename] = ""
5177 if "%s.deleted" % tablename in fieldnames:
5178 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
5179 if "%s.draft" % tablename in fieldnames:
5180 item_dict["%s.draft" % tablename] = "\\Draft" in flags
5181 if "%s.flagged" % tablename in fieldnames:
5182 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
5183 if "%s.recent" % tablename in fieldnames:
5184 item_dict["%s.recent" % tablename] = "\\Recent" in flags
5185 if "%s.seen" % tablename in fieldnames:
5186 item_dict["%s.seen" % tablename] = "\\Seen" in flags
5187 if "%s.subject" % tablename in fieldnames:
5188 subject = message["Subject"]
5189 decoded_subject = decode_header(subject)
5190 text = decoded_subject[0][0]
5191 encoding = decoded_subject[0][1]
5192 if encoding in (None, ""):
5193 encoding = charset
5194 item_dict["%s.subject" % tablename] = self.encode_text(text, encoding)
5195 if "%s.answered" % tablename in fieldnames:
5196 item_dict["%s.answered" % tablename] = "\\Answered" in flags
5197 if "%s.mime" % tablename in fieldnames:
5198 item_dict["%s.mime" % tablename] = message.get_content_type()
5199
5200
5201
5202
5203
5204 if "%s.email" % tablename in fieldnames:
5205 item_dict["%s.email" % tablename] = self.encode_text(raw_message, charset)
5206
5207
5208
5209
5210
5211 for part in message.walk():
5212 if "%s.attachments" % tablename in fieldnames:
5213 if not "text" in part.get_content_maintype():
5214 attachments.append(part.get_payload(decode=True))
5215 if "%s.content" % tablename in fieldnames:
5216 if "text" in part.get_content_maintype():
5217 payload = self.encode_text(part.get_payload(decode=True), charset)
5218 content.append(payload)
5219 if "%s.size" % tablename in fieldnames:
5220 if part is not None:
5221 size += len(str(part))
5222
5223 item_dict["%s.content" % tablename] = bar_encode(content)
5224 item_dict["%s.attachments" % tablename] = bar_encode(attachments)
5225 item_dict["%s.size" % tablename] = size
5226
5227 imapqry_list.append(item_dict)
5228
5229
5230
5231 for item_dict in imapqry_list:
5232 imapqry_array_item = list()
5233 for fieldname in fieldnames:
5234 imapqry_array_item.append(item_dict[fieldname])
5235 imapqry_array.append(imapqry_array_item)
5236
5237 return tablename, imapqry_array, fieldnames
5238
5239 - def select(self,query,fields,attributes):
5240 tablename, imapqry_array , fieldnames = self._select(query,fields,attributes)
5241
5242 colnames = fieldnames
5243 result = self.parse(imapqry_array, fields, colnames)
5244 return result
5245
5246 - def update(self, tablename, query, fields):
5247
5248
5249 if use_common_filters(query):
5250 query = self.common_filter(query, [tablename,])
5251
5252 mark = []
5253 unmark = []
5254 rowcount = 0
5255 query = str(query)
5256 if query:
5257 for item in fields:
5258 field = item[0]
5259 name = field.name
5260 value = item[1]
5261 if self.is_flag(name):
5262 flag = self.search_fields[name]
5263 if (value is not None) and (flag != "\\Recent"):
5264 if value:
5265 mark.append(flag)
5266 else:
5267 unmark.append(flag)
5268
5269
5270 result, data = self.connection.select(self.connection.mailbox_names[tablename])
5271
5272 string_query = "(%s)" % query
5273
5274 result, data = self.connection.search(None, string_query)
5275 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
5276
5277
5278 for number in store_list:
5279 result = None
5280 if len(mark) > 0:
5281
5282 result, data = self.connection.store(number, "+FLAGS", "(%s)" % " ".join(mark))
5283 if len(unmark) > 0:
5284
5285 result, data = self.connection.store(number, "-FLAGS", "(%s)" % " ".join(unmark))
5286 if result == "OK":
5287 rowcount += 1
5288 return rowcount
5289
5290 - def count(self,query,distinct=None):
5291 counter = 0
5292 tablename = self.get_query_mailbox(query)
5293 if query and tablename is not None:
5294 if use_common_filters(query):
5295 query = self.common_filter(query, [tablename,])
5296
5297 result, data = self.connection.select(self.connection.mailbox_names[tablename])
5298
5299 string_query = "(%s)" % query
5300 result, data = self.connection.search(None, string_query)
5301 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
5302 counter = len(store_list)
5303 return counter
5304
5305 - def delete(self, tablename, query):
5306 counter = 0
5307 if query:
5308
5309 if use_common_filters(query):
5310 query = self.common_filter(query, [tablename,])
5311 result, data = self.connection.select(self.connection.mailbox_names[tablename])
5312
5313 string_query = "(%s)" % query
5314 result, data = self.connection.search(None, string_query)
5315 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
5316 for number in store_list:
5317 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
5318
5319 if result == "OK":
5320 counter += 1
5321 if counter > 0:
5322
5323 result, data = self.connection.expunge()
5324 return counter
5325
5326 - def BELONGS(self, first, second):
5327 result = None
5328 name = self.search_fields[first.name]
5329 if name == "MESSAGE":
5330 values = [str(val) for val in second if str(val).isdigit()]
5331 result = "%s" % ",".join(values).strip()
5332
5333 elif name == "UID":
5334 values = [str(val) for val in second if str(val).isdigit()]
5335 result = "UID %s" % ",".join(values).strip()
5336
5337 else:
5338 raise Exception("Operation not supported")
5339
5340 return result
5341
5343 result = None
5344 name = self.search_fields[first.name]
5345
5346 if name in ("FROM", "TO", "SUBJECT", "TEXT"):
5347 result = "%s \"%s\"" % (name, self.expand(second))
5348 else:
5349 if first.name in ("cc", "bcc"):
5350 result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
5351 elif first.name == "mime":
5352 result = "HEADER Content-Type \"%s\"" % self.expand(second)
5353 else:
5354 raise Exception("Operation not supported")
5355 return result
5356
5357 - def GT(self, first, second):
5358 result = None
5359 name = self.search_fields[first.name]
5360 if name == "MESSAGE":
5361 last_message = self.get_last_message(first.tablename)
5362 result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
5363 elif name == "UID":
5364
5365
5366
5367 try:
5368 pedestal, threshold = self.get_uid_bounds(first.tablename)
5369 except TypeError, e:
5370 logger.debug("Error requesting uid bounds: %s", str(e))
5371 return ""
5372 try:
5373 lower_limit = int(self.expand(second)) + 1
5374 except (ValueError, TypeError), e:
5375 raise Exception("Operation not supported (non integer UID)")
5376 result = "UID %s:%s" % (lower_limit, threshold)
5377 elif name == "DATE":
5378 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
5379 elif name == "SIZE":
5380 result = "LARGER %s" % self.expand(second)
5381 else:
5382 raise Exception("Operation not supported")
5383 return result
5384
5385 - def GE(self, first, second):
5386 result = None
5387 name = self.search_fields[first.name]
5388 if name == "MESSAGE":
5389 last_message = self.get_last_message(first.tablename)
5390 result = "%s:%s" % (self.expand(second), last_message)
5391 elif name == "UID":
5392
5393
5394
5395 try:
5396 pedestal, threshold = self.get_uid_bounds(first.tablename)
5397 except TypeError, e:
5398 logger.debug("Error requesting uid bounds: %s", str(e))
5399 return ""
5400 lower_limit = self.expand(second)
5401 result = "UID %s:%s" % (lower_limit, threshold)
5402 elif name == "DATE":
5403 result = "SINCE %s" % self.convert_date(second)
5404 else:
5405 raise Exception("Operation not supported")
5406 return result
5407
5408 - def LT(self, first, second):
5409 result = None
5410 name = self.search_fields[first.name]
5411 if name == "MESSAGE":
5412 result = "%s:%s" % (1, int(self.expand(second)) - 1)
5413 elif name == "UID":
5414 try:
5415 pedestal, threshold = self.get_uid_bounds(first.tablename)
5416 except TypeError, e:
5417 logger.debug("Error requesting uid bounds: %s", str(e))
5418 return ""
5419 try:
5420 upper_limit = int(self.expand(second)) - 1
5421 except (ValueError, TypeError), e:
5422 raise Exception("Operation not supported (non integer UID)")
5423 result = "UID %s:%s" % (pedestal, upper_limit)
5424 elif name == "DATE":
5425 result = "BEFORE %s" % self.convert_date(second)
5426 elif name == "SIZE":
5427 result = "SMALLER %s" % self.expand(second)
5428 else:
5429 raise Exception("Operation not supported")
5430 return result
5431
5432 - def LE(self, first, second):
5433 result = None
5434 name = self.search_fields[first.name]
5435 if name == "MESSAGE":
5436 result = "%s:%s" % (1, self.expand(second))
5437 elif name == "UID":
5438 try:
5439 pedestal, threshold = self.get_uid_bounds(first.tablename)
5440 except TypeError, e:
5441 logger.debug("Error requesting uid bounds: %s", str(e))
5442 return ""
5443 upper_limit = int(self.expand(second))
5444 result = "UID %s:%s" % (pedestal, upper_limit)
5445 elif name == "DATE":
5446 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
5447 else:
5448 raise Exception("Operation not supported")
5449 return result
5450
5451 - def NE(self, first, second):
5452 result = self.NOT(self.EQ(first, second))
5453 result = result.replace("NOT NOT", "").strip()
5454 return result
5455
5456 - def EQ(self,first,second):
5457 name = self.search_fields[first.name]
5458 result = None
5459 if name is not None:
5460 if name == "MESSAGE":
5461
5462 result = "%s" % self.expand(second)
5463 elif name == "UID":
5464 result = "UID %s" % self.expand(second)
5465 elif name == "DATE":
5466 result = "ON %s" % self.convert_date(second)
5467
5468 elif name in self.flags:
5469 if second:
5470 result = "%s" % (name.upper()[1:])
5471 else:
5472 result = "NOT %s" % (name.upper()[1:])
5473 else:
5474 raise Exception("Operation not supported")
5475 else:
5476 raise Exception("Operation not supported")
5477 return result
5478
5479 - def AND(self, first, second):
5482
5483 - def OR(self, first, second):
5484 result = "OR %s %s" % (self.expand(first), self.expand(second))
5485 return "%s" % result.replace("OR OR", "OR")
5486
5487 - def NOT(self, first):
5488 result = "NOT %s" % self.expand(first)
5489 return result
5490
5491
5492
5493
5494
5495 ADAPTERS = {
5496 'sqlite': SQLiteAdapter,
5497 'sqlite:memory': SQLiteAdapter,
5498 'mysql': MySQLAdapter,
5499 'postgres': PostgreSQLAdapter,
5500 'postgres:psycopg2': PostgreSQLAdapter,
5501 'postgres:pg8000': PostgreSQLAdapter,
5502 'oracle': OracleAdapter,
5503 'mssql': MSSQLAdapter,
5504 'mssql2': MSSQL2Adapter,
5505 'db2': DB2Adapter,
5506 'teradata': TeradataAdapter,
5507 'informix': InformixAdapter,
5508 'firebird': FireBirdAdapter,
5509 'firebird_embedded': FireBirdAdapter,
5510 'ingres': IngresAdapter,
5511 'ingresu': IngresUnicodeAdapter,
5512 'sapdb': SAPDBAdapter,
5513 'cubrid': CubridAdapter,
5514 'jdbc:sqlite': JDBCSQLiteAdapter,
5515 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
5516 'jdbc:postgres': JDBCPostgreSQLAdapter,
5517 'gae': GoogleDatastoreAdapter,
5518 'google:datastore': GoogleDatastoreAdapter,
5519 'google:sql': GoogleSQLAdapter,
5520 'couchdb': CouchDBAdapter,
5521 'mongodb': MongoDBAdapter,
5522 'imap': IMAPAdapter
5523 }
5524
5525
5527 """
5528 Field type validation, using web2py's validators mechanism.
5529
5530 makes sure the content of a field is in line with the declared
5531 fieldtype
5532 """
5533 if not have_validators:
5534 return []
5535 field_type, field_length = field.type, field.length
5536 if isinstance(field_type, SQLCustomType):
5537 if hasattr(field_type, 'validator'):
5538 return field_type.validator
5539 else:
5540 field_type = field_type.type
5541 elif not isinstance(field_type,str):
5542 return []
5543 requires=[]
5544 def ff(r,id):
5545 row=r(id)
5546 if not row:
5547 return id
5548 elif hasattr(r, '_format') and isinstance(r._format,str):
5549 return r._format % row
5550 elif hasattr(r, '_format') and callable(r._format):
5551 return r._format(row)
5552 else:
5553 return id
5554 if field_type == 'string':
5555 requires.append(validators.IS_LENGTH(field_length))
5556 elif field_type == 'text':
5557 requires.append(validators.IS_LENGTH(field_length))
5558 elif field_type == 'password':
5559 requires.append(validators.IS_LENGTH(field_length))
5560 elif field_type == 'double':
5561 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
5562 elif field_type == 'integer':
5563 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
5564 elif field_type.startswith('decimal'):
5565 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
5566 elif field_type == 'date':
5567 requires.append(validators.IS_DATE())
5568 elif field_type == 'time':
5569 requires.append(validators.IS_TIME())
5570 elif field_type == 'datetime':
5571 requires.append(validators.IS_DATETIME())
5572 elif field.db and field_type.startswith('reference') and \
5573 field_type.find('.') < 0 and \
5574 field_type[10:] in field.db.tables:
5575 referenced = field.db[field_type[10:]]
5576 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
5577 field.represent = field.represent or repr_ref
5578 if hasattr(referenced, '_format') and referenced._format:
5579 requires = validators.IS_IN_DB(field.db,referenced._id,
5580 referenced._format)
5581 if field.unique:
5582 requires._and = validators.IS_NOT_IN_DB(field.db,field)
5583 if field.tablename == field_type[10:]:
5584 return validators.IS_EMPTY_OR(requires)
5585 return requires
5586 elif field.db and field_type.startswith('list:reference') and \
5587 field_type.find('.') < 0 and \
5588 field_type[15:] in field.db.tables:
5589 referenced = field.db[field_type[15:]]
5590 def list_ref_repr(ids, row=None, r=referenced, f=ff):
5591 if not ids:
5592 return None
5593 refs = r._db(r._id.belongs(ids)).select(r._id)
5594 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
5595 field.represent = field.represent or list_ref_repr
5596 if hasattr(referenced, '_format') and referenced._format:
5597 requires = validators.IS_IN_DB(field.db,referenced._id,
5598 referenced._format,multiple=True)
5599 else:
5600 requires = validators.IS_IN_DB(field.db,referenced._id,
5601 multiple=True)
5602 if field.unique:
5603 requires._and = validators.IS_NOT_IN_DB(field.db,field)
5604 return requires
5605 elif field_type.startswith('list:'):
5606 def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
5607 field.represent = field.represent or repr_list
5608 if field.unique:
5609 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
5610 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
5611 if field.notnull and not field_type[:2] in sff:
5612 requires.insert(0, validators.IS_NOT_EMPTY())
5613 elif not field.notnull and field_type[:2] in sff and requires:
5614 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
5615 return requires
5616
5617
5619 return str(item).replace('|', '||')
5620
5623
5625 return [int(x) for x in value.split('|') if x.strip()]
5626
5629
5630
5632
5633 """
5634 a dictionary that lets you do d['a'] as well as d.a
5635 this is only used to store a Row
5636 """
5637
5639 key=str(key)
5640 m = regex_table_field.match(key)
5641 if key in self.get('_extra',{}):
5642 return self._extra[key]
5643 elif m:
5644 try:
5645 return dict.__getitem__(self, m.group(1))[m.group(2)]
5646 except (KeyError,TypeError):
5647 key = m.group(2)
5648 return dict.__getitem__(self, key)
5649
5652
5655
5658
5661
5663 return '<Row ' + dict.__repr__(self) + '>'
5664
5667
5669 try:
5670 return self.as_dict() == other.as_dict()
5671 except AttributeError:
5672 return False
5673
5675 return not (self == other)
5676
5678 return Row(dict(self))
5679
5680 - def as_dict(self,datetime_to_str=False):
5681 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
5682 d = dict(self)
5683 for k in copy.copy(d.keys()):
5684 v=d[k]
5685 if d[k] is None:
5686 continue
5687 elif isinstance(v,Row):
5688 d[k]=v.as_dict()
5689 elif isinstance(v,Reference):
5690 d[k]=int(v)
5691 elif isinstance(v,decimal.Decimal):
5692 d[k]=float(v)
5693 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
5694 if datetime_to_str:
5695 d[k] = v.isoformat().replace('T',' ')[:19]
5696 elif not isinstance(v,SERIALIZABLE_TYPES):
5697 del d[k]
5698 return d
5699
5700
5702 return Row(cPickle.loads(data))
5703
5706
5707 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
5708
5709
5710
5711
5712
5713
5714
5717 return copy.copy(self)
5718
5720 if not isinstance(fields,(list,tuple)):
5721 fields = [fields]
5722 new_fields = []
5723 for field in fields:
5724 if isinstance(field,Field):
5725 new_fields.append(field)
5726 elif isinstance(field,Table):
5727 for ofield in field:
5728 new_fields.append(ofield)
5729 else:
5730 raise RuntimeError, "fields must be a list of fields"
5731 fields = new_fields
5732 field_map = {}
5733 for field in fields:
5734 n = field.name.lower()
5735 if not n in field_map:
5736 field_map[n] = field
5737 n = str(field).lower()
5738 if not n in field_map:
5739 field_map[n] = field
5740 re_constants = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
5741 constants = {}
5742 i = 0
5743 while True:
5744 m = re_constants.search(text)
5745 if not m: break
5746 text = text[:m.start()]+('#%i' % i)+text[m.end():]
5747 constants[str(i)] = m.group()[1:-1]
5748 i+=1
5749 text = re.sub('\s+',' ',text).lower()
5750 for a,b in [('&','and'),
5751 ('|','or'),
5752 ('~','not'),
5753 ('==','='),
5754 ('<','<'),
5755 ('>','>'),
5756 ('<=','<='),
5757 ('>=','>='),
5758 ('<>','!='),
5759 ('=<','<='),
5760 ('=>','>='),
5761 ('=','='),
5762 (' less or equal than ','<='),
5763 (' greater or equal than ','>='),
5764 (' equal or less than ','<='),
5765 (' equal or greater than ','>='),
5766 (' less or equal ','<='),
5767 (' greater or equal ','>='),
5768 (' equal or less ','<='),
5769 (' equal or greater ','>='),
5770 (' not equal to ','!='),
5771 (' not equal ','!='),
5772 (' equal to ','='),
5773 (' equal ','='),
5774 (' equals ','!='),
5775 (' less than ','<'),
5776 (' greater than ','>'),
5777 (' starts with ','startswith'),
5778 (' ends with ','endswith'),
5779 (' is ','=')]:
5780 if a[0]==' ':
5781 text = text.replace(' is'+a,' %s ' % b)
5782 text = text.replace(a,' %s ' % b)
5783 text = re.sub('\s+',' ',text).lower()
5784 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
5785 query = field = neg = op = logic = None
5786 for item in text.split():
5787 if field is None:
5788 if item == 'not':
5789 neg = True
5790 elif not neg and not logic and item in ('and','or'):
5791 logic = item
5792 elif item in field_map:
5793 field = field_map[item]
5794 else:
5795 raise RuntimeError, "Invalid syntax"
5796 elif not field is None and op is None:
5797 op = item
5798 elif not op is None:
5799 if item.startswith('#'):
5800 if not item[1:] in constants:
5801 raise RuntimeError, "Invalid syntax"
5802 value = constants[item[1:]]
5803 else:
5804 value = item
5805 if op == '=': op = 'like'
5806 if op == '=': new_query = field==value
5807 elif op == '<': new_query = field<value
5808 elif op == '>': new_query = field>value
5809 elif op == '<=': new_query = field<=value
5810 elif op == '>=': new_query = field>=value
5811 elif op == '!=': new_query = field!=value
5812 elif field.type in ('text','string'):
5813 if op == 'contains': new_query = field.contains(value)
5814 elif op == 'like': new_query = field.like(value)
5815 elif op == 'startswith': new_query = field.startswith(value)
5816 elif op == 'endswith': new_query = field.endswith(value)
5817 else: raise RuntimeError, "Invalid operation"
5818 else: raise RuntimeError, "Invalid operation"
5819 if neg: new_query = ~new_query
5820 if query is None:
5821 query = new_query
5822 elif logic == 'and':
5823 query &= new_query
5824 elif logic == 'or':
5825 query |= new_query
5826 field = op = neg = logic = None
5827 return query
5828
5829
5831
5832 """
5833 an instance of this class represents a database connection
5834
5835 Example::
5836
5837 db = DAL('sqlite://test.db')
5838 db.define_table('tablename', Field('fieldname1'),
5839 Field('fieldname2'))
5840 """
5841
5842 @staticmethod
5844 """
5845 # ## this allows gluon to set a folder for this thread
5846 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
5847 """
5848 BaseAdapter.set_folder(folder)
5849
5850 @staticmethod
5863
5864 @staticmethod
5886
5887
5888 - def __init__(self, uri='sqlite://dummy.db',
5889 pool_size=0, folder=None,
5890 db_codec='UTF-8', check_reserved=None,
5891 migrate=True, fake_migrate=False,
5892 migrate_enabled=True, fake_migrate_all=False,
5893 decode_credentials=False, driver_args=None,
5894 adapter_args=None, attempts=5, auto_import=False):
5895 """
5896 Creates a new Database Abstraction Layer instance.
5897
5898 Keyword arguments:
5899
5900 :uri: string that contains information for connecting to a database.
5901 (default: 'sqlite://dummy.db')
5902 :pool_size: How many open connections to make to the database object.
5903 :folder: <please update me>
5904 :db_codec: string encoding of the database (default: 'UTF-8')
5905 :check_reserved: list of adapters to check tablenames and column names
5906 against sql reserved keywords. (Default None)
5907
5908 * 'common' List of sql keywords that are common to all database types
5909 such as "SELECT, INSERT". (recommended)
5910 * 'all' Checks against all known SQL keywords. (not recommended)
5911 <adaptername> Checks against the specific adapters list of keywords
5912 (recommended)
5913 * '<adaptername>_nonreserved' Checks against the specific adapters
5914 list of nonreserved keywords. (if available)
5915 :migrate (defaults to True) sets default migrate behavior for all tables
5916 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
5917 :migrate_enabled (defaults to True). If set to False disables ALL migrations
5918 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
5919 :attempts (defaults to 5). Number of times to attempt connecting
5920 """
5921 if not decode_credentials:
5922 credential_decoder = lambda cred: cred
5923 else:
5924 credential_decoder = lambda cred: urllib.unquote(cred)
5925 if folder:
5926 self.set_folder(folder)
5927 self._uri = uri
5928 self._pool_size = pool_size
5929 self._db_codec = db_codec
5930 self._lastsql = ''
5931 self._timings = []
5932 self._pending_references = {}
5933 self._request_tenant = 'request_tenant'
5934 self._common_fields = []
5935 self._referee_name = '%(table)s'
5936 if not str(attempts).isdigit() or attempts < 0:
5937 attempts = 5
5938 if uri:
5939 uris = isinstance(uri,(list,tuple)) and uri or [uri]
5940 error = ''
5941 connected = False
5942 for k in range(attempts):
5943 for uri in uris:
5944 try:
5945 if is_jdbc and not uri.startswith('jdbc:'):
5946 uri = 'jdbc:'+uri
5947 self._dbname = regex_dbname.match(uri).group()
5948 if not self._dbname in ADAPTERS:
5949 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
5950
5951
5952 args = (self,uri,pool_size,folder,
5953 db_codec, credential_decoder,
5954 driver_args or {}, adapter_args or {})
5955 self._adapter = ADAPTERS[self._dbname](*args)
5956 connected = True
5957 break
5958 except SyntaxError:
5959 raise
5960 except Exception, error:
5961 tb = traceback.format_exc()
5962 sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
5963 if connected:
5964 break
5965 else:
5966 time.sleep(1)
5967 if not connected:
5968 raise RuntimeError, "Failure to connect, tried %d times:\n%s" % (attempts, tb)
5969 else:
5970 args = (self,'None',0,folder,db_codec)
5971 self._adapter = BaseAdapter(*args)
5972 migrate = fake_migrate = False
5973 adapter = self._adapter
5974 self._uri_hash = hashlib.md5(adapter.uri).hexdigest()
5975 self._tables = SQLCallableList()
5976 self.check_reserved = check_reserved
5977 if self.check_reserved:
5978 from reserved_sql_keywords import ADAPTERS as RSK
5979 self.RSK = RSK
5980 self._migrate = migrate
5981 self._fake_migrate = fake_migrate
5982 self._migrate_enabled = migrate_enabled
5983 self._fake_migrate_all = fake_migrate_all
5984 if auto_import:
5985 self.import_table_definitions(adapter.folder)
5986
5987 @property
5990
5992 pattern = os.path.join(path,self._uri_hash+'_*.table')
5993 for filename in glob.glob(pattern):
5994 tfile = self._adapter.file_open(filename, 'r')
5995 try:
5996 sql_fields = cPickle.load(tfile)
5997 name = filename[len(pattern)-7:-6]
5998 mf = [(value['sortable'],Field(key,type=value['type'])) \
5999 for key, value in sql_fields.items()]
6000 mf.sort(lambda a,b: cmp(a[0],b[0]))
6001 self.define_table(name,*[item[1] for item in mf],
6002 **dict(migrate=migrate,fake_migrate=fake_migrate))
6003 finally:
6004 self._adapter.file_close(tfile)
6005
6007 """
6008 Validates ``name`` against SQL keywords
6009 Uses self.check_reserve which is a list of
6010 operators to use.
6011 self.check_reserved
6012 ['common', 'postgres', 'mysql']
6013 self.check_reserved
6014 ['all']
6015 """
6016 for backend in self.check_reserved:
6017 if name.upper() in self.RSK[backend]:
6018 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
6019
6021 if self.has_key(tablename):
6022 return True
6023 else:
6024 return False
6025
6026 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
6027 """
6028 EXAMPLE:
6029
6030 db.define_table('person',Field('name'),Field('info'))
6031 db.define_table('pet',Field('owner',db.person),Field('name'),Field('info'))
6032
6033 @request.restful()
6034 def index():
6035 def GET(*args,**vars):
6036 patterns = [
6037 "/friends[person]",
6038 "/{friend.name.startswith}",
6039 "/{friend.name}/:field",
6040 "/{friend.name}/pets[pet.owner]",
6041 "/{friend.name}/pet[pet.owner]/{pet.name}",
6042 "/{friend.name}/pet[pet.owner]/{pet.name}/:field"
6043 ]
6044 parser = db.parse_as_rest(patterns,args,vars)
6045 if parser.status == 200:
6046 return dict(content=parser.response)
6047 else:
6048 raise HTTP(parser.status,parser.error)
6049 def POST(table_name,**vars):
6050 if table_name == 'person':
6051 return db.person.validate_and_insert(**vars)
6052 elif table_name == 'pet':
6053 return db.pet.validate_and_insert(**vars)
6054 else:
6055 raise HTTP(400)
6056 return locals()
6057 """
6058
6059 db = self
6060 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
6061 re2 = re.compile('^.+\[.+\]$')
6062
6063 def auto_table(table,base='',depth=0):
6064 patterns = []
6065 for field in db[table].fields:
6066 if base:
6067 tag = '%s/%s' % (base,field.replace('_','-'))
6068 else:
6069 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
6070 f = db[table][field]
6071 if not f.readable: continue
6072 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
6073 tag += '/{%s.%s}' % (table,field)
6074 patterns.append(tag)
6075 patterns.append(tag+'/:field')
6076 elif f.type.startswith('boolean'):
6077 tag += '/{%s.%s}' % (table,field)
6078 patterns.append(tag)
6079 patterns.append(tag+'/:field')
6080 elif f.type.startswith('double') or f.type.startswith('integer'):
6081 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
6082 patterns.append(tag)
6083 patterns.append(tag+'/:field')
6084 elif f.type.startswith('list:'):
6085 tag += '/{%s.%s.contains}' % (table,field)
6086 patterns.append(tag)
6087 patterns.append(tag+'/:field')
6088 elif f.type in ('date','datetime'):
6089 tag+= '/{%s.%s.year}' % (table,field)
6090 patterns.append(tag)
6091 patterns.append(tag+'/:field')
6092 tag+='/{%s.%s.month}' % (table,field)
6093 patterns.append(tag)
6094 patterns.append(tag+'/:field')
6095 tag+='/{%s.%s.day}' % (table,field)
6096 patterns.append(tag)
6097 patterns.append(tag+'/:field')
6098 if f.type in ('datetime','time'):
6099 tag+= '/{%s.%s.hour}' % (table,field)
6100 patterns.append(tag)
6101 patterns.append(tag+'/:field')
6102 tag+='/{%s.%s.minute}' % (table,field)
6103 patterns.append(tag)
6104 patterns.append(tag+'/:field')
6105 tag+='/{%s.%s.second}' % (table,field)
6106 patterns.append(tag)
6107 patterns.append(tag+'/:field')
6108 if depth>0:
6109 for rtable,rfield in db[table]._referenced_by:
6110 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
6111 patterns.append(tag)
6112 patterns += auto_table(rtable,base=tag,depth=depth-1)
6113 return patterns
6114
6115 if patterns=='auto':
6116 patterns=[]
6117 for table in db.tables:
6118 if not table.startswith('auth_'):
6119 patterns.append('/%s[%s]' % (table,table))
6120 patterns += auto_table(table,base='',depth=1)
6121 else:
6122 i = 0
6123 while i<len(patterns):
6124 pattern = patterns[i]
6125 tokens = pattern.split('/')
6126 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
6127 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
6128 '/'.join(tokens[:-1]))
6129 patterns = patterns[:i]+new_patterns+patterns[i+1:]
6130 i += len(new_patterns)
6131 else:
6132 i += 1
6133 if '/'.join(args) == 'patterns':
6134 return Row({'status':200,'pattern':'list',
6135 'error':None,'response':patterns})
6136 for pattern in patterns:
6137 otable=table=None
6138 if not isinstance(queries,dict):
6139 dbset=db(queries)
6140 i=0
6141 tags = pattern[1:].split('/')
6142 if len(tags)!=len(args):
6143 continue
6144 for tag in tags:
6145 if re1.match(tag):
6146
6147 tokens = tag[1:-1].split('.')
6148 table, field = tokens[0], tokens[1]
6149 if not otable or table == otable:
6150 if len(tokens)==2 or tokens[2]=='eq':
6151 query = db[table][field]==args[i]
6152 elif tokens[2]=='ne':
6153 query = db[table][field]!=args[i]
6154 elif tokens[2]=='lt':
6155 query = db[table][field]<args[i]
6156 elif tokens[2]=='gt':
6157 query = db[table][field]>args[i]
6158 elif tokens[2]=='ge':
6159 query = db[table][field]>=args[i]
6160 elif tokens[2]=='le':
6161 query = db[table][field]<=args[i]
6162 elif tokens[2]=='year':
6163 query = db[table][field].year()==args[i]
6164 elif tokens[2]=='month':
6165 query = db[table][field].month()==args[i]
6166 elif tokens[2]=='day':
6167 query = db[table][field].day()==args[i]
6168 elif tokens[2]=='hour':
6169 query = db[table][field].hour()==args[i]
6170 elif tokens[2]=='minute':
6171 query = db[table][field].minutes()==args[i]
6172 elif tokens[2]=='second':
6173 query = db[table][field].seconds()==args[i]
6174 elif tokens[2]=='startswith':
6175 query = db[table][field].startswith(args[i])
6176 elif tokens[2]=='contains':
6177 query = db[table][field].contains(args[i])
6178 else:
6179 raise RuntimeError, "invalid pattern: %s" % pattern
6180 if len(tokens)==4 and tokens[3]=='not':
6181 query = ~query
6182 elif len(tokens)>=4:
6183 raise RuntimeError, "invalid pattern: %s" % pattern
6184 if not otable and isinstance(queries,dict):
6185 dbset = db(queries[table])
6186 dbset=dbset(query)
6187 else:
6188 raise RuntimeError, "missing relation in pattern: %s" % pattern
6189 elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
6190 ref = tag[tag.find('[')+1:-1]
6191 if '.' in ref and otable:
6192 table,field = ref.split('.')
6193
6194 if nested_select:
6195 try:
6196 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
6197 except ValueError:
6198 return Row({'status':400,'pattern':pattern,
6199 'error':'invalid path','response':None})
6200 else:
6201 items = [item.id for item in dbset.select(db[otable]._id)]
6202 dbset=db(db[table][field].belongs(items))
6203 else:
6204 table = ref
6205 if not otable and isinstance(queries,dict):
6206 dbset = db(queries[table])
6207 dbset=dbset(db[table])
6208 elif tag==':field' and table:
6209
6210 field = args[i]
6211 if not field in db[table]: break
6212 try:
6213 item = dbset.select(db[table][field],limitby=(0,1)).first()
6214 except ValueError:
6215 return Row({'status':400,'pattern':pattern,
6216 'error':'invalid path','response':None})
6217 if not item:
6218 return Row({'status':404,'pattern':pattern,
6219 'error':'record not found','response':None})
6220 else:
6221 return Row({'status':200,'response':item[field],
6222 'pattern':pattern})
6223 elif tag != args[i]:
6224 break
6225 otable = table
6226 i += 1
6227 if i==len(tags) and table:
6228 ofields = vars.get('order',db[table]._id.name).split('|')
6229 try:
6230 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
6231 except KeyError:
6232 return Row({'status':400,'error':'invalid orderby','response':None})
6233 fields = [field for field in db[table] if field.readable]
6234 count = dbset.count()
6235 try:
6236 offset = int(vars.get('offset',None) or 0)
6237 limits = (offset,int(vars.get('limit',None) or 1000)+offset)
6238 except ValueError:
6239 Row({'status':400,'error':'invalid limits','response':None})
6240 if count > limits[1]-limits[0]:
6241 Row({'status':400,'error':'too many records','response':None})
6242 try:
6243 response = dbset.select(limitby=limits,orderby=orderby,*fields)
6244 except ValueError:
6245 return Row({'status':400,'pattern':pattern,
6246 'error':'invalid path','response':None})
6247 return Row({'status':200,'response':response,'pattern':pattern})
6248 return Row({'status':400,'error':'no matching pattern','response':None})
6249
6250
6251 - def define_table(
6252 self,
6253 tablename,
6254 *fields,
6255 **args
6256 ):
6257
6258 for key in args:
6259 if key not in [
6260 'migrate',
6261 'primarykey',
6262 'fake_migrate',
6263 'format',
6264 'singular',
6265 'plural',
6266 'trigger_name',
6267 'sequence_name',
6268 'common_filter',
6269 'polymodel',
6270 'table_class']:
6271 raise SyntaxError, 'invalid table "%s" attribute: %s' \
6272 % (tablename, key)
6273 if not isinstance(tablename,str):
6274 raise SyntaxError, "missing table name"
6275 tablename = cleanup(tablename)
6276 migrate = self._migrate_enabled and args.get('migrate',
6277 self._migrate)
6278 fake_migrate = self._fake_migrate_all or args.get('fake_migrate',
6279 self._fake_migrate)
6280 table_class = args.get('table_class',Table)
6281 format = args.get('format',None)
6282 trigger_name = args.get('trigger_name', None)
6283 sequence_name = args.get('sequence_name', None)
6284 primarykey =args.get('primarykey',None)
6285 polymodel = args.get('polymodel',None)
6286 singular = args.get('singular',tablename.replace('_',' ').capitalize())
6287 plural = args.get('plural',pluralize(singular.lower()).capitalize())
6288 lowertablename = tablename.lower()
6289
6290 if tablename.startswith('_') or hasattr(self,lowertablename) or \
6291 regex_python_keywords.match(tablename):
6292 raise SyntaxError, 'invalid table name: %s' % tablename
6293 elif lowertablename in self.tables:
6294 raise SyntaxError, 'table already defined: %s' % tablename
6295 elif self.check_reserved:
6296 self.check_reserved_keyword(tablename)
6297
6298 if self._common_fields:
6299 fields = [f for f in fields] + [f for f in self._common_fields]
6300
6301 common_filter = args.get('common_filter', None)
6302
6303 t = self[tablename] = table_class(self, tablename, *fields,
6304 **dict(primarykey=primarykey,
6305 trigger_name=trigger_name,
6306 sequence_name=sequence_name,
6307 common_filter=common_filter))
6308
6309
6310 if self._uri in (None,'None'):
6311 return t
6312
6313 t._create_references()
6314
6315 if migrate or self._adapter.dbengine=='google:datastore':
6316 try:
6317 sql_locker.acquire()
6318 self._adapter.create_table(t,migrate=migrate,
6319 fake_migrate=fake_migrate,
6320 polymodel=polymodel)
6321 finally:
6322 sql_locker.release()
6323 else:
6324 t._dbt = None
6325 self.tables.append(tablename)
6326 t._format = format
6327 t._singular = singular
6328 t._plural = plural
6329 t._actual = True
6330 return t
6331
6333 for tablename in self.tables:
6334 yield self[tablename]
6335
6338
6341
6344
6346 if key[:1]!='_' and key in self:
6347 raise SyntaxError, \
6348 'Object %s exists and cannot be redefined' % key
6349 self[key] = value
6350
6352 return '<DAL ' + dict.__repr__(self) + '>'
6353
6356
6357 - def __call__(self, query=None, ignore_common_filters=None):
6358 if isinstance(query,Table):
6359 query = query._id>0
6360 elif isinstance(query,Field):
6361 query = query!=None
6362 return Set(self, query, ignore_common_filters=ignore_common_filters)
6363
6366
6369
6370 - def executesql(self, query, placeholders=None, as_dict=False):
6371 """
6372 placeholders is optional and will always be None when using DAL.
6373 If using raw SQL with placeholders, placeholders may be
6374 a sequence of values to be substituted in
6375 or, (if supported by the DB driver), a dictionary with keys
6376 matching named placeholders in your SQL.
6377
6378 Added 2009-12-05 "as_dict" optional argument. Will always be
6379 None when using DAL. If using raw SQL can be set to True
6380 and the results cursor returned by the DB driver will be
6381 converted to a sequence of dictionaries keyed with the db
6382 field names. Tested with SQLite but should work with any database
6383 since the cursor.description used to get field names is part of the
6384 Python dbi 2.0 specs. Results returned with as_dict=True are
6385 the same as those returned when applying .to_list() to a DAL query.
6386
6387 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
6388
6389 --bmeredyk
6390 """
6391 if placeholders:
6392 self._adapter.execute(query, placeholders)
6393 else:
6394 self._adapter.execute(query)
6395 if as_dict:
6396 if not hasattr(self._adapter.cursor,'description'):
6397 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
6398
6399
6400
6401 columns = self._adapter.cursor.description
6402
6403 fields = [f[0] for f in columns]
6404
6405 data = self._adapter.cursor.fetchall()
6406
6407
6408 return [dict(zip(fields,row)) for row in data]
6409
6410 try:
6411 return self._adapter.cursor.fetchall()
6412 except:
6413 return None
6414
6416 for tablename in self.tables:
6417 by = self[tablename]._referenced_by
6418 by[:] = [item for item in by if not item[0] == other]
6419
6421 step = int(kwargs.get('max_fetch_rows,',500))
6422 write_colnames = kwargs['write_colnames'] = \
6423 kwargs.get("write_colnames", True)
6424 for table in self.tables:
6425 ofile.write('TABLE %s\r\n' % table)
6426 query = self[table]._id > 0
6427 nrows = self(query).count()
6428 kwargs['write_colnames'] = write_colnames
6429 for k in range(0,nrows,step):
6430 self(query).select(limitby=(k,k+step)).export_to_csv_file(
6431 ofile, *args, **kwargs)
6432 kwargs['write_colnames'] = False
6433 ofile.write('\r\n\r\n')
6434 ofile.write('END')
6435
6436 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
6437 unique='uuid', *args, **kwargs):
6438 if id_map is None: id_map={}
6439 for line in ifile:
6440 line = line.strip()
6441 if not line:
6442 continue
6443 elif line == 'END':
6444 return
6445 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
6446 raise SyntaxError, 'invalid file format'
6447 else:
6448 tablename = line[6:]
6449 self[tablename].import_from_csv_file(ifile, id_map, null,
6450 unique, *args, **kwargs)
6451
6453 """
6454 Helper class providing a comma-separated string having all the field names
6455 (prefixed by table name and '.')
6456
6457 normally only called from within gluon.sql
6458 """
6459
6462
6464 return ', '.join([str(field) for field in self.table])
6465
6466
6468
6470 if not self._record:
6471 self._record = self._table[int(self)]
6472 if not self._record:
6473 raise RuntimeError, "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self))
6474
6476 if key == 'id':
6477 return int(self)
6478 self.__allocate()
6479 return self._record.get(key, None)
6480
6481 - def get(self, key):
6483
6490
6492 if key == 'id':
6493 return int(self)
6494 self.__allocate()
6495 return self._record.get(key, None)
6496
6498 self.__allocate()
6499 self._record[key] = value
6500
6501
6503 return marshal.loads(data)
6504
6506 try:
6507 marshal_dump = marshal.dumps(int(data))
6508 except AttributeError:
6509 marshal_dump = 'i%s' % struct.pack('<i', int(data))
6510 return (Reference_unpickler, (marshal_dump,))
6511
6512 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
6513
6514
6516
6517 """
6518 an instance of this class represents a database table
6519
6520 Example::
6521
6522 db = DAL(...)
6523 db.define_table('users', Field('name'))
6524 db.users.insert(name='me') # print db.users._insert(...) to see SQL
6525 db.users.drop()
6526 """
6527
6528 - def __init__(
6529 self,
6530 db,
6531 tablename,
6532 *fields,
6533 **args
6534 ):
6535 """
6536 Initializes the table and performs checking on the provided fields.
6537
6538 Each table will have automatically an 'id'.
6539
6540 If a field is of type Table, the fields (excluding 'id') from that table
6541 will be used instead.
6542
6543 :raises SyntaxError: when a supplied field is of incorrect type.
6544 """
6545 self._actual = False
6546 self._tablename = tablename
6547 self._sequence_name = args.get('sequence_name',None) or \
6548 db and db._adapter.sequence_name(tablename)
6549 self._trigger_name = args.get('trigger_name',None) or \
6550 db and db._adapter.trigger_name(tablename)
6551 self._common_filter = args.get('common_filter', None)
6552 primarykey = args.get('primarykey', None)
6553 fieldnames,newfields=set(),[]
6554 if primarykey:
6555 if not isinstance(primarykey,list):
6556 raise SyntaxError, \
6557 "primarykey must be a list of fields from table '%s'" \
6558 % tablename
6559 self._primarykey = primarykey
6560 elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
6561 field = Field('id', 'id')
6562 newfields.append(field)
6563 fieldnames.add('id')
6564 self._id = field
6565 for field in fields:
6566 if not isinstance(field, (Field, Table)):
6567 raise SyntaxError, \
6568 'define_table argument is not a Field or Table: %s' % field
6569 elif isinstance(field, Field) and not field.name in fieldnames:
6570 if hasattr(field, '_db'):
6571 field = copy.copy(field)
6572 newfields.append(field)
6573 fieldnames.add(field.name)
6574 if field.type=='id':
6575 self._id = field
6576 elif isinstance(field, Table):
6577 table = field
6578 for field in table:
6579 if not field.name in fieldnames and not field.type=='id':
6580 field = copy.copy(field)
6581
6582 if not table._actual and field.type == 'reference '+table._tablename:
6583 field.type = 'reference '+self._tablename
6584 newfields.append(field)
6585 fieldnames.add(field.name)
6586 else:
6587
6588 pass
6589 fields = newfields
6590 self._db = db
6591 tablename = tablename
6592 self._fields = SQLCallableList()
6593 self.virtualfields = []
6594 fields = list(fields)
6595
6596 if db and self._db._adapter.uploads_in_blob==True:
6597 for field in fields:
6598 if isinstance(field, Field) and field.type == 'upload'\
6599 and field.uploadfield is True:
6600 tmp = field.uploadfield = '%s_blob' % field.name
6601 fields.append(self._db.Field(tmp, 'blob', default=''))
6602
6603 lower_fieldnames = set()
6604 reserved = dir(Table) + ['fields']
6605 for field in fields:
6606 if db and db.check_reserved:
6607 db.check_reserved_keyword(field.name)
6608 elif field.name in reserved:
6609 raise SyntaxError, "field name %s not allowed" % field.name
6610
6611 if field.name.lower() in lower_fieldnames:
6612 raise SyntaxError, "duplicate field %s in table %s" \
6613 % (field.name, tablename)
6614 else:
6615 lower_fieldnames.add(field.name.lower())
6616
6617 self.fields.append(field.name)
6618 self[field.name] = field
6619 if field.type == 'id':
6620 self['id'] = field
6621 field.tablename = field._tablename = tablename
6622 field.table = field._table = self
6623 field.db = field._db = self._db
6624 if self._db and not field.type in ('text','blob') and \
6625 self._db._adapter.maxcharlength < field.length:
6626 field.length = self._db._adapter.maxcharlength
6627 if field.requires is DEFAULT:
6628 field.requires = sqlhtml_validators(field)
6629 self.ALL = SQLALL(self)
6630
6631 if hasattr(self,'_primarykey'):
6632 for k in self._primarykey:
6633 if k not in self.fields:
6634 raise SyntaxError, \
6635 "primarykey must be a list of fields from table '%s " % tablename
6636 else:
6637 self[k].notnull = True
6638
6639 @property
6642
6643 - def update(self,*args,**kwargs):
6644 raise RuntimeError, "Syntax Not Supported"
6645
6647 errors = Row()
6648 for key,value in vars.items():
6649 value,error = self[key].validate(value)
6650 if error:
6651 errors[key] = error
6652 return errors
6653
6655 pr = self._db._pending_references
6656 self._referenced_by = []
6657 for fieldname in self.fields:
6658 field=self[fieldname]
6659 if isinstance(field.type,str) and field.type[:10] == 'reference ':
6660 ref = field.type[10:].strip()
6661 if not ref.split():
6662 raise SyntaxError, 'Table: reference to nothing: %s' %ref
6663 refs = ref.split('.')
6664 rtablename = refs[0]
6665 if not rtablename in self._db:
6666 pr[rtablename] = pr.get(rtablename,[]) + [field]
6667 continue
6668 rtable = self._db[rtablename]
6669 if len(refs)==2:
6670 rfieldname = refs[1]
6671 if not hasattr(rtable,'_primarykey'):
6672 raise SyntaxError,\
6673 'keyed tables can only reference other keyed tables (for now)'
6674 if rfieldname not in rtable.fields:
6675 raise SyntaxError,\
6676 "invalid field '%s' for referenced table '%s' in table '%s'" \
6677 % (rfieldname, rtablename, self._tablename)
6678 rtable._referenced_by.append((self._tablename, field.name))
6679 for referee in pr.get(self._tablename,[]):
6680 self._referenced_by.append((referee._tablename,referee.name))
6681
6683 return dict([(k, v) for (k, v) in record.items() if k
6684 in self.fields and (self[k].type!='id' or id)])
6685
6687 """ for keyed table only """
6688 query = None
6689 for k,v in key.iteritems():
6690 if k in self._primarykey:
6691 if query:
6692 query = query & (self[k] == v)
6693 else:
6694 query = (self[k] == v)
6695 else:
6696 raise SyntaxError, \
6697 'Field %s is not part of the primary key of %s' % \
6698 (k,self._tablename)
6699 return query
6700
6702 if not key:
6703 return None
6704 elif isinstance(key, dict):
6705 """ for keyed table """
6706 query = self._build_query(key)
6707 rows = self._db(query).select()
6708 if rows:
6709 return rows[0]
6710 return None
6711 elif str(key).isdigit():
6712 return self._db(self._id == key).select(limitby=(0,1)).first()
6713 elif key:
6714 return dict.__getitem__(self, str(key))
6715
6717 for_update = kwargs.get('_for_update',False)
6718 if '_for_update' in kwargs: del kwargs['_for_update']
6719 if not key is DEFAULT:
6720 if isinstance(key, Query):
6721 record = self._db(key).select(
6722 limitby=(0,1),for_update=for_update).first()
6723 elif not str(key).isdigit():
6724 record = None
6725 else:
6726 record = self._db(self._id == key).select(
6727 limitby=(0,1),for_update=for_update).first()
6728 if record:
6729 for k,v in kwargs.items():
6730 if record[k]!=v: return None
6731 return record
6732 elif kwargs:
6733 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
6734 return self._db(query).select(limitby=(0,1),for_update=for_update).first()
6735 else:
6736 return None
6737
6739 if isinstance(key, dict) and isinstance(value, dict):
6740 """ option for keyed table """
6741 if set(key.keys()) == set(self._primarykey):
6742 value = self._filter_fields(value)
6743 kv = {}
6744 kv.update(value)
6745 kv.update(key)
6746 if not self.insert(**kv):
6747 query = self._build_query(key)
6748 self._db(query).update(**self._filter_fields(value))
6749 else:
6750 raise SyntaxError,\
6751 'key must have all fields from primary key: %s'%\
6752 (self._primarykey)
6753 elif str(key).isdigit():
6754 if key == 0:
6755 self.insert(**self._filter_fields(value))
6756 elif self._db(self._id == key)\
6757 .update(**self._filter_fields(value)) is None:
6758 raise SyntaxError, 'No such record: %s' % key
6759 else:
6760 if isinstance(key, dict):
6761 raise SyntaxError,\
6762 'value must be a dictionary: %s' % value
6763 dict.__setitem__(self, str(key), value)
6764
6766 if isinstance(key, dict):
6767 query = self._build_query(key)
6768 if not self._db(query).delete():
6769 raise SyntaxError, 'No such record: %s' % key
6770 elif not str(key).isdigit() or not self._db(self._id == key).delete():
6771 raise SyntaxError, 'No such record: %s' % key
6772
6775
6777 if key[:1]!='_' and key in self:
6778 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
6779 self[key] = value
6780
6782 for fieldname in self.fields:
6783 yield self[fieldname]
6784
6786 return '<Table ' + dict.__repr__(self) + '>'
6787
6789 if self.get('_ot', None):
6790 return '%s AS %s' % (self._ot, self._tablename)
6791 return self._tablename
6792
6793 - def _drop(self, mode = ''):
6794 return self._db._adapter._drop(self, mode)
6795
6796 - def drop(self, mode = ''):
6797 return self._db._adapter.drop(self,mode)
6798
6799 - def _listify(self,fields,update=False):
6800 new_fields = []
6801 new_fields_names = []
6802 for name in fields:
6803 if not name in self.fields:
6804 if name != 'id':
6805 raise SyntaxError, 'Field %s does not belong to the table' % name
6806 else:
6807 new_fields.append((self[name],fields[name]))
6808 new_fields_names.append(name)
6809 for ofield in self:
6810 if not ofield.name in new_fields_names:
6811 if not update and not ofield.default is None:
6812 new_fields.append((ofield,ofield.default))
6813 elif update and not ofield.update is None:
6814 new_fields.append((ofield,ofield.update))
6815 for ofield in self:
6816 if not ofield.name in new_fields_names and ofield.compute:
6817 try:
6818 new_fields.append((ofield,ofield.compute(Row(fields))))
6819 except KeyError:
6820 pass
6821 if not update and ofield.required and not ofield.name in new_fields_names:
6822 raise SyntaxError,'Table: missing required field: %s' % ofield.name
6823 return new_fields
6824
6827
6830
6846
6848 if _key is DEFAULT:
6849 record = self(**values)
6850 else:
6851 record = self(_key)
6852 if record:
6853 record.update_record(**values)
6854 newid = None
6855 else:
6856 newid = self.insert(**values)
6857 return newid
6858
6860 """
6861 here items is a list of dictionaries
6862 """
6863 items = [self._listify(item) for item in items]
6864 return self._db._adapter.bulk_insert(self,items)
6865
6867 return self._db._adapter._truncate(self, mode)
6868
6870 return self._db._adapter.truncate(self, mode)
6871
6872 - def import_from_csv_file(
6873 self,
6874 csvfile,
6875 id_map=None,
6876 null='<NULL>',
6877 unique='uuid',
6878 *args, **kwargs
6879 ):
6880 """
6881 import records from csv file. Column headers must have same names as
6882 table fields. field 'id' is ignored. If column names read 'table.file'
6883 the 'table.' prefix is ignored.
6884 'unique' argument is a field which must be unique
6885 (typically a uuid field)
6886 """
6887
6888 delimiter = kwargs.get('delimiter', ',')
6889 quotechar = kwargs.get('quotechar', '"')
6890 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
6891
6892 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
6893 colnames = None
6894 if isinstance(id_map, dict):
6895 if not self._tablename in id_map:
6896 id_map[self._tablename] = {}
6897 id_map_self = id_map[self._tablename]
6898
6899 def fix(field, value, id_map):
6900 list_reference_s='list:reference'
6901 if value == null:
6902 value = None
6903 elif field.type=='blob':
6904 value = base64.b64decode(value)
6905 elif field.type=='double':
6906 if not value.strip():
6907 value = None
6908 else:
6909 value = float(value)
6910 elif field.type=='integer':
6911 if not value.strip():
6912 value = None
6913 else:
6914 value = int(value)
6915 elif field.type.startswith('list:string'):
6916 value = bar_decode_string(value)
6917 elif field.type.startswith(list_reference_s):
6918 ref_table = field.type[len(list_reference_s):].strip()
6919 value = [id_map[ref_table][int(v)] \
6920 for v in bar_decode_string(value)]
6921 elif field.type.startswith('list:'):
6922 value = bar_decode_integer(value)
6923 elif id_map and field.type.startswith('reference'):
6924 try:
6925 value = id_map[field.type[9:].strip()][int(value)]
6926 except KeyError:
6927 pass
6928 return (field.name, value)
6929
6930 def is_id(colname):
6931 if colname in self:
6932 return self[colname].type == 'id'
6933 else:
6934 return False
6935
6936 for line in reader:
6937 if not line:
6938 break
6939 if not colnames:
6940 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
6941 cols, cid = [], []
6942 for i,colname in enumerate(colnames):
6943 if is_id(colname):
6944 cid = i
6945 else:
6946 cols.append(i)
6947 if colname == unique:
6948 unique_idx = i
6949 else:
6950 items = [fix(self[colnames[i]], line[i], id_map) \
6951 for i in cols if colnames[i] in self.fields]
6952
6953
6954 if not unique or unique not in colnames:
6955 new_id = self.insert(**dict(items))
6956 else:
6957 unique_value = line[unique_idx]
6958 query = self._db[self][unique] == unique_value
6959 record = self._db(query).select().first()
6960 if record:
6961 record.update_record(**dict(items))
6962 new_id = record[self._id.name]
6963 else:
6964 new_id = self.insert(**dict(items))
6965 if id_map and cid != []:
6966 id_map_self[int(line[cid])] = new_id
6967
6970
6971 - def on(self, query):
6972 return Expression(self._db,self._db._adapter.ON,self,query)
6973
6974
6975
6977
6978 - def __init__(
6979 self,
6980 db,
6981 op,
6982 first=None,
6983 second=None,
6984 type=None,
6985 ):
6986
6987 self.db = db
6988 self.op = op
6989 self.first = first
6990 self.second = second
6991
6992 if not type and first and hasattr(first,'type'):
6993 self.type = first.type
6994 else:
6995 self.type = type
6996
6999
7002
7005
7008
7010 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
7011
7013 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
7014
7017
7020
7023
7026
7029
7032
7035
7038
7040 if start < 0:
7041 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
7042 else:
7043 pos0 = start + 1
7044
7045 if stop < 0:
7046 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
7047 elif stop == sys.maxint:
7048 length = self.len()
7049 else:
7050 length = '(%s - %s)' % (stop + 1, pos0)
7051 return Expression(self.db,self.db._adapter.SUBSTRING,
7052 self, (pos0, length), self.type)
7053
7055 return self[i:i + 1]
7056
7058 return self.db._adapter.expand(self,self.type)
7059
7061 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
7062
7064 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
7065 return self.first
7066 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
7067
7069 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
7070
7072 if self.type == 'integer':
7073 result_type = 'integer'
7074 elif self.type in ['date','time','datetime','double']:
7075 result_type = 'double'
7076 else:
7077 raise SyntaxError, "subtraction operation not supported for type"
7078 return Expression(self.db,self.db._adapter.SUB,self,other,
7079 result_type)
7081 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
7082
7084 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
7085
7087 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
7088
7090 return Query(self.db, self.db._adapter.EQ, self, value)
7091
7093 return Query(self.db, self.db._adapter.NE, self, value)
7094
7096 return Query(self.db, self.db._adapter.LT, self, value)
7097
7099 return Query(self.db, self.db._adapter.LE, self, value)
7100
7102 return Query(self.db, self.db._adapter.GT, self, value)
7103
7105 return Query(self.db, self.db._adapter.GE, self, value)
7106
7107 - def like(self, value, case_sensitive=False):
7108 op = case_sensitive and self.db._adapter.LIKE or self.db._adapter.ILIKE
7109 return Query(self.db, op, self, value)
7110
7112 return Query(self.db, self.db._adapter.REGEXP, self, value)
7113
7115 if isinstance(value,Query):
7116 value = self.db(value)._select(value.first._table._id)
7117 return Query(self.db, self.db._adapter.BELONGS, self, value)
7118
7120 if not self.type in ('string', 'text'):
7121 raise SyntaxError, "startswith used with incompatible field type"
7122 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
7123
7125 if not self.type in ('string', 'text'):
7126 raise SyntaxError, "endswith used with incompatible field type"
7127 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
7128
7130 if isinstance(value,(list, tuple)):
7131 subqueries = [self.contains(str(v).strip()) for v in value if str(v).strip()]
7132 return reduce(all and AND or OR, subqueries)
7133 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
7134 raise SyntaxError, "contains used with incompatible field type"
7135 return Query(self.db, self.db._adapter.CONTAINS, self, value)
7136
7139
7140
7141
7142
7144 """
7145 allows defining of custom SQL types
7146
7147 Example::
7148
7149 decimal = SQLCustomType(
7150 type ='double',
7151 native ='integer',
7152 encoder =(lambda x: int(float(x) * 100)),
7153 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
7154 )
7155
7156 db.define_table(
7157 'example',
7158 Field('value', type=decimal)
7159 )
7160
7161 :param type: the web2py type (default = 'string')
7162 :param native: the backend type
7163 :param encoder: how to encode the value to store it in the backend
7164 :param decoder: how to decode the value retrieved from the backend
7165 :param validator: what validators to use ( default = None, will use the
7166 default validator for type)
7167 """
7168
7169 - def __init__(
7170 self,
7171 type='string',
7172 native=None,
7173 encoder=None,
7174 decoder=None,
7175 validator=None,
7176 _class=None,
7177 ):
7178
7179 self.type = type
7180 self.native = native
7181 self.encoder = encoder or (lambda x: x)
7182 self.decoder = decoder or (lambda x: x)
7183 self.validator = validator
7184 self._class = _class or type
7185
7187 try:
7188 return self.type.startswith(self, text)
7189 except TypeError:
7190 return False
7191
7194
7197
7200
7204
7209
7210
7211 -class Field(Expression):
7212
7213 Virtual = FieldVirtual
7214 Lazy = FieldLazy
7215
7216 """
7217 an instance of this class represents a database field
7218
7219 example::
7220
7221 a = Field(name, 'string', length=32, default=None, required=False,
7222 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
7223 notnull=False, unique=False,
7224 uploadfield=True, widget=None, label=None, comment=None,
7225 uploadfield=True, # True means store on disk,
7226 # 'a_field_name' means store in this field in db
7227 # False means file content will be discarded.
7228 writable=True, readable=True, update=None, authorize=None,
7229 autodelete=False, represent=None, uploadfolder=None,
7230 uploadseparate=False # upload to separate directories by uuid_keys
7231 # first 2 character and tablename.fieldname
7232 # False - old behavior
7233 # True - put uploaded file in
7234 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
7235 # directory)
7236
7237 to be used as argument of DAL.define_table
7238
7239 allowed field types:
7240 string, boolean, integer, double, text, blob,
7241 date, time, datetime, upload, password
7242
7243 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
7244 fields should have a default or they will be required in SQLFORMs
7245 the requires argument is used to validate the field input in SQLFORMs
7246
7247 """
7248
7249 - def __init__(
7250 self,
7251 fieldname,
7252 type='string',
7253 length=None,
7254 default=DEFAULT,
7255 required=False,
7256 requires=DEFAULT,
7257 ondelete='CASCADE',
7258 notnull=False,
7259 unique=False,
7260 uploadfield=True,
7261 widget=None,
7262 label=DEFAULT,
7263 comment=None,
7264 writable=True,
7265 readable=True,
7266 update=None,
7267 authorize=None,
7268 autodelete=False,
7269 represent=None,
7270 uploadfolder=None,
7271 uploadseparate=False,
7272 compute=None,
7273 custom_store=None,
7274 custom_retrieve=None,
7275 custom_delete=None,
7276 ):
7277 self.db = None
7278 self.op = None
7279 self.first = None
7280 self.second = None
7281 if not isinstance(fieldname,str):
7282 raise SyntaxError, "missing field name"
7283 self.name = fieldname = cleanup(fieldname)
7284 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
7285 regex_python_keywords.match(fieldname):
7286 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
7287 if isinstance(type, Table):
7288 type = 'reference ' + type._tablename
7289 self.type = type
7290 self.length = (length is None) and DEFAULTLENGTH.get(type,512) or length
7291 if default is DEFAULT:
7292 self.default = update or None
7293 else:
7294 self.default = default
7295 self.required = required
7296 self.ondelete = ondelete.upper()
7297 self.notnull = notnull
7298 self.unique = unique
7299 self.uploadfield = uploadfield
7300 self.uploadfolder = uploadfolder
7301 self.uploadseparate = uploadseparate
7302 self.widget = widget
7303 if label is DEFAULT:
7304 self.label = fieldname.replace('_', ' ').title()
7305 else:
7306 self.label = label or ''
7307 self.comment = comment
7308 self.writable = writable
7309 self.readable = readable
7310 self.update = update
7311 self.authorize = authorize
7312 self.autodelete = autodelete
7313 if not represent and type in ('list:integer','list:string'):
7314 represent=lambda x,r=None: ', '.join(str(y) for y in x or [])
7315 self.represent = represent
7316 self.compute = compute
7317 self.isattachment = True
7318 self.custom_store = custom_store
7319 self.custom_retrieve = custom_retrieve
7320 self.custom_delete = custom_delete
7321 if self.label is None:
7322 self.label = fieldname.replace('_',' ').title()
7323 if requires is None:
7324 self.requires = []
7325 else:
7326 self.requires = requires
7327
7328 - def store(self, file, filename=None, path=None):
7329 if self.custom_store:
7330 return self.custom_store(file,filename,path)
7331 if isinstance(file, cgi.FieldStorage):
7332 file = file.file
7333 filename = filename or file.filename
7334 elif not filename:
7335 filename = file.name
7336 filename = os.path.basename(filename.replace('/', os.sep)\
7337 .replace('\\', os.sep))
7338 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
7339 extension = m and m.group('e') or 'txt'
7340 uuid_key = web2py_uuid().replace('-', '')[-16:]
7341 encoded_filename = base64.b16encode(filename).lower()
7342 newfilename = '%s.%s.%s.%s' % \
7343 (self._tablename, self.name, uuid_key, encoded_filename)
7344 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
7345 if isinstance(self.uploadfield,Field):
7346 blob_uploadfield_name = self.uploadfield.uploadfield
7347 keys={self.uploadfield.name: newfilename,
7348 blob_uploadfield_name: file.read()}
7349 self.uploadfield.table.insert(**keys)
7350 elif self.uploadfield == True:
7351 if path:
7352 pass
7353 elif self.uploadfolder:
7354 path = self.uploadfolder
7355 elif self.db._adapter.folder:
7356 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
7357 else:
7358 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
7359 if self.uploadseparate:
7360 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
7361 if not os.path.exists(path):
7362 os.makedirs(path)
7363 pathfilename = os.path.join(path, newfilename)
7364 dest_file = open(pathfilename, 'wb')
7365 try:
7366 shutil.copyfileobj(file, dest_file)
7367 except IOError:
7368 raise IOError, 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename
7369 dest_file.close()
7370 return newfilename
7371
7373 if self.custom_retrieve:
7374 return self.custom_retrieve(name, path)
7375 import http
7376 if self.authorize or isinstance(self.uploadfield, str):
7377 row = self.db(self == name).select().first()
7378 if not row:
7379 raise http.HTTP(404)
7380 if self.authorize and not self.authorize(row):
7381 raise http.HTTP(403)
7382 try:
7383 m = regex_content.match(name)
7384 if not m or not self.isattachment:
7385 raise TypeError, 'Can\'t retrieve %s' % name
7386 filename = base64.b16decode(m.group('name'), True)
7387 filename = regex_cleanup_fn.sub('_', filename)
7388 except (TypeError, AttributeError):
7389 filename = name
7390 if isinstance(self.uploadfield, str):
7391 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
7392 elif isinstance(self.uploadfield,Field):
7393 blob_uploadfield_name = self.uploadfield.uploadfield
7394 query = self.uploadfield == name
7395 data = self.uploadfield.table(query)[blob_uploadfield_name]
7396 return (filename, cStringIO.StringIO(data))
7397 else:
7398
7399 if path:
7400 pass
7401 elif self.uploadfolder:
7402 path = self.uploadfolder
7403 else:
7404 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
7405 if self.uploadseparate:
7406 t = m.group('table')
7407 f = m.group('field')
7408 u = m.group('uuidkey')
7409 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
7410 return (filename, open(os.path.join(path, name), 'rb'))
7411
7426
7438
7441
7444
7446 try:
7447 return '%s.%s' % (self.tablename, self.name)
7448 except:
7449 return '<no table>.%s' % self.name
7450
7451
7453
7455
7456 """
7457 a query object necessary to define a set.
7458 it can be stored or can be passed to DAL.__call__() to obtain a Set
7459
7460 Example::
7461
7462 query = db.users.name=='Max'
7463 set = db(query)
7464 records = set.select()
7465
7466 """
7467
7468 - def __init__(
7469 self,
7470 db,
7471 op,
7472 first=None,
7473 second=None,
7474 ignore_common_filters = False,
7475 ):
7476 self.db = self._db = db
7477 self.op = op
7478 self.first = first
7479 self.second = second
7480 self.ignore_common_filters = ignore_common_filters
7481
7483 return self.db._adapter.expand(self)
7484
7486 return Query(self.db,self.db._adapter.AND,self,other)
7487
7489 return Query(self.db,self.db._adapter.OR,self,other)
7490
7492 if self.op==self.db._adapter.NOT:
7493 return self.first
7494 return Query(self.db,self.db._adapter.NOT,self)
7495
7496
7497 regex_quotes = re.compile("'[^']*'")
7498
7499
7501 if not orderby:
7502 return None
7503 orderby2 = orderby[0]
7504 for item in orderby[1:]:
7505 orderby2 = orderby2 | item
7506 return orderby2
7507
7509 return (query and hasattr(query,'ignore_common_filters') and \
7510 not query.ignore_common_filters)
7511
7513
7514 """
7515 a Set represents a set of records in the database,
7516 the records are identified by the query=Query(...) object.
7517 normally the Set is generated by DAL.__call__(Query(...))
7518
7519 given a set, for example
7520 set = db(db.users.name=='Max')
7521 you can:
7522 set.update(db.users.name='Massimo')
7523 set.delete() # all elements in the set
7524 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
7525 and take subsets:
7526 subset = set(db.users.id<5)
7527 """
7528
7529 - def __init__(self, db, query, ignore_common_filters = None):
7530 self.db = db
7531 self._db = db
7532 if not ignore_common_filters is None and \
7533 use_common_filters(query) == ignore_common_filters:
7534 query = copy.copy(query)
7535 query.ignore_common_filters = ignore_common_filters
7536 self.query = query
7537
7538 - def __call__(self, query, ignore_common_filters=False):
7539 if isinstance(query,Table):
7540 query = query._id>0
7541 elif isinstance(query,str):
7542 query = raw(query)
7543 elif isinstance(query,Field):
7544 query = query!=None
7545 if self.query:
7546 return Set(self.db, self.query & query,
7547 ignore_common_filters = ignore_common_filters)
7548 else:
7549 return Set(self.db, query,
7550 ignore_common_filters = ignore_common_filters)
7551
7552 - def _count(self,distinct=None):
7553 return self.db._adapter._count(self.query,distinct)
7554
7555 - def _select(self, *fields, **attributes):
7559
7561 tablename=self.db._adapter.get_table(self.query)
7562 return self.db._adapter._delete(tablename,self.query)
7563
7564 - def _update(self, **update_fields):
7568
7570 return not self.select(limitby=(0,1))
7571
7572 - def count(self,distinct=None):
7573 return self.db._adapter.count(self.query,distinct)
7574
7575 - def select(self, *fields, **attributes):
7579
7584
7585 - def update(self, **update_fields):
7592
7613
7615 table = self.db[self.db._adapter.tables(self.query)[0]]
7616
7617 if upload_fields:
7618 fields = upload_fields.keys()
7619 else:
7620 fields = table.fields
7621 fields = [f for f in fields if table[f].type == 'upload'
7622 and table[f].uploadfield == True
7623 and table[f].autodelete]
7624 if not fields:
7625 return
7626 for record in self.select(*[table[f] for f in fields]):
7627 for fieldname in fields:
7628 field = table[fieldname]
7629 oldname = record.get(fieldname, None)
7630 if not oldname:
7631 continue
7632 if upload_fields and oldname == upload_fields[fieldname]:
7633 continue
7634 if field.custom_delete:
7635 field.custom_delete(oldname)
7636 else:
7637 uploadfolder = field.uploadfolder
7638 if not uploadfolder:
7639 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
7640 if field.uploadseparate:
7641 items = oldname.split('.')
7642 uploadfolder = os.path.join(uploadfolder,
7643 "%s.%s" % (items[0], items[1]),
7644 items[2][:2])
7645 oldpath = os.path.join(uploadfolder, oldname)
7646 if os.path.exists(oldpath):
7647 os.unlink(oldpath)
7648
7650 (colset, table, id) = pack
7651 b = a or dict(colset)
7652 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
7653 table._db(table._id==id).update(**c)
7654 for (k, v) in c.items():
7655 colset[k] = v
7656
7659 self.method=method
7660 self.row=row
7662 return self.method(self.row,*args,**kwargs)
7663
7665 f.__lazy__ = True
7666 return f
7667
7668 -class Rows(object):
7669
7670 """
7671 A wrapper for the return value of a select. It basically represents a table.
7672 It has an iterator and each row is represented as a dictionary.
7673 """
7674
7675
7676
7677 - def __init__(
7678 self,
7679 db=None,
7680 records=[],
7681 colnames=[],
7682 compact=True,
7683 rawrows=None
7684 ):
7685 self.db = db
7686 self.records = records
7687 self.colnames = colnames
7688 self.compact = compact
7689 self.response = rawrows
7690
7692 """
7693 db.define_table('x',Field('number','integer'))
7694 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
7695
7696 from gluon.dal import lazy_virtualfield
7697
7698 class MyVirtualFields(object):
7699 # normal virtual field (backward compatible, discouraged)
7700 def normal_shift(self): return self.x.number+1
7701 # lazy virtual field (because of @staticmethod)
7702 @lazy_virtualfield
7703 def lazy_shift(instance,row,delta=4): return row.x.number+delta
7704 db.x.virtualfields.append(MyVirtualFields())
7705
7706 for row in db(db.x).select():
7707 print row.number, row.normal_shift, row.lazy_shift(delta=7)
7708 """
7709 if not keyed_virtualfields:
7710 return self
7711 for row in self.records:
7712 for (tablename,virtualfields) in keyed_virtualfields.items():
7713 attributes = dir(virtualfields)
7714 if not tablename in row:
7715 box = row[tablename] = Row()
7716 else:
7717 box = row[tablename]
7718 updated = False
7719 for attribute in attributes:
7720 if attribute[0] != '_':
7721 method = getattr(virtualfields,attribute)
7722 if hasattr(method,'__lazy__'):
7723 box[attribute]=VirtualCommand(method,row)
7724 elif type(method)==types.MethodType:
7725 if not updated:
7726 virtualfields.__dict__.update(row)
7727 updated = True
7728 box[attribute]=method()
7729 return self
7730
7732 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
7733 records = self.records+other.records
7734 return Rows(self.db,records,self.colnames)
7735
7737 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
7738 records = self.records
7739 records += [record for record in other.records \
7740 if not record in records]
7741 return Rows(self.db,records,self.colnames)
7742
7744 if len(self.records):
7745 return 1
7746 return 0
7747
7749 return len(self.records)
7750
7752 return Rows(self.db,self.records[a:b],self.colnames)
7753
7755 row = self.records[i]
7756 keys = row.keys()
7757 if self.compact and len(keys) == 1 and keys[0] != '_extra':
7758 return row[row.keys()[0]]
7759 return row
7760
7762 """
7763 iterator over records
7764 """
7765
7766 for i in xrange(len(self)):
7767 yield self[i]
7768
7770 """
7771 serializes the table into a csv file
7772 """
7773
7774 s = cStringIO.StringIO()
7775 self.export_to_csv_file(s)
7776 return s.getvalue()
7777
7779 if not self.records:
7780 return None
7781 return self[0]
7782
7784 if not self.records:
7785 return None
7786 return self[-1]
7787
7789 """
7790 returns a new Rows object, a subset of the original object,
7791 filtered by the function f
7792 """
7793 if not self.records:
7794 return Rows(self.db, [], self.colnames)
7795 records = []
7796 for i in range(0,len(self)):
7797 row = self[i]
7798 if f(row):
7799 records.append(self.records[i])
7800 return Rows(self.db, records, self.colnames)
7801
7803 """
7804 removes elements from the calling Rows object, filtered by the function f,
7805 and returns a new Rows object containing the removed elements
7806 """
7807 if not self.records:
7808 return Rows(self.db, [], self.colnames)
7809 removed = []
7810 i=0
7811 while i<len(self):
7812 row = self[i]
7813 if f(row):
7814 removed.append(self.records[i])
7815 del self.records[i]
7816 else:
7817 i += 1
7818 return Rows(self.db, removed, self.colnames)
7819
7820 - def sort(self, f, reverse=False):
7821 """
7822 returns a list of sorted elements (not sorted in place)
7823 """
7824 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
7825
7827 """
7828 regroups the rows, by one of the fields
7829 """
7830 if not self.records:
7831 return {}
7832 key = str(field)
7833 grouped_row_group = dict()
7834
7835 for row in self:
7836 value = row[key]
7837 if not value in grouped_row_group:
7838 grouped_row_group[value] = [row]
7839 else:
7840 grouped_row_group[value].append(row)
7841 return grouped_row_group
7842
7843 - def as_list(self,
7844 compact=True,
7845 storage_to_dict=True,
7846 datetime_to_str=True):
7847 """
7848 returns the data as a list or dictionary.
7849 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
7850 :param datetime_to_str: convert datetime fields as strings (default True)
7851 """
7852 (oc, self.compact) = (self.compact, compact)
7853 if storage_to_dict:
7854 items = [item.as_dict(datetime_to_str) for item in self]
7855 else:
7856 items = [item for item in self]
7857 self.compact = compact
7858 return items
7859
7860
7861 - def as_dict(self,
7862 key='id',
7863 compact=True,
7864 storage_to_dict=True,
7865 datetime_to_str=True):
7866 """
7867 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
7868
7869 :param key: the name of the field to be used as dict key, normally the id
7870 :param compact: ? (default True)
7871 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
7872 :param datetime_to_str: convert datetime fields as strings (default True)
7873 """
7874 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
7875 if isinstance(key,str) and key.count('.')==1:
7876 (table, field) = key.split('.')
7877 return dict([(r[table][field],r) for r in rows])
7878 elif isinstance(key,str):
7879 return dict([(r[key],r) for r in rows])
7880 else:
7881 return dict([(key(r),r) for r in rows])
7882
7884 """
7885 export data to csv, the first line contains the column names
7886
7887 :param ofile: where the csv must be exported to
7888 :param null: how null values must be represented (default '<NULL>')
7889 :param delimiter: delimiter to separate values (default ',')
7890 :param quotechar: character to use to quote string values (default '"')
7891 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
7892 :param represent: use the fields .represent value (default False)
7893 :param colnames: list of column names to use (default self.colnames)
7894 This will only work when exporting rows objects!!!!
7895 DO NOT use this with db.export_to_csv()
7896 """
7897 delimiter = kwargs.get('delimiter', ',')
7898 quotechar = kwargs.get('quotechar', '"')
7899 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
7900 represent = kwargs.get('represent', False)
7901 writer = csv.writer(ofile, delimiter=delimiter,
7902 quotechar=quotechar, quoting=quoting)
7903 colnames = kwargs.get('colnames', self.colnames)
7904 write_colnames = kwargs.get('write_colnames',True)
7905
7906 if write_colnames:
7907 writer.writerow(colnames)
7908
7909 def none_exception(value):
7910 """
7911 returns a cleaned up value that can be used for csv export:
7912 - unicode text is encoded as such
7913 - None values are replaced with the given representation (default <NULL>)
7914 """
7915 if value is None:
7916 return null
7917 elif isinstance(value, unicode):
7918 return value.encode('utf8')
7919 elif isinstance(value,Reference):
7920 return int(value)
7921 elif hasattr(value, 'isoformat'):
7922 return value.isoformat()[:19].replace('T', ' ')
7923 elif isinstance(value, (list,tuple)):
7924 return bar_encode(value)
7925 return value
7926
7927 for record in self:
7928 row = []
7929 for col in colnames:
7930 if not regex_table_field.match(col):
7931 row.append(record._extra[col])
7932 else:
7933 (t, f) = col.split('.')
7934 field = self.db[t][f]
7935 if isinstance(record.get(t, None), (Row,dict)):
7936 value = record[t][f]
7937 else:
7938 value = record[f]
7939 if field.type=='blob' and not value is None:
7940 value = base64.b64encode(value)
7941 elif represent and field.represent:
7942 value = field.represent(value)
7943 row.append(none_exception(value))
7944 writer.writerow(row)
7945
7947 """
7948 serializes the table using sqlhtml.SQLTABLE (if present)
7949 """
7950
7951 import sqlhtml
7952 return sqlhtml.SQLTABLE(self).xml()
7953
7954 - def json(self, mode='object', default=None):
7955 """
7956 serializes the table to a JSON list of objects
7957 """
7958 mode = mode.lower()
7959 if not mode in ['object', 'array']:
7960 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
7961
7962 def inner_loop(record, col):
7963 (t, f) = col.split('.')
7964 res = None
7965 if not regex_table_field.match(col):
7966 key = col
7967 res = record._extra[col]
7968 else:
7969 key = f
7970 if isinstance(record.get(t, None), Row):
7971 res = record[t][f]
7972 else:
7973 res = record[f]
7974 if mode == 'object':
7975 return (key, res)
7976 else:
7977 return res
7978
7979 if mode == 'object':
7980 items = [dict([inner_loop(record, col) for col in
7981 self.colnames]) for record in self]
7982 else:
7983 items = [[inner_loop(record, col) for col in self.colnames]
7984 for record in self]
7985 if have_serializers:
7986 return serializers.json(items,default=default or serializers.custom_json)
7987 else:
7988 import simplejson
7989 return simplejson.dumps(items)
7990
7992 return cPickle.loads(data)
7993
7995 return Rows_unpickler, \
7996 (cPickle.dumps(data.as_list(storage_to_dict=False,
7997 datetime_to_str=False)),)
7998
7999 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
8000
8001
8002
8003
8004
8005
8007 """
8008
8009 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
8010 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
8011 >>> tmp = db.define_table('users',\
8012 Field('stringf', 'string', length=32, required=True),\
8013 Field('booleanf', 'boolean', default=False),\
8014 Field('passwordf', 'password', notnull=True),\
8015 Field('uploadf', 'upload'),\
8016 Field('blobf', 'blob'),\
8017 Field('integerf', 'integer', unique=True),\
8018 Field('doublef', 'double', unique=True,notnull=True),\
8019 Field('datef', 'date', default=datetime.date.today()),\
8020 Field('timef', 'time'),\
8021 Field('datetimef', 'datetime'),\
8022 migrate='test_user.table')
8023
8024 Insert a field
8025
8026 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
8027 uploadf=None, integerf=5, doublef=3.14,\
8028 datef=datetime.date(2001, 1, 1),\
8029 timef=datetime.time(12, 30, 15),\
8030 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
8031 1
8032
8033 Drop the table
8034
8035 >>> db.users.drop()
8036
8037 Examples of insert, select, update, delete
8038
8039 >>> tmp = db.define_table('person',\
8040 Field('name'),\
8041 Field('birth','date'),\
8042 migrate='test_person.table')
8043 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
8044 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
8045
8046 commented len(db().select(db.person.ALL))
8047 commented 2
8048
8049 >>> me = db(db.person.id==person_id).select()[0] # test select
8050 >>> me.name
8051 'Massimo'
8052 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
8053 1
8054 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
8055 1
8056
8057 Update a single record
8058
8059 >>> me.update_record(name=\"Max\")
8060 >>> me.name
8061 'Max'
8062
8063 Examples of complex search conditions
8064
8065 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
8066 1
8067 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
8068 1
8069 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
8070 1
8071 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
8072 >>> me.name
8073 'Max'
8074
8075 Examples of search conditions using extract from date/datetime/time
8076
8077 >>> len(db(db.person.birth.month()==12).select())
8078 1
8079 >>> len(db(db.person.birth.year()>1900).select())
8080 1
8081
8082 Example of usage of NULL
8083
8084 >>> len(db(db.person.birth==None).select()) ### test NULL
8085 0
8086 >>> len(db(db.person.birth!=None).select()) ### test NULL
8087 1
8088
8089 Examples of search conditions using lower, upper, and like
8090
8091 >>> len(db(db.person.name.upper()=='MAX').select())
8092 1
8093 >>> len(db(db.person.name.like('%ax')).select())
8094 1
8095 >>> len(db(db.person.name.upper().like('%AX')).select())
8096 1
8097 >>> len(db(~db.person.name.upper().like('%AX')).select())
8098 0
8099
8100 orderby, groupby and limitby
8101
8102 >>> people = db().select(db.person.name, orderby=db.person.name)
8103 >>> order = db.person.name|~db.person.birth
8104 >>> people = db().select(db.person.name, orderby=order)
8105
8106 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
8107
8108 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
8109
8110 Example of one 2 many relation
8111
8112 >>> tmp = db.define_table('dog',\
8113 Field('name'),\
8114 Field('birth','date'),\
8115 Field('owner',db.person),\
8116 migrate='test_dog.table')
8117 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
8118 1
8119
8120 A simple JOIN
8121
8122 >>> len(db(db.dog.owner==db.person.id).select())
8123 1
8124
8125 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
8126 1
8127
8128 Drop tables
8129
8130 >>> db.dog.drop()
8131 >>> db.person.drop()
8132
8133 Example of many 2 many relation and Set
8134
8135 >>> tmp = db.define_table('author', Field('name'),\
8136 migrate='test_author.table')
8137 >>> tmp = db.define_table('paper', Field('title'),\
8138 migrate='test_paper.table')
8139 >>> tmp = db.define_table('authorship',\
8140 Field('author_id', db.author),\
8141 Field('paper_id', db.paper),\
8142 migrate='test_authorship.table')
8143 >>> aid = db.author.insert(name='Massimo')
8144 >>> pid = db.paper.insert(title='QCD')
8145 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
8146
8147 Define a Set
8148
8149 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
8150 >>> rows = authored_papers.select(db.author.name, db.paper.title)
8151 >>> for row in rows: print row.author.name, row.paper.title
8152 Massimo QCD
8153
8154 Example of search condition using belongs
8155
8156 >>> set = (1, 2, 3)
8157 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
8158 >>> print rows[0].title
8159 QCD
8160
8161 Example of search condition using nested select
8162
8163 >>> nested_select = db()._select(db.authorship.paper_id)
8164 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
8165 >>> print rows[0].title
8166 QCD
8167
8168 Example of expressions
8169
8170 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
8171 >>> db(mynumber.id>0).delete()
8172 0
8173 >>> for i in range(10): tmp = mynumber.insert(x=i)
8174 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
8175 45
8176
8177 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
8178 5
8179
8180 Output in csv
8181
8182 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
8183 author.name,paper.title\r
8184 Massimo,QCD
8185
8186 Delete all leftover tables
8187
8188 >>> DAL.distributed_transaction_commit(db)
8189
8190 >>> db.mynumber.drop()
8191 >>> db.authorship.drop()
8192 >>> db.author.drop()
8193 >>> db.paper.drop()
8194 """
8195
8196
8197
8198
8199 SQLField = Field
8200 SQLTable = Table
8201 SQLXorable = Expression
8202 SQLQuery = Query
8203 SQLSet = Set
8204 SQLRows = Rows
8205 SQLStorage = Row
8206 SQLDB = DAL
8207 GQLDB = DAL
8208 DAL.Field = Field
8209 DAL.Table = Table
8210
8211
8212
8213
8214
8215 if __name__ == '__main__':
8216 import doctest
8217 doctest.testmod()
8218