Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50', 
 118          18: 'a0f9efcabdecfb4ddb6d8c0b69c02092' 
 119  } 
 120   
 121  map_schema_hash2version = { 
 122          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 123          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 124          '4428ccf2e54c289136819e701bb095ea': 4, 
 125          '7e7b093af57aea48c288e76632a382e5': 5, 
 126          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 127          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 128          '89b13a7af83337c3aad153b717e52360': 8, 
 129          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 130          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 131          '03042ae24f3f92877d986fb0a6184d76': 11, 
 132          '06183a6616db62257e22814007a8ed07': 12, 
 133          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 134          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 135          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 136          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 137          '161428ee97a00e3bf56168c3a15b7b50': 17, 
 138          'a0f9efcabdecfb4ddb6d8c0b69c02092': 18 
 139  } 
 140   
 141  map_client_branch2required_db_version = { 
 142          u'GIT tree': 0, 
 143          u'0.3': 9, 
 144          u'0.4': 10, 
 145          u'0.5': 11, 
 146          u'0.6': 12, 
 147          u'0.7': 13, 
 148          u'0.8': 14, 
 149          u'0.9': 15, 
 150          u'1.0': 16,             # intentional duplicate with 1.1 
 151          u'1.1': 16, 
 152          u'1.2': 17, 
 153          u'1.3': 18 
 154  } 
 155   
 156  # get columns and data types for a given table 
 157  query_table_col_defs = u"""select 
 158          cols.column_name, 
 159          cols.udt_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  query_table_attributes = u"""select 
 170          cols.column_name 
 171  from 
 172          information_schema.columns cols 
 173  where 
 174          cols.table_schema = %s 
 175                  and 
 176          cols.table_name = %s 
 177  order by 
 178          cols.ordinal_position""" 
 179   
 180  # ======================================================================= 
 181  # module globals API 
 182  # ======================================================================= 
183 -def set_default_client_encoding(encoding = None):
184 # check whether psycopg2 can handle this encoding 185 if encoding not in psycopg2.extensions.encodings: 186 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 187 # check whether Python can handle this encoding 188 py_enc = psycopg2.extensions.encodings[encoding] 189 try: 190 codecs.lookup(py_enc) 191 except LookupError: 192 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 193 raise 194 # FIXME: check encoding against the database 195 # FIXME: - but we may not yet have access 196 # FIXME: - psycopg2 will pull its encodings from the database eventually 197 # it seems save to set it 198 global _default_client_encoding 199 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 200 _default_client_encoding = encoding 201 return True
202 #---------------------------------------------------
203 -def set_default_client_timezone(timezone = None):
204 205 # FIXME: use __validate 206 global _default_client_timezone 207 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 208 _default_client_timezone = timezone 209 210 global _sql_set_timezone 211 _sql_set_timezone = u'set timezone to %s' 212 213 return True
214 #---------------------------------------------------
215 -def __validate_timezone(conn=None, timezone=None):
216 217 _log.debug(u'validating time zone [%s]', timezone) 218 219 cmd = u'set timezone to %(tz)s' 220 args = {u'tz': timezone} 221 222 conn.commit() 223 curs = conn.cursor() 224 is_valid = False 225 try: 226 curs.execute(cmd, args) 227 _log.info(u'time zone [%s] is settable', timezone) 228 # can we actually use it, though ? 229 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 230 try: 231 curs.execute(cmd) 232 curs.fetchone() 233 _log.info(u'time zone [%s] is usable', timezone) 234 is_valid = True 235 except: 236 _log.error('error using time zone [%s]', timezone) 237 except dbapi.DataError: 238 _log.warning(u'time zone [%s] is not settable', timezone) 239 except: 240 _log.error(u'failed to set time zone to [%s]', timezone) 241 _log.exception(u'') 242 243 curs.close() 244 conn.rollback() 245 246 return is_valid
247 #---------------------------------------------------
248 -def __expand_timezone(conn=None, timezone=None):
249 """some timezone defs are abbreviations so try to expand 250 them because "set time zone" doesn't take abbreviations""" 251 252 cmd = u""" 253 select distinct on (abbrev) name 254 from pg_timezone_names 255 where 256 abbrev = %(tz)s and 257 name ~ '^[^/]+/[^/]+$' and 258 name !~ '^Etc/' 259 """ 260 args = {u'tz': timezone} 261 262 conn.commit() 263 curs = conn.cursor() 264 265 result = timezone 266 try: 267 curs.execute(cmd, args) 268 rows = curs.fetchall() 269 if len(rows) > 0: 270 result = rows[0][0] 271 _log.debug(u'[%s] maps to [%s]', timezone, result) 272 except: 273 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 274 275 curs.close() 276 conn.rollback() 277 278 return result
279 #---------------------------------------------------
280 -def __detect_client_timezone(conn=None):
281 """This is run on the very first connection.""" 282 283 # FIXME: check whether server.timezone is the same 284 # FIXME: value as what we eventually detect 285 286 # we need gmDateTime to be initialized 287 if gmDateTime.current_local_iso_numeric_timezone_string is None: 288 gmDateTime.init() 289 290 _log.debug('trying to detect timezone from system') 291 292 tz_candidates = [] 293 try: 294 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 295 tz_candidates.append(tz) 296 expanded = __expand_timezone(conn = conn, timezone = tz) 297 if expanded != tz: 298 tz_candidates.append(expanded) 299 except KeyError: 300 pass 301 302 tz_candidates.append(gmDateTime.current_local_timezone_name) 303 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 304 if expanded != gmDateTime.current_local_timezone_name: 305 tz_candidates.append(expanded) 306 307 _log.debug('candidates: %s', str(tz_candidates)) 308 309 # find best among candidates 310 global _default_client_timezone 311 global _sql_set_timezone 312 found = False 313 for tz in tz_candidates: 314 if __validate_timezone(conn = conn, timezone = tz): 315 _default_client_timezone = tz 316 _sql_set_timezone = u'set timezone to %s' 317 found = True 318 break 319 320 if not found: 321 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 322 _sql_set_timezone = u"set time zone interval %s hour to minute" 323 324 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
325 # ======================================================================= 326 # login API 327 # =======================================================================
328 -def __request_login_params_tui():
329 """Text mode request of database login parameters""" 330 import getpass 331 login = gmLoginInfo.LoginInfo() 332 333 print "\nPlease enter the required login parameters:" 334 try: 335 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 336 login.database = prompted_input(prompt = "database", default = 'gnumed_v19') 337 login.user = prompted_input(prompt = "user name", default = '') 338 tmp = 'password for "%s" (not shown): ' % login.user 339 login.password = getpass.getpass(tmp) 340 login.port = prompted_input(prompt = "port", default = 5432) 341 except KeyboardInterrupt: 342 _log.warning("user cancelled text mode login dialog") 343 print "user cancelled text mode login dialog" 344 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 345 346 return login
347 #---------------------------------------------------
348 -def __request_login_params_gui_wx():
349 """GUI (wx) input request for database login parameters. 350 351 Returns gmLoginInfo.LoginInfo object 352 """ 353 import wx 354 # OK, wxPython was already loaded. But has the main Application instance 355 # been initialized yet ? if not, the exception will kick us out 356 if wx.GetApp() is None: 357 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 358 359 # Let's launch the login dialog 360 # if wx was not initialized /no main App loop, an exception should be raised anyway 361 import gmAuthWidgets 362 dlg = gmAuthWidgets.cLoginDialog(None, -1) 363 dlg.ShowModal() 364 login = dlg.panel.GetLoginInfo() 365 dlg.Destroy() 366 367 #if user cancelled or something else went wrong, raise an exception 368 if login is None: 369 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 370 371 return login
372 #---------------------------------------------------
373 -def request_login_params():
374 """Request login parameters for database connection.""" 375 # do we auto-request parameters at all ? 376 if not auto_request_login_params: 377 raise Exception('Cannot request login parameters.') 378 379 # are we inside X ? 380 # (if we aren't wxGTK will crash hard at 381 # C-level with "can't open Display") 382 if os.environ.has_key('DISPLAY'): 383 # try wxPython GUI 384 try: return __request_login_params_gui_wx() 385 except: pass 386 387 # well, either we are on the console or 388 # wxPython does not work, use text mode 389 return __request_login_params_tui()
390 391 # ======================================================================= 392 # DSN API 393 # -----------------------------------------------------------------------
394 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
395 dsn_parts = [] 396 397 if (database is not None) and (database.strip() != ''): 398 dsn_parts.append('dbname=%s' % database) 399 400 if (host is not None) and (host.strip() != ''): 401 dsn_parts.append('host=%s' % host) 402 403 if (port is not None) and (str(port).strip() != ''): 404 dsn_parts.append('port=%s' % port) 405 406 if (user is not None) and (user.strip() != ''): 407 dsn_parts.append('user=%s' % user) 408 409 if (password is not None) and (password.strip() != ''): 410 dsn_parts.append('password=%s' % password) 411 412 dsn_parts.append('sslmode=prefer') 413 414 return ' '.join(dsn_parts)
415 # ------------------------------------------------------
416 -def get_default_login():
417 # make sure we do have a login 418 get_default_dsn() 419 return _default_login
420 # ------------------------------------------------------
421 -def get_default_dsn():
422 global _default_dsn 423 if _default_dsn is not None: 424 return _default_dsn 425 426 login = request_login_params() 427 set_default_login(login=login) 428 429 return _default_dsn
430 # ------------------------------------------------------
431 -def set_default_login(login=None):
432 if login is None: 433 return False 434 435 if login.host is not None: 436 if login.host.strip() == u'': 437 login.host = None 438 439 global _default_login 440 _default_login = login 441 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 442 443 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 444 445 global _default_dsn 446 if _default_dsn is None: 447 old_dsn = u'None' 448 else: 449 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 450 _log.info ('setting default DSN from [%s] to [%s]', 451 old_dsn, 452 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 453 ) 454 _default_dsn = dsn 455 456 return True
457 # ======================================================================= 458 # netadata API 459 # =======================================================================
460 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
461 expected_hash = known_schema_hashes[version] 462 if version == 0: 463 args = {'ver': 9999} 464 else: 465 args = {'ver': version} 466 rows, idx = run_ro_queries ( 467 link_obj = link_obj, 468 queries = [{ 469 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 470 'args': args 471 }] 472 ) 473 if rows[0]['md5'] != expected_hash: 474 _log.error('database schema version mismatch') 475 _log.error('expected: %s (%s)' % (version, expected_hash)) 476 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 477 if verbose: 478 _log.debug('schema dump follows:') 479 for line in get_schema_structure(link_obj = link_obj).split(): 480 _log.debug(line) 481 _log.debug('schema revision history dump follows:') 482 for line in get_schema_revision_history(link_obj = link_obj): 483 _log.debug(u' - '.join(line)) 484 return False 485 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 486 return True
487 #------------------------------------------------------------------------
488 -def get_schema_version(link_obj=None):
489 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 490 try: 491 return map_schema_hash2version[rows[0]['md5']] 492 except KeyError: 493 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
494 #------------------------------------------------------------------------
495 -def get_schema_structure(link_obj=None):
496 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 497 return rows[0][0]
498 #------------------------------------------------------------------------
499 -def get_schema_hash(link_obj=None):
500 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 501 return rows[0]['md5']
502 #------------------------------------------------------------------------
503 -def get_schema_revision_history(link_obj=None):
504 505 if table_exists(link_obj = link_obj, schema = 'gm', table = 'schema_revision'): 506 cmd = u""" 507 SELECT 508 imported::text, 509 version, 510 filename 511 FROM gm.schema_revision 512 ORDER BY imported""" 513 elif table_exists(link_obj = link_obj, schema = 'public', table = 'gm_schema_revision'): 514 cmd = u""" 515 SELECT 516 imported::text, 517 version, 518 filename 519 FROM public.gm_schema_revision 520 ORDER BY imported""" 521 else: 522 return [] 523 524 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}]) 525 return rows
526 #------------------------------------------------------------------------
527 -def get_current_user():
528 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 529 return rows[0][0]
530 #------------------------------------------------------------------------
531 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
532 """Get the foreign keys pointing to schema.table.column. 533 534 Does not properly work with multi-column FKs. 535 GNUmed doesn't use any, however. 536 """ 537 cmd = u""" 538 select 539 %(schema)s as referenced_schema, 540 %(tbl)s as referenced_table, 541 %(col)s as referenced_column, 542 pgc.confkey as referenced_column_list, 543 pgc.conrelid::regclass as referencing_table, 544 pgc.conkey as referencing_column_list, 545 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 546 from 547 pg_constraint pgc 548 where 549 pgc.contype = 'f' 550 and 551 pgc.confrelid = ( 552 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 553 select oid from pg_namespace where nspname = %(schema)s 554 ) 555 ) and 556 ( 557 select attnum 558 from pg_attribute 559 where 560 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 561 select oid from pg_namespace where nspname = %(schema)s 562 )) 563 and 564 attname = %(col)s 565 ) = any(pgc.confkey) 566 """ 567 568 args = { 569 'schema': schema, 570 'tbl': table, 571 'col': column 572 } 573 574 rows, idx = run_ro_queries ( 575 link_obj = link_obj, 576 queries = [ 577 {'cmd': cmd, 'args': args} 578 ] 579 ) 580 581 return rows
582 #------------------------------------------------------------------------
583 -def get_child_tables(schema='public', table=None, link_obj=None):
584 """Return child tables of <table>.""" 585 cmd = u""" 586 select 587 pgn.nspname as namespace, 588 pgc.relname as table 589 from 590 pg_namespace pgn, 591 pg_class pgc 592 where 593 pgc.relnamespace = pgn.oid 594 and 595 pgc.oid in ( 596 select inhrelid from pg_inherits where inhparent = ( 597 select oid from pg_class where 598 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 599 relname = %(table)s 600 ) 601 )""" 602 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 603 return rows
604 #------------------------------------------------------------------------
605 -def schema_exists(link_obj=None, schema=u'gm'):
606 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 607 args = {'schema': schema} 608 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 609 return rows[0][0]
610 #------------------------------------------------------------------------
611 -def table_exists(link_obj=None, schema=None, table=None):
612 """Returns false, true.""" 613 cmd = u""" 614 select exists ( 615 select 1 from information_schema.tables 616 where 617 table_schema = %s and 618 table_name = %s and 619 table_type = 'BASE TABLE' 620 )""" 621 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 622 return rows[0][0]
623 #------------------------------------------------------------------------
624 -def get_col_indices(cursor = None):
625 if cursor.description is None: 626 _log.error('no result description available: unused cursor or last query did not select rows') 627 return None 628 col_indices = {} 629 col_index = 0 630 for col_desc in cursor.description: 631 col_name = col_desc[0] 632 # a query like "select 1,2;" will return two columns of the same name ! 633 # hence adjust to that, note, however, that dict-style access won't work 634 # on results of such queries ... 635 if col_indices.has_key(col_name): 636 col_name = '%s_%s' % (col_name, col_index) 637 col_indices[col_name] = col_index 638 col_index += 1 639 640 return col_indices
641 #------------------------------------------------------------------------
642 -def get_col_defs(link_obj=None, schema='public', table=None):
643 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 644 col_names = [] 645 col_type = {} 646 for row in rows: 647 col_names.append(row[0]) 648 # map array types 649 if row[1].startswith('_'): 650 col_type[row[0]] = row[1][1:] + '[]' 651 else: 652 col_type[row[0]] = row[1] 653 col_defs = [] 654 col_defs.append(col_names) 655 col_defs.append(col_type) 656 return col_defs
657 #------------------------------------------------------------------------
658 -def get_col_names(link_obj=None, schema='public', table=None):
659 """Return column attributes of table""" 660 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 661 cols = [] 662 for row in rows: 663 cols.append(row[0]) 664 return cols
665 666 #------------------------------------------------------------------------ 667 # i18n functions 668 #------------------------------------------------------------------------
669 -def export_translations_from_database(filename=None):
670 tx_file = codecs.open(filename, 'wb', 'utf8') 671 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 672 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 673 tx_file.write(u'-- - user database language is set to [%s]\n\n' % get_current_user_language()) 674 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 675 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 676 tx_file.write(u'set default_transaction_read_only to off;\n\n') 677 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 678 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 679 680 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 681 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 682 for row in rows: 683 line = u"select i18n.upd_tx(E'%s', E'%s', E'%s');\n" % ( 684 row['lang'].replace("'", "\\'"), 685 row['orig'].replace("'", "\\'"), 686 row['trans'].replace("'", "\\'") 687 ) 688 tx_file.write(line) 689 tx_file.write(u'\n') 690 691 tx_file.write(u'\set ON_ERROR_STOP 1\n') 692 tx_file.close() 693 694 return True
695 #------------------------------------------------------------------------
696 -def delete_translation_from_database(link_obj=None, language=None, original=None):
697 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 698 args = {'lang': language, 'orig': original} 699 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 700 return True
701 702 #------------------------------------------------------------------------
703 -def update_translation_in_database(language=None, original=None, translation=None):
704 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 705 args = {'lang': language, 'orig': original, 'trans': translation} 706 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 707 return args
708 709 #------------------------------------------------------------------------
710 -def get_translation_languages():
711 rows, idx = run_ro_queries ( 712 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 713 ) 714 return [ r[0] for r in rows ]
715 716 #------------------------------------------------------------------------
717 -def get_database_translations(language=None, order_by=None):
718 719 args = {'lang': language} 720 _log.debug('language [%s]', language) 721 722 if order_by is None: 723 order_by = u'ORDER BY %s' % order_by 724 else: 725 order_by = u'ORDER BY lang, orig' 726 727 if language is None: 728 cmd = u""" 729 SELECT DISTINCT ON (orig, lang) 730 lang, orig, trans 731 FROM (( 732 733 -- strings stored as translation keys whether translated or not 734 SELECT 735 NULL as lang, 736 ik.orig, 737 NULL AS trans 738 FROM 739 i18n.keys ik 740 741 ) UNION ALL ( 742 743 -- already translated strings 744 SELECT 745 it.lang, 746 it.orig, 747 it.trans 748 FROM 749 i18n.translations it 750 751 )) as translatable_strings 752 %s""" % order_by 753 else: 754 cmd = u""" 755 SELECT DISTINCT ON (orig, lang) 756 lang, orig, trans 757 FROM (( 758 759 -- strings stored as translation keys whether translated or not 760 SELECT 761 %%(lang)s as lang, 762 ik.orig, 763 i18n._(ik.orig, %%(lang)s) AS trans 764 FROM 765 i18n.keys ik 766 767 ) UNION ALL ( 768 769 -- already translated strings 770 SELECT 771 %%(lang)s as lang, 772 it.orig, 773 i18n._(it.orig, %%(lang)s) AS trans 774 FROM 775 i18n.translations it 776 777 )) AS translatable_strings 778 %s""" % order_by 779 780 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 781 782 if rows is None: 783 _log.error('no translatable strings found') 784 else: 785 _log.debug('%s translatable strings found', len(rows)) 786 787 return rows
788 789 #------------------------------------------------------------------------
790 -def get_current_user_language():
791 cmd = u'select i18n.get_curr_lang()' 792 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 793 return rows[0][0]
794 795 #------------------------------------------------------------------------
796 -def set_user_language(user=None, language=None):
797 """Set the user language in the database. 798 799 user = None: current db user 800 language = None: unset 801 """ 802 _log.info('setting database language for user [%s] to [%s]', user, language) 803 804 args = { 805 'usr': user, 806 'lang': language 807 } 808 809 if language is None: 810 if user is None: 811 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 812 else: 813 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 814 queries.append({'cmd': u'select True'}) 815 else: 816 if user is None: 817 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 818 else: 819 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 820 821 rows, idx = run_rw_queries(queries = queries, return_data = True) 822 823 if not rows[0][0]: 824 _log.error('cannot set database language to [%s] for user [%s]', language, user) 825 826 return rows[0][0]
827 #------------------------------------------------------------------------
828 -def force_user_language(language=None):
829 """Set the user language in the database. 830 831 - regardless of whether there is any translation available. 832 - only for the current user 833 """ 834 _log.info('forcing database language for current db user to [%s]', language) 835 836 run_rw_queries(queries = [{ 837 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 838 'args': {'lang': language} 839 }])
840 841 # ======================================================================= 842 # query runners and helpers 843 # =======================================================================
844 -def send_maintenance_notification():
845 cmd = u'notify "db_maintenance_warning:"' 846 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
847 #------------------------------------------------------------------------
848 -def send_maintenance_shutdown():
849 cmd = u'notify "db_maintenance_disconnect:"' 850 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
851 #------------------------------------------------------------------------
852 -def is_pg_interval(candidate=None):
853 cmd = u'SELECT %(candidate)s::interval' 854 try: 855 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 856 return True 857 except: 858 cmd = u'SELECT %(candidate)s::text::interval' 859 try: 860 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 861 return True 862 except: 863 return False
864 #------------------------------------------------------------------------
865 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
866 outfile = file(filename, 'wb') 867 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 868 outfile.close() 869 return result
870 #------------------------------------------------------------------------
871 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
872 """Store data from a bytea field into a file. 873 874 <data_query> 875 - dict {'cmd': ..., 'args': ...} 876 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 877 - 'args' must be a dict 878 - must return one row with one field of type bytea 879 <file> 880 - must be a file like Python object 881 <data_size> 882 - integer of the total size of the expected data or None 883 <data_size_query> 884 - dict {'cmd': ..., 'args': ...} 885 - cmd must be unicode 886 - must return one row with one field with the octet_length() of the data field 887 - used only when <data_size> is None 888 """ 889 if data_size == 0: 890 return True 891 892 # If the client sets an encoding other than the default we 893 # will receive encoding-parsed data which isn't the binary 894 # content we want. Hence we need to get our own connection. 895 # It must be a read-write one so that we don't affect the 896 # encoding for other users of the shared read-only 897 # connections. 898 # Actually, encodings shouldn't be applied to binary data 899 # (eg. bytea types) in the first place but that is only 900 # reported to be fixed > v7.4. 901 # further tests reveal that at least on PG 8.0 this bug still 902 # manifests itself 903 conn = get_raw_connection(readonly=True) 904 905 if data_size is None: 906 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 907 data_size = rows[0][0] 908 if data_size in [None, 0]: 909 conn.rollback() 910 return True 911 912 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 913 if chunk_size == 0: 914 chunk_size = min(data_size, max_chunk_size) 915 916 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size) 917 918 # Windoze sucks: it can't transfer objects of arbitrary size, 919 # anyways, we need to split the transfer, 920 # however, only possible if postgres >= 7.2 921 needed_chunks, remainder = divmod(data_size, chunk_size) 922 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder) 923 924 # try setting "bytea_output" 925 # - fails if not necessary 926 # - succeeds if necessary 927 try: 928 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 929 except dbapi.ProgrammingError: 930 _log.debug('failed to set bytea_output to "escape", not necessary') 931 932 # retrieve chunks, skipped if data size < chunk size, 933 # does this not carry the danger of cutting up multi-byte escape sequences ? 934 # no, since bytea is binary, 935 # yes, since in bytea there are *some* escaped values, still 936 # no, since those are only escaped during *transfer*, not on-disk, hence 937 # only complete escape sequences are put on the wire 938 for chunk_id in range(needed_chunks): 939 chunk_start = (chunk_id * chunk_size) + 1 940 data_query['args']['start'] = chunk_start 941 data_query['args']['size'] = chunk_size 942 try: 943 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 944 except: 945 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 946 conn.rollback() 947 raise 948 # it would be a fatal error to see more than one result as ids are supposed to be unique 949 file_obj.write(str(rows[0][0])) 950 951 # retrieve remainder 952 if remainder > 0: 953 chunk_start = (needed_chunks * chunk_size) + 1 954 data_query['args']['start'] = chunk_start 955 data_query['args']['size'] = remainder 956 try: 957 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 958 except: 959 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 960 conn.rollback() 961 raise 962 # it would be a fatal error to see more than one result as ids are supposed to be unique 963 file_obj.write(str(rows[0][0])) 964 965 conn.rollback() 966 return True
967 #------------------------------------------------------------------------
968 -def file2bytea(query=None, filename=None, args=None, conn=None):
969 """Store data from a file into a bytea field. 970 971 The query must: 972 - be in unicode 973 - contain a format spec identifying the row (eg a primary key) 974 matching <args> if it is an UPDATE 975 - contain a format spec %(data)s::bytea 976 """ 977 # read data from file 978 infile = file(filename, "rb") 979 data_as_byte_string = infile.read() 980 infile.close() 981 if args is None: 982 args = {} 983 args['data'] = buffer(data_as_byte_string) 984 del(data_as_byte_string) 985 986 # insert the data 987 if conn is None: 988 conn = get_raw_connection(readonly=False) 989 close_conn = True 990 else: 991 close_conn = False 992 993 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 994 995 if close_conn: 996 conn.close() 997 998 return
999 #------------------------------------------------------------------------
1000 -def sanitize_pg_regex(expression=None, escape_all=False):
1001 """Escape input for use in a PostgreSQL regular expression. 1002 1003 If a fragment comes from user input and is to be used 1004 as a regular expression we need to make sure it doesn't 1005 contain invalid regex patterns such as unbalanced ('s. 1006 1007 <escape_all> 1008 True: try to escape *all* metacharacters 1009 False: only escape those which render the regex invalid 1010 """ 1011 return expression.replace ( 1012 '(', '\(' 1013 ).replace ( 1014 ')', '\)' 1015 ).replace ( 1016 '[', '\[' 1017 ).replace ( 1018 '+', '\+' 1019 ).replace ( 1020 '.', '\.' 1021 ).replace ( 1022 '*', '\*' 1023 )
1024 #']', '\]', # not needed 1025 #------------------------------------------------------------------------
1026 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1027 """Run read-only queries. 1028 1029 <queries> must be a list of dicts: 1030 [ 1031 {'cmd': <string>, 'args': <dict> or <tuple>}, 1032 {...}, 1033 ... 1034 ] 1035 """ 1036 if isinstance(link_obj, dbapi._psycopg.cursor): 1037 curs = link_obj 1038 curs_close = __noop 1039 tx_rollback = __noop 1040 elif isinstance(link_obj, dbapi._psycopg.connection): 1041 curs = link_obj.cursor() 1042 curs_close = curs.close 1043 tx_rollback = link_obj.rollback 1044 elif link_obj is None: 1045 conn = get_connection(readonly=True, verbose=verbose) 1046 curs = conn.cursor() 1047 curs_close = curs.close 1048 tx_rollback = conn.rollback 1049 else: 1050 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1051 1052 if verbose: 1053 _log.debug('cursor: %s', curs) 1054 1055 for query in queries: 1056 if type(query['cmd']) is not types.UnicodeType: 1057 print "run_ro_queries(): non-unicode query" 1058 print query['cmd'] 1059 try: 1060 args = query['args'] 1061 except KeyError: 1062 args = None 1063 try: 1064 curs.execute(query['cmd'], args) 1065 if verbose: 1066 _log.debug('ran query: [%s]', curs.query) 1067 if curs.statusmessage != u'': 1068 _log.debug('PG status message: %s', curs.statusmessage) 1069 _log.debug('cursor description: %s', str(curs.description)) 1070 except dbapi.Error as pg_exc: 1071 _log.error('query failed: [%s]', curs.query) 1072 if curs.statusmessage != u'': 1073 _log.error('PG status message: %s', curs.statusmessage) 1074 _log.error('PG error code: %s', pg_exc.pgcode) 1075 if pg_exc.pgerror is not None: 1076 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1077 try: 1078 curs_close() 1079 except dbapi.InterfaceError: 1080 _log.exception('cannot close cursor') 1081 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1082 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1083 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1084 if curs.statusmessage != u'': 1085 details = u'Status: %s\n%s' % ( 1086 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1087 details 1088 ) 1089 if pg_exc.pgerror is None: 1090 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1091 else: 1092 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1093 raise gmExceptions.AccessDenied ( 1094 msg, 1095 source = u'PostgreSQL', 1096 code = pg_exc.pgcode, 1097 details = details 1098 ) 1099 raise 1100 except: 1101 _log.error('query failed: [%s]', curs.query) 1102 if curs.statusmessage != u'': 1103 _log.error('PG status message: %s', curs.statusmessage) 1104 try: 1105 curs_close() 1106 except dbapi.InterfaceError: 1107 _log.exception('cannot close cursor') 1108 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1109 raise 1110 1111 data = None 1112 col_idx = None 1113 if return_data: 1114 data = curs.fetchall() 1115 if verbose: 1116 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1117 _log.debug('cursor description: %s', str(curs.description)) 1118 if get_col_idx: 1119 col_idx = get_col_indices(curs) 1120 1121 curs_close() 1122 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1123 return (data, col_idx)
1124 #------------------------------------------------------------------------
1125 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1126 """Convenience function for running a transaction 1127 that is supposed to get committed. 1128 1129 <link_obj> 1130 can be either: 1131 - a cursor 1132 - a connection 1133 1134 <queries> 1135 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1136 to be executed as a single transaction, the last 1137 query may usefully return rows (such as a 1138 "SELECT currval('some_sequence')" statement) 1139 1140 <end_tx> 1141 - controls whether the transaction is finalized (eg. 1142 committed/rolled back) or not, this allows the 1143 call to run_rw_queries() to be part of a framing 1144 transaction 1145 - if link_obj is a connection then <end_tx> will 1146 default to False unless it is explicitly set to 1147 True which is taken to mean "yes, you do have full 1148 control over the transaction" in which case the 1149 transaction is properly finalized 1150 - if link_obj is a cursor we CANNOT finalize the 1151 transaction because we would need the connection for that 1152 - if link_obj is None <end_tx> will, of course, always be True 1153 1154 <return_data> 1155 - if true, the returned data will include the rows 1156 the last query selected 1157 - if false, it returns None instead 1158 1159 <get_col_idx> 1160 - if true, the returned data will include a dictionary 1161 mapping field names to column positions 1162 - if false, the returned data returns None instead 1163 1164 method result: 1165 - returns a tuple (data, idx) 1166 - <data>: 1167 * (None, None) if last query did not return rows 1168 * ("fetchall() result", <index>) if last query returned any rows 1169 * for <index> see <get_col_idx> 1170 """ 1171 if isinstance(link_obj, dbapi._psycopg.cursor): 1172 conn_close = __noop 1173 conn_commit = __noop 1174 tx_rollback = __noop 1175 curs = link_obj 1176 curs_close = __noop 1177 elif isinstance(link_obj, dbapi._psycopg.connection): 1178 conn_close = __noop 1179 if end_tx: 1180 conn_commit = link_obj.commit 1181 tx_rollback = link_obj.rollback 1182 else: 1183 conn_commit = __noop 1184 tx_rollback = __noop 1185 curs = link_obj.cursor() 1186 curs_close = curs.close 1187 elif link_obj is None: 1188 conn = get_connection(readonly=False) 1189 conn_close = conn.close 1190 conn_commit = conn.commit 1191 tx_rollback = conn.rollback 1192 curs = conn.cursor() 1193 curs_close = curs.close 1194 else: 1195 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1196 1197 for query in queries: 1198 if type(query['cmd']) is not types.UnicodeType: 1199 print "run_rw_queries(): non-unicode query" 1200 print query['cmd'] 1201 try: 1202 args = query['args'] 1203 except KeyError: 1204 args = None 1205 try: 1206 curs.execute(query['cmd'], args) 1207 except dbapi.Error as pg_exc: 1208 _log.error('RW query failed: [%s]', curs.query) 1209 if curs.statusmessage != u'': 1210 _log.error('PG status message: %s', curs.statusmessage) 1211 _log.error('PG error code: %s', pg_exc.pgcode) 1212 if pg_exc.pgerror is not None: 1213 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1214 try: 1215 curs_close() 1216 tx_rollback() # just for good measure 1217 conn_close() 1218 except dbapi.InterfaceError: 1219 _log.exception('cannot cleanup') 1220 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1221 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1222 if curs.statusmessage != u'': 1223 details = u'Status: %s\n%s' % ( 1224 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1225 details 1226 ) 1227 if pg_exc.pgerror is None: 1228 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1229 else: 1230 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1231 raise gmExceptions.AccessDenied ( 1232 msg, 1233 source = u'PostgreSQL', 1234 code = pg_exc.pgcode, 1235 details = details 1236 ) 1237 raise 1238 except: 1239 _log.exception('error running RW query') 1240 gmLog2.log_stack_trace() 1241 try: 1242 curs_close() 1243 tx_rollback() 1244 conn_close() 1245 except dbapi.InterfaceError: 1246 _log.exception('cannot cleanup') 1247 raise 1248 raise 1249 1250 data = None 1251 col_idx = None 1252 if return_data: 1253 try: 1254 data = curs.fetchall() 1255 except: 1256 _log.exception('error fetching data from RW query') 1257 gmLog2.log_stack_trace() 1258 try: 1259 curs_close() 1260 tx_rollback() 1261 conn_close() 1262 except dbapi.InterfaceError: 1263 _log.exception('cannot cleanup') 1264 raise 1265 raise 1266 if get_col_idx: 1267 col_idx = get_col_indices(curs) 1268 1269 curs_close() 1270 conn_commit() 1271 conn_close() 1272 1273 return (data, col_idx)
1274 #------------------------------------------------------------------------
1275 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1276 """Generates SQL for an INSERT query. 1277 1278 values: dict of values keyed by field to insert them into 1279 """ 1280 if schema is None: 1281 schema = u'public' 1282 1283 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1284 val_snippets = [] 1285 for field in fields: 1286 val_snippets.append(u'%%(%s)s' % field) 1287 1288 if returning is None: 1289 returning = u'' 1290 return_data = False 1291 else: 1292 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1293 return_data = True 1294 1295 cmd = u"""\nINSERT INTO %s.%s ( 1296 %s 1297 ) VALUES ( 1298 %s 1299 )%s""" % ( 1300 schema, 1301 table, 1302 u',\n\t\t'.join(fields), 1303 u',\n\t\t'.join(val_snippets), 1304 returning 1305 ) 1306 1307 _log.debug(u'running SQL: >>>%s<<<', cmd) 1308 1309 return run_rw_queries ( 1310 link_obj = link_obj, 1311 queries = [{'cmd': cmd, 'args': values}], 1312 end_tx = end_tx, 1313 return_data = return_data, 1314 get_col_idx = get_col_idx, 1315 verbose = verbose 1316 )
1317 # ======================================================================= 1318 # connection handling API 1319 # -----------------------------------------------------------------------
1320 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1321 """ 1322 GNUmed database connection pool. 1323 1324 Extends psycopg2's PersistentConnectionPool with 1325 a custom _connect() function. Supports one connection 1326 per thread - which also ties it to one particular DSN. 1327 """ 1328 #--------------------------------------------------
1329 - def _connect(self, key=None):
1330 1331 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1332 1333 conn.original_close = conn.close 1334 conn.close = _raise_exception_on_ro_conn_close 1335 1336 if key is not None: 1337 self._used[key] = conn 1338 self._rused[id(conn)] = key 1339 else: 1340 self._pool.append(conn) 1341 1342 return conn
1343 #--------------------------------------------------
1344 - def shutdown(self):
1345 for conn_key in self._used.keys(): 1346 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1347 self._used[conn_key].original_close()
1348 # -----------------------------------------------------------------------
1349 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1350 """Get a raw, unadorned connection. 1351 1352 - this will not set any parameters such as encoding, timezone, datestyle 1353 - the only requirement is a valid DSN 1354 - hence it can be used for "service" connections 1355 for verifying encodings etc 1356 """ 1357 # FIXME: support verbose 1358 if dsn is None: 1359 dsn = get_default_dsn() 1360 1361 if u'host=salaam.homeunix' in dsn: 1362 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1363 1364 try: 1365 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1366 except dbapi.OperationalError, e: 1367 1368 t, v, tb = sys.exc_info() 1369 try: 1370 msg = e.args[0] 1371 except (AttributeError, IndexError, TypeError): 1372 raise 1373 1374 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1375 1376 if msg.find('fe_sendauth') != -1: 1377 raise cAuthenticationError, (dsn, msg), tb 1378 1379 if regex.search('user ".*" does not exist', msg) is not None: 1380 raise cAuthenticationError, (dsn, msg), tb 1381 1382 if msg.find('uthenti') != -1: 1383 raise cAuthenticationError, (dsn, msg), tb 1384 1385 raise 1386 1387 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1388 1389 # do first-time stuff 1390 global postgresql_version 1391 if postgresql_version is None: 1392 curs = conn.cursor() 1393 curs.execute(""" 1394 SELECT 1395 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1396 FROM 1397 pg_settings 1398 WHERE 1399 name = 'server_version' 1400 """) 1401 postgresql_version = curs.fetchone()['version'] 1402 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1403 try: 1404 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1405 _log.info('database size: %s', curs.fetchone()[0]) 1406 except: 1407 pass 1408 if verbose: 1409 __log_PG_settings(curs=curs) 1410 curs.close() 1411 conn.commit() 1412 1413 if _default_client_timezone is None: 1414 __detect_client_timezone(conn = conn) 1415 1416 curs = conn.cursor() 1417 1418 # set access mode 1419 if readonly: 1420 _log.debug('access mode [READ ONLY]') 1421 cmd = 'set session characteristics as transaction READ ONLY' 1422 curs.execute(cmd) 1423 cmd = 'set default_transaction_read_only to on' 1424 curs.execute(cmd) 1425 else: 1426 _log.debug('access mode [READ WRITE]') 1427 cmd = 'set session characteristics as transaction READ WRITE' 1428 curs.execute(cmd) 1429 cmd = 'set default_transaction_read_only to off' 1430 curs.execute(cmd) 1431 1432 curs.close() 1433 conn.commit() 1434 1435 conn.is_decorated = False 1436 1437 return conn
1438 # =======================================================================
1439 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1440 """Get a new connection. 1441 1442 This assumes the locale system has been initialized 1443 unless an encoding is specified. 1444 """ 1445 # FIXME: support pooled on RW, too 1446 # FIXME: for now, support the default DSN only 1447 if pooled and readonly and (dsn is None): 1448 global __ro_conn_pool 1449 if __ro_conn_pool is None: 1450 __ro_conn_pool = cConnectionPool ( 1451 minconn = 1, 1452 maxconn = 2, 1453 dsn = dsn, 1454 verbose = verbose 1455 ) 1456 conn = __ro_conn_pool.getconn() 1457 else: 1458 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1459 1460 if conn.is_decorated: 1461 return conn 1462 1463 if encoding is None: 1464 encoding = _default_client_encoding 1465 if encoding is None: 1466 encoding = gmI18N.get_encoding() 1467 _log.warning('client encoding not specified') 1468 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1469 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1470 1471 # set connection properties 1472 # - client encoding 1473 try: 1474 conn.set_client_encoding(encoding) 1475 except dbapi.OperationalError: 1476 t, v, tb = sys.exc_info() 1477 if str(v).find("can't set encoding to") != -1: 1478 raise cEncodingError, (encoding, v), tb 1479 raise 1480 1481 # - transaction isolation level 1482 if readonly: 1483 iso_level = u'read committed' 1484 else: 1485 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1486 iso_level = u'serializable' 1487 1488 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1489 1490 curs = conn.cursor() 1491 1492 # - client time zone 1493 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1494 1495 conn.commit() 1496 1497 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1498 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1499 # 1500 # - bytea data format 1501 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1502 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1503 # as the transmission mode for bytea output, 1504 # so try to set this setting back to "escape", 1505 # if that's not possible the reason will be that PG < 9.0 does not support 1506 # that setting - which also means we don't need it and can ignore the 1507 # failure 1508 cmd = "set bytea_output to 'escape'" 1509 try: 1510 curs.execute(cmd) 1511 except dbapi.ProgrammingError: 1512 _log.error('cannot set bytea_output format') 1513 1514 curs.close() 1515 conn.commit() 1516 1517 conn.is_decorated = True 1518 1519 return conn
1520 #-----------------------------------------------------------------------
1521 -def shutdown():
1522 if __ro_conn_pool is None: 1523 return 1524 __ro_conn_pool.shutdown()
1525 # ====================================================================== 1526 # internal helpers 1527 #-----------------------------------------------------------------------
1528 -def __noop():
1529 pass
1530 #-----------------------------------------------------------------------
1531 -def _raise_exception_on_ro_conn_close():
1532 raise TypeError(u'close() called on read-only connection')
1533 #-----------------------------------------------------------------------
1534 -def log_database_access(action=None):
1535 run_insert ( 1536 schema = u'gm', 1537 table = u'access_log', 1538 values = {u'user_action': action}, 1539 end_tx = True 1540 )
1541 #-----------------------------------------------------------------------
1542 -def sanity_check_time_skew(tolerance=60):
1543 """Check server time and local time to be within 1544 the given tolerance of each other. 1545 1546 tolerance: seconds 1547 """ 1548 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1549 1550 cmd = u"SELECT now() at time zone 'UTC'" 1551 conn = get_raw_connection(readonly=True) 1552 curs = conn.cursor() 1553 1554 start = time.time() 1555 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1556 end = time.time() 1557 client_now_as_utc = pydt.datetime.utcnow() 1558 1559 curs.close() 1560 conn.commit() 1561 1562 server_now_as_utc = rows[0][0] 1563 query_duration = end - start 1564 _log.info('server "now" (UTC): %s', server_now_as_utc) 1565 _log.info('client "now" (UTC): %s', client_now_as_utc) 1566 _log.debug('wire roundtrip (seconds): %s', query_duration) 1567 1568 if query_duration > tolerance: 1569 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1570 return False 1571 1572 if server_now_as_utc > client_now_as_utc: 1573 real_skew = server_now_as_utc - client_now_as_utc 1574 else: 1575 real_skew = client_now_as_utc - server_now_as_utc 1576 1577 _log.debug('client/server time skew: %s', real_skew) 1578 1579 if real_skew > pydt.timedelta(seconds = tolerance): 1580 _log.error('client/server time skew > tolerance') 1581 return False 1582 1583 return True
1584 #-----------------------------------------------------------------------
1585 -def sanity_check_database_settings():
1586 """Checks database settings. 1587 1588 returns (status, message) 1589 status: 1590 0: no problem 1591 1: non-fatal problem 1592 2: fatal problem 1593 """ 1594 _log.debug('checking database settings') 1595 1596 conn = get_connection() 1597 1598 # - version string 1599 global postgresql_version_string 1600 if postgresql_version_string is None: 1601 curs = conn.cursor() 1602 curs.execute('SELECT version()') 1603 postgresql_version_string = curs.fetchone()['version'] 1604 curs.close() 1605 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1606 1607 options2check = { 1608 # setting: [expected value, risk, fatal?] 1609 u'allow_system_table_mods': [u'off', u'system breakage', False], 1610 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1611 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1612 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1613 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1614 u'fsync': [u'on', u'data loss/corruption', True], 1615 u'full_page_writes': [u'on', u'data loss/corruption', False], 1616 u'lc_messages': [u'C', u'suboptimal error detection', False], 1617 u'password_encryption': [u'on', u'breach of confidentiality', False], 1618 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1619 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1620 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1621 } 1622 1623 from Gnumed.pycommon import gmCfg2 1624 _cfg = gmCfg2.gmCfgData() 1625 if _cfg.get(option = u'hipaa'): 1626 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1627 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1628 else: 1629 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1630 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1631 1632 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1633 rows, idx = run_ro_queries ( 1634 link_obj = conn, 1635 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1636 get_col_idx = False 1637 ) 1638 1639 found_error = False 1640 found_problem = False 1641 msg = [] 1642 for row in rows: 1643 option = row['name'] 1644 value_found = row['setting'] 1645 value_expected = options2check[option][0] 1646 risk = options2check[option][1] 1647 fatal_setting = options2check[option][2] 1648 if value_found != value_expected: 1649 if fatal_setting is True: 1650 found_error = True 1651 elif fatal_setting is False: 1652 found_problem = True 1653 elif fatal_setting is None: 1654 pass 1655 else: 1656 _log.error(options2check[option]) 1657 raise ValueError(u'invalid database configuration sanity check') 1658 msg.append(_(' option [%s]: %s') % (option, value_found)) 1659 msg.append(_(' risk: %s') % risk) 1660 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1661 1662 if found_error: 1663 return 2, u'\n'.join(msg) 1664 1665 if found_problem: 1666 return 1, u'\n'.join(msg) 1667 1668 return 0, u''
1669 #------------------------------------------------------------------------
1670 -def __log_PG_settings(curs=None):
1671 # don't use any of the run_*()s since that might 1672 # create a loop if we fail here 1673 # FIXME: use pg_settings 1674 try: 1675 curs.execute(u'show all') 1676 except: 1677 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1678 return False 1679 settings = curs.fetchall() 1680 if settings is None: 1681 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1682 return False 1683 for setting in settings: 1684 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1685 return True
1686 # =======================================================================
1687 -def extract_msg_from_pg_exception(exc=None):
1688 1689 try: 1690 msg = exc.args[0] 1691 except (AttributeError, IndexError, TypeError): 1692 return u'cannot extract message from exception' 1693 1694 return unicode(msg, gmI18N.get_encoding(), 'replace')
1695 # =======================================================================
1696 -class cAuthenticationError(dbapi.OperationalError):
1697
1698 - def __init__(self, dsn=None, prev_val=None):
1699 self.dsn = dsn 1700 self.prev_val = prev_val
1701
1702 - def __str__(self):
1703 _log.warning('%s.__str__() called', self.__class__.__name__) 1704 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1705 _log.error(tmp) 1706 return tmp.encode(gmI18N.get_encoding(), 'replace')
1707
1708 - def __unicode__(self):
1709 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1710 1711 # ======================================================================= 1712 # custom psycopg2 extensions 1713 # =======================================================================
1714 -class cEncodingError(dbapi.OperationalError):
1715
1716 - def __init__(self, encoding=None, prev_val=None):
1717 self.encoding = encoding 1718 self.prev_val = prev_val
1719
1720 - def __str__(self):
1721 _log.warning('%s.__str__() called', self.__class__.__name__) 1722 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1723
1724 - def __unicode__(self):
1725 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1726 1727 # ----------------------------------------------------------------------- 1728 # Python -> PostgreSQL 1729 # ----------------------------------------------------------------------- 1730 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1731 -class cAdapterPyDateTime(object):
1732
1733 - def __init__(self, dt):
1734 if dt.tzinfo is None: 1735 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1736 self.__dt = dt
1737
1738 - def getquoted(self):
1739 return _timestamp_template % self.__dt.isoformat()
1740 1741 ## remove for 0.9 1742 ## ---------------------------------------------------------------------- 1743 ##class cAdapterMxDateTime(object): 1744 ## 1745 ## def __init__(self, dt): 1746 ## if dt.tz == '???': 1747 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1748 ## self.__dt = dt 1749 ## 1750 ## def getquoted(self): 1751 ## # under some locale settings the mx.DateTime ISO formatter 1752 ## # will insert "," into the ISO string, 1753 ## # while this is allowed per the ISO8601 spec PostgreSQL 1754 ## # cannot currently handle that, 1755 ## # so map those "," to "." to make things work: 1756 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1757 ## 1758 ## ---------------------------------------------------------------------- 1759 ## PostgreSQL -> Python 1760 ## ---------------------------------------------------------------------- 1761 1762 #======================================================================= 1763 # main 1764 #----------------------------------------------------------------------- 1765 1766 # make sure psycopg2 knows how to handle unicode ... 1767 # intended to become standard 1768 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1769 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1770 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1771 1772 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1773 # check in 0.9: 1774 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1775 1776 ## remove for 0.9 1777 #try: 1778 # import mx.DateTime as mxDT 1779 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1780 #except ImportError: 1781 # _log.warning('cannot import mx.DateTime') 1782 1783 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1784 # them adapted to "... ARRAY[]..." so we can support PG arrays 1785 1786 #======================================================================= 1787 if __name__ == "__main__": 1788 1789 if len(sys.argv) < 2: 1790 sys.exit() 1791 1792 if sys.argv[1] != 'test': 1793 sys.exit() 1794 1795 logging.basicConfig(level=logging.DEBUG) 1796 #--------------------------------------------------------------------
1797 - def test_file2bytea():
1798 run_rw_queries(queries = [ 1799 {'cmd': u'create table test_bytea (data bytea)'} 1800 ]) 1801 1802 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1803 try: 1804 file2bytea(query = cmd, filename = sys.argv[2]) 1805 except: 1806 _log.exception('error') 1807 1808 run_rw_queries(queries = [ 1809 {'cmd': u'drop table test_bytea'} 1810 ])
1811 #--------------------------------------------------------------------
1812 - def test_get_connection():
1813 print "testing get_connection()" 1814 1815 dsn = 'foo' 1816 try: 1817 conn = get_connection(dsn=dsn) 1818 except dbapi.OperationalError, e: 1819 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1820 t, v = sys.exc_info()[:2] 1821 print ' ', t 1822 print ' ', v 1823 1824 dsn = 'dbname=gnumed_v9' 1825 try: 1826 conn = get_connection(dsn=dsn) 1827 except cAuthenticationError: 1828 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1829 t, v = sys.exc_info()[:2] 1830 print ' ', t 1831 print ' ', v 1832 1833 dsn = 'dbname=gnumed_v9 user=abc' 1834 try: 1835 conn = get_connection(dsn=dsn) 1836 except cAuthenticationError: 1837 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1838 t, v = sys.exc_info()[:2] 1839 print ' ', t 1840 print ' ', v 1841 1842 dsn = 'dbname=gnumed_v9 user=any-doc' 1843 try: 1844 conn = get_connection(dsn=dsn) 1845 except cAuthenticationError: 1846 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1847 t, v = sys.exc_info()[:2] 1848 print ' ', t 1849 print ' ', v 1850 1851 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1852 try: 1853 conn = get_connection(dsn=dsn) 1854 except cAuthenticationError: 1855 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1856 t, v = sys.exc_info()[:2] 1857 print ' ', t 1858 print ' ', v 1859 1860 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1861 conn = get_connection(dsn=dsn, readonly=True) 1862 1863 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1864 conn = get_connection(dsn=dsn, readonly=False) 1865 1866 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1867 encoding = 'foo' 1868 try: 1869 conn = get_connection(dsn=dsn, encoding=encoding) 1870 except cEncodingError: 1871 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1872 t, v = sys.exc_info()[:2] 1873 print ' ', t 1874 print ' ', v
1875 #--------------------------------------------------------------------
1876 - def test_exceptions():
1877 print "testing exceptions" 1878 1879 try: 1880 raise cAuthenticationError('no dsn', 'no previous exception') 1881 except cAuthenticationError: 1882 t, v, tb = sys.exc_info() 1883 print t 1884 print v 1885 print tb 1886 1887 try: 1888 raise cEncodingError('no dsn', 'no previous exception') 1889 except cEncodingError: 1890 t, v, tb = sys.exc_info() 1891 print t 1892 print v 1893 print tb
1894 #--------------------------------------------------------------------
1895 - def test_ro_queries():
1896 print "testing run_ro_queries()" 1897 1898 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1899 conn = get_connection(dsn, readonly=True) 1900 1901 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1902 print data 1903 print idx 1904 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1905 print data 1906 print idx 1907 1908 curs = conn.cursor() 1909 1910 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1911 print data 1912 print idx 1913 1914 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1915 print data 1916 print idx 1917 1918 try: 1919 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1920 print data 1921 print idx 1922 except psycopg2.ProgrammingError: 1923 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1924 t, v = sys.exc_info()[:2] 1925 print ' ', t 1926 print ' ', v 1927 1928 curs.close()
1929 #--------------------------------------------------------------------
1930 - def test_request_dsn():
1931 conn = get_connection() 1932 print conn 1933 conn.close()
1934 #--------------------------------------------------------------------
1935 - def test_set_encoding():
1936 print "testing set_default_client_encoding()" 1937 1938 enc = 'foo' 1939 try: 1940 set_default_client_encoding(enc) 1941 print "SUCCESS: encoding [%s] worked" % enc 1942 except ValueError: 1943 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1944 t, v = sys.exc_info()[:2] 1945 print ' ', t 1946 print ' ', v 1947 1948 enc = '' 1949 try: 1950 set_default_client_encoding(enc) 1951 print "SUCCESS: encoding [%s] worked" % enc 1952 except ValueError: 1953 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1954 t, v = sys.exc_info()[:2] 1955 print ' ', t 1956 print ' ', v 1957 1958 enc = 'latin1' 1959 try: 1960 set_default_client_encoding(enc) 1961 print "SUCCESS: encoding [%s] worked" % enc 1962 except ValueError: 1963 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1964 t, v = sys.exc_info()[:2] 1965 print ' ', t 1966 print ' ', v 1967 1968 enc = 'utf8' 1969 try: 1970 set_default_client_encoding(enc) 1971 print "SUCCESS: encoding [%s] worked" % enc 1972 except ValueError: 1973 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1974 t, v = sys.exc_info()[:2] 1975 print ' ', t 1976 print ' ', v 1977 1978 enc = 'unicode' 1979 try: 1980 set_default_client_encoding(enc) 1981 print "SUCCESS: encoding [%s] worked" % enc 1982 except ValueError: 1983 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1984 t, v = sys.exc_info()[:2] 1985 print ' ', t 1986 print ' ', v 1987 1988 enc = 'UNICODE' 1989 try: 1990 set_default_client_encoding(enc) 1991 print "SUCCESS: encoding [%s] worked" % enc 1992 except ValueError: 1993 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1994 t, v = sys.exc_info()[:2] 1995 print ' ', t 1996 print ' ', v
1997 #--------------------------------------------------------------------
1998 - def test_connection_pool():
1999 dsn = get_default_dsn() 2000 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2001 print pool 2002 print pool.getconn() 2003 print pool.getconn() 2004 print pool.getconn() 2005 print type(pool.getconn())
2006 #--------------------------------------------------------------------
2007 - def test_list_args():
2008 dsn = get_default_dsn() 2009 conn = get_connection(dsn, readonly=True) 2010 curs = conn.cursor() 2011 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2012 #--------------------------------------------------------------------
2013 - def test_sanitize_pg_regex():
2014 tests = [ 2015 ['(', '\\('] 2016 , ['[', '\\['] 2017 , [')', '\\)'] 2018 ] 2019 for test in tests: 2020 result = sanitize_pg_regex(test[0]) 2021 if result != test[1]: 2022 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2023 #--------------------------------------------------------------------
2024 - def test_is_pg_interval():
2025 status = True 2026 tests = [ 2027 [None, True], # None == NULL == succeeds ! 2028 [1, True], 2029 ['1', True], 2030 ['abc', False] 2031 ] 2032 2033 if not is_pg_interval(): 2034 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2035 status = False 2036 2037 for test in tests: 2038 result = is_pg_interval(test[0]) 2039 if result != test[1]: 2040 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2041 status = False 2042 2043 return status
2044 #--------------------------------------------------------------------
2045 - def test_sanity_check_time_skew():
2046 sanity_check_time_skew()
2047 #--------------------------------------------------------------------
2048 - def test_get_foreign_key_details():
2049 for row in get_foreign_keys2column ( 2050 schema = u'dem', 2051 table = u'identity', 2052 column = u'pk' 2053 ): 2054 print '%s.%s references %s.%s.%s' % ( 2055 row['referencing_table'], 2056 row['referencing_column'], 2057 row['referenced_schema'], 2058 row['referenced_table'], 2059 row['referenced_column'] 2060 )
2061 #--------------------------------------------------------------------
2062 - def test_set_user_language():
2063 # (user, language, result, exception type) 2064 tests = [ 2065 # current user 2066 [None, 'de_DE', True], 2067 [None, 'lang_w/o_tx', False], 2068 [None, None, True], 2069 # valid user 2070 ['any-doc', 'de_DE', True], 2071 ['any-doc', 'lang_w/o_tx', False], 2072 ['any-doc', None, True], 2073 # invalid user 2074 ['invalid user', 'de_DE', None], 2075 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2076 ['invalid user', None, True] 2077 ] 2078 for test in tests: 2079 try: 2080 result = set_user_language(user = test[0], language = test[1]) 2081 if result != test[2]: 2082 print "test:", test 2083 print "result:", result, "expected:", test[2] 2084 except psycopg2.IntegrityError, e: 2085 if test[2] is None: 2086 continue 2087 print "test:", test 2088 print "expected exception" 2089 print "result:", e
2090 #--------------------------------------------------------------------
2091 - def test_get_schema_revision_history():
2092 for line in get_schema_revision_history(): 2093 print u' - '.join(line)
2094 #--------------------------------------------------------------------
2095 - def test_run_query():
2096 gmDateTime.init() 2097 args = {'dt': gmDateTime.pydt_max_here()} 2098 cmd = u"SELECT %(dt)s" 2099 2100 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2101 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2102 print rows
2103 #--------------------------------------------------------------------
2104 - def test_schema_exists():
2105 print schema_exists()
2106 2107 #-------------------------------------------------------------------- 2108 # run tests 2109 #test_file2bytea() 2110 #test_get_connection() 2111 #test_exceptions() 2112 #test_ro_queries() 2113 #test_request_dsn() 2114 #test_set_encoding() 2115 #test_connection_pool() 2116 #test_list_args() 2117 #test_sanitize_pg_regex() 2118 #test_is_pg_interval() 2119 #test_sanity_check_time_skew() 2120 #test_get_foreign_key_details() 2121 #test_set_user_language() 2122 #test_get_schema_revision_history() 2123 #test_run_query() 2124 test_schema_exists() 2125 2126 # ====================================================================== 2127