Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50', 
 118          18: 'a0f9efcabdecfb4ddb6d8c0b69c02092' 
 119  } 
 120   
 121  map_schema_hash2version = { 
 122          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 123          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 124          '4428ccf2e54c289136819e701bb095ea': 4, 
 125          '7e7b093af57aea48c288e76632a382e5': 5, 
 126          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 127          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 128          '89b13a7af83337c3aad153b717e52360': 8, 
 129          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 130          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 131          '03042ae24f3f92877d986fb0a6184d76': 11, 
 132          '06183a6616db62257e22814007a8ed07': 12, 
 133          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 134          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 135          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 136          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 137          '161428ee97a00e3bf56168c3a15b7b50': 17, 
 138          'a0f9efcabdecfb4ddb6d8c0b69c02092': 18 
 139  } 
 140   
 141  map_client_branch2required_db_version = { 
 142          u'GIT tree': 0, 
 143          u'0.3': 9, 
 144          u'0.4': 10, 
 145          u'0.5': 11, 
 146          u'0.6': 12, 
 147          u'0.7': 13, 
 148          u'0.8': 14, 
 149          u'0.9': 15, 
 150          u'1.0': 16,             # intentional duplicate with 1.1 
 151          u'1.1': 16, 
 152          u'1.2': 17, 
 153          u'1.3': 18 
 154  } 
 155   
 156  # get columns and data types for a given table 
 157  query_table_col_defs = u"""select 
 158          cols.column_name, 
 159          cols.udt_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  query_table_attributes = u"""select 
 170          cols.column_name 
 171  from 
 172          information_schema.columns cols 
 173  where 
 174          cols.table_schema = %s 
 175                  and 
 176          cols.table_name = %s 
 177  order by 
 178          cols.ordinal_position""" 
 179   
 180  # only works for single-column FKs but that's fine 
 181  # needs gm-dbo, any-doc won't work 
 182  SQL_foreign_key_name = u"""SELECT tc.constraint_schema, tc.constraint_name 
 183  FROM 
 184          information_schema.table_constraints tc 
 185                  INNER JOIN information_schema.constraint_column_usage ccu USING (constraint_catalog, constraint_schema, constraint_name) 
 186                          INNER JOIN information_schema.key_column_usage kcu USING (constraint_catalog, constraint_schema, constraint_name) 
 187  WHERE 
 188          tc.constraint_type = 'FOREIGN KEY' 
 189                  AND 
 190          kcu.table_schema = %(src_schema)s 
 191                  AND 
 192          kcu.table_name = %(src_tbl)s 
 193                  AND 
 194          kcu.column_name = %(src_col)s 
 195                  AND 
 196          ccu.table_schema = %(target_schema)s 
 197                  AND 
 198          ccu.table_name = %(target_tbl)s 
 199                  AND 
 200          ccu.column_name = %(target_col)s""" 
 201   
 202  # ======================================================================= 
 203  # module globals API 
 204  # ======================================================================= 
205 -def set_default_client_encoding(encoding = None):
206 # check whether psycopg2 can handle this encoding 207 if encoding not in psycopg2.extensions.encodings: 208 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 209 # check whether Python can handle this encoding 210 py_enc = psycopg2.extensions.encodings[encoding] 211 try: 212 codecs.lookup(py_enc) 213 except LookupError: 214 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 215 raise 216 # FIXME: check encoding against the database 217 # FIXME: - but we may not yet have access 218 # FIXME: - psycopg2 will pull its encodings from the database eventually 219 # it seems save to set it 220 global _default_client_encoding 221 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 222 _default_client_encoding = encoding 223 return True
224 #---------------------------------------------------
225 -def set_default_client_timezone(timezone = None):
226 227 # FIXME: use __validate 228 global _default_client_timezone 229 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 230 _default_client_timezone = timezone 231 232 global _sql_set_timezone 233 _sql_set_timezone = u'set timezone to %s' 234 235 return True
236 #---------------------------------------------------
237 -def __validate_timezone(conn=None, timezone=None):
238 239 _log.debug(u'validating time zone [%s]', timezone) 240 241 cmd = u'set timezone to %(tz)s' 242 args = {u'tz': timezone} 243 244 conn.commit() 245 curs = conn.cursor() 246 is_valid = False 247 try: 248 curs.execute(cmd, args) 249 _log.info(u'time zone [%s] is settable', timezone) 250 # can we actually use it, though ? 251 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 252 try: 253 curs.execute(cmd) 254 curs.fetchone() 255 _log.info(u'time zone [%s] is usable', timezone) 256 is_valid = True 257 except: 258 _log.error('error using time zone [%s]', timezone) 259 except dbapi.DataError: 260 _log.warning(u'time zone [%s] is not settable', timezone) 261 except: 262 _log.error(u'failed to set time zone to [%s]', timezone) 263 _log.exception(u'') 264 265 curs.close() 266 conn.rollback() 267 268 return is_valid
269 #---------------------------------------------------
270 -def __expand_timezone(conn=None, timezone=None):
271 """some timezone defs are abbreviations so try to expand 272 them because "set time zone" doesn't take abbreviations""" 273 274 cmd = u""" 275 select distinct on (abbrev) name 276 from pg_timezone_names 277 where 278 abbrev = %(tz)s and 279 name ~ '^[^/]+/[^/]+$' and 280 name !~ '^Etc/' 281 """ 282 args = {u'tz': timezone} 283 284 conn.commit() 285 curs = conn.cursor() 286 287 result = timezone 288 try: 289 curs.execute(cmd, args) 290 rows = curs.fetchall() 291 if len(rows) > 0: 292 result = rows[0][0] 293 _log.debug(u'[%s] maps to [%s]', timezone, result) 294 except: 295 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 296 297 curs.close() 298 conn.rollback() 299 300 return result
301 #---------------------------------------------------
302 -def __detect_client_timezone(conn=None):
303 """This is run on the very first connection.""" 304 305 # FIXME: check whether server.timezone is the same 306 # FIXME: value as what we eventually detect 307 308 # we need gmDateTime to be initialized 309 if gmDateTime.current_local_iso_numeric_timezone_string is None: 310 gmDateTime.init() 311 312 _log.debug('trying to detect timezone from system') 313 314 tz_candidates = [] 315 try: 316 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 317 tz_candidates.append(tz) 318 expanded = __expand_timezone(conn = conn, timezone = tz) 319 if expanded != tz: 320 tz_candidates.append(expanded) 321 except KeyError: 322 pass 323 324 tz_candidates.append(gmDateTime.current_local_timezone_name) 325 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 326 if expanded != gmDateTime.current_local_timezone_name: 327 tz_candidates.append(expanded) 328 329 _log.debug('candidates: %s', str(tz_candidates)) 330 331 # find best among candidates 332 global _default_client_timezone 333 global _sql_set_timezone 334 found = False 335 for tz in tz_candidates: 336 if __validate_timezone(conn = conn, timezone = tz): 337 _default_client_timezone = tz 338 _sql_set_timezone = u'set timezone to %s' 339 found = True 340 break 341 342 if not found: 343 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 344 _sql_set_timezone = u"set time zone interval %s hour to minute" 345 346 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
347 # ======================================================================= 348 # login API 349 # =======================================================================
350 -def __request_login_params_tui():
351 """Text mode request of database login parameters""" 352 import getpass 353 login = gmLoginInfo.LoginInfo() 354 355 print "\nPlease enter the required login parameters:" 356 try: 357 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 358 login.database = prompted_input(prompt = "database", default = 'gnumed_v19') 359 login.user = prompted_input(prompt = "user name", default = '') 360 tmp = 'password for "%s" (not shown): ' % login.user 361 login.password = getpass.getpass(tmp) 362 login.port = prompted_input(prompt = "port", default = 5432) 363 except KeyboardInterrupt: 364 _log.warning("user cancelled text mode login dialog") 365 print "user cancelled text mode login dialog" 366 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 367 368 return login
369 #---------------------------------------------------
370 -def __request_login_params_gui_wx():
371 """GUI (wx) input request for database login parameters. 372 373 Returns gmLoginInfo.LoginInfo object 374 """ 375 import wx 376 # OK, wxPython was already loaded. But has the main Application instance 377 # been initialized yet ? if not, the exception will kick us out 378 if wx.GetApp() is None: 379 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 380 381 # Let's launch the login dialog 382 # if wx was not initialized /no main App loop, an exception should be raised anyway 383 import gmAuthWidgets 384 dlg = gmAuthWidgets.cLoginDialog(None, -1) 385 dlg.ShowModal() 386 login = dlg.panel.GetLoginInfo() 387 dlg.Destroy() 388 389 #if user cancelled or something else went wrong, raise an exception 390 if login is None: 391 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 392 393 return login
394 #---------------------------------------------------
395 -def request_login_params():
396 """Request login parameters for database connection.""" 397 # do we auto-request parameters at all ? 398 if not auto_request_login_params: 399 raise Exception('Cannot request login parameters.') 400 401 # are we inside X ? 402 # (if we aren't wxGTK will crash hard at 403 # C-level with "can't open Display") 404 if os.environ.has_key('DISPLAY'): 405 # try wxPython GUI 406 try: return __request_login_params_gui_wx() 407 except: pass 408 409 # well, either we are on the console or 410 # wxPython does not work, use text mode 411 return __request_login_params_tui()
412 413 # ======================================================================= 414 # DSN API 415 # -----------------------------------------------------------------------
416 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
417 dsn_parts = [] 418 419 if (database is not None) and (database.strip() != ''): 420 dsn_parts.append('dbname=%s' % database) 421 422 if (host is not None) and (host.strip() != ''): 423 dsn_parts.append('host=%s' % host) 424 425 if (port is not None) and (str(port).strip() != ''): 426 dsn_parts.append('port=%s' % port) 427 428 if (user is not None) and (user.strip() != ''): 429 dsn_parts.append('user=%s' % user) 430 431 if (password is not None) and (password.strip() != ''): 432 dsn_parts.append('password=%s' % password) 433 434 dsn_parts.append('sslmode=prefer') 435 436 return ' '.join(dsn_parts)
437 # ------------------------------------------------------
438 -def get_default_login():
439 # make sure we do have a login 440 get_default_dsn() 441 return _default_login
442 # ------------------------------------------------------
443 -def get_default_dsn():
444 global _default_dsn 445 if _default_dsn is not None: 446 return _default_dsn 447 448 login = request_login_params() 449 set_default_login(login=login) 450 451 return _default_dsn
452 # ------------------------------------------------------
453 -def set_default_login(login=None):
454 if login is None: 455 return False 456 457 if login.host is not None: 458 if login.host.strip() == u'': 459 login.host = None 460 461 global _default_login 462 _default_login = login 463 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 464 465 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 466 467 global _default_dsn 468 if _default_dsn is None: 469 old_dsn = u'None' 470 else: 471 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 472 _log.info ('setting default DSN from [%s] to [%s]', 473 old_dsn, 474 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 475 ) 476 _default_dsn = dsn 477 478 return True
479 # ======================================================================= 480 # netadata API 481 # =======================================================================
482 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
483 expected_hash = known_schema_hashes[version] 484 if version == 0: 485 args = {'ver': 9999} 486 else: 487 args = {'ver': version} 488 rows, idx = run_ro_queries ( 489 link_obj = link_obj, 490 queries = [{ 491 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 492 'args': args 493 }] 494 ) 495 if rows[0]['md5'] != expected_hash: 496 _log.error('database schema version mismatch') 497 _log.error('expected: %s (%s)' % (version, expected_hash)) 498 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 499 if verbose: 500 _log.debug('schema dump follows:') 501 for line in get_schema_structure(link_obj = link_obj).split(): 502 _log.debug(line) 503 _log.debug('schema revision history dump follows:') 504 for line in get_schema_revision_history(link_obj = link_obj): 505 _log.debug(u' - '.join(line)) 506 return False 507 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 508 return True
509 #------------------------------------------------------------------------
510 -def get_schema_version(link_obj=None):
511 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 512 try: 513 return map_schema_hash2version[rows[0]['md5']] 514 except KeyError: 515 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
516 #------------------------------------------------------------------------
517 -def get_schema_structure(link_obj=None):
518 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 519 return rows[0][0]
520 #------------------------------------------------------------------------
521 -def get_schema_hash(link_obj=None):
522 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 523 return rows[0]['md5']
524 #------------------------------------------------------------------------
525 -def get_schema_revision_history(link_obj=None):
526 527 if table_exists(link_obj = link_obj, schema = 'gm', table = 'schema_revision'): 528 cmd = u""" 529 SELECT 530 imported::text, 531 version, 532 filename 533 FROM gm.schema_revision 534 ORDER BY imported""" 535 elif table_exists(link_obj = link_obj, schema = 'public', table = 'gm_schema_revision'): 536 cmd = u""" 537 SELECT 538 imported::text, 539 version, 540 filename 541 FROM public.gm_schema_revision 542 ORDER BY imported""" 543 else: 544 return [] 545 546 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}]) 547 return rows
548 #------------------------------------------------------------------------
549 -def get_current_user():
550 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 551 return rows[0][0]
552 #------------------------------------------------------------------------
553 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
554 """Get the foreign keys pointing to schema.table.column. 555 556 Does not properly work with multi-column FKs. 557 GNUmed doesn't use any, however. 558 """ 559 cmd = u""" 560 select 561 %(schema)s as referenced_schema, 562 %(tbl)s as referenced_table, 563 %(col)s as referenced_column, 564 pgc.confkey as referenced_column_list, 565 pgc.conrelid::regclass as referencing_table, 566 pgc.conkey as referencing_column_list, 567 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 568 from 569 pg_constraint pgc 570 where 571 pgc.contype = 'f' 572 and 573 pgc.confrelid = ( 574 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 575 select oid from pg_namespace where nspname = %(schema)s 576 ) 577 ) and 578 ( 579 select attnum 580 from pg_attribute 581 where 582 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 583 select oid from pg_namespace where nspname = %(schema)s 584 )) 585 and 586 attname = %(col)s 587 ) = any(pgc.confkey) 588 """ 589 590 args = { 591 'schema': schema, 592 'tbl': table, 593 'col': column 594 } 595 596 rows, idx = run_ro_queries ( 597 link_obj = link_obj, 598 queries = [ 599 {'cmd': cmd, 'args': args} 600 ] 601 ) 602 603 return rows
604 605 #------------------------------------------------------------------------
606 -def get_foreign_key_names(src_schema=None, src_table=None, src_column=None, target_schema=None, target_table=None, target_column=None, link_obj=None):
607 608 args = { 609 'src_schema': src_schema, 610 'src_tbl': src_table, 611 'src_col': src_column, 612 'target_schema': target_schema, 613 'target_tbl': target_table, 614 'target_col': target_column 615 } 616 617 rows, idx = run_ro_queries ( 618 link_obj = link_obj, 619 queries = [{'cmd': SQL_foreign_key_name, 'args': args}], 620 get_col_idx = False 621 ) 622 623 return rows
624 625 #------------------------------------------------------------------------
626 -def get_child_tables(schema='public', table=None, link_obj=None):
627 """Return child tables of <table>.""" 628 cmd = u""" 629 select 630 pgn.nspname as namespace, 631 pgc.relname as table 632 from 633 pg_namespace pgn, 634 pg_class pgc 635 where 636 pgc.relnamespace = pgn.oid 637 and 638 pgc.oid in ( 639 select inhrelid from pg_inherits where inhparent = ( 640 select oid from pg_class where 641 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 642 relname = %(table)s 643 ) 644 )""" 645 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 646 return rows
647 #------------------------------------------------------------------------
648 -def schema_exists(link_obj=None, schema=u'gm'):
649 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 650 args = {'schema': schema} 651 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 652 return rows[0][0]
653 #------------------------------------------------------------------------
654 -def table_exists(link_obj=None, schema=None, table=None):
655 """Returns false, true.""" 656 cmd = u""" 657 select exists ( 658 select 1 from information_schema.tables 659 where 660 table_schema = %s and 661 table_name = %s and 662 table_type = 'BASE TABLE' 663 )""" 664 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 665 return rows[0][0]
666 #------------------------------------------------------------------------
667 -def get_col_indices(cursor = None):
668 if cursor.description is None: 669 _log.error('no result description available: unused cursor or last query did not select rows') 670 return None 671 col_indices = {} 672 col_index = 0 673 for col_desc in cursor.description: 674 col_name = col_desc[0] 675 # a query like "select 1,2;" will return two columns of the same name ! 676 # hence adjust to that, note, however, that dict-style access won't work 677 # on results of such queries ... 678 if col_indices.has_key(col_name): 679 col_name = '%s_%s' % (col_name, col_index) 680 col_indices[col_name] = col_index 681 col_index += 1 682 683 return col_indices
684 #------------------------------------------------------------------------
685 -def get_col_defs(link_obj=None, schema='public', table=None):
686 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 687 col_names = [] 688 col_type = {} 689 for row in rows: 690 col_names.append(row[0]) 691 # map array types 692 if row[1].startswith('_'): 693 col_type[row[0]] = row[1][1:] + '[]' 694 else: 695 col_type[row[0]] = row[1] 696 col_defs = [] 697 col_defs.append(col_names) 698 col_defs.append(col_type) 699 return col_defs
700 #------------------------------------------------------------------------
701 -def get_col_names(link_obj=None, schema='public', table=None):
702 """Return column attributes of table""" 703 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 704 cols = [] 705 for row in rows: 706 cols.append(row[0]) 707 return cols
708 709 #------------------------------------------------------------------------ 710 # i18n functions 711 #------------------------------------------------------------------------
712 -def export_translations_from_database(filename=None):
713 tx_file = codecs.open(filename, 'wb', 'utf8') 714 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 715 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 716 tx_file.write(u'-- - user database language is set to [%s]\n\n' % get_current_user_language()) 717 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 718 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 719 tx_file.write(u'set default_transaction_read_only to off;\n\n') 720 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 721 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 722 723 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 724 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 725 for row in rows: 726 line = u"select i18n.upd_tx(E'%s', E'%s', E'%s');\n" % ( 727 row['lang'].replace("'", "\\'"), 728 row['orig'].replace("'", "\\'"), 729 row['trans'].replace("'", "\\'") 730 ) 731 tx_file.write(line) 732 tx_file.write(u'\n') 733 734 tx_file.write(u'\set ON_ERROR_STOP 1\n') 735 tx_file.close() 736 737 return True
738 #------------------------------------------------------------------------
739 -def delete_translation_from_database(link_obj=None, language=None, original=None):
740 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 741 args = {'lang': language, 'orig': original} 742 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 743 return True
744 745 #------------------------------------------------------------------------
746 -def update_translation_in_database(language=None, original=None, translation=None):
747 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 748 args = {'lang': language, 'orig': original, 'trans': translation} 749 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 750 return args
751 752 #------------------------------------------------------------------------
753 -def get_translation_languages():
754 rows, idx = run_ro_queries ( 755 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 756 ) 757 return [ r[0] for r in rows ]
758 759 #------------------------------------------------------------------------
760 -def get_database_translations(language=None, order_by=None):
761 762 args = {'lang': language} 763 _log.debug('language [%s]', language) 764 765 if order_by is None: 766 order_by = u'ORDER BY %s' % order_by 767 else: 768 order_by = u'ORDER BY lang, orig' 769 770 if language is None: 771 cmd = u""" 772 SELECT DISTINCT ON (orig, lang) 773 lang, orig, trans 774 FROM (( 775 776 -- strings stored as translation keys whether translated or not 777 SELECT 778 NULL as lang, 779 ik.orig, 780 NULL AS trans 781 FROM 782 i18n.keys ik 783 784 ) UNION ALL ( 785 786 -- already translated strings 787 SELECT 788 it.lang, 789 it.orig, 790 it.trans 791 FROM 792 i18n.translations it 793 794 )) as translatable_strings 795 %s""" % order_by 796 else: 797 cmd = u""" 798 SELECT DISTINCT ON (orig, lang) 799 lang, orig, trans 800 FROM (( 801 802 -- strings stored as translation keys whether translated or not 803 SELECT 804 %%(lang)s as lang, 805 ik.orig, 806 i18n._(ik.orig, %%(lang)s) AS trans 807 FROM 808 i18n.keys ik 809 810 ) UNION ALL ( 811 812 -- already translated strings 813 SELECT 814 %%(lang)s as lang, 815 it.orig, 816 i18n._(it.orig, %%(lang)s) AS trans 817 FROM 818 i18n.translations it 819 820 )) AS translatable_strings 821 %s""" % order_by 822 823 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 824 825 if rows is None: 826 _log.error('no translatable strings found') 827 else: 828 _log.debug('%s translatable strings found', len(rows)) 829 830 return rows
831 832 #------------------------------------------------------------------------
833 -def get_current_user_language():
834 cmd = u'select i18n.get_curr_lang()' 835 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 836 return rows[0][0]
837 838 #------------------------------------------------------------------------
839 -def set_user_language(user=None, language=None):
840 """Set the user language in the database. 841 842 user = None: current db user 843 language = None: unset 844 """ 845 _log.info('setting database language for user [%s] to [%s]', user, language) 846 847 args = { 848 'usr': user, 849 'lang': language 850 } 851 852 if language is None: 853 if user is None: 854 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 855 else: 856 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 857 queries.append({'cmd': u'select True'}) 858 else: 859 if user is None: 860 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 861 else: 862 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 863 864 rows, idx = run_rw_queries(queries = queries, return_data = True) 865 866 if not rows[0][0]: 867 _log.error('cannot set database language to [%s] for user [%s]', language, user) 868 869 return rows[0][0]
870 #------------------------------------------------------------------------
871 -def force_user_language(language=None):
872 """Set the user language in the database. 873 874 - regardless of whether there is any translation available. 875 - only for the current user 876 """ 877 _log.info('forcing database language for current db user to [%s]', language) 878 879 run_rw_queries(queries = [{ 880 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 881 'args': {'lang': language} 882 }])
883 884 # ======================================================================= 885 # query runners and helpers 886 # =======================================================================
887 -def send_maintenance_notification():
888 cmd = u'notify "db_maintenance_warning"' 889 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
890 #------------------------------------------------------------------------
891 -def send_maintenance_shutdown():
892 cmd = u'notify "db_maintenance_disconnect"' 893 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
894 #------------------------------------------------------------------------
895 -def is_pg_interval(candidate=None):
896 cmd = u'SELECT %(candidate)s::interval' 897 try: 898 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 899 return True 900 except: 901 cmd = u'SELECT %(candidate)s::text::interval' 902 try: 903 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 904 return True 905 except: 906 return False
907 908 #------------------------------------------------------------------------
909 -def lock_row(link_obj=None, table=None, pk=None, exclusive=False):
910 """Uses pg_advisory(_shared). 911 912 - locks stack and need one unlock per lock 913 - same connection: 914 - all locks succeed 915 - different connections: 916 - shared + shared succeed 917 - shared + exclusive fail 918 """ 919 _log.debug('locking row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 920 if exclusive: 921 cmd = u"""SELECT pg_try_advisory_lock('%s'::regclass::oid::int, %s)""" % (table, pk) 922 else: 923 cmd = u"""SELECT pg_try_advisory_lock_shared('%s'::regclass::oid::int, %s)""" % (table, pk) 924 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False) 925 if rows[0][0]: 926 return True 927 _log.warning('cannot lock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 928 return False
929 930 #------------------------------------------------------------------------
931 -def unlock_row(link_obj=None, table=None, pk=None, exclusive=False):
932 """Uses pg_advisory_unlock(_shared). 933 934 - each lock needs one unlock 935 """ 936 _log.debug('trying to unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 937 if exclusive: 938 cmd = u"SELECT pg_advisory_unlock('%s'::regclass::oid::int, %s)" % (table, pk) 939 else: 940 cmd = u"SELECT pg_advisory_unlock_shared('%s'::regclass::oid::int, %s)" % (table, pk) 941 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False) 942 if rows[0][0]: 943 return True 944 _log.warning('cannot unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 945 return False
946 947 #------------------------------------------------------------------------
948 -def row_is_locked(table=None, pk=None):
949 """Looks at pk_locks 950 951 - does not take into account locks other than 'advisory', however 952 """ 953 cmd = u"""SELECT EXISTS ( 954 SELECT 1 FROM pg_locks WHERE 955 classid = '%s'::regclass::oid::int 956 AND 957 objid = %s 958 AND 959 locktype = 'advisory' 960 )""" % (table, pk) 961 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 962 if rows[0][0]: 963 _log.debug('row is locked: [%s] [%s]', table, pk) 964 return True 965 _log.debug('row is NOT locked: [%s] [%s]', table, pk) 966 return False
967 968 #------------------------------------------------------------------------
969 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
970 outfile = file(filename, 'wb') 971 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 972 outfile.close() 973 return result
974 #------------------------------------------------------------------------
975 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
976 """Store data from a bytea field into a file. 977 978 <data_query> 979 - dict {'cmd': ..., 'args': ...} 980 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 981 - 'args' must be a dict 982 - must return one row with one field of type bytea 983 <file> 984 - must be a file like Python object 985 <data_size> 986 - integer of the total size of the expected data or None 987 <data_size_query> 988 - dict {'cmd': ..., 'args': ...} 989 - cmd must be unicode 990 - must return one row with one field with the octet_length() of the data field 991 - used only when <data_size> is None 992 """ 993 if data_size == 0: 994 return True 995 996 # If the client sets an encoding other than the default we 997 # will receive encoding-parsed data which isn't the binary 998 # content we want. Hence we need to get our own connection. 999 # It must be a read-write one so that we don't affect the 1000 # encoding for other users of the shared read-only 1001 # connections. 1002 # Actually, encodings shouldn't be applied to binary data 1003 # (eg. bytea types) in the first place but that is only 1004 # reported to be fixed > v7.4. 1005 # further tests reveal that at least on PG 8.0 this bug still 1006 # manifests itself 1007 conn = get_raw_connection(readonly=True) 1008 1009 if data_size is None: 1010 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 1011 data_size = rows[0][0] 1012 if data_size in [None, 0]: 1013 conn.rollback() 1014 return True 1015 1016 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 1017 if chunk_size == 0: 1018 chunk_size = min(data_size, max_chunk_size) 1019 1020 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size) 1021 1022 # Windoze sucks: it can't transfer objects of arbitrary size, 1023 # anyways, we need to split the transfer, 1024 # however, only possible if postgres >= 7.2 1025 needed_chunks, remainder = divmod(data_size, chunk_size) 1026 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder) 1027 1028 # try setting "bytea_output" 1029 # - fails if not necessary 1030 # - succeeds if necessary 1031 try: 1032 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1033 except dbapi.ProgrammingError: 1034 _log.debug('failed to set bytea_output to "escape", not necessary') 1035 1036 # retrieve chunks, skipped if data size < chunk size, 1037 # does this not carry the danger of cutting up multi-byte escape sequences ? 1038 # no, since bytea is binary, 1039 # yes, since in bytea there are *some* escaped values, still 1040 # no, since those are only escaped during *transfer*, not on-disk, hence 1041 # only complete escape sequences are put on the wire 1042 for chunk_id in range(needed_chunks): 1043 chunk_start = (chunk_id * chunk_size) + 1 1044 data_query['args']['start'] = chunk_start 1045 data_query['args']['size'] = chunk_size 1046 try: 1047 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1048 except: 1049 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1050 conn.rollback() 1051 raise 1052 # it would be a fatal error to see more than one result as ids are supposed to be unique 1053 file_obj.write(str(rows[0][0])) 1054 1055 # retrieve remainder 1056 if remainder > 0: 1057 chunk_start = (needed_chunks * chunk_size) + 1 1058 data_query['args']['start'] = chunk_start 1059 data_query['args']['size'] = remainder 1060 try: 1061 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1062 except: 1063 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1064 conn.rollback() 1065 raise 1066 # it would be a fatal error to see more than one result as ids are supposed to be unique 1067 file_obj.write(str(rows[0][0])) 1068 1069 conn.rollback() 1070 return True
1071 #------------------------------------------------------------------------
1072 -def file2bytea(query=None, filename=None, args=None, conn=None):
1073 """Store data from a file into a bytea field. 1074 1075 The query must: 1076 - be in unicode 1077 - contain a format spec identifying the row (eg a primary key) 1078 matching <args> if it is an UPDATE 1079 - contain a format spec %(data)s::bytea 1080 """ 1081 # read data from file 1082 infile = file(filename, "rb") 1083 data_as_byte_string = infile.read() 1084 infile.close() 1085 if args is None: 1086 args = {} 1087 args['data'] = buffer(data_as_byte_string) 1088 del(data_as_byte_string) 1089 1090 # insert the data 1091 if conn is None: 1092 conn = get_raw_connection(readonly=False) 1093 close_conn = True 1094 else: 1095 close_conn = False 1096 1097 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1098 1099 if close_conn: 1100 conn.close() 1101 1102 return
1103 #------------------------------------------------------------------------
1104 -def sanitize_pg_regex(expression=None, escape_all=False):
1105 """Escape input for use in a PostgreSQL regular expression. 1106 1107 If a fragment comes from user input and is to be used 1108 as a regular expression we need to make sure it doesn't 1109 contain invalid regex patterns such as unbalanced ('s. 1110 1111 <escape_all> 1112 True: try to escape *all* metacharacters 1113 False: only escape those which render the regex invalid 1114 """ 1115 return expression.replace ( 1116 '(', '\(' 1117 ).replace ( 1118 ')', '\)' 1119 ).replace ( 1120 '[', '\[' 1121 ).replace ( 1122 '+', '\+' 1123 ).replace ( 1124 '.', '\.' 1125 ).replace ( 1126 '*', '\*' 1127 )
1128 #']', '\]', # not needed 1129 #------------------------------------------------------------------------
1130 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1131 """Run read-only queries. 1132 1133 <queries> must be a list of dicts: 1134 [ 1135 {'cmd': <string>, 'args': <dict> or <tuple>}, 1136 {...}, 1137 ... 1138 ] 1139 """ 1140 if isinstance(link_obj, dbapi._psycopg.cursor): 1141 curs = link_obj 1142 curs_close = __noop 1143 tx_rollback = __noop 1144 elif isinstance(link_obj, dbapi._psycopg.connection): 1145 curs = link_obj.cursor() 1146 curs_close = curs.close 1147 tx_rollback = link_obj.rollback 1148 elif link_obj is None: 1149 conn = get_connection(readonly=True, verbose=verbose) 1150 curs = conn.cursor() 1151 curs_close = curs.close 1152 tx_rollback = conn.rollback 1153 else: 1154 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1155 1156 if verbose: 1157 _log.debug('cursor: %s', curs) 1158 1159 for query in queries: 1160 if type(query['cmd']) is not types.UnicodeType: 1161 print "run_ro_queries(): non-unicode query" 1162 print query['cmd'] 1163 try: 1164 args = query['args'] 1165 except KeyError: 1166 args = None 1167 try: 1168 curs.execute(query['cmd'], args) 1169 if verbose: 1170 _log.debug('ran query: [%s]', curs.query) 1171 if curs.statusmessage != u'': 1172 _log.debug('PG status message: %s', curs.statusmessage) 1173 _log.debug('cursor description: %s', str(curs.description)) 1174 except dbapi.Error as pg_exc: 1175 _log.error('query failed: [%s]', curs.query) 1176 if curs.statusmessage != u'': 1177 _log.error('PG status message: %s', curs.statusmessage) 1178 _log.error('PG error code: %s', pg_exc.pgcode) 1179 if pg_exc.pgerror is not None: 1180 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1181 try: 1182 curs_close() 1183 except dbapi.InterfaceError: 1184 _log.exception('cannot close cursor') 1185 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1186 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1187 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1188 if curs.statusmessage != u'': 1189 details = u'Status: %s\n%s' % ( 1190 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1191 details 1192 ) 1193 if pg_exc.pgerror is None: 1194 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1195 else: 1196 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1197 raise gmExceptions.AccessDenied ( 1198 msg, 1199 source = u'PostgreSQL', 1200 code = pg_exc.pgcode, 1201 details = details 1202 ) 1203 raise 1204 except: 1205 _log.error('query failed: [%s]', curs.query) 1206 if curs.statusmessage != u'': 1207 _log.error('PG status message: %s', curs.statusmessage) 1208 try: 1209 curs_close() 1210 except dbapi.InterfaceError: 1211 _log.exception('cannot close cursor') 1212 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1213 raise 1214 1215 data = None 1216 col_idx = None 1217 if return_data: 1218 data = curs.fetchall() 1219 if verbose: 1220 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1221 _log.debug('cursor description: %s', str(curs.description)) 1222 if get_col_idx: 1223 col_idx = get_col_indices(curs) 1224 1225 curs_close() 1226 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1227 return (data, col_idx)
1228 #------------------------------------------------------------------------
1229 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1230 """Convenience function for running a transaction 1231 that is supposed to get committed. 1232 1233 <link_obj> 1234 can be either: 1235 - a cursor 1236 - a connection 1237 1238 <queries> 1239 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1240 to be executed as a single transaction, the last 1241 query may usefully return rows (such as a 1242 "SELECT currval('some_sequence')" statement) 1243 1244 <end_tx> 1245 - controls whether the transaction is finalized (eg. 1246 committed/rolled back) or not, this allows the 1247 call to run_rw_queries() to be part of a framing 1248 transaction 1249 - if link_obj is a connection then <end_tx> will 1250 default to False unless it is explicitly set to 1251 True which is taken to mean "yes, you do have full 1252 control over the transaction" in which case the 1253 transaction is properly finalized 1254 - if link_obj is a cursor we CANNOT finalize the 1255 transaction because we would need the connection for that 1256 - if link_obj is None <end_tx> will, of course, always be True 1257 1258 <return_data> 1259 - if true, the returned data will include the rows 1260 the last query selected 1261 - if false, it returns None instead 1262 1263 <get_col_idx> 1264 - if true, the returned data will include a dictionary 1265 mapping field names to column positions 1266 - if false, the returned data returns None instead 1267 1268 method result: 1269 - returns a tuple (data, idx) 1270 - <data>: 1271 * (None, None) if last query did not return rows 1272 * ("fetchall() result", <index>) if last query returned any rows 1273 * for <index> see <get_col_idx> 1274 """ 1275 if isinstance(link_obj, dbapi._psycopg.cursor): 1276 conn_close = __noop 1277 conn_commit = __noop 1278 tx_rollback = __noop 1279 curs = link_obj 1280 curs_close = __noop 1281 elif isinstance(link_obj, dbapi._psycopg.connection): 1282 conn_close = __noop 1283 if end_tx: 1284 conn_commit = link_obj.commit 1285 tx_rollback = link_obj.rollback 1286 else: 1287 conn_commit = __noop 1288 tx_rollback = __noop 1289 curs = link_obj.cursor() 1290 curs_close = curs.close 1291 elif link_obj is None: 1292 conn = get_connection(readonly=False) 1293 conn_close = conn.close 1294 conn_commit = conn.commit 1295 tx_rollback = conn.rollback 1296 curs = conn.cursor() 1297 curs_close = curs.close 1298 else: 1299 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1300 1301 for query in queries: 1302 if type(query['cmd']) is not types.UnicodeType: 1303 print "run_rw_queries(): non-unicode query" 1304 print query['cmd'] 1305 try: 1306 args = query['args'] 1307 except KeyError: 1308 args = None 1309 try: 1310 curs.execute(query['cmd'], args) 1311 except dbapi.Error as pg_exc: 1312 _log.error('RW query failed: [%s]', curs.query) 1313 if curs.statusmessage != u'': 1314 _log.error('PG status message: %s', curs.statusmessage) 1315 _log.error('PG error code: %s', pg_exc.pgcode) 1316 if pg_exc.pgerror is not None: 1317 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1318 try: 1319 curs_close() 1320 tx_rollback() # just for good measure 1321 conn_close() 1322 except dbapi.InterfaceError: 1323 _log.exception('cannot cleanup') 1324 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1325 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1326 if curs.statusmessage != u'': 1327 details = u'Status: %s\n%s' % ( 1328 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1329 details 1330 ) 1331 if pg_exc.pgerror is None: 1332 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1333 else: 1334 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1335 raise gmExceptions.AccessDenied ( 1336 msg, 1337 source = u'PostgreSQL', 1338 code = pg_exc.pgcode, 1339 details = details 1340 ) 1341 raise 1342 except: 1343 _log.exception('error running RW query') 1344 gmLog2.log_stack_trace() 1345 try: 1346 curs_close() 1347 tx_rollback() 1348 conn_close() 1349 except dbapi.InterfaceError: 1350 _log.exception('cannot cleanup') 1351 raise 1352 raise 1353 1354 data = None 1355 col_idx = None 1356 if return_data: 1357 try: 1358 data = curs.fetchall() 1359 except: 1360 _log.exception('error fetching data from RW query') 1361 gmLog2.log_stack_trace() 1362 try: 1363 curs_close() 1364 tx_rollback() 1365 conn_close() 1366 except dbapi.InterfaceError: 1367 _log.exception('cannot cleanup') 1368 raise 1369 raise 1370 if get_col_idx: 1371 col_idx = get_col_indices(curs) 1372 1373 curs_close() 1374 conn_commit() 1375 conn_close() 1376 1377 return (data, col_idx)
1378 #------------------------------------------------------------------------
1379 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1380 """Generates SQL for an INSERT query. 1381 1382 values: dict of values keyed by field to insert them into 1383 """ 1384 if schema is None: 1385 schema = u'public' 1386 1387 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1388 val_snippets = [] 1389 for field in fields: 1390 val_snippets.append(u'%%(%s)s' % field) 1391 1392 if returning is None: 1393 returning = u'' 1394 return_data = False 1395 else: 1396 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1397 return_data = True 1398 1399 cmd = u"""\nINSERT INTO %s.%s ( 1400 %s 1401 ) VALUES ( 1402 %s 1403 )%s""" % ( 1404 schema, 1405 table, 1406 u',\n\t\t'.join(fields), 1407 u',\n\t\t'.join(val_snippets), 1408 returning 1409 ) 1410 1411 _log.debug(u'running SQL: >>>%s<<<', cmd) 1412 1413 return run_rw_queries ( 1414 link_obj = link_obj, 1415 queries = [{'cmd': cmd, 'args': values}], 1416 end_tx = end_tx, 1417 return_data = return_data, 1418 get_col_idx = get_col_idx, 1419 verbose = verbose 1420 )
1421 # ======================================================================= 1422 # connection handling API 1423 # -----------------------------------------------------------------------
1424 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1425 """ 1426 GNUmed database connection pool. 1427 1428 Extends psycopg2's PersistentConnectionPool with 1429 a custom _connect() function. Supports one connection 1430 per thread - which also ties it to one particular DSN. 1431 """ 1432 #--------------------------------------------------
1433 - def _connect(self, key=None):
1434 1435 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1436 1437 conn.original_close = conn.close 1438 conn.close = _raise_exception_on_ro_conn_close 1439 1440 if key is not None: 1441 self._used[key] = conn 1442 self._rused[id(conn)] = key 1443 else: 1444 self._pool.append(conn) 1445 1446 return conn
1447 #--------------------------------------------------
1448 - def shutdown(self):
1449 for conn_key in self._used.keys(): 1450 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1451 self._used[conn_key].original_close()
1452 # -----------------------------------------------------------------------
1453 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1454 """Get a raw, unadorned connection. 1455 1456 - this will not set any parameters such as encoding, timezone, datestyle 1457 - the only requirement is a valid DSN 1458 - hence it can be used for "service" connections 1459 for verifying encodings etc 1460 """ 1461 # FIXME: support verbose 1462 if dsn is None: 1463 dsn = get_default_dsn() 1464 1465 if u'host=salaam.homeunix' in dsn: 1466 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1467 1468 try: 1469 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1470 except dbapi.OperationalError, e: 1471 1472 t, v, tb = sys.exc_info() 1473 try: 1474 msg = e.args[0] 1475 except (AttributeError, IndexError, TypeError): 1476 raise 1477 1478 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1479 1480 if msg.find('fe_sendauth') != -1: 1481 raise cAuthenticationError, (dsn, msg), tb 1482 1483 if regex.search('user ".*" does not exist', msg) is not None: 1484 raise cAuthenticationError, (dsn, msg), tb 1485 1486 if msg.find('uthenti') != -1: 1487 raise cAuthenticationError, (dsn, msg), tb 1488 1489 raise 1490 1491 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1492 1493 # do first-time stuff 1494 global postgresql_version 1495 if postgresql_version is None: 1496 curs = conn.cursor() 1497 curs.execute(""" 1498 SELECT 1499 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1500 FROM 1501 pg_settings 1502 WHERE 1503 name = 'server_version' 1504 """) 1505 postgresql_version = curs.fetchone()['version'] 1506 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1507 try: 1508 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1509 _log.info('database size: %s', curs.fetchone()[0]) 1510 except: 1511 pass 1512 if verbose: 1513 __log_PG_settings(curs=curs) 1514 curs.close() 1515 conn.commit() 1516 1517 if _default_client_timezone is None: 1518 __detect_client_timezone(conn = conn) 1519 1520 curs = conn.cursor() 1521 1522 # set access mode 1523 if readonly: 1524 _log.debug('access mode [READ ONLY]') 1525 cmd = 'set session characteristics as transaction READ ONLY' 1526 curs.execute(cmd) 1527 cmd = 'set default_transaction_read_only to on' 1528 curs.execute(cmd) 1529 else: 1530 _log.debug('access mode [READ WRITE]') 1531 cmd = 'set session characteristics as transaction READ WRITE' 1532 curs.execute(cmd) 1533 cmd = 'set default_transaction_read_only to off' 1534 curs.execute(cmd) 1535 1536 curs.close() 1537 conn.commit() 1538 1539 conn.is_decorated = False 1540 1541 return conn
1542 # =======================================================================
1543 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1544 """Get a new connection. 1545 1546 This assumes the locale system has been initialized 1547 unless an encoding is specified. 1548 """ 1549 # FIXME: support pooled on RW, too 1550 # FIXME: for now, support the default DSN only 1551 if pooled and readonly and (dsn is None): 1552 global __ro_conn_pool 1553 if __ro_conn_pool is None: 1554 __ro_conn_pool = cConnectionPool ( 1555 minconn = 1, 1556 maxconn = 2, 1557 dsn = dsn, 1558 verbose = verbose 1559 ) 1560 conn = __ro_conn_pool.getconn() 1561 else: 1562 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1563 1564 if conn.is_decorated: 1565 return conn 1566 1567 if encoding is None: 1568 encoding = _default_client_encoding 1569 if encoding is None: 1570 encoding = gmI18N.get_encoding() 1571 _log.warning('client encoding not specified') 1572 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1573 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1574 1575 # set connection properties 1576 # - client encoding 1577 try: 1578 conn.set_client_encoding(encoding) 1579 except dbapi.OperationalError: 1580 t, v, tb = sys.exc_info() 1581 if str(v).find("can't set encoding to") != -1: 1582 raise cEncodingError, (encoding, v), tb 1583 raise 1584 1585 # - transaction isolation level 1586 if readonly: 1587 iso_level = u'read committed' 1588 else: 1589 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1590 iso_level = u'serializable' 1591 1592 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1593 1594 curs = conn.cursor() 1595 1596 # - client time zone 1597 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1598 1599 conn.commit() 1600 1601 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1602 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1603 # 1604 # - bytea data format 1605 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1606 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1607 # as the transmission mode for bytea output, 1608 # so try to set this setting back to "escape", 1609 # if that's not possible the reason will be that PG < 9.0 does not support 1610 # that setting - which also means we don't need it and can ignore the 1611 # failure 1612 cmd = "set bytea_output to 'escape'" 1613 try: 1614 curs.execute(cmd) 1615 except dbapi.ProgrammingError: 1616 _log.error('cannot set bytea_output format') 1617 1618 curs.close() 1619 conn.commit() 1620 1621 conn.is_decorated = True 1622 1623 return conn
1624 #-----------------------------------------------------------------------
1625 -def shutdown():
1626 if __ro_conn_pool is None: 1627 return 1628 __ro_conn_pool.shutdown()
1629 # ====================================================================== 1630 # internal helpers 1631 #-----------------------------------------------------------------------
1632 -def __noop():
1633 pass
1634 #-----------------------------------------------------------------------
1635 -def _raise_exception_on_ro_conn_close():
1636 raise TypeError(u'close() called on read-only connection')
1637 #-----------------------------------------------------------------------
1638 -def log_database_access(action=None):
1639 run_insert ( 1640 schema = u'gm', 1641 table = u'access_log', 1642 values = {u'user_action': action}, 1643 end_tx = True 1644 )
1645 #-----------------------------------------------------------------------
1646 -def sanity_check_time_skew(tolerance=60):
1647 """Check server time and local time to be within 1648 the given tolerance of each other. 1649 1650 tolerance: seconds 1651 """ 1652 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1653 1654 cmd = u"SELECT now() at time zone 'UTC'" 1655 conn = get_raw_connection(readonly=True) 1656 curs = conn.cursor() 1657 1658 start = time.time() 1659 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1660 end = time.time() 1661 client_now_as_utc = pydt.datetime.utcnow() 1662 1663 curs.close() 1664 conn.commit() 1665 1666 server_now_as_utc = rows[0][0] 1667 query_duration = end - start 1668 _log.info('server "now" (UTC): %s', server_now_as_utc) 1669 _log.info('client "now" (UTC): %s', client_now_as_utc) 1670 _log.debug('wire roundtrip (seconds): %s', query_duration) 1671 1672 if query_duration > tolerance: 1673 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1674 return False 1675 1676 if server_now_as_utc > client_now_as_utc: 1677 real_skew = server_now_as_utc - client_now_as_utc 1678 else: 1679 real_skew = client_now_as_utc - server_now_as_utc 1680 1681 _log.debug('client/server time skew: %s', real_skew) 1682 1683 if real_skew > pydt.timedelta(seconds = tolerance): 1684 _log.error('client/server time skew > tolerance') 1685 return False 1686 1687 return True
1688 #-----------------------------------------------------------------------
1689 -def sanity_check_database_settings():
1690 """Checks database settings. 1691 1692 returns (status, message) 1693 status: 1694 0: no problem 1695 1: non-fatal problem 1696 2: fatal problem 1697 """ 1698 _log.debug('checking database settings') 1699 1700 conn = get_connection() 1701 1702 # - version string 1703 global postgresql_version_string 1704 if postgresql_version_string is None: 1705 curs = conn.cursor() 1706 curs.execute('SELECT version()') 1707 postgresql_version_string = curs.fetchone()['version'] 1708 curs.close() 1709 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1710 1711 options2check = { 1712 # setting: [expected value, risk, fatal?] 1713 u'allow_system_table_mods': [u'off', u'system breakage', False], 1714 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1715 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1716 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1717 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1718 u'fsync': [u'on', u'data loss/corruption', True], 1719 u'full_page_writes': [u'on', u'data loss/corruption', False], 1720 u'lc_messages': [u'C', u'suboptimal error detection', False], 1721 u'password_encryption': [u'on', u'breach of confidentiality', False], 1722 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1723 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1724 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1725 } 1726 1727 from Gnumed.pycommon import gmCfg2 1728 _cfg = gmCfg2.gmCfgData() 1729 if _cfg.get(option = u'hipaa'): 1730 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1731 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1732 else: 1733 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1734 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1735 1736 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1737 rows, idx = run_ro_queries ( 1738 link_obj = conn, 1739 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1740 get_col_idx = False 1741 ) 1742 1743 found_error = False 1744 found_problem = False 1745 msg = [] 1746 for row in rows: 1747 option = row['name'] 1748 value_found = row['setting'] 1749 value_expected = options2check[option][0] 1750 risk = options2check[option][1] 1751 fatal_setting = options2check[option][2] 1752 if value_found != value_expected: 1753 if fatal_setting is True: 1754 found_error = True 1755 elif fatal_setting is False: 1756 found_problem = True 1757 elif fatal_setting is None: 1758 pass 1759 else: 1760 _log.error(options2check[option]) 1761 raise ValueError(u'invalid database configuration sanity check') 1762 msg.append(_(' option [%s]: %s') % (option, value_found)) 1763 msg.append(_(' risk: %s') % risk) 1764 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1765 1766 if found_error: 1767 return 2, u'\n'.join(msg) 1768 1769 if found_problem: 1770 return 1, u'\n'.join(msg) 1771 1772 return 0, u''
1773 #------------------------------------------------------------------------
1774 -def __log_PG_settings(curs=None):
1775 # don't use any of the run_*()s since that might 1776 # create a loop if we fail here 1777 # FIXME: use pg_settings 1778 try: 1779 curs.execute(u'show all') 1780 except: 1781 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1782 return False 1783 settings = curs.fetchall() 1784 if settings is None: 1785 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1786 return False 1787 for setting in settings: 1788 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1789 return True
1790 # =======================================================================
1791 -def extract_msg_from_pg_exception(exc=None):
1792 1793 try: 1794 msg = exc.args[0] 1795 except (AttributeError, IndexError, TypeError): 1796 return u'cannot extract message from exception' 1797 1798 return unicode(msg, gmI18N.get_encoding(), 'replace')
1799 # =======================================================================
1800 -class cAuthenticationError(dbapi.OperationalError):
1801
1802 - def __init__(self, dsn=None, prev_val=None):
1803 self.dsn = dsn 1804 self.prev_val = prev_val
1805
1806 - def __str__(self):
1807 _log.warning('%s.__str__() called', self.__class__.__name__) 1808 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1809 _log.error(tmp) 1810 return tmp.encode(gmI18N.get_encoding(), 'replace')
1811
1812 - def __unicode__(self):
1813 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1814 1815 # ======================================================================= 1816 # custom psycopg2 extensions 1817 # =======================================================================
1818 -class cEncodingError(dbapi.OperationalError):
1819
1820 - def __init__(self, encoding=None, prev_val=None):
1821 self.encoding = encoding 1822 self.prev_val = prev_val
1823
1824 - def __str__(self):
1825 _log.warning('%s.__str__() called', self.__class__.__name__) 1826 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1827
1828 - def __unicode__(self):
1829 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1830 1831 # ----------------------------------------------------------------------- 1832 # Python -> PostgreSQL 1833 # ----------------------------------------------------------------------- 1834 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1835 -class cAdapterPyDateTime(object):
1836
1837 - def __init__(self, dt):
1838 if dt.tzinfo is None: 1839 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1840 self.__dt = dt
1841
1842 - def getquoted(self):
1843 return _timestamp_template % self.__dt.isoformat()
1844 1845 ## remove for 0.9 1846 ## ---------------------------------------------------------------------- 1847 ##class cAdapterMxDateTime(object): 1848 ## 1849 ## def __init__(self, dt): 1850 ## if dt.tz == '???': 1851 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1852 ## self.__dt = dt 1853 ## 1854 ## def getquoted(self): 1855 ## # under some locale settings the mx.DateTime ISO formatter 1856 ## # will insert "," into the ISO string, 1857 ## # while this is allowed per the ISO8601 spec PostgreSQL 1858 ## # cannot currently handle that, 1859 ## # so map those "," to "." to make things work: 1860 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1861 ## 1862 ## ---------------------------------------------------------------------- 1863 ## PostgreSQL -> Python 1864 ## ---------------------------------------------------------------------- 1865 1866 #======================================================================= 1867 # main 1868 #----------------------------------------------------------------------- 1869 1870 # make sure psycopg2 knows how to handle unicode ... 1871 # intended to become standard 1872 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1873 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1874 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1875 1876 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1877 # check in 0.9: 1878 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1879 1880 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1881 # them adapted to "... ARRAY[]..." so we can support PG arrays 1882 1883 #======================================================================= 1884 if __name__ == "__main__": 1885 1886 if len(sys.argv) < 2: 1887 sys.exit() 1888 1889 if sys.argv[1] != 'test': 1890 sys.exit() 1891 1892 logging.basicConfig(level=logging.DEBUG) 1893 #--------------------------------------------------------------------
1894 - def test_file2bytea():
1895 run_rw_queries(queries = [ 1896 {'cmd': u'create table test_bytea (data bytea)'} 1897 ]) 1898 1899 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1900 try: 1901 file2bytea(query = cmd, filename = sys.argv[2]) 1902 except: 1903 _log.exception('error') 1904 1905 run_rw_queries(queries = [ 1906 {'cmd': u'drop table test_bytea'} 1907 ])
1908 #--------------------------------------------------------------------
1909 - def test_get_connection():
1910 print "testing get_connection()" 1911 1912 dsn = 'foo' 1913 try: 1914 conn = get_connection(dsn=dsn) 1915 except dbapi.OperationalError, e: 1916 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1917 t, v = sys.exc_info()[:2] 1918 print ' ', t 1919 print ' ', v 1920 1921 dsn = 'dbname=gnumed_v9' 1922 try: 1923 conn = get_connection(dsn=dsn) 1924 except cAuthenticationError: 1925 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1926 t, v = sys.exc_info()[:2] 1927 print ' ', t 1928 print ' ', v 1929 1930 dsn = 'dbname=gnumed_v9 user=abc' 1931 try: 1932 conn = get_connection(dsn=dsn) 1933 except cAuthenticationError: 1934 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1935 t, v = sys.exc_info()[:2] 1936 print ' ', t 1937 print ' ', v 1938 1939 dsn = 'dbname=gnumed_v9 user=any-doc' 1940 try: 1941 conn = get_connection(dsn=dsn) 1942 except cAuthenticationError: 1943 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1944 t, v = sys.exc_info()[:2] 1945 print ' ', t 1946 print ' ', v 1947 1948 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1949 try: 1950 conn = get_connection(dsn=dsn) 1951 except cAuthenticationError: 1952 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1953 t, v = sys.exc_info()[:2] 1954 print ' ', t 1955 print ' ', v 1956 1957 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1958 conn = get_connection(dsn=dsn, readonly=True) 1959 1960 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1961 conn = get_connection(dsn=dsn, readonly=False) 1962 1963 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1964 encoding = 'foo' 1965 try: 1966 conn = get_connection(dsn=dsn, encoding=encoding) 1967 except cEncodingError: 1968 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1969 t, v = sys.exc_info()[:2] 1970 print ' ', t 1971 print ' ', v
1972 #--------------------------------------------------------------------
1973 - def test_exceptions():
1974 print "testing exceptions" 1975 1976 try: 1977 raise cAuthenticationError('no dsn', 'no previous exception') 1978 except cAuthenticationError: 1979 t, v, tb = sys.exc_info() 1980 print t 1981 print v 1982 print tb 1983 1984 try: 1985 raise cEncodingError('no dsn', 'no previous exception') 1986 except cEncodingError: 1987 t, v, tb = sys.exc_info() 1988 print t 1989 print v 1990 print tb
1991 #--------------------------------------------------------------------
1992 - def test_ro_queries():
1993 print "testing run_ro_queries()" 1994 1995 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1996 conn = get_connection(dsn, readonly=True) 1997 1998 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1999 print data 2000 print idx 2001 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 2002 print data 2003 print idx 2004 2005 curs = conn.cursor() 2006 2007 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 2008 print data 2009 print idx 2010 2011 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 2012 print data 2013 print idx 2014 2015 try: 2016 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 2017 print data 2018 print idx 2019 except psycopg2.ProgrammingError: 2020 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 2021 t, v = sys.exc_info()[:2] 2022 print ' ', t 2023 print ' ', v 2024 2025 curs.close()
2026 #--------------------------------------------------------------------
2027 - def test_request_dsn():
2028 conn = get_connection() 2029 print conn 2030 conn.close()
2031 #--------------------------------------------------------------------
2032 - def test_set_encoding():
2033 print "testing set_default_client_encoding()" 2034 2035 enc = 'foo' 2036 try: 2037 set_default_client_encoding(enc) 2038 print "SUCCESS: encoding [%s] worked" % enc 2039 except ValueError: 2040 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2041 t, v = sys.exc_info()[:2] 2042 print ' ', t 2043 print ' ', v 2044 2045 enc = '' 2046 try: 2047 set_default_client_encoding(enc) 2048 print "SUCCESS: encoding [%s] worked" % enc 2049 except ValueError: 2050 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2051 t, v = sys.exc_info()[:2] 2052 print ' ', t 2053 print ' ', v 2054 2055 enc = 'latin1' 2056 try: 2057 set_default_client_encoding(enc) 2058 print "SUCCESS: encoding [%s] worked" % enc 2059 except ValueError: 2060 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2061 t, v = sys.exc_info()[:2] 2062 print ' ', t 2063 print ' ', v 2064 2065 enc = 'utf8' 2066 try: 2067 set_default_client_encoding(enc) 2068 print "SUCCESS: encoding [%s] worked" % enc 2069 except ValueError: 2070 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2071 t, v = sys.exc_info()[:2] 2072 print ' ', t 2073 print ' ', v 2074 2075 enc = 'unicode' 2076 try: 2077 set_default_client_encoding(enc) 2078 print "SUCCESS: encoding [%s] worked" % enc 2079 except ValueError: 2080 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2081 t, v = sys.exc_info()[:2] 2082 print ' ', t 2083 print ' ', v 2084 2085 enc = 'UNICODE' 2086 try: 2087 set_default_client_encoding(enc) 2088 print "SUCCESS: encoding [%s] worked" % enc 2089 except ValueError: 2090 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2091 t, v = sys.exc_info()[:2] 2092 print ' ', t 2093 print ' ', v
2094 #--------------------------------------------------------------------
2095 - def test_connection_pool():
2096 dsn = get_default_dsn() 2097 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2098 print pool 2099 print pool.getconn() 2100 print pool.getconn() 2101 print pool.getconn() 2102 print type(pool.getconn())
2103 #--------------------------------------------------------------------
2104 - def test_list_args():
2105 dsn = get_default_dsn() 2106 conn = get_connection(dsn, readonly=True) 2107 curs = conn.cursor() 2108 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2109 #--------------------------------------------------------------------
2110 - def test_sanitize_pg_regex():
2111 tests = [ 2112 ['(', '\\('] 2113 , ['[', '\\['] 2114 , [')', '\\)'] 2115 ] 2116 for test in tests: 2117 result = sanitize_pg_regex(test[0]) 2118 if result != test[1]: 2119 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2120 #--------------------------------------------------------------------
2121 - def test_is_pg_interval():
2122 status = True 2123 tests = [ 2124 [None, True], # None == NULL == succeeds ! 2125 [1, True], 2126 ['1', True], 2127 ['abc', False] 2128 ] 2129 2130 if not is_pg_interval(): 2131 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2132 status = False 2133 2134 for test in tests: 2135 result = is_pg_interval(test[0]) 2136 if result != test[1]: 2137 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2138 status = False 2139 2140 return status
2141 #--------------------------------------------------------------------
2142 - def test_sanity_check_time_skew():
2143 sanity_check_time_skew()
2144 #--------------------------------------------------------------------
2145 - def test_get_foreign_key_details():
2146 for row in get_foreign_keys2column ( 2147 schema = u'dem', 2148 table = u'identity', 2149 column = u'pk' 2150 ): 2151 print '%s.%s references %s.%s.%s' % ( 2152 row['referencing_table'], 2153 row['referencing_column'], 2154 row['referenced_schema'], 2155 row['referenced_table'], 2156 row['referenced_column'] 2157 )
2158 #--------------------------------------------------------------------
2159 - def test_set_user_language():
2160 # (user, language, result, exception type) 2161 tests = [ 2162 # current user 2163 [None, 'de_DE', True], 2164 [None, 'lang_w/o_tx', False], 2165 [None, None, True], 2166 # valid user 2167 ['any-doc', 'de_DE', True], 2168 ['any-doc', 'lang_w/o_tx', False], 2169 ['any-doc', None, True], 2170 # invalid user 2171 ['invalid user', 'de_DE', None], 2172 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2173 ['invalid user', None, True] 2174 ] 2175 for test in tests: 2176 try: 2177 result = set_user_language(user = test[0], language = test[1]) 2178 if result != test[2]: 2179 print "test:", test 2180 print "result:", result, "expected:", test[2] 2181 except psycopg2.IntegrityError, e: 2182 if test[2] is None: 2183 continue 2184 print "test:", test 2185 print "expected exception" 2186 print "result:", e
2187 #--------------------------------------------------------------------
2188 - def test_get_schema_revision_history():
2189 for line in get_schema_revision_history(): 2190 print u' - '.join(line)
2191 #--------------------------------------------------------------------
2192 - def test_run_query():
2193 gmDateTime.init() 2194 args = {'dt': gmDateTime.pydt_max_here()} 2195 cmd = u"SELECT %(dt)s" 2196 2197 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2198 2199 cmd = u""" 2200 SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM ( 2201 SELECT REGEXP_REPLACE ( 2202 't1.130729.0902.tif', -- string 2203 E'(.1)\.([0-9\.]+)(\.tif)', -- pattern 2204 E'\\\\2' -- replacement 2205 ) AS foofoo 2206 ) AS foo""" 2207 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2208 print rows 2209 print rows[0] 2210 print rows[0][0]
2211 #--------------------------------------------------------------------
2212 - def test_schema_exists():
2213 print schema_exists()
2214 #--------------------------------------------------------------------
2215 - def test_row_locks():
2216 row_is_locked(table = 'dem.identity', pk = 12) 2217 2218 print "1st connection:" 2219 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2220 print " 1st shared lock succeeded:", lock_row(table = 'dem.identity', pk = 12, exclusive = False) 2221 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2222 2223 print " 2nd shared lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = False) 2224 print " `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False) 2225 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2226 print " exclusive lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = True) 2227 print " `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = True) 2228 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2229 2230 print "2nd connection:" 2231 conn = get_raw_connection(readonly=True) 2232 print " shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2233 print " `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2234 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2235 print " exclusive lock succeeded ?", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True), "(should fail)" 2236 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2237 2238 print "1st connection:" 2239 print " unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False) 2240 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2241 2242 print "2nd connection:" 2243 print " exclusive lock should succeed", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True) 2244 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2245 print " shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2246 print " `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2247 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2248 print " unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2249 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2250 2251 conn.close()
2252 #--------------------------------------------------------------------
2253 - def test_get_foreign_key_names():
2254 print get_foreign_key_names ( 2255 src_schema = 'dem', 2256 src_table = 'names', 2257 src_column = 'id_identity', 2258 target_schema = 'dem', 2259 target_table = 'identity', 2260 target_column = 'pk' 2261 )
2262 2263 #-------------------------------------------------------------------- 2264 # run tests 2265 #test_file2bytea() 2266 #test_get_connection() 2267 #test_exceptions() 2268 #test_ro_queries() 2269 #test_request_dsn() 2270 #test_set_encoding() 2271 #test_connection_pool() 2272 #test_list_args() 2273 #test_sanitize_pg_regex() 2274 #test_is_pg_interval() 2275 #test_sanity_check_time_skew() 2276 #test_get_foreign_key_details() 2277 #test_set_user_language() 2278 #test_get_schema_revision_history() 2279 #test_run_query() 2280 #test_schema_exists() 2281 #test_get_foreign_key_names() 2282 test_row_locks() 2283 2284 # ====================================================================== 2285