Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50', 
 118          18: 'a0f9efcabdecfb4ddb6d8c0b69c02092' 
 119  } 
 120   
 121  map_schema_hash2version = { 
 122          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 123          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 124          '4428ccf2e54c289136819e701bb095ea': 4, 
 125          '7e7b093af57aea48c288e76632a382e5': 5, 
 126          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 127          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 128          '89b13a7af83337c3aad153b717e52360': 8, 
 129          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 130          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 131          '03042ae24f3f92877d986fb0a6184d76': 11, 
 132          '06183a6616db62257e22814007a8ed07': 12, 
 133          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 134          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 135          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 136          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 137          '161428ee97a00e3bf56168c3a15b7b50': 17, 
 138          'a0f9efcabdecfb4ddb6d8c0b69c02092': 18 
 139  } 
 140   
 141  map_client_branch2required_db_version = { 
 142          u'GIT tree': 0, 
 143          u'0.3': 9, 
 144          u'0.4': 10, 
 145          u'0.5': 11, 
 146          u'0.6': 12, 
 147          u'0.7': 13, 
 148          u'0.8': 14, 
 149          u'0.9': 15, 
 150          u'1.0': 16,             # intentional duplicate with 1.1 
 151          u'1.1': 16, 
 152          u'1.2': 17, 
 153          u'1.3': 18 
 154  } 
 155   
 156  # get columns and data types for a given table 
 157  query_table_col_defs = u"""select 
 158          cols.column_name, 
 159          cols.udt_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  query_table_attributes = u"""select 
 170          cols.column_name 
 171  from 
 172          information_schema.columns cols 
 173  where 
 174          cols.table_schema = %s 
 175                  and 
 176          cols.table_name = %s 
 177  order by 
 178          cols.ordinal_position""" 
 179   
 180  # ======================================================================= 
 181  # module globals API 
 182  # ======================================================================= 
183 -def set_default_client_encoding(encoding = None):
184 # check whether psycopg2 can handle this encoding 185 if encoding not in psycopg2.extensions.encodings: 186 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 187 # check whether Python can handle this encoding 188 py_enc = psycopg2.extensions.encodings[encoding] 189 try: 190 codecs.lookup(py_enc) 191 except LookupError: 192 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 193 raise 194 # FIXME: check encoding against the database 195 # FIXME: - but we may not yet have access 196 # FIXME: - psycopg2 will pull its encodings from the database eventually 197 # it seems save to set it 198 global _default_client_encoding 199 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 200 _default_client_encoding = encoding 201 return True
202 #---------------------------------------------------
203 -def set_default_client_timezone(timezone = None):
204 205 # FIXME: use __validate 206 global _default_client_timezone 207 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 208 _default_client_timezone = timezone 209 210 global _sql_set_timezone 211 _sql_set_timezone = u'set timezone to %s' 212 213 return True
214 #---------------------------------------------------
215 -def __validate_timezone(conn=None, timezone=None):
216 217 _log.debug(u'validating time zone [%s]', timezone) 218 219 cmd = u'set timezone to %(tz)s' 220 args = {u'tz': timezone} 221 222 conn.commit() 223 curs = conn.cursor() 224 is_valid = False 225 try: 226 curs.execute(cmd, args) 227 _log.info(u'time zone [%s] is settable', timezone) 228 # can we actually use it, though ? 229 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 230 try: 231 curs.execute(cmd) 232 curs.fetchone() 233 _log.info(u'time zone [%s] is usable', timezone) 234 is_valid = True 235 except: 236 _log.error('error using time zone [%s]', timezone) 237 except dbapi.DataError: 238 _log.warning(u'time zone [%s] is not settable', timezone) 239 except: 240 _log.error(u'failed to set time zone to [%s]', timezone) 241 _log.exception(u'') 242 243 curs.close() 244 conn.rollback() 245 246 return is_valid
247 #---------------------------------------------------
248 -def __expand_timezone(conn=None, timezone=None):
249 """some timezone defs are abbreviations so try to expand 250 them because "set time zone" doesn't take abbreviations""" 251 252 cmd = u""" 253 select distinct on (abbrev) name 254 from pg_timezone_names 255 where 256 abbrev = %(tz)s and 257 name ~ '^[^/]+/[^/]+$' and 258 name !~ '^Etc/' 259 """ 260 args = {u'tz': timezone} 261 262 conn.commit() 263 curs = conn.cursor() 264 265 result = timezone 266 try: 267 curs.execute(cmd, args) 268 rows = curs.fetchall() 269 if len(rows) > 0: 270 result = rows[0][0] 271 _log.debug(u'[%s] maps to [%s]', timezone, result) 272 except: 273 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 274 275 curs.close() 276 conn.rollback() 277 278 return result
279 #---------------------------------------------------
280 -def __detect_client_timezone(conn=None):
281 """This is run on the very first connection.""" 282 283 # FIXME: check whether server.timezone is the same 284 # FIXME: value as what we eventually detect 285 286 # we need gmDateTime to be initialized 287 if gmDateTime.current_local_iso_numeric_timezone_string is None: 288 gmDateTime.init() 289 290 _log.debug('trying to detect timezone from system') 291 292 tz_candidates = [] 293 try: 294 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 295 tz_candidates.append(tz) 296 expanded = __expand_timezone(conn = conn, timezone = tz) 297 if expanded != tz: 298 tz_candidates.append(expanded) 299 except KeyError: 300 pass 301 302 tz_candidates.append(gmDateTime.current_local_timezone_name) 303 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 304 if expanded != gmDateTime.current_local_timezone_name: 305 tz_candidates.append(expanded) 306 307 _log.debug('candidates: %s', str(tz_candidates)) 308 309 # find best among candidates 310 global _default_client_timezone 311 global _sql_set_timezone 312 found = False 313 for tz in tz_candidates: 314 if __validate_timezone(conn = conn, timezone = tz): 315 _default_client_timezone = tz 316 _sql_set_timezone = u'set timezone to %s' 317 found = True 318 break 319 320 if not found: 321 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 322 _sql_set_timezone = u"set time zone interval %s hour to minute" 323 324 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
325 # ======================================================================= 326 # login API 327 # =======================================================================
328 -def __request_login_params_tui():
329 """Text mode request of database login parameters""" 330 import getpass 331 login = gmLoginInfo.LoginInfo() 332 333 print "\nPlease enter the required login parameters:" 334 try: 335 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 336 login.database = prompted_input(prompt = "database", default = 'gnumed_v19') 337 login.user = prompted_input(prompt = "user name", default = '') 338 tmp = 'password for "%s" (not shown): ' % login.user 339 login.password = getpass.getpass(tmp) 340 login.port = prompted_input(prompt = "port", default = 5432) 341 except KeyboardInterrupt: 342 _log.warning("user cancelled text mode login dialog") 343 print "user cancelled text mode login dialog" 344 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 345 346 return login
347 #---------------------------------------------------
348 -def __request_login_params_gui_wx():
349 """GUI (wx) input request for database login parameters. 350 351 Returns gmLoginInfo.LoginInfo object 352 """ 353 import wx 354 # OK, wxPython was already loaded. But has the main Application instance 355 # been initialized yet ? if not, the exception will kick us out 356 if wx.GetApp() is None: 357 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 358 359 # Let's launch the login dialog 360 # if wx was not initialized /no main App loop, an exception should be raised anyway 361 import gmAuthWidgets 362 dlg = gmAuthWidgets.cLoginDialog(None, -1) 363 dlg.ShowModal() 364 login = dlg.panel.GetLoginInfo() 365 dlg.Destroy() 366 367 #if user cancelled or something else went wrong, raise an exception 368 if login is None: 369 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 370 371 return login
372 #---------------------------------------------------
373 -def request_login_params():
374 """Request login parameters for database connection.""" 375 # do we auto-request parameters at all ? 376 if not auto_request_login_params: 377 raise Exception('Cannot request login parameters.') 378 379 # are we inside X ? 380 # (if we aren't wxGTK will crash hard at 381 # C-level with "can't open Display") 382 if os.environ.has_key('DISPLAY'): 383 # try wxPython GUI 384 try: return __request_login_params_gui_wx() 385 except: pass 386 387 # well, either we are on the console or 388 # wxPython does not work, use text mode 389 return __request_login_params_tui()
390 391 # ======================================================================= 392 # DSN API 393 # -----------------------------------------------------------------------
394 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
395 dsn_parts = [] 396 397 if (database is not None) and (database.strip() != ''): 398 dsn_parts.append('dbname=%s' % database) 399 400 if (host is not None) and (host.strip() != ''): 401 dsn_parts.append('host=%s' % host) 402 403 if (port is not None) and (str(port).strip() != ''): 404 dsn_parts.append('port=%s' % port) 405 406 if (user is not None) and (user.strip() != ''): 407 dsn_parts.append('user=%s' % user) 408 409 if (password is not None) and (password.strip() != ''): 410 dsn_parts.append('password=%s' % password) 411 412 dsn_parts.append('sslmode=prefer') 413 414 return ' '.join(dsn_parts)
415 # ------------------------------------------------------
416 -def get_default_login():
417 # make sure we do have a login 418 get_default_dsn() 419 return _default_login
420 # ------------------------------------------------------
421 -def get_default_dsn():
422 global _default_dsn 423 if _default_dsn is not None: 424 return _default_dsn 425 426 login = request_login_params() 427 set_default_login(login=login) 428 429 return _default_dsn
430 # ------------------------------------------------------
431 -def set_default_login(login=None):
432 if login is None: 433 return False 434 435 if login.host is not None: 436 if login.host.strip() == u'': 437 login.host = None 438 439 global _default_login 440 _default_login = login 441 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 442 443 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 444 445 global _default_dsn 446 if _default_dsn is None: 447 old_dsn = u'None' 448 else: 449 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 450 _log.info ('setting default DSN from [%s] to [%s]', 451 old_dsn, 452 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 453 ) 454 _default_dsn = dsn 455 456 return True
457 # ======================================================================= 458 # netadata API 459 # =======================================================================
460 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
461 expected_hash = known_schema_hashes[version] 462 if version == 0: 463 args = {'ver': 9999} 464 else: 465 args = {'ver': version} 466 rows, idx = run_ro_queries ( 467 link_obj = link_obj, 468 queries = [{ 469 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 470 'args': args 471 }] 472 ) 473 if rows[0]['md5'] != expected_hash: 474 _log.error('database schema version mismatch') 475 _log.error('expected: %s (%s)' % (version, expected_hash)) 476 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 477 if verbose: 478 _log.debug('schema dump follows:') 479 for line in get_schema_structure(link_obj = link_obj).split(): 480 _log.debug(line) 481 _log.debug('schema revision history dump follows:') 482 for line in get_schema_revision_history(link_obj = link_obj): 483 _log.debug(u' - '.join(line)) 484 return False 485 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 486 return True
487 #------------------------------------------------------------------------
488 -def get_schema_version(link_obj=None):
489 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 490 try: 491 return map_schema_hash2version[rows[0]['md5']] 492 except KeyError: 493 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
494 #------------------------------------------------------------------------
495 -def get_schema_structure(link_obj=None):
496 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 497 return rows[0][0]
498 #------------------------------------------------------------------------
499 -def get_schema_hash(link_obj=None):
500 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 501 return rows[0]['md5']
502 #------------------------------------------------------------------------
503 -def get_schema_revision_history(link_obj=None):
504 505 if table_exists(link_obj = link_obj, schema = 'gm', table = 'schema_revision'): 506 cmd = u""" 507 SELECT 508 imported::text, 509 version, 510 filename 511 FROM gm.schema_revision 512 ORDER BY imported""" 513 elif table_exists(link_obj = link_obj, schema = 'public', table = 'gm_schema_revision'): 514 cmd = u""" 515 SELECT 516 imported::text, 517 version, 518 filename 519 FROM public.gm_schema_revision 520 ORDER BY imported""" 521 else: 522 return [] 523 524 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}]) 525 return rows
526 #------------------------------------------------------------------------
527 -def get_current_user():
528 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 529 return rows[0][0]
530 #------------------------------------------------------------------------
531 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
532 """Get the foreign keys pointing to schema.table.column. 533 534 Does not properly work with multi-column FKs. 535 GNUmed doesn't use any, however. 536 """ 537 cmd = u""" 538 select 539 %(schema)s as referenced_schema, 540 %(tbl)s as referenced_table, 541 %(col)s as referenced_column, 542 pgc.confkey as referenced_column_list, 543 pgc.conrelid::regclass as referencing_table, 544 pgc.conkey as referencing_column_list, 545 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 546 from 547 pg_constraint pgc 548 where 549 pgc.contype = 'f' 550 and 551 pgc.confrelid = ( 552 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 553 select oid from pg_namespace where nspname = %(schema)s 554 ) 555 ) and 556 ( 557 select attnum 558 from pg_attribute 559 where 560 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 561 select oid from pg_namespace where nspname = %(schema)s 562 )) 563 and 564 attname = %(col)s 565 ) = any(pgc.confkey) 566 """ 567 568 args = { 569 'schema': schema, 570 'tbl': table, 571 'col': column 572 } 573 574 rows, idx = run_ro_queries ( 575 link_obj = link_obj, 576 queries = [ 577 {'cmd': cmd, 'args': args} 578 ] 579 ) 580 581 return rows
582 #------------------------------------------------------------------------
583 -def get_child_tables(schema='public', table=None, link_obj=None):
584 """Return child tables of <table>.""" 585 cmd = u""" 586 select 587 pgn.nspname as namespace, 588 pgc.relname as table 589 from 590 pg_namespace pgn, 591 pg_class pgc 592 where 593 pgc.relnamespace = pgn.oid 594 and 595 pgc.oid in ( 596 select inhrelid from pg_inherits where inhparent = ( 597 select oid from pg_class where 598 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 599 relname = %(table)s 600 ) 601 )""" 602 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 603 return rows
604 #------------------------------------------------------------------------
605 -def schema_exists(link_obj=None, schema=u'gm'):
606 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 607 args = {'schema': schema} 608 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 609 return rows[0][0]
610 #------------------------------------------------------------------------
611 -def table_exists(link_obj=None, schema=None, table=None):
612 """Returns false, true.""" 613 cmd = u""" 614 select exists ( 615 select 1 from information_schema.tables 616 where 617 table_schema = %s and 618 table_name = %s and 619 table_type = 'BASE TABLE' 620 )""" 621 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 622 return rows[0][0]
623 #------------------------------------------------------------------------
624 -def get_col_indices(cursor = None):
625 if cursor.description is None: 626 _log.error('no result description available: unused cursor or last query did not select rows') 627 return None 628 col_indices = {} 629 col_index = 0 630 for col_desc in cursor.description: 631 col_name = col_desc[0] 632 # a query like "select 1,2;" will return two columns of the same name ! 633 # hence adjust to that, note, however, that dict-style access won't work 634 # on results of such queries ... 635 if col_indices.has_key(col_name): 636 col_name = '%s_%s' % (col_name, col_index) 637 col_indices[col_name] = col_index 638 col_index += 1 639 640 return col_indices
641 #------------------------------------------------------------------------
642 -def get_col_defs(link_obj=None, schema='public', table=None):
643 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 644 col_names = [] 645 col_type = {} 646 for row in rows: 647 col_names.append(row[0]) 648 # map array types 649 if row[1].startswith('_'): 650 col_type[row[0]] = row[1][1:] + '[]' 651 else: 652 col_type[row[0]] = row[1] 653 col_defs = [] 654 col_defs.append(col_names) 655 col_defs.append(col_type) 656 return col_defs
657 #------------------------------------------------------------------------
658 -def get_col_names(link_obj=None, schema='public', table=None):
659 """Return column attributes of table""" 660 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 661 cols = [] 662 for row in rows: 663 cols.append(row[0]) 664 return cols
665 666 #------------------------------------------------------------------------ 667 # i18n functions 668 #------------------------------------------------------------------------
669 -def export_translations_from_database(filename=None):
670 tx_file = codecs.open(filename, 'wb', 'utf8') 671 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 672 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 673 tx_file.write(u'-- - user database language is set to [%s]\n\n' % get_current_user_language()) 674 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 675 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 676 tx_file.write(u'set default_transaction_read_only to off;\n\n') 677 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 678 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 679 680 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 681 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 682 for row in rows: 683 line = u"select i18n.upd_tx(E'%s', E'%s', E'%s');\n" % ( 684 row['lang'].replace("'", "\\'"), 685 row['orig'].replace("'", "\\'"), 686 row['trans'].replace("'", "\\'") 687 ) 688 tx_file.write(line) 689 tx_file.write(u'\n') 690 691 tx_file.write(u'\set ON_ERROR_STOP 1\n') 692 tx_file.close() 693 694 return True
695 #------------------------------------------------------------------------
696 -def delete_translation_from_database(link_obj=None, language=None, original=None):
697 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 698 args = {'lang': language, 'orig': original} 699 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 700 return True
701 702 #------------------------------------------------------------------------
703 -def update_translation_in_database(language=None, original=None, translation=None):
704 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 705 args = {'lang': language, 'orig': original, 'trans': translation} 706 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 707 return args
708 709 #------------------------------------------------------------------------
710 -def get_translation_languages():
711 rows, idx = run_ro_queries ( 712 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 713 ) 714 return [ r[0] for r in rows ]
715 716 #------------------------------------------------------------------------
717 -def get_database_translations(language=None, order_by=None):
718 719 args = {'lang': language} 720 _log.debug('language [%s]', language) 721 722 if order_by is None: 723 order_by = u'ORDER BY %s' % order_by 724 else: 725 order_by = u'ORDER BY lang, orig' 726 727 if language is None: 728 cmd = u""" 729 SELECT DISTINCT ON (orig, lang) 730 lang, orig, trans 731 FROM (( 732 733 -- strings stored as translation keys whether translated or not 734 SELECT 735 NULL as lang, 736 ik.orig, 737 NULL AS trans 738 FROM 739 i18n.keys ik 740 741 ) UNION ALL ( 742 743 -- already translated strings 744 SELECT 745 it.lang, 746 it.orig, 747 it.trans 748 FROM 749 i18n.translations it 750 751 )) as translatable_strings 752 %s""" % order_by 753 else: 754 cmd = u""" 755 SELECT DISTINCT ON (orig, lang) 756 lang, orig, trans 757 FROM (( 758 759 -- strings stored as translation keys whether translated or not 760 SELECT 761 %%(lang)s as lang, 762 ik.orig, 763 i18n._(ik.orig, %%(lang)s) AS trans 764 FROM 765 i18n.keys ik 766 767 ) UNION ALL ( 768 769 -- already translated strings 770 SELECT 771 %%(lang)s as lang, 772 it.orig, 773 i18n._(it.orig, %%(lang)s) AS trans 774 FROM 775 i18n.translations it 776 777 )) AS translatable_strings 778 %s""" % order_by 779 780 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 781 782 if rows is None: 783 _log.error('no translatable strings found') 784 else: 785 _log.debug('%s translatable strings found', len(rows)) 786 787 return rows
788 789 #------------------------------------------------------------------------
790 -def get_current_user_language():
791 cmd = u'select i18n.get_curr_lang()' 792 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 793 return rows[0][0]
794 795 #------------------------------------------------------------------------
796 -def set_user_language(user=None, language=None):
797 """Set the user language in the database. 798 799 user = None: current db user 800 language = None: unset 801 """ 802 _log.info('setting database language for user [%s] to [%s]', user, language) 803 804 args = { 805 'usr': user, 806 'lang': language 807 } 808 809 if language is None: 810 if user is None: 811 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 812 else: 813 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 814 queries.append({'cmd': u'select True'}) 815 else: 816 if user is None: 817 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 818 else: 819 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 820 821 rows, idx = run_rw_queries(queries = queries, return_data = True) 822 823 if not rows[0][0]: 824 _log.error('cannot set database language to [%s] for user [%s]', language, user) 825 826 return rows[0][0]
827 #------------------------------------------------------------------------
828 -def force_user_language(language=None):
829 """Set the user language in the database. 830 831 - regardless of whether there is any translation available. 832 - only for the current user 833 """ 834 _log.info('forcing database language for current db user to [%s]', language) 835 836 run_rw_queries(queries = [{ 837 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 838 'args': {'lang': language} 839 }])
840 841 # ======================================================================= 842 # query runners and helpers 843 # =======================================================================
844 -def send_maintenance_notification():
845 cmd = u'notify "db_maintenance_warning:"' 846 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
847 #------------------------------------------------------------------------
848 -def send_maintenance_shutdown():
849 cmd = u'notify "db_maintenance_disconnect:"' 850 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
851 #------------------------------------------------------------------------
852 -def is_pg_interval(candidate=None):
853 cmd = u'SELECT %(candidate)s::interval' 854 try: 855 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 856 return True 857 except: 858 cmd = u'SELECT %(candidate)s::text::interval' 859 try: 860 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 861 return True 862 except: 863 return False
864 865 #------------------------------------------------------------------------
866 -def lock_row(link_obj=None, table=None, pk=None, exclusive=False):
867 """Uses pg_advisory(_shared). 868 869 - locks stack and need one unlock per lock 870 - same connection: 871 - all locks succeed 872 - different connections: 873 - shared + shared succeed 874 - shared + exclusive fail 875 """ 876 _log.debug('locking row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 877 if exclusive: 878 cmd = u"""SELECT pg_try_advisory_lock('%s'::regclass::oid::int, %s)""" % (table, pk) 879 else: 880 cmd = u"""SELECT pg_try_advisory_lock_shared('%s'::regclass::oid::int, %s)""" % (table, pk) 881 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False) 882 if rows[0][0]: 883 return True 884 _log.warning('cannot lock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 885 return False
886 887 #------------------------------------------------------------------------
888 -def unlock_row(link_obj=None, table=None, pk=None, exclusive=False):
889 """Uses pg_advisory_unlock(_shared). 890 891 - each lock needs one unlock 892 """ 893 _log.debug('trying to unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 894 if exclusive: 895 cmd = u"SELECT pg_advisory_unlock('%s'::regclass::oid::int, %s)" % (table, pk) 896 else: 897 cmd = u"SELECT pg_advisory_unlock_shared('%s'::regclass::oid::int, %s)" % (table, pk) 898 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False) 899 if rows[0][0]: 900 return True 901 _log.warning('cannot unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive) 902 return False
903 904 #------------------------------------------------------------------------
905 -def row_is_locked(table=None, pk=None):
906 """Looks at pk_locks 907 908 - does not take into account locks other than 'advisory', however 909 """ 910 cmd = u"""SELECT EXISTS ( 911 SELECT 1 FROM pg_locks WHERE 912 classid = '%s'::regclass::oid::int 913 AND 914 objid = %s 915 AND 916 locktype = 'advisory' 917 )""" % (table, pk) 918 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 919 if rows[0][0]: 920 _log.debug('row is locked: [%s] [%s]', table, pk) 921 return True 922 _log.debug('row is NOT locked: [%s] [%s]', table, pk) 923 return False
924 925 #------------------------------------------------------------------------
926 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
927 outfile = file(filename, 'wb') 928 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 929 outfile.close() 930 return result
931 #------------------------------------------------------------------------
932 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
933 """Store data from a bytea field into a file. 934 935 <data_query> 936 - dict {'cmd': ..., 'args': ...} 937 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 938 - 'args' must be a dict 939 - must return one row with one field of type bytea 940 <file> 941 - must be a file like Python object 942 <data_size> 943 - integer of the total size of the expected data or None 944 <data_size_query> 945 - dict {'cmd': ..., 'args': ...} 946 - cmd must be unicode 947 - must return one row with one field with the octet_length() of the data field 948 - used only when <data_size> is None 949 """ 950 if data_size == 0: 951 return True 952 953 # If the client sets an encoding other than the default we 954 # will receive encoding-parsed data which isn't the binary 955 # content we want. Hence we need to get our own connection. 956 # It must be a read-write one so that we don't affect the 957 # encoding for other users of the shared read-only 958 # connections. 959 # Actually, encodings shouldn't be applied to binary data 960 # (eg. bytea types) in the first place but that is only 961 # reported to be fixed > v7.4. 962 # further tests reveal that at least on PG 8.0 this bug still 963 # manifests itself 964 conn = get_raw_connection(readonly=True) 965 966 if data_size is None: 967 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 968 data_size = rows[0][0] 969 if data_size in [None, 0]: 970 conn.rollback() 971 return True 972 973 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 974 if chunk_size == 0: 975 chunk_size = min(data_size, max_chunk_size) 976 977 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size) 978 979 # Windoze sucks: it can't transfer objects of arbitrary size, 980 # anyways, we need to split the transfer, 981 # however, only possible if postgres >= 7.2 982 needed_chunks, remainder = divmod(data_size, chunk_size) 983 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder) 984 985 # try setting "bytea_output" 986 # - fails if not necessary 987 # - succeeds if necessary 988 try: 989 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 990 except dbapi.ProgrammingError: 991 _log.debug('failed to set bytea_output to "escape", not necessary') 992 993 # retrieve chunks, skipped if data size < chunk size, 994 # does this not carry the danger of cutting up multi-byte escape sequences ? 995 # no, since bytea is binary, 996 # yes, since in bytea there are *some* escaped values, still 997 # no, since those are only escaped during *transfer*, not on-disk, hence 998 # only complete escape sequences are put on the wire 999 for chunk_id in range(needed_chunks): 1000 chunk_start = (chunk_id * chunk_size) + 1 1001 data_query['args']['start'] = chunk_start 1002 data_query['args']['size'] = chunk_size 1003 try: 1004 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1005 except: 1006 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1007 conn.rollback() 1008 raise 1009 # it would be a fatal error to see more than one result as ids are supposed to be unique 1010 file_obj.write(str(rows[0][0])) 1011 1012 # retrieve remainder 1013 if remainder > 0: 1014 chunk_start = (needed_chunks * chunk_size) + 1 1015 data_query['args']['start'] = chunk_start 1016 data_query['args']['size'] = remainder 1017 try: 1018 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1019 except: 1020 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1021 conn.rollback() 1022 raise 1023 # it would be a fatal error to see more than one result as ids are supposed to be unique 1024 file_obj.write(str(rows[0][0])) 1025 1026 conn.rollback() 1027 return True
1028 #------------------------------------------------------------------------
1029 -def file2bytea(query=None, filename=None, args=None, conn=None):
1030 """Store data from a file into a bytea field. 1031 1032 The query must: 1033 - be in unicode 1034 - contain a format spec identifying the row (eg a primary key) 1035 matching <args> if it is an UPDATE 1036 - contain a format spec %(data)s::bytea 1037 """ 1038 # read data from file 1039 infile = file(filename, "rb") 1040 data_as_byte_string = infile.read() 1041 infile.close() 1042 if args is None: 1043 args = {} 1044 args['data'] = buffer(data_as_byte_string) 1045 del(data_as_byte_string) 1046 1047 # insert the data 1048 if conn is None: 1049 conn = get_raw_connection(readonly=False) 1050 close_conn = True 1051 else: 1052 close_conn = False 1053 1054 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1055 1056 if close_conn: 1057 conn.close() 1058 1059 return
1060 #------------------------------------------------------------------------
1061 -def sanitize_pg_regex(expression=None, escape_all=False):
1062 """Escape input for use in a PostgreSQL regular expression. 1063 1064 If a fragment comes from user input and is to be used 1065 as a regular expression we need to make sure it doesn't 1066 contain invalid regex patterns such as unbalanced ('s. 1067 1068 <escape_all> 1069 True: try to escape *all* metacharacters 1070 False: only escape those which render the regex invalid 1071 """ 1072 return expression.replace ( 1073 '(', '\(' 1074 ).replace ( 1075 ')', '\)' 1076 ).replace ( 1077 '[', '\[' 1078 ).replace ( 1079 '+', '\+' 1080 ).replace ( 1081 '.', '\.' 1082 ).replace ( 1083 '*', '\*' 1084 )
1085 #']', '\]', # not needed 1086 #------------------------------------------------------------------------
1087 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1088 """Run read-only queries. 1089 1090 <queries> must be a list of dicts: 1091 [ 1092 {'cmd': <string>, 'args': <dict> or <tuple>}, 1093 {...}, 1094 ... 1095 ] 1096 """ 1097 if isinstance(link_obj, dbapi._psycopg.cursor): 1098 curs = link_obj 1099 curs_close = __noop 1100 tx_rollback = __noop 1101 elif isinstance(link_obj, dbapi._psycopg.connection): 1102 curs = link_obj.cursor() 1103 curs_close = curs.close 1104 tx_rollback = link_obj.rollback 1105 elif link_obj is None: 1106 conn = get_connection(readonly=True, verbose=verbose) 1107 curs = conn.cursor() 1108 curs_close = curs.close 1109 tx_rollback = conn.rollback 1110 else: 1111 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1112 1113 if verbose: 1114 _log.debug('cursor: %s', curs) 1115 1116 for query in queries: 1117 if type(query['cmd']) is not types.UnicodeType: 1118 print "run_ro_queries(): non-unicode query" 1119 print query['cmd'] 1120 try: 1121 args = query['args'] 1122 except KeyError: 1123 args = None 1124 try: 1125 curs.execute(query['cmd'], args) 1126 if verbose: 1127 _log.debug('ran query: [%s]', curs.query) 1128 if curs.statusmessage != u'': 1129 _log.debug('PG status message: %s', curs.statusmessage) 1130 _log.debug('cursor description: %s', str(curs.description)) 1131 except dbapi.Error as pg_exc: 1132 _log.error('query failed: [%s]', curs.query) 1133 if curs.statusmessage != u'': 1134 _log.error('PG status message: %s', curs.statusmessage) 1135 _log.error('PG error code: %s', pg_exc.pgcode) 1136 if pg_exc.pgerror is not None: 1137 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1138 try: 1139 curs_close() 1140 except dbapi.InterfaceError: 1141 _log.exception('cannot close cursor') 1142 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1143 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1144 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1145 if curs.statusmessage != u'': 1146 details = u'Status: %s\n%s' % ( 1147 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1148 details 1149 ) 1150 if pg_exc.pgerror is None: 1151 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1152 else: 1153 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1154 raise gmExceptions.AccessDenied ( 1155 msg, 1156 source = u'PostgreSQL', 1157 code = pg_exc.pgcode, 1158 details = details 1159 ) 1160 raise 1161 except: 1162 _log.error('query failed: [%s]', curs.query) 1163 if curs.statusmessage != u'': 1164 _log.error('PG status message: %s', curs.statusmessage) 1165 try: 1166 curs_close() 1167 except dbapi.InterfaceError: 1168 _log.exception('cannot close cursor') 1169 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1170 raise 1171 1172 data = None 1173 col_idx = None 1174 if return_data: 1175 data = curs.fetchall() 1176 if verbose: 1177 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1178 _log.debug('cursor description: %s', str(curs.description)) 1179 if get_col_idx: 1180 col_idx = get_col_indices(curs) 1181 1182 curs_close() 1183 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1184 return (data, col_idx)
1185 #------------------------------------------------------------------------
1186 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1187 """Convenience function for running a transaction 1188 that is supposed to get committed. 1189 1190 <link_obj> 1191 can be either: 1192 - a cursor 1193 - a connection 1194 1195 <queries> 1196 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1197 to be executed as a single transaction, the last 1198 query may usefully return rows (such as a 1199 "SELECT currval('some_sequence')" statement) 1200 1201 <end_tx> 1202 - controls whether the transaction is finalized (eg. 1203 committed/rolled back) or not, this allows the 1204 call to run_rw_queries() to be part of a framing 1205 transaction 1206 - if link_obj is a connection then <end_tx> will 1207 default to False unless it is explicitly set to 1208 True which is taken to mean "yes, you do have full 1209 control over the transaction" in which case the 1210 transaction is properly finalized 1211 - if link_obj is a cursor we CANNOT finalize the 1212 transaction because we would need the connection for that 1213 - if link_obj is None <end_tx> will, of course, always be True 1214 1215 <return_data> 1216 - if true, the returned data will include the rows 1217 the last query selected 1218 - if false, it returns None instead 1219 1220 <get_col_idx> 1221 - if true, the returned data will include a dictionary 1222 mapping field names to column positions 1223 - if false, the returned data returns None instead 1224 1225 method result: 1226 - returns a tuple (data, idx) 1227 - <data>: 1228 * (None, None) if last query did not return rows 1229 * ("fetchall() result", <index>) if last query returned any rows 1230 * for <index> see <get_col_idx> 1231 """ 1232 if isinstance(link_obj, dbapi._psycopg.cursor): 1233 conn_close = __noop 1234 conn_commit = __noop 1235 tx_rollback = __noop 1236 curs = link_obj 1237 curs_close = __noop 1238 elif isinstance(link_obj, dbapi._psycopg.connection): 1239 conn_close = __noop 1240 if end_tx: 1241 conn_commit = link_obj.commit 1242 tx_rollback = link_obj.rollback 1243 else: 1244 conn_commit = __noop 1245 tx_rollback = __noop 1246 curs = link_obj.cursor() 1247 curs_close = curs.close 1248 elif link_obj is None: 1249 conn = get_connection(readonly=False) 1250 conn_close = conn.close 1251 conn_commit = conn.commit 1252 tx_rollback = conn.rollback 1253 curs = conn.cursor() 1254 curs_close = curs.close 1255 else: 1256 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1257 1258 for query in queries: 1259 if type(query['cmd']) is not types.UnicodeType: 1260 print "run_rw_queries(): non-unicode query" 1261 print query['cmd'] 1262 try: 1263 args = query['args'] 1264 except KeyError: 1265 args = None 1266 try: 1267 curs.execute(query['cmd'], args) 1268 except dbapi.Error as pg_exc: 1269 _log.error('RW query failed: [%s]', curs.query) 1270 if curs.statusmessage != u'': 1271 _log.error('PG status message: %s', curs.statusmessage) 1272 _log.error('PG error code: %s', pg_exc.pgcode) 1273 if pg_exc.pgerror is not None: 1274 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1275 try: 1276 curs_close() 1277 tx_rollback() # just for good measure 1278 conn_close() 1279 except dbapi.InterfaceError: 1280 _log.exception('cannot cleanup') 1281 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1282 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1283 if curs.statusmessage != u'': 1284 details = u'Status: %s\n%s' % ( 1285 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1286 details 1287 ) 1288 if pg_exc.pgerror is None: 1289 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1290 else: 1291 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1292 raise gmExceptions.AccessDenied ( 1293 msg, 1294 source = u'PostgreSQL', 1295 code = pg_exc.pgcode, 1296 details = details 1297 ) 1298 raise 1299 except: 1300 _log.exception('error running RW query') 1301 gmLog2.log_stack_trace() 1302 try: 1303 curs_close() 1304 tx_rollback() 1305 conn_close() 1306 except dbapi.InterfaceError: 1307 _log.exception('cannot cleanup') 1308 raise 1309 raise 1310 1311 data = None 1312 col_idx = None 1313 if return_data: 1314 try: 1315 data = curs.fetchall() 1316 except: 1317 _log.exception('error fetching data from RW query') 1318 gmLog2.log_stack_trace() 1319 try: 1320 curs_close() 1321 tx_rollback() 1322 conn_close() 1323 except dbapi.InterfaceError: 1324 _log.exception('cannot cleanup') 1325 raise 1326 raise 1327 if get_col_idx: 1328 col_idx = get_col_indices(curs) 1329 1330 curs_close() 1331 conn_commit() 1332 conn_close() 1333 1334 return (data, col_idx)
1335 #------------------------------------------------------------------------
1336 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1337 """Generates SQL for an INSERT query. 1338 1339 values: dict of values keyed by field to insert them into 1340 """ 1341 if schema is None: 1342 schema = u'public' 1343 1344 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1345 val_snippets = [] 1346 for field in fields: 1347 val_snippets.append(u'%%(%s)s' % field) 1348 1349 if returning is None: 1350 returning = u'' 1351 return_data = False 1352 else: 1353 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1354 return_data = True 1355 1356 cmd = u"""\nINSERT INTO %s.%s ( 1357 %s 1358 ) VALUES ( 1359 %s 1360 )%s""" % ( 1361 schema, 1362 table, 1363 u',\n\t\t'.join(fields), 1364 u',\n\t\t'.join(val_snippets), 1365 returning 1366 ) 1367 1368 _log.debug(u'running SQL: >>>%s<<<', cmd) 1369 1370 return run_rw_queries ( 1371 link_obj = link_obj, 1372 queries = [{'cmd': cmd, 'args': values}], 1373 end_tx = end_tx, 1374 return_data = return_data, 1375 get_col_idx = get_col_idx, 1376 verbose = verbose 1377 )
1378 # ======================================================================= 1379 # connection handling API 1380 # -----------------------------------------------------------------------
1381 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1382 """ 1383 GNUmed database connection pool. 1384 1385 Extends psycopg2's PersistentConnectionPool with 1386 a custom _connect() function. Supports one connection 1387 per thread - which also ties it to one particular DSN. 1388 """ 1389 #--------------------------------------------------
1390 - def _connect(self, key=None):
1391 1392 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1393 1394 conn.original_close = conn.close 1395 conn.close = _raise_exception_on_ro_conn_close 1396 1397 if key is not None: 1398 self._used[key] = conn 1399 self._rused[id(conn)] = key 1400 else: 1401 self._pool.append(conn) 1402 1403 return conn
1404 #--------------------------------------------------
1405 - def shutdown(self):
1406 for conn_key in self._used.keys(): 1407 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1408 self._used[conn_key].original_close()
1409 # -----------------------------------------------------------------------
1410 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1411 """Get a raw, unadorned connection. 1412 1413 - this will not set any parameters such as encoding, timezone, datestyle 1414 - the only requirement is a valid DSN 1415 - hence it can be used for "service" connections 1416 for verifying encodings etc 1417 """ 1418 # FIXME: support verbose 1419 if dsn is None: 1420 dsn = get_default_dsn() 1421 1422 if u'host=salaam.homeunix' in dsn: 1423 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1424 1425 try: 1426 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1427 except dbapi.OperationalError, e: 1428 1429 t, v, tb = sys.exc_info() 1430 try: 1431 msg = e.args[0] 1432 except (AttributeError, IndexError, TypeError): 1433 raise 1434 1435 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1436 1437 if msg.find('fe_sendauth') != -1: 1438 raise cAuthenticationError, (dsn, msg), tb 1439 1440 if regex.search('user ".*" does not exist', msg) is not None: 1441 raise cAuthenticationError, (dsn, msg), tb 1442 1443 if msg.find('uthenti') != -1: 1444 raise cAuthenticationError, (dsn, msg), tb 1445 1446 raise 1447 1448 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1449 1450 # do first-time stuff 1451 global postgresql_version 1452 if postgresql_version is None: 1453 curs = conn.cursor() 1454 curs.execute(""" 1455 SELECT 1456 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1457 FROM 1458 pg_settings 1459 WHERE 1460 name = 'server_version' 1461 """) 1462 postgresql_version = curs.fetchone()['version'] 1463 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1464 try: 1465 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1466 _log.info('database size: %s', curs.fetchone()[0]) 1467 except: 1468 pass 1469 if verbose: 1470 __log_PG_settings(curs=curs) 1471 curs.close() 1472 conn.commit() 1473 1474 if _default_client_timezone is None: 1475 __detect_client_timezone(conn = conn) 1476 1477 curs = conn.cursor() 1478 1479 # set access mode 1480 if readonly: 1481 _log.debug('access mode [READ ONLY]') 1482 cmd = 'set session characteristics as transaction READ ONLY' 1483 curs.execute(cmd) 1484 cmd = 'set default_transaction_read_only to on' 1485 curs.execute(cmd) 1486 else: 1487 _log.debug('access mode [READ WRITE]') 1488 cmd = 'set session characteristics as transaction READ WRITE' 1489 curs.execute(cmd) 1490 cmd = 'set default_transaction_read_only to off' 1491 curs.execute(cmd) 1492 1493 curs.close() 1494 conn.commit() 1495 1496 conn.is_decorated = False 1497 1498 return conn
1499 # =======================================================================
1500 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1501 """Get a new connection. 1502 1503 This assumes the locale system has been initialized 1504 unless an encoding is specified. 1505 """ 1506 # FIXME: support pooled on RW, too 1507 # FIXME: for now, support the default DSN only 1508 if pooled and readonly and (dsn is None): 1509 global __ro_conn_pool 1510 if __ro_conn_pool is None: 1511 __ro_conn_pool = cConnectionPool ( 1512 minconn = 1, 1513 maxconn = 2, 1514 dsn = dsn, 1515 verbose = verbose 1516 ) 1517 conn = __ro_conn_pool.getconn() 1518 else: 1519 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1520 1521 if conn.is_decorated: 1522 return conn 1523 1524 if encoding is None: 1525 encoding = _default_client_encoding 1526 if encoding is None: 1527 encoding = gmI18N.get_encoding() 1528 _log.warning('client encoding not specified') 1529 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1530 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1531 1532 # set connection properties 1533 # - client encoding 1534 try: 1535 conn.set_client_encoding(encoding) 1536 except dbapi.OperationalError: 1537 t, v, tb = sys.exc_info() 1538 if str(v).find("can't set encoding to") != -1: 1539 raise cEncodingError, (encoding, v), tb 1540 raise 1541 1542 # - transaction isolation level 1543 if readonly: 1544 iso_level = u'read committed' 1545 else: 1546 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1547 iso_level = u'serializable' 1548 1549 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1550 1551 curs = conn.cursor() 1552 1553 # - client time zone 1554 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1555 1556 conn.commit() 1557 1558 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1559 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1560 # 1561 # - bytea data format 1562 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1563 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1564 # as the transmission mode for bytea output, 1565 # so try to set this setting back to "escape", 1566 # if that's not possible the reason will be that PG < 9.0 does not support 1567 # that setting - which also means we don't need it and can ignore the 1568 # failure 1569 cmd = "set bytea_output to 'escape'" 1570 try: 1571 curs.execute(cmd) 1572 except dbapi.ProgrammingError: 1573 _log.error('cannot set bytea_output format') 1574 1575 curs.close() 1576 conn.commit() 1577 1578 conn.is_decorated = True 1579 1580 return conn
1581 #-----------------------------------------------------------------------
1582 -def shutdown():
1583 if __ro_conn_pool is None: 1584 return 1585 __ro_conn_pool.shutdown()
1586 # ====================================================================== 1587 # internal helpers 1588 #-----------------------------------------------------------------------
1589 -def __noop():
1590 pass
1591 #-----------------------------------------------------------------------
1592 -def _raise_exception_on_ro_conn_close():
1593 raise TypeError(u'close() called on read-only connection')
1594 #-----------------------------------------------------------------------
1595 -def log_database_access(action=None):
1596 run_insert ( 1597 schema = u'gm', 1598 table = u'access_log', 1599 values = {u'user_action': action}, 1600 end_tx = True 1601 )
1602 #-----------------------------------------------------------------------
1603 -def sanity_check_time_skew(tolerance=60):
1604 """Check server time and local time to be within 1605 the given tolerance of each other. 1606 1607 tolerance: seconds 1608 """ 1609 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1610 1611 cmd = u"SELECT now() at time zone 'UTC'" 1612 conn = get_raw_connection(readonly=True) 1613 curs = conn.cursor() 1614 1615 start = time.time() 1616 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1617 end = time.time() 1618 client_now_as_utc = pydt.datetime.utcnow() 1619 1620 curs.close() 1621 conn.commit() 1622 1623 server_now_as_utc = rows[0][0] 1624 query_duration = end - start 1625 _log.info('server "now" (UTC): %s', server_now_as_utc) 1626 _log.info('client "now" (UTC): %s', client_now_as_utc) 1627 _log.debug('wire roundtrip (seconds): %s', query_duration) 1628 1629 if query_duration > tolerance: 1630 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1631 return False 1632 1633 if server_now_as_utc > client_now_as_utc: 1634 real_skew = server_now_as_utc - client_now_as_utc 1635 else: 1636 real_skew = client_now_as_utc - server_now_as_utc 1637 1638 _log.debug('client/server time skew: %s', real_skew) 1639 1640 if real_skew > pydt.timedelta(seconds = tolerance): 1641 _log.error('client/server time skew > tolerance') 1642 return False 1643 1644 return True
1645 #-----------------------------------------------------------------------
1646 -def sanity_check_database_settings():
1647 """Checks database settings. 1648 1649 returns (status, message) 1650 status: 1651 0: no problem 1652 1: non-fatal problem 1653 2: fatal problem 1654 """ 1655 _log.debug('checking database settings') 1656 1657 conn = get_connection() 1658 1659 # - version string 1660 global postgresql_version_string 1661 if postgresql_version_string is None: 1662 curs = conn.cursor() 1663 curs.execute('SELECT version()') 1664 postgresql_version_string = curs.fetchone()['version'] 1665 curs.close() 1666 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1667 1668 options2check = { 1669 # setting: [expected value, risk, fatal?] 1670 u'allow_system_table_mods': [u'off', u'system breakage', False], 1671 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1672 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1673 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1674 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1675 u'fsync': [u'on', u'data loss/corruption', True], 1676 u'full_page_writes': [u'on', u'data loss/corruption', False], 1677 u'lc_messages': [u'C', u'suboptimal error detection', False], 1678 u'password_encryption': [u'on', u'breach of confidentiality', False], 1679 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1680 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1681 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1682 } 1683 1684 from Gnumed.pycommon import gmCfg2 1685 _cfg = gmCfg2.gmCfgData() 1686 if _cfg.get(option = u'hipaa'): 1687 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1688 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1689 else: 1690 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1691 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1692 1693 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1694 rows, idx = run_ro_queries ( 1695 link_obj = conn, 1696 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1697 get_col_idx = False 1698 ) 1699 1700 found_error = False 1701 found_problem = False 1702 msg = [] 1703 for row in rows: 1704 option = row['name'] 1705 value_found = row['setting'] 1706 value_expected = options2check[option][0] 1707 risk = options2check[option][1] 1708 fatal_setting = options2check[option][2] 1709 if value_found != value_expected: 1710 if fatal_setting is True: 1711 found_error = True 1712 elif fatal_setting is False: 1713 found_problem = True 1714 elif fatal_setting is None: 1715 pass 1716 else: 1717 _log.error(options2check[option]) 1718 raise ValueError(u'invalid database configuration sanity check') 1719 msg.append(_(' option [%s]: %s') % (option, value_found)) 1720 msg.append(_(' risk: %s') % risk) 1721 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1722 1723 if found_error: 1724 return 2, u'\n'.join(msg) 1725 1726 if found_problem: 1727 return 1, u'\n'.join(msg) 1728 1729 return 0, u''
1730 #------------------------------------------------------------------------
1731 -def __log_PG_settings(curs=None):
1732 # don't use any of the run_*()s since that might 1733 # create a loop if we fail here 1734 # FIXME: use pg_settings 1735 try: 1736 curs.execute(u'show all') 1737 except: 1738 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1739 return False 1740 settings = curs.fetchall() 1741 if settings is None: 1742 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1743 return False 1744 for setting in settings: 1745 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1746 return True
1747 # =======================================================================
1748 -def extract_msg_from_pg_exception(exc=None):
1749 1750 try: 1751 msg = exc.args[0] 1752 except (AttributeError, IndexError, TypeError): 1753 return u'cannot extract message from exception' 1754 1755 return unicode(msg, gmI18N.get_encoding(), 'replace')
1756 # =======================================================================
1757 -class cAuthenticationError(dbapi.OperationalError):
1758
1759 - def __init__(self, dsn=None, prev_val=None):
1760 self.dsn = dsn 1761 self.prev_val = prev_val
1762
1763 - def __str__(self):
1764 _log.warning('%s.__str__() called', self.__class__.__name__) 1765 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1766 _log.error(tmp) 1767 return tmp.encode(gmI18N.get_encoding(), 'replace')
1768
1769 - def __unicode__(self):
1770 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1771 1772 # ======================================================================= 1773 # custom psycopg2 extensions 1774 # =======================================================================
1775 -class cEncodingError(dbapi.OperationalError):
1776
1777 - def __init__(self, encoding=None, prev_val=None):
1778 self.encoding = encoding 1779 self.prev_val = prev_val
1780
1781 - def __str__(self):
1782 _log.warning('%s.__str__() called', self.__class__.__name__) 1783 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1784
1785 - def __unicode__(self):
1786 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1787 1788 # ----------------------------------------------------------------------- 1789 # Python -> PostgreSQL 1790 # ----------------------------------------------------------------------- 1791 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1792 -class cAdapterPyDateTime(object):
1793
1794 - def __init__(self, dt):
1795 if dt.tzinfo is None: 1796 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1797 self.__dt = dt
1798
1799 - def getquoted(self):
1800 return _timestamp_template % self.__dt.isoformat()
1801 1802 ## remove for 0.9 1803 ## ---------------------------------------------------------------------- 1804 ##class cAdapterMxDateTime(object): 1805 ## 1806 ## def __init__(self, dt): 1807 ## if dt.tz == '???': 1808 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1809 ## self.__dt = dt 1810 ## 1811 ## def getquoted(self): 1812 ## # under some locale settings the mx.DateTime ISO formatter 1813 ## # will insert "," into the ISO string, 1814 ## # while this is allowed per the ISO8601 spec PostgreSQL 1815 ## # cannot currently handle that, 1816 ## # so map those "," to "." to make things work: 1817 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1818 ## 1819 ## ---------------------------------------------------------------------- 1820 ## PostgreSQL -> Python 1821 ## ---------------------------------------------------------------------- 1822 1823 #======================================================================= 1824 # main 1825 #----------------------------------------------------------------------- 1826 1827 # make sure psycopg2 knows how to handle unicode ... 1828 # intended to become standard 1829 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1830 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1831 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1832 1833 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1834 # check in 0.9: 1835 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1836 1837 ## remove for 0.9 1838 #try: 1839 # import mx.DateTime as mxDT 1840 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1841 #except ImportError: 1842 # _log.warning('cannot import mx.DateTime') 1843 1844 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1845 # them adapted to "... ARRAY[]..." so we can support PG arrays 1846 1847 #======================================================================= 1848 if __name__ == "__main__": 1849 1850 if len(sys.argv) < 2: 1851 sys.exit() 1852 1853 if sys.argv[1] != 'test': 1854 sys.exit() 1855 1856 logging.basicConfig(level=logging.DEBUG) 1857 #--------------------------------------------------------------------
1858 - def test_file2bytea():
1859 run_rw_queries(queries = [ 1860 {'cmd': u'create table test_bytea (data bytea)'} 1861 ]) 1862 1863 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1864 try: 1865 file2bytea(query = cmd, filename = sys.argv[2]) 1866 except: 1867 _log.exception('error') 1868 1869 run_rw_queries(queries = [ 1870 {'cmd': u'drop table test_bytea'} 1871 ])
1872 #--------------------------------------------------------------------
1873 - def test_get_connection():
1874 print "testing get_connection()" 1875 1876 dsn = 'foo' 1877 try: 1878 conn = get_connection(dsn=dsn) 1879 except dbapi.OperationalError, e: 1880 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1881 t, v = sys.exc_info()[:2] 1882 print ' ', t 1883 print ' ', v 1884 1885 dsn = 'dbname=gnumed_v9' 1886 try: 1887 conn = get_connection(dsn=dsn) 1888 except cAuthenticationError: 1889 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1890 t, v = sys.exc_info()[:2] 1891 print ' ', t 1892 print ' ', v 1893 1894 dsn = 'dbname=gnumed_v9 user=abc' 1895 try: 1896 conn = get_connection(dsn=dsn) 1897 except cAuthenticationError: 1898 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1899 t, v = sys.exc_info()[:2] 1900 print ' ', t 1901 print ' ', v 1902 1903 dsn = 'dbname=gnumed_v9 user=any-doc' 1904 try: 1905 conn = get_connection(dsn=dsn) 1906 except cAuthenticationError: 1907 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1908 t, v = sys.exc_info()[:2] 1909 print ' ', t 1910 print ' ', v 1911 1912 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1913 try: 1914 conn = get_connection(dsn=dsn) 1915 except cAuthenticationError: 1916 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1917 t, v = sys.exc_info()[:2] 1918 print ' ', t 1919 print ' ', v 1920 1921 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1922 conn = get_connection(dsn=dsn, readonly=True) 1923 1924 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1925 conn = get_connection(dsn=dsn, readonly=False) 1926 1927 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1928 encoding = 'foo' 1929 try: 1930 conn = get_connection(dsn=dsn, encoding=encoding) 1931 except cEncodingError: 1932 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1933 t, v = sys.exc_info()[:2] 1934 print ' ', t 1935 print ' ', v
1936 #--------------------------------------------------------------------
1937 - def test_exceptions():
1938 print "testing exceptions" 1939 1940 try: 1941 raise cAuthenticationError('no dsn', 'no previous exception') 1942 except cAuthenticationError: 1943 t, v, tb = sys.exc_info() 1944 print t 1945 print v 1946 print tb 1947 1948 try: 1949 raise cEncodingError('no dsn', 'no previous exception') 1950 except cEncodingError: 1951 t, v, tb = sys.exc_info() 1952 print t 1953 print v 1954 print tb
1955 #--------------------------------------------------------------------
1956 - def test_ro_queries():
1957 print "testing run_ro_queries()" 1958 1959 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1960 conn = get_connection(dsn, readonly=True) 1961 1962 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1963 print data 1964 print idx 1965 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1966 print data 1967 print idx 1968 1969 curs = conn.cursor() 1970 1971 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1972 print data 1973 print idx 1974 1975 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1976 print data 1977 print idx 1978 1979 try: 1980 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1981 print data 1982 print idx 1983 except psycopg2.ProgrammingError: 1984 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1985 t, v = sys.exc_info()[:2] 1986 print ' ', t 1987 print ' ', v 1988 1989 curs.close()
1990 #--------------------------------------------------------------------
1991 - def test_request_dsn():
1992 conn = get_connection() 1993 print conn 1994 conn.close()
1995 #--------------------------------------------------------------------
1996 - def test_set_encoding():
1997 print "testing set_default_client_encoding()" 1998 1999 enc = 'foo' 2000 try: 2001 set_default_client_encoding(enc) 2002 print "SUCCESS: encoding [%s] worked" % enc 2003 except ValueError: 2004 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2005 t, v = sys.exc_info()[:2] 2006 print ' ', t 2007 print ' ', v 2008 2009 enc = '' 2010 try: 2011 set_default_client_encoding(enc) 2012 print "SUCCESS: encoding [%s] worked" % enc 2013 except ValueError: 2014 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2015 t, v = sys.exc_info()[:2] 2016 print ' ', t 2017 print ' ', v 2018 2019 enc = 'latin1' 2020 try: 2021 set_default_client_encoding(enc) 2022 print "SUCCESS: encoding [%s] worked" % enc 2023 except ValueError: 2024 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2025 t, v = sys.exc_info()[:2] 2026 print ' ', t 2027 print ' ', v 2028 2029 enc = 'utf8' 2030 try: 2031 set_default_client_encoding(enc) 2032 print "SUCCESS: encoding [%s] worked" % enc 2033 except ValueError: 2034 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2035 t, v = sys.exc_info()[:2] 2036 print ' ', t 2037 print ' ', v 2038 2039 enc = 'unicode' 2040 try: 2041 set_default_client_encoding(enc) 2042 print "SUCCESS: encoding [%s] worked" % enc 2043 except ValueError: 2044 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2045 t, v = sys.exc_info()[:2] 2046 print ' ', t 2047 print ' ', v 2048 2049 enc = 'UNICODE' 2050 try: 2051 set_default_client_encoding(enc) 2052 print "SUCCESS: encoding [%s] worked" % enc 2053 except ValueError: 2054 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2055 t, v = sys.exc_info()[:2] 2056 print ' ', t 2057 print ' ', v
2058 #--------------------------------------------------------------------
2059 - def test_connection_pool():
2060 dsn = get_default_dsn() 2061 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2062 print pool 2063 print pool.getconn() 2064 print pool.getconn() 2065 print pool.getconn() 2066 print type(pool.getconn())
2067 #--------------------------------------------------------------------
2068 - def test_list_args():
2069 dsn = get_default_dsn() 2070 conn = get_connection(dsn, readonly=True) 2071 curs = conn.cursor() 2072 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2073 #--------------------------------------------------------------------
2074 - def test_sanitize_pg_regex():
2075 tests = [ 2076 ['(', '\\('] 2077 , ['[', '\\['] 2078 , [')', '\\)'] 2079 ] 2080 for test in tests: 2081 result = sanitize_pg_regex(test[0]) 2082 if result != test[1]: 2083 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2084 #--------------------------------------------------------------------
2085 - def test_is_pg_interval():
2086 status = True 2087 tests = [ 2088 [None, True], # None == NULL == succeeds ! 2089 [1, True], 2090 ['1', True], 2091 ['abc', False] 2092 ] 2093 2094 if not is_pg_interval(): 2095 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2096 status = False 2097 2098 for test in tests: 2099 result = is_pg_interval(test[0]) 2100 if result != test[1]: 2101 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2102 status = False 2103 2104 return status
2105 #--------------------------------------------------------------------
2106 - def test_sanity_check_time_skew():
2107 sanity_check_time_skew()
2108 #--------------------------------------------------------------------
2109 - def test_get_foreign_key_details():
2110 for row in get_foreign_keys2column ( 2111 schema = u'dem', 2112 table = u'identity', 2113 column = u'pk' 2114 ): 2115 print '%s.%s references %s.%s.%s' % ( 2116 row['referencing_table'], 2117 row['referencing_column'], 2118 row['referenced_schema'], 2119 row['referenced_table'], 2120 row['referenced_column'] 2121 )
2122 #--------------------------------------------------------------------
2123 - def test_set_user_language():
2124 # (user, language, result, exception type) 2125 tests = [ 2126 # current user 2127 [None, 'de_DE', True], 2128 [None, 'lang_w/o_tx', False], 2129 [None, None, True], 2130 # valid user 2131 ['any-doc', 'de_DE', True], 2132 ['any-doc', 'lang_w/o_tx', False], 2133 ['any-doc', None, True], 2134 # invalid user 2135 ['invalid user', 'de_DE', None], 2136 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2137 ['invalid user', None, True] 2138 ] 2139 for test in tests: 2140 try: 2141 result = set_user_language(user = test[0], language = test[1]) 2142 if result != test[2]: 2143 print "test:", test 2144 print "result:", result, "expected:", test[2] 2145 except psycopg2.IntegrityError, e: 2146 if test[2] is None: 2147 continue 2148 print "test:", test 2149 print "expected exception" 2150 print "result:", e
2151 #--------------------------------------------------------------------
2152 - def test_get_schema_revision_history():
2153 for line in get_schema_revision_history(): 2154 print u' - '.join(line)
2155 #--------------------------------------------------------------------
2156 - def test_run_query():
2157 gmDateTime.init() 2158 args = {'dt': gmDateTime.pydt_max_here()} 2159 cmd = u"SELECT %(dt)s" 2160 2161 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2162 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2163 print rows
2164 #--------------------------------------------------------------------
2165 - def test_schema_exists():
2166 print schema_exists()
2167 #--------------------------------------------------------------------
2168 - def test_row_locks():
2169 row_is_locked(table = 'dem.identity', pk = 12) 2170 2171 print "1st connection:" 2172 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2173 print " 1st shared lock succeeded:", lock_row(table = 'dem.identity', pk = 12, exclusive = False) 2174 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2175 2176 print " 2nd shared lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = False) 2177 print " `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False) 2178 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2179 print " exclusive lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = True) 2180 print " `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = True) 2181 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2182 2183 print "2nd connection:" 2184 conn = get_raw_connection(readonly=True) 2185 print " shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2186 print " `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2187 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2188 print " exclusive lock succeeded ?", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True), "(should fail)" 2189 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2190 2191 print "1st connection:" 2192 print " unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False) 2193 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2194 2195 print "2nd connection:" 2196 print " exclusive lock should succeed", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True) 2197 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2198 print " shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2199 print " `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2200 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2201 print " unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False) 2202 print " locked:", row_is_locked(table = 'dem.identity', pk = 12) 2203 2204 conn.close()
2205 #-------------------------------------------------------------------- 2206 # run tests 2207 #test_file2bytea() 2208 #test_get_connection() 2209 #test_exceptions() 2210 #test_ro_queries() 2211 #test_request_dsn() 2212 #test_set_encoding() 2213 #test_connection_pool() 2214 #test_list_args() 2215 #test_sanitize_pg_regex() 2216 #test_is_pg_interval() 2217 #test_sanity_check_time_skew() 2218 #test_get_foreign_key_details() 2219 #test_set_user_language() 2220 #test_get_schema_revision_history() 2221 #test_run_query() 2222 #test_schema_exists() 2223 test_row_locks() 2224 2225 # ====================================================================== 2226