Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50' 
 118  } 
 119   
 120  map_schema_hash2version = { 
 121          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 122          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 123          '4428ccf2e54c289136819e701bb095ea': 4, 
 124          '7e7b093af57aea48c288e76632a382e5': 5, 
 125          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 126          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 127          '89b13a7af83337c3aad153b717e52360': 8, 
 128          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 129          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 130          '03042ae24f3f92877d986fb0a6184d76': 11, 
 131          '06183a6616db62257e22814007a8ed07': 12, 
 132          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 133          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 134          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 135          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 136          '161428ee97a00e3bf56168c3a15b7b50': 17 
 137  } 
 138   
 139  map_client_branch2required_db_version = { 
 140          u'GIT tree': 0, 
 141          u'0.3': 9, 
 142          u'0.4': 10, 
 143          u'0.5': 11, 
 144          u'0.6': 12, 
 145          u'0.7': 13, 
 146          u'0.8': 14, 
 147          u'0.9': 15, 
 148          u'1.0': 16,             # intentional duplicate with 1.1 
 149          u'1.1': 16, 
 150          u'1.2': 17, 
 151          u'1.3': 18 
 152  } 
 153   
 154  # get columns and data types for a given table 
 155  query_table_col_defs = u"""select 
 156          cols.column_name, 
 157          cols.udt_name 
 158  from 
 159          information_schema.columns cols 
 160  where 
 161          cols.table_schema = %s 
 162                  and 
 163          cols.table_name = %s 
 164  order by 
 165          cols.ordinal_position""" 
 166   
 167  query_table_attributes = u"""select 
 168          cols.column_name 
 169  from 
 170          information_schema.columns cols 
 171  where 
 172          cols.table_schema = %s 
 173                  and 
 174          cols.table_name = %s 
 175  order by 
 176          cols.ordinal_position""" 
 177   
 178  # ======================================================================= 
 179  # module globals API 
 180  # ======================================================================= 
181 -def set_default_client_encoding(encoding = None):
182 # check whether psycopg2 can handle this encoding 183 if encoding not in psycopg2.extensions.encodings: 184 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 185 # check whether Python can handle this encoding 186 py_enc = psycopg2.extensions.encodings[encoding] 187 try: 188 codecs.lookup(py_enc) 189 except LookupError: 190 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 191 raise 192 # FIXME: check encoding against the database 193 # FIXME: - but we may not yet have access 194 # FIXME: - psycopg2 will pull its encodings from the database eventually 195 # it seems save to set it 196 global _default_client_encoding 197 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 198 _default_client_encoding = encoding 199 return True
200 #---------------------------------------------------
201 -def set_default_client_timezone(timezone = None):
202 203 # FIXME: use __validate 204 global _default_client_timezone 205 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 206 _default_client_timezone = timezone 207 208 global _sql_set_timezone 209 _sql_set_timezone = u'set timezone to %s' 210 211 return True
212 #---------------------------------------------------
213 -def __validate_timezone(conn=None, timezone=None):
214 215 _log.debug(u'validating time zone [%s]', timezone) 216 217 cmd = u'set timezone to %(tz)s' 218 args = {u'tz': timezone} 219 220 conn.commit() 221 curs = conn.cursor() 222 is_valid = False 223 try: 224 curs.execute(cmd, args) 225 _log.info(u'time zone [%s] is settable', timezone) 226 # can we actually use it, though ? 227 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 228 try: 229 curs.execute(cmd) 230 curs.fetchone() 231 _log.info(u'time zone [%s] is usable', timezone) 232 is_valid = True 233 except: 234 _log.error('error using time zone [%s]', timezone) 235 except dbapi.DataError: 236 _log.warning(u'time zone [%s] is not settable', timezone) 237 except: 238 _log.error(u'failed to set time zone to [%s]', timezone) 239 _log.exception(u'') 240 241 curs.close() 242 conn.rollback() 243 244 return is_valid
245 #---------------------------------------------------
246 -def __expand_timezone(conn=None, timezone=None):
247 """some timezone defs are abbreviations so try to expand 248 them because "set time zone" doesn't take abbreviations""" 249 250 cmd = u""" 251 select distinct on (abbrev) name 252 from pg_timezone_names 253 where 254 abbrev = %(tz)s and 255 name ~ '^[^/]+/[^/]+$' and 256 name !~ '^Etc/' 257 """ 258 args = {u'tz': timezone} 259 260 conn.commit() 261 curs = conn.cursor() 262 263 result = timezone 264 try: 265 curs.execute(cmd, args) 266 rows = curs.fetchall() 267 if len(rows) > 0: 268 result = rows[0][0] 269 _log.debug(u'[%s] maps to [%s]', timezone, result) 270 except: 271 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 272 273 curs.close() 274 conn.rollback() 275 276 return result
277 #---------------------------------------------------
278 -def __detect_client_timezone(conn=None):
279 """This is run on the very first connection.""" 280 281 # FIXME: check whether server.timezone is the same 282 # FIXME: value as what we eventually detect 283 284 # we need gmDateTime to be initialized 285 if gmDateTime.current_local_iso_numeric_timezone_string is None: 286 gmDateTime.init() 287 288 _log.debug('trying to detect timezone from system') 289 290 tz_candidates = [] 291 try: 292 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 293 tz_candidates.append(tz) 294 expanded = __expand_timezone(conn = conn, timezone = tz) 295 if expanded != tz: 296 tz_candidates.append(expanded) 297 except KeyError: 298 pass 299 300 tz_candidates.append(gmDateTime.current_local_timezone_name) 301 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 302 if expanded != gmDateTime.current_local_timezone_name: 303 tz_candidates.append(expanded) 304 305 _log.debug('candidates: %s', str(tz_candidates)) 306 307 # find best among candidates 308 global _default_client_timezone 309 global _sql_set_timezone 310 found = False 311 for tz in tz_candidates: 312 if __validate_timezone(conn = conn, timezone = tz): 313 _default_client_timezone = tz 314 _sql_set_timezone = u'set timezone to %s' 315 found = True 316 break 317 318 if not found: 319 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 320 _sql_set_timezone = u"set time zone interval %s hour to minute" 321 322 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
323 # ======================================================================= 324 # login API 325 # =======================================================================
326 -def __request_login_params_tui():
327 """Text mode request of database login parameters""" 328 import getpass 329 login = gmLoginInfo.LoginInfo() 330 331 print "\nPlease enter the required login parameters:" 332 try: 333 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 334 login.database = prompted_input(prompt = "database", default = 'gnumed_v17') 335 login.user = prompted_input(prompt = "user name", default = '') 336 tmp = 'password for "%s" (not shown): ' % login.user 337 login.password = getpass.getpass(tmp) 338 login.port = prompted_input(prompt = "port", default = 5432) 339 except KeyboardInterrupt: 340 _log.warning("user cancelled text mode login dialog") 341 print "user cancelled text mode login dialog" 342 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 343 344 return login
345 #---------------------------------------------------
346 -def __request_login_params_gui_wx():
347 """GUI (wx) input request for database login parameters. 348 349 Returns gmLoginInfo.LoginInfo object 350 """ 351 import wx 352 # OK, wxPython was already loaded. But has the main Application instance 353 # been initialized yet ? if not, the exception will kick us out 354 if wx.GetApp() is None: 355 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 356 357 # Let's launch the login dialog 358 # if wx was not initialized /no main App loop, an exception should be raised anyway 359 import gmAuthWidgets 360 dlg = gmAuthWidgets.cLoginDialog(None, -1) 361 dlg.ShowModal() 362 login = dlg.panel.GetLoginInfo() 363 dlg.Destroy() 364 365 #if user cancelled or something else went wrong, raise an exception 366 if login is None: 367 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 368 369 return login
370 #---------------------------------------------------
371 -def request_login_params():
372 """Request login parameters for database connection.""" 373 # do we auto-request parameters at all ? 374 if not auto_request_login_params: 375 raise Exception('Cannot request login parameters.') 376 377 # are we inside X ? 378 # (if we aren't wxGTK will crash hard at 379 # C-level with "can't open Display") 380 if os.environ.has_key('DISPLAY'): 381 # try wxPython GUI 382 try: return __request_login_params_gui_wx() 383 except: pass 384 385 # well, either we are on the console or 386 # wxPython does not work, use text mode 387 return __request_login_params_tui()
388 389 # ======================================================================= 390 # DSN API 391 # -----------------------------------------------------------------------
392 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
393 dsn_parts = [] 394 395 if (database is not None) and (database.strip() != ''): 396 dsn_parts.append('dbname=%s' % database) 397 398 if (host is not None) and (host.strip() != ''): 399 dsn_parts.append('host=%s' % host) 400 401 if (port is not None) and (str(port).strip() != ''): 402 dsn_parts.append('port=%s' % port) 403 404 if (user is not None) and (user.strip() != ''): 405 dsn_parts.append('user=%s' % user) 406 407 if (password is not None) and (password.strip() != ''): 408 dsn_parts.append('password=%s' % password) 409 410 dsn_parts.append('sslmode=prefer') 411 412 return ' '.join(dsn_parts)
413 # ------------------------------------------------------
414 -def get_default_login():
415 # make sure we do have a login 416 get_default_dsn() 417 return _default_login
418 # ------------------------------------------------------
419 -def get_default_dsn():
420 global _default_dsn 421 if _default_dsn is not None: 422 return _default_dsn 423 424 login = request_login_params() 425 set_default_login(login=login) 426 427 return _default_dsn
428 # ------------------------------------------------------
429 -def set_default_login(login=None):
430 if login is None: 431 return False 432 433 if login.host is not None: 434 if login.host.strip() == u'': 435 login.host = None 436 437 global _default_login 438 _default_login = login 439 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 440 441 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 442 443 global _default_dsn 444 if _default_dsn is None: 445 old_dsn = u'None' 446 else: 447 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 448 _log.info ('setting default DSN from [%s] to [%s]', 449 old_dsn, 450 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 451 ) 452 _default_dsn = dsn 453 454 return True
455 # ======================================================================= 456 # netadata API 457 # =======================================================================
458 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
459 expected_hash = known_schema_hashes[version] 460 if version == 0: 461 args = {'ver': 9999} 462 else: 463 args = {'ver': version} 464 rows, idx = run_ro_queries ( 465 link_obj = link_obj, 466 queries = [{ 467 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 468 'args': args 469 }] 470 ) 471 if rows[0]['md5'] != expected_hash: 472 _log.error('database schema version mismatch') 473 _log.error('expected: %s (%s)' % (version, expected_hash)) 474 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 475 if verbose: 476 _log.debug('schema dump follows:') 477 for line in get_schema_structure(link_obj=link_obj).split(): 478 _log.debug(line) 479 _log.debug('schema revision history dump follows:') 480 for line in get_schema_revision_history(link_obj=link_obj): 481 _log.debug(u' - '.join(line)) 482 return False 483 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 484 return True
485 #------------------------------------------------------------------------
486 -def get_schema_version(link_obj=None):
487 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 488 try: 489 return map_schema_hash2version[rows[0]['md5']] 490 except KeyError: 491 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
492 #------------------------------------------------------------------------
493 -def get_schema_structure(link_obj=None):
494 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 495 return rows[0][0]
496 #------------------------------------------------------------------------
497 -def get_schema_hash(link_obj=None):
498 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 499 return rows[0]['md5']
500 #------------------------------------------------------------------------
501 -def get_schema_revision_history(link_obj=None):
502 cmd = u""" 503 select 504 imported::text, 505 version, 506 filename 507 from gm.schema_revision 508 order by imported 509 """ 510 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 511 return rows
512 #------------------------------------------------------------------------
513 -def get_current_user():
514 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 515 return rows[0][0]
516 #------------------------------------------------------------------------
517 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
518 """Get the foreign keys pointing to schema.table.column. 519 520 Does not properly work with multi-column FKs. 521 GNUmed doesn't use any, however. 522 """ 523 cmd = u""" 524 select 525 %(schema)s as referenced_schema, 526 %(tbl)s as referenced_table, 527 %(col)s as referenced_column, 528 pgc.confkey as referenced_column_list, 529 pgc.conrelid::regclass as referencing_table, 530 pgc.conkey as referencing_column_list, 531 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 532 from 533 pg_constraint pgc 534 where 535 pgc.contype = 'f' 536 and 537 pgc.confrelid = ( 538 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 539 select oid from pg_namespace where nspname = %(schema)s 540 ) 541 ) and 542 ( 543 select attnum 544 from pg_attribute 545 where 546 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 547 select oid from pg_namespace where nspname = %(schema)s 548 )) 549 and 550 attname = %(col)s 551 ) = any(pgc.confkey) 552 """ 553 554 args = { 555 'schema': schema, 556 'tbl': table, 557 'col': column 558 } 559 560 rows, idx = run_ro_queries ( 561 link_obj = link_obj, 562 queries = [ 563 {'cmd': cmd, 'args': args} 564 ] 565 ) 566 567 return rows
568 #------------------------------------------------------------------------
569 -def get_child_tables(schema='public', table=None, link_obj=None):
570 """Return child tables of <table>.""" 571 cmd = u""" 572 select 573 pgn.nspname as namespace, 574 pgc.relname as table 575 from 576 pg_namespace pgn, 577 pg_class pgc 578 where 579 pgc.relnamespace = pgn.oid 580 and 581 pgc.oid in ( 582 select inhrelid from pg_inherits where inhparent = ( 583 select oid from pg_class where 584 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 585 relname = %(table)s 586 ) 587 )""" 588 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 589 return rows
590 #------------------------------------------------------------------------
591 -def schema_exists(link_obj=None, schema=u'gm'):
592 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 593 args = {'schema': schema} 594 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 595 return rows[0][0]
596 #------------------------------------------------------------------------
597 -def table_exists(link_obj=None, schema=None, table=None):
598 """Returns false, true.""" 599 cmd = u""" 600 select exists ( 601 select 1 from information_schema.tables 602 where 603 table_schema = %s and 604 table_name = %s and 605 table_type = 'BASE TABLE' 606 )""" 607 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 608 return rows[0][0]
609 #------------------------------------------------------------------------
610 -def get_col_indices(cursor = None):
611 if cursor.description is None: 612 _log.error('no result description available: unused cursor or last query did not select rows') 613 return None 614 col_indices = {} 615 col_index = 0 616 for col_desc in cursor.description: 617 col_name = col_desc[0] 618 # a query like "select 1,2;" will return two columns of the same name ! 619 # hence adjust to that, note, however, that dict-style access won't work 620 # on results of such queries ... 621 if col_indices.has_key(col_name): 622 col_name = '%s_%s' % (col_name, col_index) 623 col_indices[col_name] = col_index 624 col_index += 1 625 626 return col_indices
627 #------------------------------------------------------------------------
628 -def get_col_defs(link_obj=None, schema='public', table=None):
629 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 630 col_names = [] 631 col_type = {} 632 for row in rows: 633 col_names.append(row[0]) 634 # map array types 635 if row[1].startswith('_'): 636 col_type[row[0]] = row[1][1:] + '[]' 637 else: 638 col_type[row[0]] = row[1] 639 col_defs = [] 640 col_defs.append(col_names) 641 col_defs.append(col_type) 642 return col_defs
643 #------------------------------------------------------------------------
644 -def get_col_names(link_obj=None, schema='public', table=None):
645 """Return column attributes of table""" 646 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 647 cols = [] 648 for row in rows: 649 cols.append(row[0]) 650 return cols
651 652 #------------------------------------------------------------------------ 653 # i18n functions 654 #------------------------------------------------------------------------
655 -def export_translations_from_database(filename=None):
656 tx_file = codecs.open(filename, 'wb', 'utf8') 657 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 658 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 659 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 660 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 661 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 662 tx_file.write(u'set default_transaction_read_only to off;\n\n') 663 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 664 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 665 666 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 667 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 668 for row in rows: 669 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 670 row['lang'].replace("'", "\\'"), 671 row['orig'].replace("'", "\\'"), 672 row['trans'].replace("'", "\\'") 673 ) 674 tx_file.write(line) 675 tx_file.write(u'\n') 676 677 tx_file.write(u'\set ON_ERROR_STOP 1\n') 678 tx_file.close() 679 680 return True
681 #------------------------------------------------------------------------
682 -def delete_translation_from_database(link_obj=None, language=None, original=None):
683 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 684 args = {'lang': language, 'orig': original} 685 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 686 return True
687 688 #------------------------------------------------------------------------
689 -def update_translation_in_database(language=None, original=None, translation=None):
690 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 691 args = {'lang': language, 'orig': original, 'trans': translation} 692 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 693 return args
694 695 #------------------------------------------------------------------------
696 -def get_translation_languages():
697 rows, idx = run_ro_queries ( 698 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 699 ) 700 return [ r[0] for r in rows ]
701 702 #------------------------------------------------------------------------
703 -def get_database_translations(language=None, order_by=None):
704 705 args = {'lang': language} 706 _log.debug('language [%s]', language) 707 708 if order_by is None: 709 order_by = u'ORDER BY %s' % order_by 710 else: 711 order_by = u'ORDER BY lang, orig' 712 713 if language is None: 714 cmd = u""" 715 SELECT DISTINCT ON (orig, lang) 716 lang, orig, trans 717 FROM (( 718 719 -- strings stored as translation keys whether translated or not 720 SELECT 721 NULL as lang, 722 ik.orig, 723 NULL AS trans 724 FROM 725 i18n.keys ik 726 727 ) UNION ALL ( 728 729 -- already translated strings 730 SELECT 731 it.lang, 732 it.orig, 733 it.trans 734 FROM 735 i18n.translations it 736 737 )) as translatable_strings 738 %s""" % order_by 739 else: 740 cmd = u""" 741 SELECT DISTINCT ON (orig, lang) 742 lang, orig, trans 743 FROM (( 744 745 -- strings stored as translation keys whether translated or not 746 SELECT 747 %%(lang)s as lang, 748 ik.orig, 749 i18n._(ik.orig, %%(lang)s) AS trans 750 FROM 751 i18n.keys ik 752 753 ) UNION ALL ( 754 755 -- already translated strings 756 SELECT 757 %%(lang)s as lang, 758 it.orig, 759 i18n._(it.orig, %%(lang)s) AS trans 760 FROM 761 i18n.translations it 762 763 )) AS translatable_strings 764 %s""" % order_by 765 766 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 767 768 if rows is None: 769 _log.error('no translatable strings found') 770 else: 771 _log.debug('%s translatable strings found', len(rows)) 772 773 return rows
774 775 #------------------------------------------------------------------------
776 -def get_current_user_language():
777 cmd = u'select i18n.get_curr_lang()' 778 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 779 return rows[0][0]
780 781 #------------------------------------------------------------------------
782 -def set_user_language(user=None, language=None):
783 """Set the user language in the database. 784 785 user = None: current db user 786 language = None: unset 787 """ 788 _log.info('setting database language for user [%s] to [%s]', user, language) 789 790 args = { 791 'usr': user, 792 'lang': language 793 } 794 795 if language is None: 796 if user is None: 797 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 798 else: 799 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 800 queries.append({'cmd': u'select True'}) 801 else: 802 if user is None: 803 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 804 else: 805 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 806 807 rows, idx = run_rw_queries(queries = queries, return_data = True) 808 809 if not rows[0][0]: 810 _log.error('cannot set database language to [%s] for user [%s]', language, user) 811 812 return rows[0][0]
813 #------------------------------------------------------------------------
814 -def force_user_language(language=None):
815 """Set the user language in the database. 816 817 - regardless of whether there is any translation available. 818 - only for the current user 819 """ 820 _log.info('forcing database language for current db user to [%s]', language) 821 822 run_rw_queries(queries = [{ 823 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 824 'args': {'lang': language} 825 }])
826 #------------------------------------------------------------------------ 827 #------------------------------------------------------------------------ 828 text_expansion_keywords = None 829
830 -def get_text_expansion_keywords():
831 global text_expansion_keywords 832 if text_expansion_keywords is not None: 833 return text_expansion_keywords 834 835 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 836 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 837 text_expansion_keywords = rows 838 839 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 840 841 return text_expansion_keywords
842 #------------------------------------------------------------------------
843 -def expand_keyword(keyword = None):
844 845 # Easter Egg ;-) 846 if keyword == u'$$steffi': 847 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 848 849 cmd = u"""SELECT expansion FROM clin.v_your_keyword_expansions WHERE keyword = %(kwd)s""" 850 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 851 852 if len(rows) == 0: 853 return None 854 855 return rows[0]['expansion']
856 #------------------------------------------------------------------------
857 -def get_keyword_expansion_candidates(keyword = None):
858 859 if keyword is None: 860 return [] 861 862 get_text_expansion_keywords() 863 864 candidates = [] 865 for kwd in text_expansion_keywords: 866 if kwd['keyword'].startswith(keyword): 867 candidates.append(kwd['keyword']) 868 869 return candidates
870 #------------------------------------------------------------------------
871 -def add_text_expansion(keyword=None, expansion=None, public=None):
872 873 if public: 874 cmd = u"SELECT 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 875 else: 876 cmd = u"SELECT 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 877 878 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 879 if len(rows) != 0: 880 return False 881 882 if public: 883 cmd = u""" 884 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 885 values (%(kwd)s, %(exp)s, null)""" 886 else: 887 cmd = u""" 888 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 889 values (%(kwd)s, %(exp)s, (SELECT pk from dem.staff where db_user = current_user))""" 890 891 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 892 893 global text_expansion_keywords 894 text_expansion_keywords = None 895 896 return True
897 #------------------------------------------------------------------------
898 -def delete_text_expansion(keyword):
899 cmd = u""" 900 delete from clin.keyword_expansion where 901 keyword = %(kwd)s and ( 902 (fk_staff = (SELECT pk from dem.staff where db_user = current_user)) 903 or 904 (fk_staff is null and owner = current_user) 905 )""" 906 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 907 908 global text_expansion_keywords 909 text_expansion_keywords = None
910 #------------------------------------------------------------------------
911 -def edit_text_expansion(keyword, expansion):
912 913 cmd1 = u""" 914 DELETE FROM clin.keyword_expansion 915 WHERE 916 keyword = %(kwd)s 917 AND 918 fk_staff = (SELECT pk FROM dem.staff WHERE db_user = current_user)""" 919 920 cmd2 = u""" 921 INSERT INTO clin.keyword_expansion ( 922 keyword, expansion, fk_staff 923 ) VALUES ( 924 %(kwd)s, 925 %(exp)s, 926 (SELECT pk FROM dem.staff WHERE db_user = current_user) 927 )""" 928 args = {'kwd': keyword, 'exp': expansion} 929 rows, idx = run_rw_queries(queries = [ 930 {'cmd': cmd1, 'args': args}, 931 {'cmd': cmd2, 'args': args}, 932 ]) 933 934 global text_expansion_keywords 935 text_expansion_keywords = None
936 # ======================================================================= 937 # query runners and helpers 938 # =======================================================================
939 -def send_maintenance_notification():
940 cmd = u'notify "db_maintenance_warning:"' 941 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
942 #------------------------------------------------------------------------
943 -def send_maintenance_shutdown():
944 cmd = u'notify "db_maintenance_disconnect:"' 945 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
946 #------------------------------------------------------------------------
947 -def is_pg_interval(candidate=None):
948 cmd = u'SELECT %(candidate)s::interval' 949 try: 950 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 951 return True 952 except: 953 cmd = u'SELECT %(candidate)s::text::interval' 954 try: 955 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 956 return True 957 except: 958 return False
959 #------------------------------------------------------------------------
960 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
961 outfile = file(filename, 'wb') 962 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 963 outfile.close() 964 return result
965 #------------------------------------------------------------------------
966 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
967 """Store data from a bytea field into a file. 968 969 <data_query> 970 - dict {'cmd': ..., 'args': ...} 971 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 972 - 'args' must be a dict 973 - must return one row with one field of type bytea 974 <file> 975 - must be a file like Python object 976 <data_size> 977 - integer of the total size of the expected data or None 978 <data_size_query> 979 - dict {'cmd': ..., 'args': ...} 980 - cmd must be unicode 981 - must return one row with one field with the octet_length() of the data field 982 - used only when <data_size> is None 983 """ 984 if data_size == 0: 985 return True 986 987 # If the client sets an encoding other than the default we 988 # will receive encoding-parsed data which isn't the binary 989 # content we want. Hence we need to get our own connection. 990 # It must be a read-write one so that we don't affect the 991 # encoding for other users of the shared read-only 992 # connections. 993 # Actually, encodings shouldn't be applied to binary data 994 # (eg. bytea types) in the first place but that is only 995 # reported to be fixed > v7.4. 996 # further tests reveal that at least on PG 8.0 this bug still 997 # manifests itself 998 conn = get_raw_connection(readonly=True) 999 1000 if data_size is None: 1001 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 1002 data_size = rows[0][0] 1003 if data_size in [None, 0]: 1004 conn.rollback() 1005 return True 1006 1007 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 1008 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 1009 1010 # chunk size of 0 means "retrieve whole field at once" 1011 if chunk_size == 0: 1012 chunk_size = data_size 1013 _log.debug('chunk size [0] bytes: retrieving all data at once') 1014 1015 # Windoze sucks: it can't transfer objects of arbitrary size, 1016 # anyways, we need to split the transfer, 1017 # however, only possible if postgres >= 7.2 1018 needed_chunks, remainder = divmod(data_size, chunk_size) 1019 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 1020 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 1021 1022 # try setting "bytea_output" 1023 # - fails if not necessary 1024 # - succeeds if necessary 1025 try: 1026 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1027 except dbapi.ProgrammingError: 1028 _log.debug('failed to set bytea_output to "escape", not necessary') 1029 1030 # retrieve chunks, skipped if data size < chunk size, 1031 # does this not carry the danger of cutting up multi-byte escape sequences ? 1032 # no, since bytea is binary, 1033 # yes, since in bytea there are *some* escaped values, still 1034 # no, since those are only escaped during *transfer*, not on-disk, hence 1035 # only complete escape sequences are put on the wire 1036 for chunk_id in range(needed_chunks): 1037 chunk_start = (chunk_id * chunk_size) + 1 1038 data_query['args']['start'] = chunk_start 1039 data_query['args']['size'] = chunk_size 1040 try: 1041 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1042 except: 1043 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1044 conn.rollback() 1045 raise 1046 # it would be a fatal error to see more than one result as ids are supposed to be unique 1047 file_obj.write(str(rows[0][0])) 1048 1049 # retrieve remainder 1050 if remainder > 0: 1051 chunk_start = (needed_chunks * chunk_size) + 1 1052 data_query['args']['start'] = chunk_start 1053 data_query['args']['size'] = remainder 1054 try: 1055 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1056 except: 1057 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1058 conn.rollback() 1059 raise 1060 # it would be a fatal error to see more than one result as ids are supposed to be unique 1061 file_obj.write(str(rows[0][0])) 1062 1063 conn.rollback() 1064 return True
1065 #------------------------------------------------------------------------
1066 -def file2bytea(query=None, filename=None, args=None, conn=None):
1067 """Store data from a file into a bytea field. 1068 1069 The query must: 1070 - be in unicode 1071 - contain a format spec identifying the row (eg a primary key) 1072 matching <args> if it is an UPDATE 1073 - contain a format spec %(data)s::bytea 1074 """ 1075 # read data from file 1076 infile = file(filename, "rb") 1077 data_as_byte_string = infile.read() 1078 infile.close() 1079 if args is None: 1080 args = {} 1081 args['data'] = buffer(data_as_byte_string) 1082 del(data_as_byte_string) 1083 1084 # insert the data 1085 if conn is None: 1086 conn = get_raw_connection(readonly=False) 1087 close_conn = True 1088 else: 1089 close_conn = False 1090 1091 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1092 1093 if close_conn: 1094 conn.close() 1095 1096 return
1097 #------------------------------------------------------------------------
1098 -def sanitize_pg_regex(expression=None, escape_all=False):
1099 """Escape input for use in a PostgreSQL regular expression. 1100 1101 If a fragment comes from user input and is to be used 1102 as a regular expression we need to make sure it doesn't 1103 contain invalid regex patterns such as unbalanced ('s. 1104 1105 <escape_all> 1106 True: try to escape *all* metacharacters 1107 False: only escape those which render the regex invalid 1108 """ 1109 return expression.replace ( 1110 '(', '\(' 1111 ).replace ( 1112 ')', '\)' 1113 ).replace ( 1114 '[', '\[' 1115 ).replace ( 1116 '+', '\+' 1117 ).replace ( 1118 '.', '\.' 1119 ).replace ( 1120 '*', '\*' 1121 )
1122 #']', '\]', # not needed 1123 #------------------------------------------------------------------------
1124 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1125 """Run read-only queries. 1126 1127 <queries> must be a list of dicts: 1128 [ 1129 {'cmd': <string>, 'args': <dict> or <tuple>}, 1130 {...}, 1131 ... 1132 ] 1133 """ 1134 if isinstance(link_obj, dbapi._psycopg.cursor): 1135 curs = link_obj 1136 curs_close = __noop 1137 tx_rollback = __noop 1138 elif isinstance(link_obj, dbapi._psycopg.connection): 1139 curs = link_obj.cursor() 1140 curs_close = curs.close 1141 tx_rollback = link_obj.rollback 1142 elif link_obj is None: 1143 conn = get_connection(readonly=True, verbose=verbose) 1144 curs = conn.cursor() 1145 curs_close = curs.close 1146 tx_rollback = conn.rollback 1147 else: 1148 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1149 1150 if verbose: 1151 _log.debug('cursor: %s', curs) 1152 1153 for query in queries: 1154 if type(query['cmd']) is not types.UnicodeType: 1155 print "run_ro_queries(): non-unicode query" 1156 print query['cmd'] 1157 try: 1158 args = query['args'] 1159 except KeyError: 1160 args = None 1161 try: 1162 curs.execute(query['cmd'], args) 1163 if verbose: 1164 _log.debug('ran query: [%s]', curs.query) 1165 if curs.statusmessage != u'': 1166 _log.debug('PG status message: %s', curs.statusmessage) 1167 _log.debug('cursor description: %s', str(curs.description)) 1168 except dbapi.Error as pg_exc: 1169 _log.error('query failed: [%s]', curs.query) 1170 if curs.statusmessage != u'': 1171 _log.error('PG status message: %s', curs.statusmessage) 1172 _log.error('PG error code: %s', pg_exc.pgcode) 1173 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1174 try: 1175 curs_close() 1176 except dbapi.InterfaceError: 1177 _log.exception('cannot close cursor') 1178 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1179 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1180 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1181 if curs.statusmessage != u'': 1182 details = u'Status: %s\n%s' % ( 1183 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1184 details 1185 ) 1186 raise gmExceptions.AccessDenied ( 1187 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1188 source = u'PostgreSQL', 1189 code = pg_exc.pgcode, 1190 details = details 1191 ) 1192 raise 1193 except: 1194 _log.error('query failed: [%s]', curs.query) 1195 if curs.statusmessage != u'': 1196 _log.error('PG status message: %s', curs.statusmessage) 1197 try: 1198 curs_close() 1199 except dbapi.InterfaceError: 1200 _log.exception('cannot close cursor') 1201 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1202 raise 1203 1204 data = None 1205 col_idx = None 1206 if return_data: 1207 data = curs.fetchall() 1208 if verbose: 1209 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1210 _log.debug('cursor description: %s', str(curs.description)) 1211 if get_col_idx: 1212 col_idx = get_col_indices(curs) 1213 1214 curs_close() 1215 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1216 return (data, col_idx)
1217 #------------------------------------------------------------------------
1218 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1219 """Convenience function for running a transaction 1220 that is supposed to get committed. 1221 1222 <link_obj> 1223 can be either: 1224 - a cursor 1225 - a connection 1226 1227 <queries> 1228 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1229 to be executed as a single transaction, the last 1230 query may usefully return rows (such as a 1231 "SELECT currval('some_sequence')" statement) 1232 1233 <end_tx> 1234 - controls whether the transaction is finalized (eg. 1235 committed/rolled back) or not, this allows the 1236 call to run_rw_queries() to be part of a framing 1237 transaction 1238 - if link_obj is a connection then <end_tx> will 1239 default to False unless it is explicitly set to 1240 True which is taken to mean "yes, you do have full 1241 control over the transaction" in which case the 1242 transaction is properly finalized 1243 - if link_obj is a cursor we CANNOT finalize the 1244 transaction because we would need the connection for that 1245 - if link_obj is None <end_tx> will, of course, always be True 1246 1247 <return_data> 1248 - if true, the returned data will include the rows 1249 the last query selected 1250 - if false, it returns None instead 1251 1252 <get_col_idx> 1253 - if true, the returned data will include a dictionary 1254 mapping field names to column positions 1255 - if false, the returned data returns None instead 1256 1257 method result: 1258 - returns a tuple (data, idx) 1259 - <data>: 1260 * (None, None) if last query did not return rows 1261 * ("fetchall() result", <index>) if last query returned any rows 1262 * for <index> see <get_col_idx> 1263 """ 1264 if isinstance(link_obj, dbapi._psycopg.cursor): 1265 conn_close = __noop 1266 conn_commit = __noop 1267 tx_rollback = __noop 1268 curs = link_obj 1269 curs_close = __noop 1270 elif isinstance(link_obj, dbapi._psycopg.connection): 1271 conn_close = __noop 1272 if end_tx: 1273 conn_commit = link_obj.commit 1274 tx_rollback = link_obj.rollback 1275 else: 1276 conn_commit = __noop 1277 tx_rollback = __noop 1278 curs = link_obj.cursor() 1279 curs_close = curs.close 1280 elif link_obj is None: 1281 conn = get_connection(readonly=False) 1282 conn_close = conn.close 1283 conn_commit = conn.commit 1284 tx_rollback = conn.rollback 1285 curs = conn.cursor() 1286 curs_close = curs.close 1287 else: 1288 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1289 1290 for query in queries: 1291 if type(query['cmd']) is not types.UnicodeType: 1292 print "run_rw_queries(): non-unicode query" 1293 print query['cmd'] 1294 try: 1295 args = query['args'] 1296 except KeyError: 1297 args = None 1298 try: 1299 curs.execute(query['cmd'], args) 1300 except dbapi.Error as pg_exc: 1301 _log.error('RW query failed: [%s]', curs.query) 1302 if curs.statusmessage != u'': 1303 _log.error('PG status message: %s', curs.statusmessage) 1304 _log.error('PG error code: %s', pg_exc.pgcode) 1305 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1306 try: 1307 curs_close() 1308 tx_rollback() # just for good measure 1309 conn_close() 1310 except dbapi.InterfaceError: 1311 _log.exception('cannot cleanup') 1312 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1313 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1314 if curs.statusmessage != u'': 1315 details = u'Status: %s\n%s' % ( 1316 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1317 details 1318 ) 1319 raise gmExceptions.AccessDenied ( 1320 u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')), 1321 source = u'PostgreSQL', 1322 code = pg_exc.pgcode, 1323 details = details 1324 ) 1325 raise 1326 except: 1327 _log.exception('error running RW query') 1328 gmLog2.log_stack_trace() 1329 try: 1330 curs_close() 1331 tx_rollback() 1332 conn_close() 1333 except dbapi.InterfaceError: 1334 _log.exception('cannot cleanup') 1335 raise 1336 raise 1337 1338 data = None 1339 col_idx = None 1340 if return_data: 1341 try: 1342 data = curs.fetchall() 1343 except: 1344 _log.exception('error fetching data from RW query') 1345 gmLog2.log_stack_trace() 1346 try: 1347 curs_close() 1348 tx_rollback() 1349 conn_close() 1350 except dbapi.InterfaceError: 1351 _log.exception('cannot cleanup') 1352 raise 1353 raise 1354 if get_col_idx: 1355 col_idx = get_col_indices(curs) 1356 1357 curs_close() 1358 conn_commit() 1359 conn_close() 1360 1361 return (data, col_idx)
1362 #------------------------------------------------------------------------
1363 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1364 """Generates SQL for an INSERT query. 1365 1366 values: dict of values keyed by field to insert them into 1367 """ 1368 if schema is None: 1369 schema = u'public' 1370 1371 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1372 val_snippets = [] 1373 for field in fields: 1374 val_snippets.append(u'%%(%s)s' % field) 1375 1376 if returning is None: 1377 returning = u'' 1378 return_data = False 1379 else: 1380 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1381 return_data = True 1382 1383 cmd = u"""\nINSERT INTO %s.%s ( 1384 %s 1385 ) VALUES ( 1386 %s 1387 )%s""" % ( 1388 schema, 1389 table, 1390 u',\n\t\t'.join(fields), 1391 u',\n\t\t'.join(val_snippets), 1392 returning 1393 ) 1394 1395 _log.debug(u'running SQL: >>>%s<<<', cmd) 1396 1397 return run_rw_queries ( 1398 link_obj = link_obj, 1399 queries = [{'cmd': cmd, 'args': values}], 1400 end_tx = end_tx, 1401 return_data = return_data, 1402 get_col_idx = get_col_idx, 1403 verbose = verbose 1404 )
1405 # ======================================================================= 1406 # connection handling API 1407 # -----------------------------------------------------------------------
1408 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1409 """ 1410 GNUmed database connection pool. 1411 1412 Extends psycopg2's PersistentConnectionPool with 1413 a custom _connect() function. Supports one connection 1414 per thread - which also ties it to one particular DSN. 1415 """ 1416 #--------------------------------------------------
1417 - def _connect(self, key=None):
1418 1419 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1420 1421 conn.original_close = conn.close 1422 conn.close = _raise_exception_on_ro_conn_close 1423 1424 if key is not None: 1425 self._used[key] = conn 1426 self._rused[id(conn)] = key 1427 else: 1428 self._pool.append(conn) 1429 1430 return conn
1431 #--------------------------------------------------
1432 - def shutdown(self):
1433 for conn_key in self._used.keys(): 1434 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1435 self._used[conn_key].original_close()
1436 # -----------------------------------------------------------------------
1437 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1438 """Get a raw, unadorned connection. 1439 1440 - this will not set any parameters such as encoding, timezone, datestyle 1441 - the only requirement is a valid DSN 1442 - hence it can be used for "service" connections 1443 for verifying encodings etc 1444 """ 1445 # FIXME: support verbose 1446 if dsn is None: 1447 dsn = get_default_dsn() 1448 1449 if u'host=salaam.homeunix' in dsn: 1450 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1451 1452 try: 1453 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1454 except dbapi.OperationalError, e: 1455 1456 t, v, tb = sys.exc_info() 1457 try: 1458 msg = e.args[0] 1459 except (AttributeError, IndexError, TypeError): 1460 raise 1461 1462 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1463 1464 if msg.find('fe_sendauth') != -1: 1465 raise cAuthenticationError, (dsn, msg), tb 1466 1467 if regex.search('user ".*" does not exist', msg) is not None: 1468 raise cAuthenticationError, (dsn, msg), tb 1469 1470 if msg.find('uthenti') != -1: 1471 raise cAuthenticationError, (dsn, msg), tb 1472 1473 raise 1474 1475 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1476 1477 # do first-time stuff 1478 global postgresql_version 1479 if postgresql_version is None: 1480 curs = conn.cursor() 1481 curs.execute(""" 1482 SELECT 1483 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1484 FROM 1485 pg_settings 1486 WHERE 1487 name = 'server_version' 1488 """) 1489 postgresql_version = curs.fetchone()['version'] 1490 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1491 try: 1492 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1493 _log.info('database size: %s', curs.fetchone()[0]) 1494 except: 1495 pass 1496 if verbose: 1497 __log_PG_settings(curs=curs) 1498 curs.close() 1499 conn.commit() 1500 1501 if _default_client_timezone is None: 1502 __detect_client_timezone(conn = conn) 1503 1504 curs = conn.cursor() 1505 1506 # set access mode 1507 if readonly: 1508 _log.debug('access mode [READ ONLY]') 1509 cmd = 'set session characteristics as transaction READ ONLY' 1510 curs.execute(cmd) 1511 cmd = 'set default_transaction_read_only to on' 1512 curs.execute(cmd) 1513 else: 1514 _log.debug('access mode [READ WRITE]') 1515 cmd = 'set session characteristics as transaction READ WRITE' 1516 curs.execute(cmd) 1517 cmd = 'set default_transaction_read_only to off' 1518 curs.execute(cmd) 1519 1520 curs.close() 1521 conn.commit() 1522 1523 conn.is_decorated = False 1524 1525 return conn
1526 # =======================================================================
1527 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1528 """Get a new connection. 1529 1530 This assumes the locale system has been initialized 1531 unless an encoding is specified. 1532 """ 1533 # FIXME: support pooled on RW, too 1534 # FIXME: for now, support the default DSN only 1535 if pooled and readonly and (dsn is None): 1536 global __ro_conn_pool 1537 if __ro_conn_pool is None: 1538 __ro_conn_pool = cConnectionPool ( 1539 minconn = 1, 1540 maxconn = 2, 1541 dsn = dsn, 1542 verbose = verbose 1543 ) 1544 conn = __ro_conn_pool.getconn() 1545 else: 1546 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1547 1548 if conn.is_decorated: 1549 return conn 1550 1551 if encoding is None: 1552 encoding = _default_client_encoding 1553 if encoding is None: 1554 encoding = gmI18N.get_encoding() 1555 _log.warning('client encoding not specified') 1556 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1557 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1558 1559 # set connection properties 1560 # - client encoding 1561 try: 1562 conn.set_client_encoding(encoding) 1563 except dbapi.OperationalError: 1564 t, v, tb = sys.exc_info() 1565 if str(v).find("can't set encoding to") != -1: 1566 raise cEncodingError, (encoding, v), tb 1567 raise 1568 1569 # - transaction isolation level 1570 if readonly: 1571 iso_level = u'read committed' 1572 else: 1573 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1574 iso_level = u'serializable' 1575 1576 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1577 1578 curs = conn.cursor() 1579 1580 # - client time zone 1581 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1582 1583 conn.commit() 1584 1585 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1586 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1587 # 1588 # - bytea data format 1589 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1590 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1591 # as the transmission mode for bytea output, 1592 # so try to set this setting back to "escape", 1593 # if that's not possible the reason will be that PG < 9.0 does not support 1594 # that setting - which also means we don't need it and can ignore the 1595 # failure 1596 cmd = "set bytea_output to 'escape'" 1597 try: 1598 curs.execute(cmd) 1599 except dbapi.ProgrammingError: 1600 _log.error('cannot set bytea_output format') 1601 1602 curs.close() 1603 conn.commit() 1604 1605 conn.is_decorated = True 1606 1607 return conn
1608 #-----------------------------------------------------------------------
1609 -def shutdown():
1610 if __ro_conn_pool is None: 1611 return 1612 __ro_conn_pool.shutdown()
1613 # ====================================================================== 1614 # internal helpers 1615 #-----------------------------------------------------------------------
1616 -def __noop():
1617 pass
1618 #-----------------------------------------------------------------------
1619 -def _raise_exception_on_ro_conn_close():
1620 raise TypeError(u'close() called on read-only connection')
1621 #-----------------------------------------------------------------------
1622 -def log_database_access(action=None):
1623 run_insert ( 1624 schema = u'gm', 1625 table = u'access_log', 1626 values = {u'user_action': action}, 1627 end_tx = True 1628 )
1629 #-----------------------------------------------------------------------
1630 -def sanity_check_time_skew(tolerance=60):
1631 """Check server time and local time to be within 1632 the given tolerance of each other. 1633 1634 tolerance: seconds 1635 """ 1636 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1637 1638 cmd = u"SELECT now() at time zone 'UTC'" 1639 conn = get_raw_connection(readonly=True) 1640 curs = conn.cursor() 1641 1642 start = time.time() 1643 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1644 end = time.time() 1645 client_now_as_utc = pydt.datetime.utcnow() 1646 1647 curs.close() 1648 conn.commit() 1649 1650 server_now_as_utc = rows[0][0] 1651 query_duration = end - start 1652 _log.info('server "now" (UTC): %s', server_now_as_utc) 1653 _log.info('client "now" (UTC): %s', client_now_as_utc) 1654 _log.debug('wire roundtrip (seconds): %s', query_duration) 1655 1656 if query_duration > tolerance: 1657 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1658 return False 1659 1660 if server_now_as_utc > client_now_as_utc: 1661 real_skew = server_now_as_utc - client_now_as_utc 1662 else: 1663 real_skew = client_now_as_utc - server_now_as_utc 1664 1665 _log.debug('client/server time skew: %s', real_skew) 1666 1667 if real_skew > pydt.timedelta(seconds = tolerance): 1668 _log.error('client/server time skew > tolerance') 1669 return False 1670 1671 return True
1672 #-----------------------------------------------------------------------
1673 -def sanity_check_database_settings():
1674 """Checks database settings. 1675 1676 returns (status, message) 1677 status: 1678 0: no problem 1679 1: non-fatal problem 1680 2: fatal problem 1681 """ 1682 _log.debug('checking database settings') 1683 1684 conn = get_connection() 1685 1686 # - version string 1687 global postgresql_version_string 1688 if postgresql_version_string is None: 1689 curs = conn.cursor() 1690 curs.execute('SELECT version()') 1691 postgresql_version_string = curs.fetchone()['version'] 1692 curs.close() 1693 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1694 1695 options2check = { 1696 # setting: [expected value, risk, fatal?] 1697 u'allow_system_table_mods': [u'off', u'system breakage', False], 1698 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1699 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1700 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1701 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1702 u'fsync': [u'on', u'data loss/corruption', True], 1703 u'full_page_writes': [u'on', u'data loss/corruption', False], 1704 u'lc_messages': [u'C', u'suboptimal error detection', False], 1705 u'password_encryption': [u'on', u'breach of confidentiality', False], 1706 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1707 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1708 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1709 } 1710 1711 from Gnumed.pycommon import gmCfg2 1712 _cfg = gmCfg2.gmCfgData() 1713 if _cfg.get(option = u'hipaa'): 1714 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1715 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1716 else: 1717 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1718 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1719 1720 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1721 rows, idx = run_ro_queries ( 1722 link_obj = conn, 1723 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1724 get_col_idx = False 1725 ) 1726 1727 found_error = False 1728 found_problem = False 1729 msg = [] 1730 for row in rows: 1731 option = row['name'] 1732 value_found = row['setting'] 1733 value_expected = options2check[option][0] 1734 risk = options2check[option][1] 1735 fatal_setting = options2check[option][2] 1736 if value_found != value_expected: 1737 if fatal_setting is True: 1738 found_error = True 1739 elif fatal_setting is False: 1740 found_problem = True 1741 elif fatal_setting is None: 1742 pass 1743 else: 1744 _log.error(options2check[option]) 1745 raise ValueError(u'invalid database configuration sanity check') 1746 msg.append(_(' option [%s]: %s') % (option, value_found)) 1747 msg.append(_(' risk: %s') % risk) 1748 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1749 1750 if found_error: 1751 return 2, u'\n'.join(msg) 1752 1753 if found_problem: 1754 return 1, u'\n'.join(msg) 1755 1756 return 0, u''
1757 #------------------------------------------------------------------------
1758 -def __log_PG_settings(curs=None):
1759 # don't use any of the run_*()s since that might 1760 # create a loop if we fail here 1761 # FIXME: use pg_settings 1762 try: 1763 curs.execute(u'show all') 1764 except: 1765 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1766 return False 1767 settings = curs.fetchall() 1768 if settings is None: 1769 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1770 return False 1771 for setting in settings: 1772 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1773 return True
1774 # =======================================================================
1775 -def extract_msg_from_pg_exception(exc=None):
1776 1777 try: 1778 msg = exc.args[0] 1779 except (AttributeError, IndexError, TypeError): 1780 return u'cannot extract message from exception' 1781 1782 return unicode(msg, gmI18N.get_encoding(), 'replace')
1783 # =======================================================================
1784 -class cAuthenticationError(dbapi.OperationalError):
1785
1786 - def __init__(self, dsn=None, prev_val=None):
1787 self.dsn = dsn 1788 self.prev_val = prev_val
1789
1790 - def __str__(self):
1791 _log.warning('%s.__str__() called', self.__class__.__name__) 1792 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1793 _log.error(tmp) 1794 return tmp.encode(gmI18N.get_encoding(), 'replace')
1795
1796 - def __unicode__(self):
1797 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1798 1799 # ======================================================================= 1800 # custom psycopg2 extensions 1801 # =======================================================================
1802 -class cEncodingError(dbapi.OperationalError):
1803
1804 - def __init__(self, encoding=None, prev_val=None):
1805 self.encoding = encoding 1806 self.prev_val = prev_val
1807
1808 - def __str__(self):
1809 _log.warning('%s.__str__() called', self.__class__.__name__) 1810 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1811
1812 - def __unicode__(self):
1813 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1814 1815 # ----------------------------------------------------------------------- 1816 # Python -> PostgreSQL 1817 # ----------------------------------------------------------------------- 1818 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1819 -class cAdapterPyDateTime(object):
1820
1821 - def __init__(self, dt):
1822 if dt.tzinfo is None: 1823 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1824 self.__dt = dt
1825
1826 - def getquoted(self):
1827 return _timestamp_template % self.__dt.isoformat()
1828 1829 ## remove for 0.9 1830 ## ---------------------------------------------------------------------- 1831 ##class cAdapterMxDateTime(object): 1832 ## 1833 ## def __init__(self, dt): 1834 ## if dt.tz == '???': 1835 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1836 ## self.__dt = dt 1837 ## 1838 ## def getquoted(self): 1839 ## # under some locale settings the mx.DateTime ISO formatter 1840 ## # will insert "," into the ISO string, 1841 ## # while this is allowed per the ISO8601 spec PostgreSQL 1842 ## # cannot currently handle that, 1843 ## # so map those "," to "." to make things work: 1844 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1845 ## 1846 ## ---------------------------------------------------------------------- 1847 ## PostgreSQL -> Python 1848 ## ---------------------------------------------------------------------- 1849 1850 #======================================================================= 1851 # main 1852 #----------------------------------------------------------------------- 1853 1854 # make sure psycopg2 knows how to handle unicode ... 1855 # intended to become standard 1856 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1857 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1858 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1859 1860 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1861 # check in 0.9: 1862 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1863 1864 ## remove for 0.9 1865 #try: 1866 # import mx.DateTime as mxDT 1867 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1868 #except ImportError: 1869 # _log.warning('cannot import mx.DateTime') 1870 1871 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1872 # them adapted to "... ARRAY[]..." so we can support PG arrays 1873 1874 #======================================================================= 1875 if __name__ == "__main__": 1876 1877 if len(sys.argv) < 2: 1878 sys.exit() 1879 1880 if sys.argv[1] != 'test': 1881 sys.exit() 1882 1883 logging.basicConfig(level=logging.DEBUG) 1884 #--------------------------------------------------------------------
1885 - def test_file2bytea():
1886 run_rw_queries(queries = [ 1887 {'cmd': u'create table test_bytea (data bytea)'} 1888 ]) 1889 1890 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1891 try: 1892 file2bytea(query = cmd, filename = sys.argv[2]) 1893 except: 1894 _log.exception('error') 1895 1896 run_rw_queries(queries = [ 1897 {'cmd': u'drop table test_bytea'} 1898 ])
1899 #--------------------------------------------------------------------
1900 - def test_get_connection():
1901 print "testing get_connection()" 1902 1903 dsn = 'foo' 1904 try: 1905 conn = get_connection(dsn=dsn) 1906 except dbapi.OperationalError, e: 1907 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1908 t, v = sys.exc_info()[:2] 1909 print ' ', t 1910 print ' ', v 1911 1912 dsn = 'dbname=gnumed_v9' 1913 try: 1914 conn = get_connection(dsn=dsn) 1915 except cAuthenticationError: 1916 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1917 t, v = sys.exc_info()[:2] 1918 print ' ', t 1919 print ' ', v 1920 1921 dsn = 'dbname=gnumed_v9 user=abc' 1922 try: 1923 conn = get_connection(dsn=dsn) 1924 except cAuthenticationError: 1925 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1926 t, v = sys.exc_info()[:2] 1927 print ' ', t 1928 print ' ', v 1929 1930 dsn = 'dbname=gnumed_v9 user=any-doc' 1931 try: 1932 conn = get_connection(dsn=dsn) 1933 except cAuthenticationError: 1934 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1935 t, v = sys.exc_info()[:2] 1936 print ' ', t 1937 print ' ', v 1938 1939 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1940 try: 1941 conn = get_connection(dsn=dsn) 1942 except cAuthenticationError: 1943 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1944 t, v = sys.exc_info()[:2] 1945 print ' ', t 1946 print ' ', v 1947 1948 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1949 conn = get_connection(dsn=dsn, readonly=True) 1950 1951 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1952 conn = get_connection(dsn=dsn, readonly=False) 1953 1954 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1955 encoding = 'foo' 1956 try: 1957 conn = get_connection(dsn=dsn, encoding=encoding) 1958 except cEncodingError: 1959 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1960 t, v = sys.exc_info()[:2] 1961 print ' ', t 1962 print ' ', v
1963 #--------------------------------------------------------------------
1964 - def test_exceptions():
1965 print "testing exceptions" 1966 1967 try: 1968 raise cAuthenticationError('no dsn', 'no previous exception') 1969 except cAuthenticationError: 1970 t, v, tb = sys.exc_info() 1971 print t 1972 print v 1973 print tb 1974 1975 try: 1976 raise cEncodingError('no dsn', 'no previous exception') 1977 except cEncodingError: 1978 t, v, tb = sys.exc_info() 1979 print t 1980 print v 1981 print tb
1982 #--------------------------------------------------------------------
1983 - def test_ro_queries():
1984 print "testing run_ro_queries()" 1985 1986 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1987 conn = get_connection(dsn, readonly=True) 1988 1989 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1990 print data 1991 print idx 1992 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1993 print data 1994 print idx 1995 1996 curs = conn.cursor() 1997 1998 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1999 print data 2000 print idx 2001 2002 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 2003 print data 2004 print idx 2005 2006 try: 2007 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 2008 print data 2009 print idx 2010 except psycopg2.ProgrammingError: 2011 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 2012 t, v = sys.exc_info()[:2] 2013 print ' ', t 2014 print ' ', v 2015 2016 curs.close()
2017 #--------------------------------------------------------------------
2018 - def test_request_dsn():
2019 conn = get_connection() 2020 print conn 2021 conn.close()
2022 #--------------------------------------------------------------------
2023 - def test_set_encoding():
2024 print "testing set_default_client_encoding()" 2025 2026 enc = 'foo' 2027 try: 2028 set_default_client_encoding(enc) 2029 print "SUCCESS: encoding [%s] worked" % enc 2030 except ValueError: 2031 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2032 t, v = sys.exc_info()[:2] 2033 print ' ', t 2034 print ' ', v 2035 2036 enc = '' 2037 try: 2038 set_default_client_encoding(enc) 2039 print "SUCCESS: encoding [%s] worked" % enc 2040 except ValueError: 2041 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2042 t, v = sys.exc_info()[:2] 2043 print ' ', t 2044 print ' ', v 2045 2046 enc = 'latin1' 2047 try: 2048 set_default_client_encoding(enc) 2049 print "SUCCESS: encoding [%s] worked" % enc 2050 except ValueError: 2051 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2052 t, v = sys.exc_info()[:2] 2053 print ' ', t 2054 print ' ', v 2055 2056 enc = 'utf8' 2057 try: 2058 set_default_client_encoding(enc) 2059 print "SUCCESS: encoding [%s] worked" % enc 2060 except ValueError: 2061 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2062 t, v = sys.exc_info()[:2] 2063 print ' ', t 2064 print ' ', v 2065 2066 enc = 'unicode' 2067 try: 2068 set_default_client_encoding(enc) 2069 print "SUCCESS: encoding [%s] worked" % enc 2070 except ValueError: 2071 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2072 t, v = sys.exc_info()[:2] 2073 print ' ', t 2074 print ' ', v 2075 2076 enc = 'UNICODE' 2077 try: 2078 set_default_client_encoding(enc) 2079 print "SUCCESS: encoding [%s] worked" % enc 2080 except ValueError: 2081 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2082 t, v = sys.exc_info()[:2] 2083 print ' ', t 2084 print ' ', v
2085 #--------------------------------------------------------------------
2086 - def test_connection_pool():
2087 dsn = get_default_dsn() 2088 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2089 print pool 2090 print pool.getconn() 2091 print pool.getconn() 2092 print pool.getconn() 2093 print type(pool.getconn())
2094 #--------------------------------------------------------------------
2095 - def test_list_args():
2096 dsn = get_default_dsn() 2097 conn = get_connection(dsn, readonly=True) 2098 curs = conn.cursor() 2099 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2100 #--------------------------------------------------------------------
2101 - def test_sanitize_pg_regex():
2102 tests = [ 2103 ['(', '\\('] 2104 , ['[', '\\['] 2105 , [')', '\\)'] 2106 ] 2107 for test in tests: 2108 result = sanitize_pg_regex(test[0]) 2109 if result != test[1]: 2110 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2111 #--------------------------------------------------------------------
2112 - def test_is_pg_interval():
2113 status = True 2114 tests = [ 2115 [None, True], # None == NULL == succeeds ! 2116 [1, True], 2117 ['1', True], 2118 ['abc', False] 2119 ] 2120 2121 if not is_pg_interval(): 2122 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2123 status = False 2124 2125 for test in tests: 2126 result = is_pg_interval(test[0]) 2127 if result != test[1]: 2128 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2129 status = False 2130 2131 return status
2132 #--------------------------------------------------------------------
2133 - def test_sanity_check_time_skew():
2134 sanity_check_time_skew()
2135 #--------------------------------------------------------------------
2136 - def test_keyword_expansion():
2137 print "keywords, from database:" 2138 print get_text_expansion_keywords() 2139 print "keywords, cached:" 2140 print get_text_expansion_keywords() 2141 print "'$keyword' expands to:" 2142 print expand_keyword(keyword = u'$dvt')
2143 #--------------------------------------------------------------------
2144 - def test_get_foreign_key_details():
2145 for row in get_foreign_keys2column ( 2146 schema = u'dem', 2147 table = u'identity', 2148 column = u'pk' 2149 ): 2150 print '%s.%s references %s.%s.%s' % ( 2151 row['referencing_table'], 2152 row['referencing_column'], 2153 row['referenced_schema'], 2154 row['referenced_table'], 2155 row['referenced_column'] 2156 )
2157 #--------------------------------------------------------------------
2158 - def test_set_user_language():
2159 # (user, language, result, exception type) 2160 tests = [ 2161 # current user 2162 [None, 'de_DE', True], 2163 [None, 'lang_w/o_tx', False], 2164 [None, None, True], 2165 # valid user 2166 ['any-doc', 'de_DE', True], 2167 ['any-doc', 'lang_w/o_tx', False], 2168 ['any-doc', None, True], 2169 # invalid user 2170 ['invalid user', 'de_DE', None], 2171 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2172 ['invalid user', None, True] 2173 ] 2174 for test in tests: 2175 try: 2176 result = set_user_language(user = test[0], language = test[1]) 2177 if result != test[2]: 2178 print "test:", test 2179 print "result:", result, "expected:", test[2] 2180 except psycopg2.IntegrityError, e: 2181 if test[2] is None: 2182 continue 2183 print "test:", test 2184 print "expected exception" 2185 print "result:", e
2186 #--------------------------------------------------------------------
2187 - def test_get_schema_revision_history():
2188 for line in get_schema_revision_history(): 2189 print u' - '.join(line)
2190 #--------------------------------------------------------------------
2191 - def test_run_query():
2192 gmDateTime.init() 2193 args = {'dt': gmDateTime.pydt_max_here()} 2194 cmd = u"SELECT %(dt)s" 2195 2196 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2197 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2198 print rows
2199 #--------------------------------------------------------------------
2200 - def test_schema_exists():
2201 print schema_exists()
2202 2203 #-------------------------------------------------------------------- 2204 # run tests 2205 #test_file2bytea() 2206 #test_get_connection() 2207 #test_exceptions() 2208 #test_ro_queries() 2209 #test_request_dsn() 2210 #test_set_encoding() 2211 #test_connection_pool() 2212 #test_list_args() 2213 #test_sanitize_pg_regex() 2214 #test_is_pg_interval() 2215 #test_sanity_check_time_skew() 2216 #test_keyword_expansion() 2217 #test_get_foreign_key_details() 2218 #test_set_user_language() 2219 #test_get_schema_revision_history() 2220 #test_run_query() 2221 test_schema_exists() 2222 2223 # ====================================================================== 2224