Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50', 
 118          18: 'a0f9efcabdecfb4ddb6d8c0b69c02092' 
 119  } 
 120   
 121  map_schema_hash2version = { 
 122          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 123          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 124          '4428ccf2e54c289136819e701bb095ea': 4, 
 125          '7e7b093af57aea48c288e76632a382e5': 5, 
 126          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 127          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 128          '89b13a7af83337c3aad153b717e52360': 8, 
 129          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 130          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 131          '03042ae24f3f92877d986fb0a6184d76': 11, 
 132          '06183a6616db62257e22814007a8ed07': 12, 
 133          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 134          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 135          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 136          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 137          '161428ee97a00e3bf56168c3a15b7b50': 17, 
 138          'a0f9efcabdecfb4ddb6d8c0b69c02092': 18 
 139  } 
 140   
 141  map_client_branch2required_db_version = { 
 142          u'GIT tree': 0, 
 143          u'0.3': 9, 
 144          u'0.4': 10, 
 145          u'0.5': 11, 
 146          u'0.6': 12, 
 147          u'0.7': 13, 
 148          u'0.8': 14, 
 149          u'0.9': 15, 
 150          u'1.0': 16,             # intentional duplicate with 1.1 
 151          u'1.1': 16, 
 152          u'1.2': 17, 
 153          u'1.3': 18 
 154  } 
 155   
 156  # get columns and data types for a given table 
 157  query_table_col_defs = u"""select 
 158          cols.column_name, 
 159          cols.udt_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  query_table_attributes = u"""select 
 170          cols.column_name 
 171  from 
 172          information_schema.columns cols 
 173  where 
 174          cols.table_schema = %s 
 175                  and 
 176          cols.table_name = %s 
 177  order by 
 178          cols.ordinal_position""" 
 179   
 180  # ======================================================================= 
 181  # module globals API 
 182  # ======================================================================= 
183 -def set_default_client_encoding(encoding = None):
184 # check whether psycopg2 can handle this encoding 185 if encoding not in psycopg2.extensions.encodings: 186 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 187 # check whether Python can handle this encoding 188 py_enc = psycopg2.extensions.encodings[encoding] 189 try: 190 codecs.lookup(py_enc) 191 except LookupError: 192 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 193 raise 194 # FIXME: check encoding against the database 195 # FIXME: - but we may not yet have access 196 # FIXME: - psycopg2 will pull its encodings from the database eventually 197 # it seems save to set it 198 global _default_client_encoding 199 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 200 _default_client_encoding = encoding 201 return True
202 #---------------------------------------------------
203 -def set_default_client_timezone(timezone = None):
204 205 # FIXME: use __validate 206 global _default_client_timezone 207 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 208 _default_client_timezone = timezone 209 210 global _sql_set_timezone 211 _sql_set_timezone = u'set timezone to %s' 212 213 return True
214 #---------------------------------------------------
215 -def __validate_timezone(conn=None, timezone=None):
216 217 _log.debug(u'validating time zone [%s]', timezone) 218 219 cmd = u'set timezone to %(tz)s' 220 args = {u'tz': timezone} 221 222 conn.commit() 223 curs = conn.cursor() 224 is_valid = False 225 try: 226 curs.execute(cmd, args) 227 _log.info(u'time zone [%s] is settable', timezone) 228 # can we actually use it, though ? 229 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 230 try: 231 curs.execute(cmd) 232 curs.fetchone() 233 _log.info(u'time zone [%s] is usable', timezone) 234 is_valid = True 235 except: 236 _log.error('error using time zone [%s]', timezone) 237 except dbapi.DataError: 238 _log.warning(u'time zone [%s] is not settable', timezone) 239 except: 240 _log.error(u'failed to set time zone to [%s]', timezone) 241 _log.exception(u'') 242 243 curs.close() 244 conn.rollback() 245 246 return is_valid
247 #---------------------------------------------------
248 -def __expand_timezone(conn=None, timezone=None):
249 """some timezone defs are abbreviations so try to expand 250 them because "set time zone" doesn't take abbreviations""" 251 252 cmd = u""" 253 select distinct on (abbrev) name 254 from pg_timezone_names 255 where 256 abbrev = %(tz)s and 257 name ~ '^[^/]+/[^/]+$' and 258 name !~ '^Etc/' 259 """ 260 args = {u'tz': timezone} 261 262 conn.commit() 263 curs = conn.cursor() 264 265 result = timezone 266 try: 267 curs.execute(cmd, args) 268 rows = curs.fetchall() 269 if len(rows) > 0: 270 result = rows[0][0] 271 _log.debug(u'[%s] maps to [%s]', timezone, result) 272 except: 273 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 274 275 curs.close() 276 conn.rollback() 277 278 return result
279 #---------------------------------------------------
280 -def __detect_client_timezone(conn=None):
281 """This is run on the very first connection.""" 282 283 # FIXME: check whether server.timezone is the same 284 # FIXME: value as what we eventually detect 285 286 # we need gmDateTime to be initialized 287 if gmDateTime.current_local_iso_numeric_timezone_string is None: 288 gmDateTime.init() 289 290 _log.debug('trying to detect timezone from system') 291 292 tz_candidates = [] 293 try: 294 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 295 tz_candidates.append(tz) 296 expanded = __expand_timezone(conn = conn, timezone = tz) 297 if expanded != tz: 298 tz_candidates.append(expanded) 299 except KeyError: 300 pass 301 302 tz_candidates.append(gmDateTime.current_local_timezone_name) 303 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 304 if expanded != gmDateTime.current_local_timezone_name: 305 tz_candidates.append(expanded) 306 307 _log.debug('candidates: %s', str(tz_candidates)) 308 309 # find best among candidates 310 global _default_client_timezone 311 global _sql_set_timezone 312 found = False 313 for tz in tz_candidates: 314 if __validate_timezone(conn = conn, timezone = tz): 315 _default_client_timezone = tz 316 _sql_set_timezone = u'set timezone to %s' 317 found = True 318 break 319 320 if not found: 321 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 322 _sql_set_timezone = u"set time zone interval %s hour to minute" 323 324 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
325 # ======================================================================= 326 # login API 327 # =======================================================================
328 -def __request_login_params_tui():
329 """Text mode request of database login parameters""" 330 import getpass 331 login = gmLoginInfo.LoginInfo() 332 333 print "\nPlease enter the required login parameters:" 334 try: 335 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 336 login.database = prompted_input(prompt = "database", default = 'gnumed_v18') 337 login.user = prompted_input(prompt = "user name", default = '') 338 tmp = 'password for "%s" (not shown): ' % login.user 339 login.password = getpass.getpass(tmp) 340 login.port = prompted_input(prompt = "port", default = 5432) 341 except KeyboardInterrupt: 342 _log.warning("user cancelled text mode login dialog") 343 print "user cancelled text mode login dialog" 344 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 345 346 return login
347 #---------------------------------------------------
348 -def __request_login_params_gui_wx():
349 """GUI (wx) input request for database login parameters. 350 351 Returns gmLoginInfo.LoginInfo object 352 """ 353 import wx 354 # OK, wxPython was already loaded. But has the main Application instance 355 # been initialized yet ? if not, the exception will kick us out 356 if wx.GetApp() is None: 357 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 358 359 # Let's launch the login dialog 360 # if wx was not initialized /no main App loop, an exception should be raised anyway 361 import gmAuthWidgets 362 dlg = gmAuthWidgets.cLoginDialog(None, -1) 363 dlg.ShowModal() 364 login = dlg.panel.GetLoginInfo() 365 dlg.Destroy() 366 367 #if user cancelled or something else went wrong, raise an exception 368 if login is None: 369 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 370 371 return login
372 #---------------------------------------------------
373 -def request_login_params():
374 """Request login parameters for database connection.""" 375 # do we auto-request parameters at all ? 376 if not auto_request_login_params: 377 raise Exception('Cannot request login parameters.') 378 379 # are we inside X ? 380 # (if we aren't wxGTK will crash hard at 381 # C-level with "can't open Display") 382 if os.environ.has_key('DISPLAY'): 383 # try wxPython GUI 384 try: return __request_login_params_gui_wx() 385 except: pass 386 387 # well, either we are on the console or 388 # wxPython does not work, use text mode 389 return __request_login_params_tui()
390 391 # ======================================================================= 392 # DSN API 393 # -----------------------------------------------------------------------
394 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
395 dsn_parts = [] 396 397 if (database is not None) and (database.strip() != ''): 398 dsn_parts.append('dbname=%s' % database) 399 400 if (host is not None) and (host.strip() != ''): 401 dsn_parts.append('host=%s' % host) 402 403 if (port is not None) and (str(port).strip() != ''): 404 dsn_parts.append('port=%s' % port) 405 406 if (user is not None) and (user.strip() != ''): 407 dsn_parts.append('user=%s' % user) 408 409 if (password is not None) and (password.strip() != ''): 410 dsn_parts.append('password=%s' % password) 411 412 dsn_parts.append('sslmode=prefer') 413 414 return ' '.join(dsn_parts)
415 # ------------------------------------------------------
416 -def get_default_login():
417 # make sure we do have a login 418 get_default_dsn() 419 return _default_login
420 # ------------------------------------------------------
421 -def get_default_dsn():
422 global _default_dsn 423 if _default_dsn is not None: 424 return _default_dsn 425 426 login = request_login_params() 427 set_default_login(login=login) 428 429 return _default_dsn
430 # ------------------------------------------------------
431 -def set_default_login(login=None):
432 if login is None: 433 return False 434 435 if login.host is not None: 436 if login.host.strip() == u'': 437 login.host = None 438 439 global _default_login 440 _default_login = login 441 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 442 443 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 444 445 global _default_dsn 446 if _default_dsn is None: 447 old_dsn = u'None' 448 else: 449 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 450 _log.info ('setting default DSN from [%s] to [%s]', 451 old_dsn, 452 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 453 ) 454 _default_dsn = dsn 455 456 return True
457 # ======================================================================= 458 # netadata API 459 # =======================================================================
460 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
461 expected_hash = known_schema_hashes[version] 462 if version == 0: 463 args = {'ver': 9999} 464 else: 465 args = {'ver': version} 466 rows, idx = run_ro_queries ( 467 link_obj = link_obj, 468 queries = [{ 469 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 470 'args': args 471 }] 472 ) 473 if rows[0]['md5'] != expected_hash: 474 _log.error('database schema version mismatch') 475 _log.error('expected: %s (%s)' % (version, expected_hash)) 476 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 477 if verbose: 478 _log.debug('schema dump follows:') 479 for line in get_schema_structure(link_obj=link_obj).split(): 480 _log.debug(line) 481 _log.debug('schema revision history dump follows:') 482 for line in get_schema_revision_history(link_obj=link_obj): 483 _log.debug(u' - '.join(line)) 484 return False 485 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 486 return True
487 #------------------------------------------------------------------------
488 -def get_schema_version(link_obj=None):
489 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 490 try: 491 return map_schema_hash2version[rows[0]['md5']] 492 except KeyError: 493 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
494 #------------------------------------------------------------------------
495 -def get_schema_structure(link_obj=None):
496 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 497 return rows[0][0]
498 #------------------------------------------------------------------------
499 -def get_schema_hash(link_obj=None):
500 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 501 return rows[0]['md5']
502 #------------------------------------------------------------------------
503 -def get_schema_revision_history(link_obj=None):
504 cmd = u""" 505 select 506 imported::text, 507 version, 508 filename 509 from gm.schema_revision 510 order by imported 511 """ 512 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 513 return rows
514 #------------------------------------------------------------------------
515 -def get_current_user():
516 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 517 return rows[0][0]
518 #------------------------------------------------------------------------
519 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
520 """Get the foreign keys pointing to schema.table.column. 521 522 Does not properly work with multi-column FKs. 523 GNUmed doesn't use any, however. 524 """ 525 cmd = u""" 526 select 527 %(schema)s as referenced_schema, 528 %(tbl)s as referenced_table, 529 %(col)s as referenced_column, 530 pgc.confkey as referenced_column_list, 531 pgc.conrelid::regclass as referencing_table, 532 pgc.conkey as referencing_column_list, 533 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 534 from 535 pg_constraint pgc 536 where 537 pgc.contype = 'f' 538 and 539 pgc.confrelid = ( 540 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 541 select oid from pg_namespace where nspname = %(schema)s 542 ) 543 ) and 544 ( 545 select attnum 546 from pg_attribute 547 where 548 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 549 select oid from pg_namespace where nspname = %(schema)s 550 )) 551 and 552 attname = %(col)s 553 ) = any(pgc.confkey) 554 """ 555 556 args = { 557 'schema': schema, 558 'tbl': table, 559 'col': column 560 } 561 562 rows, idx = run_ro_queries ( 563 link_obj = link_obj, 564 queries = [ 565 {'cmd': cmd, 'args': args} 566 ] 567 ) 568 569 return rows
570 #------------------------------------------------------------------------
571 -def get_child_tables(schema='public', table=None, link_obj=None):
572 """Return child tables of <table>.""" 573 cmd = u""" 574 select 575 pgn.nspname as namespace, 576 pgc.relname as table 577 from 578 pg_namespace pgn, 579 pg_class pgc 580 where 581 pgc.relnamespace = pgn.oid 582 and 583 pgc.oid in ( 584 select inhrelid from pg_inherits where inhparent = ( 585 select oid from pg_class where 586 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 587 relname = %(table)s 588 ) 589 )""" 590 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 591 return rows
592 #------------------------------------------------------------------------
593 -def schema_exists(link_obj=None, schema=u'gm'):
594 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 595 args = {'schema': schema} 596 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 597 return rows[0][0]
598 #------------------------------------------------------------------------
599 -def table_exists(link_obj=None, schema=None, table=None):
600 """Returns false, true.""" 601 cmd = u""" 602 select exists ( 603 select 1 from information_schema.tables 604 where 605 table_schema = %s and 606 table_name = %s and 607 table_type = 'BASE TABLE' 608 )""" 609 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 610 return rows[0][0]
611 #------------------------------------------------------------------------
612 -def get_col_indices(cursor = None):
613 if cursor.description is None: 614 _log.error('no result description available: unused cursor or last query did not select rows') 615 return None 616 col_indices = {} 617 col_index = 0 618 for col_desc in cursor.description: 619 col_name = col_desc[0] 620 # a query like "select 1,2;" will return two columns of the same name ! 621 # hence adjust to that, note, however, that dict-style access won't work 622 # on results of such queries ... 623 if col_indices.has_key(col_name): 624 col_name = '%s_%s' % (col_name, col_index) 625 col_indices[col_name] = col_index 626 col_index += 1 627 628 return col_indices
629 #------------------------------------------------------------------------
630 -def get_col_defs(link_obj=None, schema='public', table=None):
631 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 632 col_names = [] 633 col_type = {} 634 for row in rows: 635 col_names.append(row[0]) 636 # map array types 637 if row[1].startswith('_'): 638 col_type[row[0]] = row[1][1:] + '[]' 639 else: 640 col_type[row[0]] = row[1] 641 col_defs = [] 642 col_defs.append(col_names) 643 col_defs.append(col_type) 644 return col_defs
645 #------------------------------------------------------------------------
646 -def get_col_names(link_obj=None, schema='public', table=None):
647 """Return column attributes of table""" 648 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 649 cols = [] 650 for row in rows: 651 cols.append(row[0]) 652 return cols
653 654 #------------------------------------------------------------------------ 655 # i18n functions 656 #------------------------------------------------------------------------
657 -def export_translations_from_database(filename=None):
658 tx_file = codecs.open(filename, 'wb', 'utf8') 659 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 660 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 661 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 662 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 663 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 664 tx_file.write(u'set default_transaction_read_only to off;\n\n') 665 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 666 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 667 668 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 669 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 670 for row in rows: 671 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 672 row['lang'].replace("'", "\\'"), 673 row['orig'].replace("'", "\\'"), 674 row['trans'].replace("'", "\\'") 675 ) 676 tx_file.write(line) 677 tx_file.write(u'\n') 678 679 tx_file.write(u'\set ON_ERROR_STOP 1\n') 680 tx_file.close() 681 682 return True
683 #------------------------------------------------------------------------
684 -def delete_translation_from_database(link_obj=None, language=None, original=None):
685 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 686 args = {'lang': language, 'orig': original} 687 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 688 return True
689 690 #------------------------------------------------------------------------
691 -def update_translation_in_database(language=None, original=None, translation=None):
692 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 693 args = {'lang': language, 'orig': original, 'trans': translation} 694 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 695 return args
696 697 #------------------------------------------------------------------------
698 -def get_translation_languages():
699 rows, idx = run_ro_queries ( 700 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 701 ) 702 return [ r[0] for r in rows ]
703 704 #------------------------------------------------------------------------
705 -def get_database_translations(language=None, order_by=None):
706 707 args = {'lang': language} 708 _log.debug('language [%s]', language) 709 710 if order_by is None: 711 order_by = u'ORDER BY %s' % order_by 712 else: 713 order_by = u'ORDER BY lang, orig' 714 715 if language is None: 716 cmd = u""" 717 SELECT DISTINCT ON (orig, lang) 718 lang, orig, trans 719 FROM (( 720 721 -- strings stored as translation keys whether translated or not 722 SELECT 723 NULL as lang, 724 ik.orig, 725 NULL AS trans 726 FROM 727 i18n.keys ik 728 729 ) UNION ALL ( 730 731 -- already translated strings 732 SELECT 733 it.lang, 734 it.orig, 735 it.trans 736 FROM 737 i18n.translations it 738 739 )) as translatable_strings 740 %s""" % order_by 741 else: 742 cmd = u""" 743 SELECT DISTINCT ON (orig, lang) 744 lang, orig, trans 745 FROM (( 746 747 -- strings stored as translation keys whether translated or not 748 SELECT 749 %%(lang)s as lang, 750 ik.orig, 751 i18n._(ik.orig, %%(lang)s) AS trans 752 FROM 753 i18n.keys ik 754 755 ) UNION ALL ( 756 757 -- already translated strings 758 SELECT 759 %%(lang)s as lang, 760 it.orig, 761 i18n._(it.orig, %%(lang)s) AS trans 762 FROM 763 i18n.translations it 764 765 )) AS translatable_strings 766 %s""" % order_by 767 768 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 769 770 if rows is None: 771 _log.error('no translatable strings found') 772 else: 773 _log.debug('%s translatable strings found', len(rows)) 774 775 return rows
776 777 #------------------------------------------------------------------------
778 -def get_current_user_language():
779 cmd = u'select i18n.get_curr_lang()' 780 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 781 return rows[0][0]
782 783 #------------------------------------------------------------------------
784 -def set_user_language(user=None, language=None):
785 """Set the user language in the database. 786 787 user = None: current db user 788 language = None: unset 789 """ 790 _log.info('setting database language for user [%s] to [%s]', user, language) 791 792 args = { 793 'usr': user, 794 'lang': language 795 } 796 797 if language is None: 798 if user is None: 799 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 800 else: 801 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 802 queries.append({'cmd': u'select True'}) 803 else: 804 if user is None: 805 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 806 else: 807 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 808 809 rows, idx = run_rw_queries(queries = queries, return_data = True) 810 811 if not rows[0][0]: 812 _log.error('cannot set database language to [%s] for user [%s]', language, user) 813 814 return rows[0][0]
815 #------------------------------------------------------------------------
816 -def force_user_language(language=None):
817 """Set the user language in the database. 818 819 - regardless of whether there is any translation available. 820 - only for the current user 821 """ 822 _log.info('forcing database language for current db user to [%s]', language) 823 824 run_rw_queries(queries = [{ 825 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 826 'args': {'lang': language} 827 }])
828 829 # ======================================================================= 830 # query runners and helpers 831 # =======================================================================
832 -def send_maintenance_notification():
833 cmd = u'notify "db_maintenance_warning:"' 834 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
835 #------------------------------------------------------------------------
836 -def send_maintenance_shutdown():
837 cmd = u'notify "db_maintenance_disconnect:"' 838 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
839 #------------------------------------------------------------------------
840 -def is_pg_interval(candidate=None):
841 cmd = u'SELECT %(candidate)s::interval' 842 try: 843 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 844 return True 845 except: 846 cmd = u'SELECT %(candidate)s::text::interval' 847 try: 848 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 849 return True 850 except: 851 return False
852 #------------------------------------------------------------------------
853 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
854 outfile = file(filename, 'wb') 855 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 856 outfile.close() 857 return result
858 #------------------------------------------------------------------------
859 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
860 """Store data from a bytea field into a file. 861 862 <data_query> 863 - dict {'cmd': ..., 'args': ...} 864 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 865 - 'args' must be a dict 866 - must return one row with one field of type bytea 867 <file> 868 - must be a file like Python object 869 <data_size> 870 - integer of the total size of the expected data or None 871 <data_size_query> 872 - dict {'cmd': ..., 'args': ...} 873 - cmd must be unicode 874 - must return one row with one field with the octet_length() of the data field 875 - used only when <data_size> is None 876 """ 877 if data_size == 0: 878 return True 879 880 # If the client sets an encoding other than the default we 881 # will receive encoding-parsed data which isn't the binary 882 # content we want. Hence we need to get our own connection. 883 # It must be a read-write one so that we don't affect the 884 # encoding for other users of the shared read-only 885 # connections. 886 # Actually, encodings shouldn't be applied to binary data 887 # (eg. bytea types) in the first place but that is only 888 # reported to be fixed > v7.4. 889 # further tests reveal that at least on PG 8.0 this bug still 890 # manifests itself 891 conn = get_raw_connection(readonly=True) 892 893 if data_size is None: 894 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 895 data_size = rows[0][0] 896 if data_size in [None, 0]: 897 conn.rollback() 898 return True 899 900 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 901 if chunk_size == 0: 902 chunk_size = min(data_size, max_chunk_size) 903 904 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 905 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 906 907 # Windoze sucks: it can't transfer objects of arbitrary size, 908 # anyways, we need to split the transfer, 909 # however, only possible if postgres >= 7.2 910 needed_chunks, remainder = divmod(data_size, chunk_size) 911 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 912 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 913 914 # try setting "bytea_output" 915 # - fails if not necessary 916 # - succeeds if necessary 917 try: 918 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 919 except dbapi.ProgrammingError: 920 _log.debug('failed to set bytea_output to "escape", not necessary') 921 922 # retrieve chunks, skipped if data size < chunk size, 923 # does this not carry the danger of cutting up multi-byte escape sequences ? 924 # no, since bytea is binary, 925 # yes, since in bytea there are *some* escaped values, still 926 # no, since those are only escaped during *transfer*, not on-disk, hence 927 # only complete escape sequences are put on the wire 928 for chunk_id in range(needed_chunks): 929 chunk_start = (chunk_id * chunk_size) + 1 930 data_query['args']['start'] = chunk_start 931 data_query['args']['size'] = chunk_size 932 try: 933 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 934 except: 935 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 936 conn.rollback() 937 raise 938 # it would be a fatal error to see more than one result as ids are supposed to be unique 939 file_obj.write(str(rows[0][0])) 940 941 # retrieve remainder 942 if remainder > 0: 943 chunk_start = (needed_chunks * chunk_size) + 1 944 data_query['args']['start'] = chunk_start 945 data_query['args']['size'] = remainder 946 try: 947 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 948 except: 949 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 950 conn.rollback() 951 raise 952 # it would be a fatal error to see more than one result as ids are supposed to be unique 953 file_obj.write(str(rows[0][0])) 954 955 conn.rollback() 956 return True
957 #------------------------------------------------------------------------
958 -def file2bytea(query=None, filename=None, args=None, conn=None):
959 """Store data from a file into a bytea field. 960 961 The query must: 962 - be in unicode 963 - contain a format spec identifying the row (eg a primary key) 964 matching <args> if it is an UPDATE 965 - contain a format spec %(data)s::bytea 966 """ 967 # read data from file 968 infile = file(filename, "rb") 969 data_as_byte_string = infile.read() 970 infile.close() 971 if args is None: 972 args = {} 973 args['data'] = buffer(data_as_byte_string) 974 del(data_as_byte_string) 975 976 # insert the data 977 if conn is None: 978 conn = get_raw_connection(readonly=False) 979 close_conn = True 980 else: 981 close_conn = False 982 983 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 984 985 if close_conn: 986 conn.close() 987 988 return
989 #------------------------------------------------------------------------
990 -def sanitize_pg_regex(expression=None, escape_all=False):
991 """Escape input for use in a PostgreSQL regular expression. 992 993 If a fragment comes from user input and is to be used 994 as a regular expression we need to make sure it doesn't 995 contain invalid regex patterns such as unbalanced ('s. 996 997 <escape_all> 998 True: try to escape *all* metacharacters 999 False: only escape those which render the regex invalid 1000 """ 1001 return expression.replace ( 1002 '(', '\(' 1003 ).replace ( 1004 ')', '\)' 1005 ).replace ( 1006 '[', '\[' 1007 ).replace ( 1008 '+', '\+' 1009 ).replace ( 1010 '.', '\.' 1011 ).replace ( 1012 '*', '\*' 1013 )
1014 #']', '\]', # not needed 1015 #------------------------------------------------------------------------
1016 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1017 """Run read-only queries. 1018 1019 <queries> must be a list of dicts: 1020 [ 1021 {'cmd': <string>, 'args': <dict> or <tuple>}, 1022 {...}, 1023 ... 1024 ] 1025 """ 1026 if isinstance(link_obj, dbapi._psycopg.cursor): 1027 curs = link_obj 1028 curs_close = __noop 1029 tx_rollback = __noop 1030 elif isinstance(link_obj, dbapi._psycopg.connection): 1031 curs = link_obj.cursor() 1032 curs_close = curs.close 1033 tx_rollback = link_obj.rollback 1034 elif link_obj is None: 1035 conn = get_connection(readonly=True, verbose=verbose) 1036 curs = conn.cursor() 1037 curs_close = curs.close 1038 tx_rollback = conn.rollback 1039 else: 1040 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1041 1042 if verbose: 1043 _log.debug('cursor: %s', curs) 1044 1045 for query in queries: 1046 if type(query['cmd']) is not types.UnicodeType: 1047 print "run_ro_queries(): non-unicode query" 1048 print query['cmd'] 1049 try: 1050 args = query['args'] 1051 except KeyError: 1052 args = None 1053 try: 1054 curs.execute(query['cmd'], args) 1055 if verbose: 1056 _log.debug('ran query: [%s]', curs.query) 1057 if curs.statusmessage != u'': 1058 _log.debug('PG status message: %s', curs.statusmessage) 1059 _log.debug('cursor description: %s', str(curs.description)) 1060 except dbapi.Error as pg_exc: 1061 _log.error('query failed: [%s]', curs.query) 1062 if curs.statusmessage != u'': 1063 _log.error('PG status message: %s', curs.statusmessage) 1064 _log.error('PG error code: %s', pg_exc.pgcode) 1065 if pg_exc.pgerror is not None: 1066 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1067 try: 1068 curs_close() 1069 except dbapi.InterfaceError: 1070 _log.exception('cannot close cursor') 1071 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1072 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1073 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1074 if curs.statusmessage != u'': 1075 details = u'Status: %s\n%s' % ( 1076 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1077 details 1078 ) 1079 if pg_exc.pgerror is None: 1080 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1081 else: 1082 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1083 raise gmExceptions.AccessDenied ( 1084 msg, 1085 source = u'PostgreSQL', 1086 code = pg_exc.pgcode, 1087 details = details 1088 ) 1089 raise 1090 except: 1091 _log.error('query failed: [%s]', curs.query) 1092 if curs.statusmessage != u'': 1093 _log.error('PG status message: %s', curs.statusmessage) 1094 try: 1095 curs_close() 1096 except dbapi.InterfaceError: 1097 _log.exception('cannot close cursor') 1098 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1099 raise 1100 1101 data = None 1102 col_idx = None 1103 if return_data: 1104 data = curs.fetchall() 1105 if verbose: 1106 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1107 _log.debug('cursor description: %s', str(curs.description)) 1108 if get_col_idx: 1109 col_idx = get_col_indices(curs) 1110 1111 curs_close() 1112 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1113 return (data, col_idx)
1114 #------------------------------------------------------------------------
1115 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1116 """Convenience function for running a transaction 1117 that is supposed to get committed. 1118 1119 <link_obj> 1120 can be either: 1121 - a cursor 1122 - a connection 1123 1124 <queries> 1125 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1126 to be executed as a single transaction, the last 1127 query may usefully return rows (such as a 1128 "SELECT currval('some_sequence')" statement) 1129 1130 <end_tx> 1131 - controls whether the transaction is finalized (eg. 1132 committed/rolled back) or not, this allows the 1133 call to run_rw_queries() to be part of a framing 1134 transaction 1135 - if link_obj is a connection then <end_tx> will 1136 default to False unless it is explicitly set to 1137 True which is taken to mean "yes, you do have full 1138 control over the transaction" in which case the 1139 transaction is properly finalized 1140 - if link_obj is a cursor we CANNOT finalize the 1141 transaction because we would need the connection for that 1142 - if link_obj is None <end_tx> will, of course, always be True 1143 1144 <return_data> 1145 - if true, the returned data will include the rows 1146 the last query selected 1147 - if false, it returns None instead 1148 1149 <get_col_idx> 1150 - if true, the returned data will include a dictionary 1151 mapping field names to column positions 1152 - if false, the returned data returns None instead 1153 1154 method result: 1155 - returns a tuple (data, idx) 1156 - <data>: 1157 * (None, None) if last query did not return rows 1158 * ("fetchall() result", <index>) if last query returned any rows 1159 * for <index> see <get_col_idx> 1160 """ 1161 if isinstance(link_obj, dbapi._psycopg.cursor): 1162 conn_close = __noop 1163 conn_commit = __noop 1164 tx_rollback = __noop 1165 curs = link_obj 1166 curs_close = __noop 1167 elif isinstance(link_obj, dbapi._psycopg.connection): 1168 conn_close = __noop 1169 if end_tx: 1170 conn_commit = link_obj.commit 1171 tx_rollback = link_obj.rollback 1172 else: 1173 conn_commit = __noop 1174 tx_rollback = __noop 1175 curs = link_obj.cursor() 1176 curs_close = curs.close 1177 elif link_obj is None: 1178 conn = get_connection(readonly=False) 1179 conn_close = conn.close 1180 conn_commit = conn.commit 1181 tx_rollback = conn.rollback 1182 curs = conn.cursor() 1183 curs_close = curs.close 1184 else: 1185 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1186 1187 for query in queries: 1188 if type(query['cmd']) is not types.UnicodeType: 1189 print "run_rw_queries(): non-unicode query" 1190 print query['cmd'] 1191 try: 1192 args = query['args'] 1193 except KeyError: 1194 args = None 1195 try: 1196 curs.execute(query['cmd'], args) 1197 except dbapi.Error as pg_exc: 1198 _log.error('RW query failed: [%s]', curs.query) 1199 if curs.statusmessage != u'': 1200 _log.error('PG status message: %s', curs.statusmessage) 1201 _log.error('PG error code: %s', pg_exc.pgcode) 1202 if pg_exc.pgerror is not None: 1203 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1204 try: 1205 curs_close() 1206 tx_rollback() # just for good measure 1207 conn_close() 1208 except dbapi.InterfaceError: 1209 _log.exception('cannot cleanup') 1210 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1211 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1212 if curs.statusmessage != u'': 1213 details = u'Status: %s\n%s' % ( 1214 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1215 details 1216 ) 1217 if pg_exc.pgerror is None: 1218 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1219 else: 1220 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1221 raise gmExceptions.AccessDenied ( 1222 msg, 1223 source = u'PostgreSQL', 1224 code = pg_exc.pgcode, 1225 details = details 1226 ) 1227 raise 1228 except: 1229 _log.exception('error running RW query') 1230 gmLog2.log_stack_trace() 1231 try: 1232 curs_close() 1233 tx_rollback() 1234 conn_close() 1235 except dbapi.InterfaceError: 1236 _log.exception('cannot cleanup') 1237 raise 1238 raise 1239 1240 data = None 1241 col_idx = None 1242 if return_data: 1243 try: 1244 data = curs.fetchall() 1245 except: 1246 _log.exception('error fetching data from RW query') 1247 gmLog2.log_stack_trace() 1248 try: 1249 curs_close() 1250 tx_rollback() 1251 conn_close() 1252 except dbapi.InterfaceError: 1253 _log.exception('cannot cleanup') 1254 raise 1255 raise 1256 if get_col_idx: 1257 col_idx = get_col_indices(curs) 1258 1259 curs_close() 1260 conn_commit() 1261 conn_close() 1262 1263 return (data, col_idx)
1264 #------------------------------------------------------------------------
1265 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1266 """Generates SQL for an INSERT query. 1267 1268 values: dict of values keyed by field to insert them into 1269 """ 1270 if schema is None: 1271 schema = u'public' 1272 1273 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1274 val_snippets = [] 1275 for field in fields: 1276 val_snippets.append(u'%%(%s)s' % field) 1277 1278 if returning is None: 1279 returning = u'' 1280 return_data = False 1281 else: 1282 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1283 return_data = True 1284 1285 cmd = u"""\nINSERT INTO %s.%s ( 1286 %s 1287 ) VALUES ( 1288 %s 1289 )%s""" % ( 1290 schema, 1291 table, 1292 u',\n\t\t'.join(fields), 1293 u',\n\t\t'.join(val_snippets), 1294 returning 1295 ) 1296 1297 _log.debug(u'running SQL: >>>%s<<<', cmd) 1298 1299 return run_rw_queries ( 1300 link_obj = link_obj, 1301 queries = [{'cmd': cmd, 'args': values}], 1302 end_tx = end_tx, 1303 return_data = return_data, 1304 get_col_idx = get_col_idx, 1305 verbose = verbose 1306 )
1307 # ======================================================================= 1308 # connection handling API 1309 # -----------------------------------------------------------------------
1310 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1311 """ 1312 GNUmed database connection pool. 1313 1314 Extends psycopg2's PersistentConnectionPool with 1315 a custom _connect() function. Supports one connection 1316 per thread - which also ties it to one particular DSN. 1317 """ 1318 #--------------------------------------------------
1319 - def _connect(self, key=None):
1320 1321 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1322 1323 conn.original_close = conn.close 1324 conn.close = _raise_exception_on_ro_conn_close 1325 1326 if key is not None: 1327 self._used[key] = conn 1328 self._rused[id(conn)] = key 1329 else: 1330 self._pool.append(conn) 1331 1332 return conn
1333 #--------------------------------------------------
1334 - def shutdown(self):
1335 for conn_key in self._used.keys(): 1336 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1337 self._used[conn_key].original_close()
1338 # -----------------------------------------------------------------------
1339 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1340 """Get a raw, unadorned connection. 1341 1342 - this will not set any parameters such as encoding, timezone, datestyle 1343 - the only requirement is a valid DSN 1344 - hence it can be used for "service" connections 1345 for verifying encodings etc 1346 """ 1347 # FIXME: support verbose 1348 if dsn is None: 1349 dsn = get_default_dsn() 1350 1351 if u'host=salaam.homeunix' in dsn: 1352 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1353 1354 try: 1355 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1356 except dbapi.OperationalError, e: 1357 1358 t, v, tb = sys.exc_info() 1359 try: 1360 msg = e.args[0] 1361 except (AttributeError, IndexError, TypeError): 1362 raise 1363 1364 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1365 1366 if msg.find('fe_sendauth') != -1: 1367 raise cAuthenticationError, (dsn, msg), tb 1368 1369 if regex.search('user ".*" does not exist', msg) is not None: 1370 raise cAuthenticationError, (dsn, msg), tb 1371 1372 if msg.find('uthenti') != -1: 1373 raise cAuthenticationError, (dsn, msg), tb 1374 1375 raise 1376 1377 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1378 1379 # do first-time stuff 1380 global postgresql_version 1381 if postgresql_version is None: 1382 curs = conn.cursor() 1383 curs.execute(""" 1384 SELECT 1385 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1386 FROM 1387 pg_settings 1388 WHERE 1389 name = 'server_version' 1390 """) 1391 postgresql_version = curs.fetchone()['version'] 1392 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1393 try: 1394 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1395 _log.info('database size: %s', curs.fetchone()[0]) 1396 except: 1397 pass 1398 if verbose: 1399 __log_PG_settings(curs=curs) 1400 curs.close() 1401 conn.commit() 1402 1403 if _default_client_timezone is None: 1404 __detect_client_timezone(conn = conn) 1405 1406 curs = conn.cursor() 1407 1408 # set access mode 1409 if readonly: 1410 _log.debug('access mode [READ ONLY]') 1411 cmd = 'set session characteristics as transaction READ ONLY' 1412 curs.execute(cmd) 1413 cmd = 'set default_transaction_read_only to on' 1414 curs.execute(cmd) 1415 else: 1416 _log.debug('access mode [READ WRITE]') 1417 cmd = 'set session characteristics as transaction READ WRITE' 1418 curs.execute(cmd) 1419 cmd = 'set default_transaction_read_only to off' 1420 curs.execute(cmd) 1421 1422 curs.close() 1423 conn.commit() 1424 1425 conn.is_decorated = False 1426 1427 return conn
1428 # =======================================================================
1429 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1430 """Get a new connection. 1431 1432 This assumes the locale system has been initialized 1433 unless an encoding is specified. 1434 """ 1435 # FIXME: support pooled on RW, too 1436 # FIXME: for now, support the default DSN only 1437 if pooled and readonly and (dsn is None): 1438 global __ro_conn_pool 1439 if __ro_conn_pool is None: 1440 __ro_conn_pool = cConnectionPool ( 1441 minconn = 1, 1442 maxconn = 2, 1443 dsn = dsn, 1444 verbose = verbose 1445 ) 1446 conn = __ro_conn_pool.getconn() 1447 else: 1448 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1449 1450 if conn.is_decorated: 1451 return conn 1452 1453 if encoding is None: 1454 encoding = _default_client_encoding 1455 if encoding is None: 1456 encoding = gmI18N.get_encoding() 1457 _log.warning('client encoding not specified') 1458 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1459 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1460 1461 # set connection properties 1462 # - client encoding 1463 try: 1464 conn.set_client_encoding(encoding) 1465 except dbapi.OperationalError: 1466 t, v, tb = sys.exc_info() 1467 if str(v).find("can't set encoding to") != -1: 1468 raise cEncodingError, (encoding, v), tb 1469 raise 1470 1471 # - transaction isolation level 1472 if readonly: 1473 iso_level = u'read committed' 1474 else: 1475 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1476 iso_level = u'serializable' 1477 1478 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1479 1480 curs = conn.cursor() 1481 1482 # - client time zone 1483 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1484 1485 conn.commit() 1486 1487 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1488 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1489 # 1490 # - bytea data format 1491 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1492 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1493 # as the transmission mode for bytea output, 1494 # so try to set this setting back to "escape", 1495 # if that's not possible the reason will be that PG < 9.0 does not support 1496 # that setting - which also means we don't need it and can ignore the 1497 # failure 1498 cmd = "set bytea_output to 'escape'" 1499 try: 1500 curs.execute(cmd) 1501 except dbapi.ProgrammingError: 1502 _log.error('cannot set bytea_output format') 1503 1504 curs.close() 1505 conn.commit() 1506 1507 conn.is_decorated = True 1508 1509 return conn
1510 #-----------------------------------------------------------------------
1511 -def shutdown():
1512 if __ro_conn_pool is None: 1513 return 1514 __ro_conn_pool.shutdown()
1515 # ====================================================================== 1516 # internal helpers 1517 #-----------------------------------------------------------------------
1518 -def __noop():
1519 pass
1520 #-----------------------------------------------------------------------
1521 -def _raise_exception_on_ro_conn_close():
1522 raise TypeError(u'close() called on read-only connection')
1523 #-----------------------------------------------------------------------
1524 -def log_database_access(action=None):
1525 run_insert ( 1526 schema = u'gm', 1527 table = u'access_log', 1528 values = {u'user_action': action}, 1529 end_tx = True 1530 )
1531 #-----------------------------------------------------------------------
1532 -def sanity_check_time_skew(tolerance=60):
1533 """Check server time and local time to be within 1534 the given tolerance of each other. 1535 1536 tolerance: seconds 1537 """ 1538 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1539 1540 cmd = u"SELECT now() at time zone 'UTC'" 1541 conn = get_raw_connection(readonly=True) 1542 curs = conn.cursor() 1543 1544 start = time.time() 1545 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1546 end = time.time() 1547 client_now_as_utc = pydt.datetime.utcnow() 1548 1549 curs.close() 1550 conn.commit() 1551 1552 server_now_as_utc = rows[0][0] 1553 query_duration = end - start 1554 _log.info('server "now" (UTC): %s', server_now_as_utc) 1555 _log.info('client "now" (UTC): %s', client_now_as_utc) 1556 _log.debug('wire roundtrip (seconds): %s', query_duration) 1557 1558 if query_duration > tolerance: 1559 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1560 return False 1561 1562 if server_now_as_utc > client_now_as_utc: 1563 real_skew = server_now_as_utc - client_now_as_utc 1564 else: 1565 real_skew = client_now_as_utc - server_now_as_utc 1566 1567 _log.debug('client/server time skew: %s', real_skew) 1568 1569 if real_skew > pydt.timedelta(seconds = tolerance): 1570 _log.error('client/server time skew > tolerance') 1571 return False 1572 1573 return True
1574 #-----------------------------------------------------------------------
1575 -def sanity_check_database_settings():
1576 """Checks database settings. 1577 1578 returns (status, message) 1579 status: 1580 0: no problem 1581 1: non-fatal problem 1582 2: fatal problem 1583 """ 1584 _log.debug('checking database settings') 1585 1586 conn = get_connection() 1587 1588 # - version string 1589 global postgresql_version_string 1590 if postgresql_version_string is None: 1591 curs = conn.cursor() 1592 curs.execute('SELECT version()') 1593 postgresql_version_string = curs.fetchone()['version'] 1594 curs.close() 1595 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1596 1597 options2check = { 1598 # setting: [expected value, risk, fatal?] 1599 u'allow_system_table_mods': [u'off', u'system breakage', False], 1600 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1601 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1602 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1603 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1604 u'fsync': [u'on', u'data loss/corruption', True], 1605 u'full_page_writes': [u'on', u'data loss/corruption', False], 1606 u'lc_messages': [u'C', u'suboptimal error detection', False], 1607 u'password_encryption': [u'on', u'breach of confidentiality', False], 1608 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1609 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1610 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1611 } 1612 1613 from Gnumed.pycommon import gmCfg2 1614 _cfg = gmCfg2.gmCfgData() 1615 if _cfg.get(option = u'hipaa'): 1616 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1617 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1618 else: 1619 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1620 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1621 1622 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1623 rows, idx = run_ro_queries ( 1624 link_obj = conn, 1625 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1626 get_col_idx = False 1627 ) 1628 1629 found_error = False 1630 found_problem = False 1631 msg = [] 1632 for row in rows: 1633 option = row['name'] 1634 value_found = row['setting'] 1635 value_expected = options2check[option][0] 1636 risk = options2check[option][1] 1637 fatal_setting = options2check[option][2] 1638 if value_found != value_expected: 1639 if fatal_setting is True: 1640 found_error = True 1641 elif fatal_setting is False: 1642 found_problem = True 1643 elif fatal_setting is None: 1644 pass 1645 else: 1646 _log.error(options2check[option]) 1647 raise ValueError(u'invalid database configuration sanity check') 1648 msg.append(_(' option [%s]: %s') % (option, value_found)) 1649 msg.append(_(' risk: %s') % risk) 1650 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1651 1652 if found_error: 1653 return 2, u'\n'.join(msg) 1654 1655 if found_problem: 1656 return 1, u'\n'.join(msg) 1657 1658 return 0, u''
1659 #------------------------------------------------------------------------
1660 -def __log_PG_settings(curs=None):
1661 # don't use any of the run_*()s since that might 1662 # create a loop if we fail here 1663 # FIXME: use pg_settings 1664 try: 1665 curs.execute(u'show all') 1666 except: 1667 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1668 return False 1669 settings = curs.fetchall() 1670 if settings is None: 1671 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1672 return False 1673 for setting in settings: 1674 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1675 return True
1676 # =======================================================================
1677 -def extract_msg_from_pg_exception(exc=None):
1678 1679 try: 1680 msg = exc.args[0] 1681 except (AttributeError, IndexError, TypeError): 1682 return u'cannot extract message from exception' 1683 1684 return unicode(msg, gmI18N.get_encoding(), 'replace')
1685 # =======================================================================
1686 -class cAuthenticationError(dbapi.OperationalError):
1687
1688 - def __init__(self, dsn=None, prev_val=None):
1689 self.dsn = dsn 1690 self.prev_val = prev_val
1691
1692 - def __str__(self):
1693 _log.warning('%s.__str__() called', self.__class__.__name__) 1694 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1695 _log.error(tmp) 1696 return tmp.encode(gmI18N.get_encoding(), 'replace')
1697
1698 - def __unicode__(self):
1699 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1700 1701 # ======================================================================= 1702 # custom psycopg2 extensions 1703 # =======================================================================
1704 -class cEncodingError(dbapi.OperationalError):
1705
1706 - def __init__(self, encoding=None, prev_val=None):
1707 self.encoding = encoding 1708 self.prev_val = prev_val
1709
1710 - def __str__(self):
1711 _log.warning('%s.__str__() called', self.__class__.__name__) 1712 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1713
1714 - def __unicode__(self):
1715 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1716 1717 # ----------------------------------------------------------------------- 1718 # Python -> PostgreSQL 1719 # ----------------------------------------------------------------------- 1720 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1721 -class cAdapterPyDateTime(object):
1722
1723 - def __init__(self, dt):
1724 if dt.tzinfo is None: 1725 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1726 self.__dt = dt
1727
1728 - def getquoted(self):
1729 return _timestamp_template % self.__dt.isoformat()
1730 1731 ## remove for 0.9 1732 ## ---------------------------------------------------------------------- 1733 ##class cAdapterMxDateTime(object): 1734 ## 1735 ## def __init__(self, dt): 1736 ## if dt.tz == '???': 1737 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1738 ## self.__dt = dt 1739 ## 1740 ## def getquoted(self): 1741 ## # under some locale settings the mx.DateTime ISO formatter 1742 ## # will insert "," into the ISO string, 1743 ## # while this is allowed per the ISO8601 spec PostgreSQL 1744 ## # cannot currently handle that, 1745 ## # so map those "," to "." to make things work: 1746 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1747 ## 1748 ## ---------------------------------------------------------------------- 1749 ## PostgreSQL -> Python 1750 ## ---------------------------------------------------------------------- 1751 1752 #======================================================================= 1753 # main 1754 #----------------------------------------------------------------------- 1755 1756 # make sure psycopg2 knows how to handle unicode ... 1757 # intended to become standard 1758 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1759 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1760 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1761 1762 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1763 # check in 0.9: 1764 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1765 1766 ## remove for 0.9 1767 #try: 1768 # import mx.DateTime as mxDT 1769 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1770 #except ImportError: 1771 # _log.warning('cannot import mx.DateTime') 1772 1773 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1774 # them adapted to "... ARRAY[]..." so we can support PG arrays 1775 1776 #======================================================================= 1777 if __name__ == "__main__": 1778 1779 if len(sys.argv) < 2: 1780 sys.exit() 1781 1782 if sys.argv[1] != 'test': 1783 sys.exit() 1784 1785 logging.basicConfig(level=logging.DEBUG) 1786 #--------------------------------------------------------------------
1787 - def test_file2bytea():
1788 run_rw_queries(queries = [ 1789 {'cmd': u'create table test_bytea (data bytea)'} 1790 ]) 1791 1792 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1793 try: 1794 file2bytea(query = cmd, filename = sys.argv[2]) 1795 except: 1796 _log.exception('error') 1797 1798 run_rw_queries(queries = [ 1799 {'cmd': u'drop table test_bytea'} 1800 ])
1801 #--------------------------------------------------------------------
1802 - def test_get_connection():
1803 print "testing get_connection()" 1804 1805 dsn = 'foo' 1806 try: 1807 conn = get_connection(dsn=dsn) 1808 except dbapi.OperationalError, e: 1809 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1810 t, v = sys.exc_info()[:2] 1811 print ' ', t 1812 print ' ', v 1813 1814 dsn = 'dbname=gnumed_v9' 1815 try: 1816 conn = get_connection(dsn=dsn) 1817 except cAuthenticationError: 1818 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1819 t, v = sys.exc_info()[:2] 1820 print ' ', t 1821 print ' ', v 1822 1823 dsn = 'dbname=gnumed_v9 user=abc' 1824 try: 1825 conn = get_connection(dsn=dsn) 1826 except cAuthenticationError: 1827 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1828 t, v = sys.exc_info()[:2] 1829 print ' ', t 1830 print ' ', v 1831 1832 dsn = 'dbname=gnumed_v9 user=any-doc' 1833 try: 1834 conn = get_connection(dsn=dsn) 1835 except cAuthenticationError: 1836 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1837 t, v = sys.exc_info()[:2] 1838 print ' ', t 1839 print ' ', v 1840 1841 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1842 try: 1843 conn = get_connection(dsn=dsn) 1844 except cAuthenticationError: 1845 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1846 t, v = sys.exc_info()[:2] 1847 print ' ', t 1848 print ' ', v 1849 1850 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1851 conn = get_connection(dsn=dsn, readonly=True) 1852 1853 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1854 conn = get_connection(dsn=dsn, readonly=False) 1855 1856 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1857 encoding = 'foo' 1858 try: 1859 conn = get_connection(dsn=dsn, encoding=encoding) 1860 except cEncodingError: 1861 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1862 t, v = sys.exc_info()[:2] 1863 print ' ', t 1864 print ' ', v
1865 #--------------------------------------------------------------------
1866 - def test_exceptions():
1867 print "testing exceptions" 1868 1869 try: 1870 raise cAuthenticationError('no dsn', 'no previous exception') 1871 except cAuthenticationError: 1872 t, v, tb = sys.exc_info() 1873 print t 1874 print v 1875 print tb 1876 1877 try: 1878 raise cEncodingError('no dsn', 'no previous exception') 1879 except cEncodingError: 1880 t, v, tb = sys.exc_info() 1881 print t 1882 print v 1883 print tb
1884 #--------------------------------------------------------------------
1885 - def test_ro_queries():
1886 print "testing run_ro_queries()" 1887 1888 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1889 conn = get_connection(dsn, readonly=True) 1890 1891 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1892 print data 1893 print idx 1894 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1895 print data 1896 print idx 1897 1898 curs = conn.cursor() 1899 1900 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1901 print data 1902 print idx 1903 1904 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1905 print data 1906 print idx 1907 1908 try: 1909 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1910 print data 1911 print idx 1912 except psycopg2.ProgrammingError: 1913 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1914 t, v = sys.exc_info()[:2] 1915 print ' ', t 1916 print ' ', v 1917 1918 curs.close()
1919 #--------------------------------------------------------------------
1920 - def test_request_dsn():
1921 conn = get_connection() 1922 print conn 1923 conn.close()
1924 #--------------------------------------------------------------------
1925 - def test_set_encoding():
1926 print "testing set_default_client_encoding()" 1927 1928 enc = 'foo' 1929 try: 1930 set_default_client_encoding(enc) 1931 print "SUCCESS: encoding [%s] worked" % enc 1932 except ValueError: 1933 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1934 t, v = sys.exc_info()[:2] 1935 print ' ', t 1936 print ' ', v 1937 1938 enc = '' 1939 try: 1940 set_default_client_encoding(enc) 1941 print "SUCCESS: encoding [%s] worked" % enc 1942 except ValueError: 1943 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1944 t, v = sys.exc_info()[:2] 1945 print ' ', t 1946 print ' ', v 1947 1948 enc = 'latin1' 1949 try: 1950 set_default_client_encoding(enc) 1951 print "SUCCESS: encoding [%s] worked" % enc 1952 except ValueError: 1953 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1954 t, v = sys.exc_info()[:2] 1955 print ' ', t 1956 print ' ', v 1957 1958 enc = 'utf8' 1959 try: 1960 set_default_client_encoding(enc) 1961 print "SUCCESS: encoding [%s] worked" % enc 1962 except ValueError: 1963 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1964 t, v = sys.exc_info()[:2] 1965 print ' ', t 1966 print ' ', v 1967 1968 enc = 'unicode' 1969 try: 1970 set_default_client_encoding(enc) 1971 print "SUCCESS: encoding [%s] worked" % enc 1972 except ValueError: 1973 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1974 t, v = sys.exc_info()[:2] 1975 print ' ', t 1976 print ' ', v 1977 1978 enc = 'UNICODE' 1979 try: 1980 set_default_client_encoding(enc) 1981 print "SUCCESS: encoding [%s] worked" % enc 1982 except ValueError: 1983 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1984 t, v = sys.exc_info()[:2] 1985 print ' ', t 1986 print ' ', v
1987 #--------------------------------------------------------------------
1988 - def test_connection_pool():
1989 dsn = get_default_dsn() 1990 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1991 print pool 1992 print pool.getconn() 1993 print pool.getconn() 1994 print pool.getconn() 1995 print type(pool.getconn())
1996 #--------------------------------------------------------------------
1997 - def test_list_args():
1998 dsn = get_default_dsn() 1999 conn = get_connection(dsn, readonly=True) 2000 curs = conn.cursor() 2001 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2002 #--------------------------------------------------------------------
2003 - def test_sanitize_pg_regex():
2004 tests = [ 2005 ['(', '\\('] 2006 , ['[', '\\['] 2007 , [')', '\\)'] 2008 ] 2009 for test in tests: 2010 result = sanitize_pg_regex(test[0]) 2011 if result != test[1]: 2012 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2013 #--------------------------------------------------------------------
2014 - def test_is_pg_interval():
2015 status = True 2016 tests = [ 2017 [None, True], # None == NULL == succeeds ! 2018 [1, True], 2019 ['1', True], 2020 ['abc', False] 2021 ] 2022 2023 if not is_pg_interval(): 2024 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2025 status = False 2026 2027 for test in tests: 2028 result = is_pg_interval(test[0]) 2029 if result != test[1]: 2030 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2031 status = False 2032 2033 return status
2034 #--------------------------------------------------------------------
2035 - def test_sanity_check_time_skew():
2036 sanity_check_time_skew()
2037 #--------------------------------------------------------------------
2038 - def test_get_foreign_key_details():
2039 for row in get_foreign_keys2column ( 2040 schema = u'dem', 2041 table = u'identity', 2042 column = u'pk' 2043 ): 2044 print '%s.%s references %s.%s.%s' % ( 2045 row['referencing_table'], 2046 row['referencing_column'], 2047 row['referenced_schema'], 2048 row['referenced_table'], 2049 row['referenced_column'] 2050 )
2051 #--------------------------------------------------------------------
2052 - def test_set_user_language():
2053 # (user, language, result, exception type) 2054 tests = [ 2055 # current user 2056 [None, 'de_DE', True], 2057 [None, 'lang_w/o_tx', False], 2058 [None, None, True], 2059 # valid user 2060 ['any-doc', 'de_DE', True], 2061 ['any-doc', 'lang_w/o_tx', False], 2062 ['any-doc', None, True], 2063 # invalid user 2064 ['invalid user', 'de_DE', None], 2065 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2066 ['invalid user', None, True] 2067 ] 2068 for test in tests: 2069 try: 2070 result = set_user_language(user = test[0], language = test[1]) 2071 if result != test[2]: 2072 print "test:", test 2073 print "result:", result, "expected:", test[2] 2074 except psycopg2.IntegrityError, e: 2075 if test[2] is None: 2076 continue 2077 print "test:", test 2078 print "expected exception" 2079 print "result:", e
2080 #--------------------------------------------------------------------
2081 - def test_get_schema_revision_history():
2082 for line in get_schema_revision_history(): 2083 print u' - '.join(line)
2084 #--------------------------------------------------------------------
2085 - def test_run_query():
2086 gmDateTime.init() 2087 args = {'dt': gmDateTime.pydt_max_here()} 2088 cmd = u"SELECT %(dt)s" 2089 2090 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2091 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2092 print rows
2093 #--------------------------------------------------------------------
2094 - def test_schema_exists():
2095 print schema_exists()
2096 2097 #-------------------------------------------------------------------- 2098 # run tests 2099 #test_file2bytea() 2100 #test_get_connection() 2101 #test_exceptions() 2102 #test_ro_queries() 2103 #test_request_dsn() 2104 #test_set_encoding() 2105 #test_connection_pool() 2106 #test_list_args() 2107 #test_sanitize_pg_regex() 2108 #test_is_pg_interval() 2109 #test_sanity_check_time_skew() 2110 #test_get_foreign_key_details() 2111 #test_set_user_language() 2112 #test_get_schema_revision_history() 2113 #test_run_query() 2114 test_schema_exists() 2115 2116 # ====================================================================== 2117