Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  15  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  16   
  17  # stdlib 
  18  import time 
  19  import sys 
  20  import os 
  21  import codecs 
  22  import types 
  23  import logging 
  24  import datetime as pydt 
  25  import re as regex 
  26   
  27   
  28  # GNUmed 
  29  if __name__ == '__main__': 
  30          sys.path.insert(0, '../../') 
  31  from Gnumed.pycommon import gmLoginInfo 
  32  from Gnumed.pycommon import gmExceptions 
  33  from Gnumed.pycommon import gmDateTime 
  34  from Gnumed.pycommon import gmBorg 
  35  from Gnumed.pycommon import gmI18N 
  36  from Gnumed.pycommon import gmLog2 
  37  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  38   
  39  _log = logging.getLogger('gm.db') 
  40   
  41   
  42  # 3rd party 
  43  try: 
  44          import psycopg2 as dbapi 
  45  except ImportError: 
  46          _log.exception("Python database adapter psycopg2 not found.") 
  47          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  48          raise 
  49   
  50   
  51  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  52  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  53  if not (float(dbapi.apilevel) >= 2.0): 
  54          raise ImportError('gmPG2: supported DB-API level too low') 
  55  if not (dbapi.threadsafety > 0): 
  56          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  57  if not (dbapi.paramstyle == 'pyformat'): 
  58          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('dt') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  63  try: 
  64          dbapi.__version__.index('ext') 
  65  except ValueError: 
  66          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  67  try: 
  68          dbapi.__version__.index('pq3') 
  69  except ValueError: 
  70          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  71   
  72  import psycopg2.extras 
  73  import psycopg2.extensions 
  74  import psycopg2.pool 
  75  import psycopg2.errorcodes as sql_error_codes 
  76   
  77  # ======================================================================= 
  78  _default_client_encoding = 'UTF8' 
  79  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  80   
  81  # things timezone 
  82  _default_client_timezone = None                 # default time zone for connections 
  83  _sql_set_timezone = None 
  84  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  85  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  86   
  87  _default_dsn = None 
  88  _default_login = None 
  89   
  90  postgresql_version_string = None 
  91  postgresql_version = None                       # accuracy: major.minor 
  92   
  93  __ro_conn_pool = None 
  94   
  95  auto_request_login_params = True 
  96  # ======================================================================= 
  97  # global data 
  98  # ======================================================================= 
  99   
 100  known_schema_hashes = { 
 101          0: 'not released, testing only', 
 102          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
 103          3: 'e73718eaf230d8f1d2d01afa8462e176', 
 104          4: '4428ccf2e54c289136819e701bb095ea', 
 105          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
 106          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
 107          7: '6c9f6d3981483f8e9433df99d1947b27', 
 108          8: '89b13a7af83337c3aad153b717e52360', 
 109          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 110          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 111          11: '03042ae24f3f92877d986fb0a6184d76', 
 112          12: '06183a6616db62257e22814007a8ed07', 
 113          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 114          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 115          15: '70012ff960b77ecdff4981c94b5b55b6', 
 116          16: '0bcf44ca22c479b52976e5eda1de8161', 
 117          17: '161428ee97a00e3bf56168c3a15b7b50', 
 118          18: 'a0f9efcabdecfb4ddb6d8c0b69c02092' 
 119  } 
 120   
 121  map_schema_hash2version = { 
 122          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 123          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 124          '4428ccf2e54c289136819e701bb095ea': 4, 
 125          '7e7b093af57aea48c288e76632a382e5': 5, 
 126          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 127          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 128          '89b13a7af83337c3aad153b717e52360': 8, 
 129          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 130          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 131          '03042ae24f3f92877d986fb0a6184d76': 11, 
 132          '06183a6616db62257e22814007a8ed07': 12, 
 133          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 134          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 135          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 136          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 137          '161428ee97a00e3bf56168c3a15b7b50': 17, 
 138          'a0f9efcabdecfb4ddb6d8c0b69c02092': 18 
 139  } 
 140   
 141  map_client_branch2required_db_version = { 
 142          u'GIT tree': 0, 
 143          u'0.3': 9, 
 144          u'0.4': 10, 
 145          u'0.5': 11, 
 146          u'0.6': 12, 
 147          u'0.7': 13, 
 148          u'0.8': 14, 
 149          u'0.9': 15, 
 150          u'1.0': 16,             # intentional duplicate with 1.1 
 151          u'1.1': 16, 
 152          u'1.2': 17, 
 153          u'1.3': 18 
 154  } 
 155   
 156  # get columns and data types for a given table 
 157  query_table_col_defs = u"""select 
 158          cols.column_name, 
 159          cols.udt_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  query_table_attributes = u"""select 
 170          cols.column_name 
 171  from 
 172          information_schema.columns cols 
 173  where 
 174          cols.table_schema = %s 
 175                  and 
 176          cols.table_name = %s 
 177  order by 
 178          cols.ordinal_position""" 
 179   
 180  # ======================================================================= 
 181  # module globals API 
 182  # ======================================================================= 
183 -def set_default_client_encoding(encoding = None):
184 # check whether psycopg2 can handle this encoding 185 if encoding not in psycopg2.extensions.encodings: 186 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 187 # check whether Python can handle this encoding 188 py_enc = psycopg2.extensions.encodings[encoding] 189 try: 190 codecs.lookup(py_enc) 191 except LookupError: 192 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 193 raise 194 # FIXME: check encoding against the database 195 # FIXME: - but we may not yet have access 196 # FIXME: - psycopg2 will pull its encodings from the database eventually 197 # it seems save to set it 198 global _default_client_encoding 199 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 200 _default_client_encoding = encoding 201 return True
202 #---------------------------------------------------
203 -def set_default_client_timezone(timezone = None):
204 205 # FIXME: use __validate 206 global _default_client_timezone 207 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 208 _default_client_timezone = timezone 209 210 global _sql_set_timezone 211 _sql_set_timezone = u'set timezone to %s' 212 213 return True
214 #---------------------------------------------------
215 -def __validate_timezone(conn=None, timezone=None):
216 217 _log.debug(u'validating time zone [%s]', timezone) 218 219 cmd = u'set timezone to %(tz)s' 220 args = {u'tz': timezone} 221 222 conn.commit() 223 curs = conn.cursor() 224 is_valid = False 225 try: 226 curs.execute(cmd, args) 227 _log.info(u'time zone [%s] is settable', timezone) 228 # can we actually use it, though ? 229 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 230 try: 231 curs.execute(cmd) 232 curs.fetchone() 233 _log.info(u'time zone [%s] is usable', timezone) 234 is_valid = True 235 except: 236 _log.error('error using time zone [%s]', timezone) 237 except dbapi.DataError: 238 _log.warning(u'time zone [%s] is not settable', timezone) 239 except: 240 _log.error(u'failed to set time zone to [%s]', timezone) 241 _log.exception(u'') 242 243 curs.close() 244 conn.rollback() 245 246 return is_valid
247 #---------------------------------------------------
248 -def __expand_timezone(conn=None, timezone=None):
249 """some timezone defs are abbreviations so try to expand 250 them because "set time zone" doesn't take abbreviations""" 251 252 cmd = u""" 253 select distinct on (abbrev) name 254 from pg_timezone_names 255 where 256 abbrev = %(tz)s and 257 name ~ '^[^/]+/[^/]+$' and 258 name !~ '^Etc/' 259 """ 260 args = {u'tz': timezone} 261 262 conn.commit() 263 curs = conn.cursor() 264 265 result = timezone 266 try: 267 curs.execute(cmd, args) 268 rows = curs.fetchall() 269 if len(rows) > 0: 270 result = rows[0][0] 271 _log.debug(u'[%s] maps to [%s]', timezone, result) 272 except: 273 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 274 275 curs.close() 276 conn.rollback() 277 278 return result
279 #---------------------------------------------------
280 -def __detect_client_timezone(conn=None):
281 """This is run on the very first connection.""" 282 283 # FIXME: check whether server.timezone is the same 284 # FIXME: value as what we eventually detect 285 286 # we need gmDateTime to be initialized 287 if gmDateTime.current_local_iso_numeric_timezone_string is None: 288 gmDateTime.init() 289 290 _log.debug('trying to detect timezone from system') 291 292 tz_candidates = [] 293 try: 294 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 295 tz_candidates.append(tz) 296 expanded = __expand_timezone(conn = conn, timezone = tz) 297 if expanded != tz: 298 tz_candidates.append(expanded) 299 except KeyError: 300 pass 301 302 tz_candidates.append(gmDateTime.current_local_timezone_name) 303 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 304 if expanded != gmDateTime.current_local_timezone_name: 305 tz_candidates.append(expanded) 306 307 _log.debug('candidates: %s', str(tz_candidates)) 308 309 # find best among candidates 310 global _default_client_timezone 311 global _sql_set_timezone 312 found = False 313 for tz in tz_candidates: 314 if __validate_timezone(conn = conn, timezone = tz): 315 _default_client_timezone = tz 316 _sql_set_timezone = u'set timezone to %s' 317 found = True 318 break 319 320 if not found: 321 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 322 _sql_set_timezone = u"set time zone interval %s hour to minute" 323 324 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
325 # ======================================================================= 326 # login API 327 # =======================================================================
328 -def __request_login_params_tui():
329 """Text mode request of database login parameters""" 330 import getpass 331 login = gmLoginInfo.LoginInfo() 332 333 print "\nPlease enter the required login parameters:" 334 try: 335 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 336 login.database = prompted_input(prompt = "database", default = 'gnumed_v18') 337 login.user = prompted_input(prompt = "user name", default = '') 338 tmp = 'password for "%s" (not shown): ' % login.user 339 login.password = getpass.getpass(tmp) 340 login.port = prompted_input(prompt = "port", default = 5432) 341 except KeyboardInterrupt: 342 _log.warning("user cancelled text mode login dialog") 343 print "user cancelled text mode login dialog" 344 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 345 346 return login
347 #---------------------------------------------------
348 -def __request_login_params_gui_wx():
349 """GUI (wx) input request for database login parameters. 350 351 Returns gmLoginInfo.LoginInfo object 352 """ 353 import wx 354 # OK, wxPython was already loaded. But has the main Application instance 355 # been initialized yet ? if not, the exception will kick us out 356 if wx.GetApp() is None: 357 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 358 359 # Let's launch the login dialog 360 # if wx was not initialized /no main App loop, an exception should be raised anyway 361 import gmAuthWidgets 362 dlg = gmAuthWidgets.cLoginDialog(None, -1) 363 dlg.ShowModal() 364 login = dlg.panel.GetLoginInfo() 365 dlg.Destroy() 366 367 #if user cancelled or something else went wrong, raise an exception 368 if login is None: 369 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 370 371 return login
372 #---------------------------------------------------
373 -def request_login_params():
374 """Request login parameters for database connection.""" 375 # do we auto-request parameters at all ? 376 if not auto_request_login_params: 377 raise Exception('Cannot request login parameters.') 378 379 # are we inside X ? 380 # (if we aren't wxGTK will crash hard at 381 # C-level with "can't open Display") 382 if os.environ.has_key('DISPLAY'): 383 # try wxPython GUI 384 try: return __request_login_params_gui_wx() 385 except: pass 386 387 # well, either we are on the console or 388 # wxPython does not work, use text mode 389 return __request_login_params_tui()
390 391 # ======================================================================= 392 # DSN API 393 # -----------------------------------------------------------------------
394 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
395 dsn_parts = [] 396 397 if (database is not None) and (database.strip() != ''): 398 dsn_parts.append('dbname=%s' % database) 399 400 if (host is not None) and (host.strip() != ''): 401 dsn_parts.append('host=%s' % host) 402 403 if (port is not None) and (str(port).strip() != ''): 404 dsn_parts.append('port=%s' % port) 405 406 if (user is not None) and (user.strip() != ''): 407 dsn_parts.append('user=%s' % user) 408 409 if (password is not None) and (password.strip() != ''): 410 dsn_parts.append('password=%s' % password) 411 412 dsn_parts.append('sslmode=prefer') 413 414 return ' '.join(dsn_parts)
415 # ------------------------------------------------------
416 -def get_default_login():
417 # make sure we do have a login 418 get_default_dsn() 419 return _default_login
420 # ------------------------------------------------------
421 -def get_default_dsn():
422 global _default_dsn 423 if _default_dsn is not None: 424 return _default_dsn 425 426 login = request_login_params() 427 set_default_login(login=login) 428 429 return _default_dsn
430 # ------------------------------------------------------
431 -def set_default_login(login=None):
432 if login is None: 433 return False 434 435 if login.host is not None: 436 if login.host.strip() == u'': 437 login.host = None 438 439 global _default_login 440 _default_login = login 441 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 442 443 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 444 445 global _default_dsn 446 if _default_dsn is None: 447 old_dsn = u'None' 448 else: 449 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 450 _log.info ('setting default DSN from [%s] to [%s]', 451 old_dsn, 452 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 453 ) 454 _default_dsn = dsn 455 456 return True
457 # ======================================================================= 458 # netadata API 459 # =======================================================================
460 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
461 expected_hash = known_schema_hashes[version] 462 if version == 0: 463 args = {'ver': 9999} 464 else: 465 args = {'ver': version} 466 rows, idx = run_ro_queries ( 467 link_obj = link_obj, 468 queries = [{ 469 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 470 'args': args 471 }] 472 ) 473 if rows[0]['md5'] != expected_hash: 474 _log.error('database schema version mismatch') 475 _log.error('expected: %s (%s)' % (version, expected_hash)) 476 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 477 if verbose: 478 _log.debug('schema dump follows:') 479 for line in get_schema_structure(link_obj=link_obj).split(): 480 _log.debug(line) 481 _log.debug('schema revision history dump follows:') 482 for line in get_schema_revision_history(link_obj=link_obj): 483 _log.debug(u' - '.join(line)) 484 return False 485 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 486 return True
487 #------------------------------------------------------------------------
488 -def get_schema_version(link_obj=None):
489 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 490 try: 491 return map_schema_hash2version[rows[0]['md5']] 492 except KeyError: 493 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
494 #------------------------------------------------------------------------
495 -def get_schema_structure(link_obj=None):
496 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 497 return rows[0][0]
498 #------------------------------------------------------------------------
499 -def get_schema_hash(link_obj=None):
500 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 501 return rows[0]['md5']
502 #------------------------------------------------------------------------
503 -def get_schema_revision_history(link_obj=None):
504 cmd = u""" 505 select 506 imported::text, 507 version, 508 filename 509 from gm.schema_revision 510 order by imported 511 """ 512 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 513 return rows
514 #------------------------------------------------------------------------
515 -def get_current_user():
516 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 517 return rows[0][0]
518 #------------------------------------------------------------------------
519 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
520 """Get the foreign keys pointing to schema.table.column. 521 522 Does not properly work with multi-column FKs. 523 GNUmed doesn't use any, however. 524 """ 525 cmd = u""" 526 select 527 %(schema)s as referenced_schema, 528 %(tbl)s as referenced_table, 529 %(col)s as referenced_column, 530 pgc.confkey as referenced_column_list, 531 pgc.conrelid::regclass as referencing_table, 532 pgc.conkey as referencing_column_list, 533 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 534 from 535 pg_constraint pgc 536 where 537 pgc.contype = 'f' 538 and 539 pgc.confrelid = ( 540 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 541 select oid from pg_namespace where nspname = %(schema)s 542 ) 543 ) and 544 ( 545 select attnum 546 from pg_attribute 547 where 548 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 549 select oid from pg_namespace where nspname = %(schema)s 550 )) 551 and 552 attname = %(col)s 553 ) = any(pgc.confkey) 554 """ 555 556 args = { 557 'schema': schema, 558 'tbl': table, 559 'col': column 560 } 561 562 rows, idx = run_ro_queries ( 563 link_obj = link_obj, 564 queries = [ 565 {'cmd': cmd, 'args': args} 566 ] 567 ) 568 569 return rows
570 #------------------------------------------------------------------------
571 -def get_child_tables(schema='public', table=None, link_obj=None):
572 """Return child tables of <table>.""" 573 cmd = u""" 574 select 575 pgn.nspname as namespace, 576 pgc.relname as table 577 from 578 pg_namespace pgn, 579 pg_class pgc 580 where 581 pgc.relnamespace = pgn.oid 582 and 583 pgc.oid in ( 584 select inhrelid from pg_inherits where inhparent = ( 585 select oid from pg_class where 586 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 587 relname = %(table)s 588 ) 589 )""" 590 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 591 return rows
592 #------------------------------------------------------------------------
593 -def schema_exists(link_obj=None, schema=u'gm'):
594 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 595 args = {'schema': schema} 596 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 597 return rows[0][0]
598 #------------------------------------------------------------------------
599 -def table_exists(link_obj=None, schema=None, table=None):
600 """Returns false, true.""" 601 cmd = u""" 602 select exists ( 603 select 1 from information_schema.tables 604 where 605 table_schema = %s and 606 table_name = %s and 607 table_type = 'BASE TABLE' 608 )""" 609 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 610 return rows[0][0]
611 #------------------------------------------------------------------------
612 -def get_col_indices(cursor = None):
613 if cursor.description is None: 614 _log.error('no result description available: unused cursor or last query did not select rows') 615 return None 616 col_indices = {} 617 col_index = 0 618 for col_desc in cursor.description: 619 col_name = col_desc[0] 620 # a query like "select 1,2;" will return two columns of the same name ! 621 # hence adjust to that, note, however, that dict-style access won't work 622 # on results of such queries ... 623 if col_indices.has_key(col_name): 624 col_name = '%s_%s' % (col_name, col_index) 625 col_indices[col_name] = col_index 626 col_index += 1 627 628 return col_indices
629 #------------------------------------------------------------------------
630 -def get_col_defs(link_obj=None, schema='public', table=None):
631 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 632 col_names = [] 633 col_type = {} 634 for row in rows: 635 col_names.append(row[0]) 636 # map array types 637 if row[1].startswith('_'): 638 col_type[row[0]] = row[1][1:] + '[]' 639 else: 640 col_type[row[0]] = row[1] 641 col_defs = [] 642 col_defs.append(col_names) 643 col_defs.append(col_type) 644 return col_defs
645 #------------------------------------------------------------------------
646 -def get_col_names(link_obj=None, schema='public', table=None):
647 """Return column attributes of table""" 648 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 649 cols = [] 650 for row in rows: 651 cols.append(row[0]) 652 return cols
653 654 #------------------------------------------------------------------------ 655 # i18n functions 656 #------------------------------------------------------------------------
657 -def export_translations_from_database(filename=None):
658 tx_file = codecs.open(filename, 'wb', 'utf8') 659 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 660 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 661 tx_file.write(u'-- - user database language is set to [%s]\n\n' % get_current_user_language()) 662 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 663 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 664 tx_file.write(u'set default_transaction_read_only to off;\n\n') 665 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 666 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 667 668 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 669 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 670 for row in rows: 671 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 672 row['lang'].replace("'", "\\'"), 673 row['orig'].replace("'", "\\'"), 674 row['trans'].replace("'", "\\'") 675 ) 676 tx_file.write(line) 677 tx_file.write(u'\n') 678 679 tx_file.write(u'\set ON_ERROR_STOP 1\n') 680 tx_file.close() 681 682 return True
683 #------------------------------------------------------------------------
684 -def delete_translation_from_database(link_obj=None, language=None, original=None):
685 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 686 args = {'lang': language, 'orig': original} 687 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 688 return True
689 690 #------------------------------------------------------------------------
691 -def update_translation_in_database(language=None, original=None, translation=None):
692 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 693 args = {'lang': language, 'orig': original, 'trans': translation} 694 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 695 return args
696 697 #------------------------------------------------------------------------
698 -def get_translation_languages():
699 rows, idx = run_ro_queries ( 700 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 701 ) 702 return [ r[0] for r in rows ]
703 704 #------------------------------------------------------------------------
705 -def get_database_translations(language=None, order_by=None):
706 707 args = {'lang': language} 708 _log.debug('language [%s]', language) 709 710 if order_by is None: 711 order_by = u'ORDER BY %s' % order_by 712 else: 713 order_by = u'ORDER BY lang, orig' 714 715 if language is None: 716 cmd = u""" 717 SELECT DISTINCT ON (orig, lang) 718 lang, orig, trans 719 FROM (( 720 721 -- strings stored as translation keys whether translated or not 722 SELECT 723 NULL as lang, 724 ik.orig, 725 NULL AS trans 726 FROM 727 i18n.keys ik 728 729 ) UNION ALL ( 730 731 -- already translated strings 732 SELECT 733 it.lang, 734 it.orig, 735 it.trans 736 FROM 737 i18n.translations it 738 739 )) as translatable_strings 740 %s""" % order_by 741 else: 742 cmd = u""" 743 SELECT DISTINCT ON (orig, lang) 744 lang, orig, trans 745 FROM (( 746 747 -- strings stored as translation keys whether translated or not 748 SELECT 749 %%(lang)s as lang, 750 ik.orig, 751 i18n._(ik.orig, %%(lang)s) AS trans 752 FROM 753 i18n.keys ik 754 755 ) UNION ALL ( 756 757 -- already translated strings 758 SELECT 759 %%(lang)s as lang, 760 it.orig, 761 i18n._(it.orig, %%(lang)s) AS trans 762 FROM 763 i18n.translations it 764 765 )) AS translatable_strings 766 %s""" % order_by 767 768 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 769 770 if rows is None: 771 _log.error('no translatable strings found') 772 else: 773 _log.debug('%s translatable strings found', len(rows)) 774 775 return rows
776 777 #------------------------------------------------------------------------
778 -def get_current_user_language():
779 cmd = u'select i18n.get_curr_lang()' 780 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 781 return rows[0][0]
782 783 #------------------------------------------------------------------------
784 -def set_user_language(user=None, language=None):
785 """Set the user language in the database. 786 787 user = None: current db user 788 language = None: unset 789 """ 790 _log.info('setting database language for user [%s] to [%s]', user, language) 791 792 args = { 793 'usr': user, 794 'lang': language 795 } 796 797 if language is None: 798 if user is None: 799 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 800 else: 801 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 802 queries.append({'cmd': u'select True'}) 803 else: 804 if user is None: 805 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 806 else: 807 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 808 809 rows, idx = run_rw_queries(queries = queries, return_data = True) 810 811 if not rows[0][0]: 812 _log.error('cannot set database language to [%s] for user [%s]', language, user) 813 814 return rows[0][0]
815 #------------------------------------------------------------------------
816 -def force_user_language(language=None):
817 """Set the user language in the database. 818 819 - regardless of whether there is any translation available. 820 - only for the current user 821 """ 822 _log.info('forcing database language for current db user to [%s]', language) 823 824 run_rw_queries(queries = [{ 825 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 826 'args': {'lang': language} 827 }])
828 829 # ======================================================================= 830 # query runners and helpers 831 # =======================================================================
832 -def send_maintenance_notification():
833 cmd = u'notify "db_maintenance_warning:"' 834 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
835 #------------------------------------------------------------------------
836 -def send_maintenance_shutdown():
837 cmd = u'notify "db_maintenance_disconnect:"' 838 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
839 #------------------------------------------------------------------------
840 -def is_pg_interval(candidate=None):
841 cmd = u'SELECT %(candidate)s::interval' 842 try: 843 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 844 return True 845 except: 846 cmd = u'SELECT %(candidate)s::text::interval' 847 try: 848 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 849 return True 850 except: 851 return False
852 #------------------------------------------------------------------------
853 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
854 outfile = file(filename, 'wb') 855 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 856 outfile.close() 857 return result
858 #------------------------------------------------------------------------
859 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
860 """Store data from a bytea field into a file. 861 862 <data_query> 863 - dict {'cmd': ..., 'args': ...} 864 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 865 - 'args' must be a dict 866 - must return one row with one field of type bytea 867 <file> 868 - must be a file like Python object 869 <data_size> 870 - integer of the total size of the expected data or None 871 <data_size_query> 872 - dict {'cmd': ..., 'args': ...} 873 - cmd must be unicode 874 - must return one row with one field with the octet_length() of the data field 875 - used only when <data_size> is None 876 """ 877 if data_size == 0: 878 return True 879 880 # If the client sets an encoding other than the default we 881 # will receive encoding-parsed data which isn't the binary 882 # content we want. Hence we need to get our own connection. 883 # It must be a read-write one so that we don't affect the 884 # encoding for other users of the shared read-only 885 # connections. 886 # Actually, encodings shouldn't be applied to binary data 887 # (eg. bytea types) in the first place but that is only 888 # reported to be fixed > v7.4. 889 # further tests reveal that at least on PG 8.0 this bug still 890 # manifests itself 891 conn = get_raw_connection(readonly=True) 892 893 if data_size is None: 894 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 895 data_size = rows[0][0] 896 if data_size in [None, 0]: 897 conn.rollback() 898 return True 899 900 max_chunk_size = 1024 * 1024 * 20 # 20 MB, works for typical CR DICOMs 901 if chunk_size == 0: 902 chunk_size = min(data_size, max_chunk_size) 903 904 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size) 905 906 # Windoze sucks: it can't transfer objects of arbitrary size, 907 # anyways, we need to split the transfer, 908 # however, only possible if postgres >= 7.2 909 needed_chunks, remainder = divmod(data_size, chunk_size) 910 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder) 911 912 # try setting "bytea_output" 913 # - fails if not necessary 914 # - succeeds if necessary 915 try: 916 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 917 except dbapi.ProgrammingError: 918 _log.debug('failed to set bytea_output to "escape", not necessary') 919 920 # retrieve chunks, skipped if data size < chunk size, 921 # does this not carry the danger of cutting up multi-byte escape sequences ? 922 # no, since bytea is binary, 923 # yes, since in bytea there are *some* escaped values, still 924 # no, since those are only escaped during *transfer*, not on-disk, hence 925 # only complete escape sequences are put on the wire 926 for chunk_id in range(needed_chunks): 927 chunk_start = (chunk_id * chunk_size) + 1 928 data_query['args']['start'] = chunk_start 929 data_query['args']['size'] = chunk_size 930 try: 931 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 932 except: 933 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 934 conn.rollback() 935 raise 936 # it would be a fatal error to see more than one result as ids are supposed to be unique 937 file_obj.write(str(rows[0][0])) 938 939 # retrieve remainder 940 if remainder > 0: 941 chunk_start = (needed_chunks * chunk_size) + 1 942 data_query['args']['start'] = chunk_start 943 data_query['args']['size'] = remainder 944 try: 945 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 946 except: 947 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 948 conn.rollback() 949 raise 950 # it would be a fatal error to see more than one result as ids are supposed to be unique 951 file_obj.write(str(rows[0][0])) 952 953 conn.rollback() 954 return True
955 #------------------------------------------------------------------------
956 -def file2bytea(query=None, filename=None, args=None, conn=None):
957 """Store data from a file into a bytea field. 958 959 The query must: 960 - be in unicode 961 - contain a format spec identifying the row (eg a primary key) 962 matching <args> if it is an UPDATE 963 - contain a format spec %(data)s::bytea 964 """ 965 # read data from file 966 infile = file(filename, "rb") 967 data_as_byte_string = infile.read() 968 infile.close() 969 if args is None: 970 args = {} 971 args['data'] = buffer(data_as_byte_string) 972 del(data_as_byte_string) 973 974 # insert the data 975 if conn is None: 976 conn = get_raw_connection(readonly=False) 977 close_conn = True 978 else: 979 close_conn = False 980 981 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 982 983 if close_conn: 984 conn.close() 985 986 return
987 #------------------------------------------------------------------------
988 -def sanitize_pg_regex(expression=None, escape_all=False):
989 """Escape input for use in a PostgreSQL regular expression. 990 991 If a fragment comes from user input and is to be used 992 as a regular expression we need to make sure it doesn't 993 contain invalid regex patterns such as unbalanced ('s. 994 995 <escape_all> 996 True: try to escape *all* metacharacters 997 False: only escape those which render the regex invalid 998 """ 999 return expression.replace ( 1000 '(', '\(' 1001 ).replace ( 1002 ')', '\)' 1003 ).replace ( 1004 '[', '\[' 1005 ).replace ( 1006 '+', '\+' 1007 ).replace ( 1008 '.', '\.' 1009 ).replace ( 1010 '*', '\*' 1011 )
1012 #']', '\]', # not needed 1013 #------------------------------------------------------------------------
1014 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1015 """Run read-only queries. 1016 1017 <queries> must be a list of dicts: 1018 [ 1019 {'cmd': <string>, 'args': <dict> or <tuple>}, 1020 {...}, 1021 ... 1022 ] 1023 """ 1024 if isinstance(link_obj, dbapi._psycopg.cursor): 1025 curs = link_obj 1026 curs_close = __noop 1027 tx_rollback = __noop 1028 elif isinstance(link_obj, dbapi._psycopg.connection): 1029 curs = link_obj.cursor() 1030 curs_close = curs.close 1031 tx_rollback = link_obj.rollback 1032 elif link_obj is None: 1033 conn = get_connection(readonly=True, verbose=verbose) 1034 curs = conn.cursor() 1035 curs_close = curs.close 1036 tx_rollback = conn.rollback 1037 else: 1038 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1039 1040 if verbose: 1041 _log.debug('cursor: %s', curs) 1042 1043 for query in queries: 1044 if type(query['cmd']) is not types.UnicodeType: 1045 print "run_ro_queries(): non-unicode query" 1046 print query['cmd'] 1047 try: 1048 args = query['args'] 1049 except KeyError: 1050 args = None 1051 try: 1052 curs.execute(query['cmd'], args) 1053 if verbose: 1054 _log.debug('ran query: [%s]', curs.query) 1055 if curs.statusmessage != u'': 1056 _log.debug('PG status message: %s', curs.statusmessage) 1057 _log.debug('cursor description: %s', str(curs.description)) 1058 except dbapi.Error as pg_exc: 1059 _log.error('query failed: [%s]', curs.query) 1060 if curs.statusmessage != u'': 1061 _log.error('PG status message: %s', curs.statusmessage) 1062 _log.error('PG error code: %s', pg_exc.pgcode) 1063 if pg_exc.pgerror is not None: 1064 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1065 try: 1066 curs_close() 1067 except dbapi.InterfaceError: 1068 _log.exception('cannot close cursor') 1069 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1070 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1071 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1072 if curs.statusmessage != u'': 1073 details = u'Status: %s\n%s' % ( 1074 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1075 details 1076 ) 1077 if pg_exc.pgerror is None: 1078 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1079 else: 1080 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1081 raise gmExceptions.AccessDenied ( 1082 msg, 1083 source = u'PostgreSQL', 1084 code = pg_exc.pgcode, 1085 details = details 1086 ) 1087 raise 1088 except: 1089 _log.error('query failed: [%s]', curs.query) 1090 if curs.statusmessage != u'': 1091 _log.error('PG status message: %s', curs.statusmessage) 1092 try: 1093 curs_close() 1094 except dbapi.InterfaceError: 1095 _log.exception('cannot close cursor') 1096 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1097 raise 1098 1099 data = None 1100 col_idx = None 1101 if return_data: 1102 data = curs.fetchall() 1103 if verbose: 1104 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1105 _log.debug('cursor description: %s', str(curs.description)) 1106 if get_col_idx: 1107 col_idx = get_col_indices(curs) 1108 1109 curs_close() 1110 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1111 return (data, col_idx)
1112 #------------------------------------------------------------------------
1113 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1114 """Convenience function for running a transaction 1115 that is supposed to get committed. 1116 1117 <link_obj> 1118 can be either: 1119 - a cursor 1120 - a connection 1121 1122 <queries> 1123 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1124 to be executed as a single transaction, the last 1125 query may usefully return rows (such as a 1126 "SELECT currval('some_sequence')" statement) 1127 1128 <end_tx> 1129 - controls whether the transaction is finalized (eg. 1130 committed/rolled back) or not, this allows the 1131 call to run_rw_queries() to be part of a framing 1132 transaction 1133 - if link_obj is a connection then <end_tx> will 1134 default to False unless it is explicitly set to 1135 True which is taken to mean "yes, you do have full 1136 control over the transaction" in which case the 1137 transaction is properly finalized 1138 - if link_obj is a cursor we CANNOT finalize the 1139 transaction because we would need the connection for that 1140 - if link_obj is None <end_tx> will, of course, always be True 1141 1142 <return_data> 1143 - if true, the returned data will include the rows 1144 the last query selected 1145 - if false, it returns None instead 1146 1147 <get_col_idx> 1148 - if true, the returned data will include a dictionary 1149 mapping field names to column positions 1150 - if false, the returned data returns None instead 1151 1152 method result: 1153 - returns a tuple (data, idx) 1154 - <data>: 1155 * (None, None) if last query did not return rows 1156 * ("fetchall() result", <index>) if last query returned any rows 1157 * for <index> see <get_col_idx> 1158 """ 1159 if isinstance(link_obj, dbapi._psycopg.cursor): 1160 conn_close = __noop 1161 conn_commit = __noop 1162 tx_rollback = __noop 1163 curs = link_obj 1164 curs_close = __noop 1165 elif isinstance(link_obj, dbapi._psycopg.connection): 1166 conn_close = __noop 1167 if end_tx: 1168 conn_commit = link_obj.commit 1169 tx_rollback = link_obj.rollback 1170 else: 1171 conn_commit = __noop 1172 tx_rollback = __noop 1173 curs = link_obj.cursor() 1174 curs_close = curs.close 1175 elif link_obj is None: 1176 conn = get_connection(readonly=False) 1177 conn_close = conn.close 1178 conn_commit = conn.commit 1179 tx_rollback = conn.rollback 1180 curs = conn.cursor() 1181 curs_close = curs.close 1182 else: 1183 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1184 1185 for query in queries: 1186 if type(query['cmd']) is not types.UnicodeType: 1187 print "run_rw_queries(): non-unicode query" 1188 print query['cmd'] 1189 try: 1190 args = query['args'] 1191 except KeyError: 1192 args = None 1193 try: 1194 curs.execute(query['cmd'], args) 1195 except dbapi.Error as pg_exc: 1196 _log.error('RW query failed: [%s]', curs.query) 1197 if curs.statusmessage != u'': 1198 _log.error('PG status message: %s', curs.statusmessage) 1199 _log.error('PG error code: %s', pg_exc.pgcode) 1200 if pg_exc.pgerror is not None: 1201 _log.error('PG error message: %s', pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1202 try: 1203 curs_close() 1204 tx_rollback() # just for good measure 1205 conn_close() 1206 except dbapi.InterfaceError: 1207 _log.exception('cannot cleanup') 1208 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE: 1209 details = u'Query: [%s]' % curs.query.strip().strip(u'\n').strip().strip(u'\n') 1210 if curs.statusmessage != u'': 1211 details = u'Status: %s\n%s' % ( 1212 curs.statusmessage.strip().strip(u'\n').strip().strip(u'\n'), 1213 details 1214 ) 1215 if pg_exc.pgerror is None: 1216 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror) 1217 else: 1218 msg = u'[%s]: %s' % (pg_exc.pgcode, pg_exc.pgerror.strip().strip(u'\n').strip().strip(u'\n')) 1219 raise gmExceptions.AccessDenied ( 1220 msg, 1221 source = u'PostgreSQL', 1222 code = pg_exc.pgcode, 1223 details = details 1224 ) 1225 raise 1226 except: 1227 _log.exception('error running RW query') 1228 gmLog2.log_stack_trace() 1229 try: 1230 curs_close() 1231 tx_rollback() 1232 conn_close() 1233 except dbapi.InterfaceError: 1234 _log.exception('cannot cleanup') 1235 raise 1236 raise 1237 1238 data = None 1239 col_idx = None 1240 if return_data: 1241 try: 1242 data = curs.fetchall() 1243 except: 1244 _log.exception('error fetching data from RW query') 1245 gmLog2.log_stack_trace() 1246 try: 1247 curs_close() 1248 tx_rollback() 1249 conn_close() 1250 except dbapi.InterfaceError: 1251 _log.exception('cannot cleanup') 1252 raise 1253 raise 1254 if get_col_idx: 1255 col_idx = get_col_indices(curs) 1256 1257 curs_close() 1258 conn_commit() 1259 conn_close() 1260 1261 return (data, col_idx)
1262 #------------------------------------------------------------------------
1263 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1264 """Generates SQL for an INSERT query. 1265 1266 values: dict of values keyed by field to insert them into 1267 """ 1268 if schema is None: 1269 schema = u'public' 1270 1271 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1272 val_snippets = [] 1273 for field in fields: 1274 val_snippets.append(u'%%(%s)s' % field) 1275 1276 if returning is None: 1277 returning = u'' 1278 return_data = False 1279 else: 1280 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1281 return_data = True 1282 1283 cmd = u"""\nINSERT INTO %s.%s ( 1284 %s 1285 ) VALUES ( 1286 %s 1287 )%s""" % ( 1288 schema, 1289 table, 1290 u',\n\t\t'.join(fields), 1291 u',\n\t\t'.join(val_snippets), 1292 returning 1293 ) 1294 1295 _log.debug(u'running SQL: >>>%s<<<', cmd) 1296 1297 return run_rw_queries ( 1298 link_obj = link_obj, 1299 queries = [{'cmd': cmd, 'args': values}], 1300 end_tx = end_tx, 1301 return_data = return_data, 1302 get_col_idx = get_col_idx, 1303 verbose = verbose 1304 )
1305 # ======================================================================= 1306 # connection handling API 1307 # -----------------------------------------------------------------------
1308 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1309 """ 1310 GNUmed database connection pool. 1311 1312 Extends psycopg2's PersistentConnectionPool with 1313 a custom _connect() function. Supports one connection 1314 per thread - which also ties it to one particular DSN. 1315 """ 1316 #--------------------------------------------------
1317 - def _connect(self, key=None):
1318 1319 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1320 1321 conn.original_close = conn.close 1322 conn.close = _raise_exception_on_ro_conn_close 1323 1324 if key is not None: 1325 self._used[key] = conn 1326 self._rused[id(conn)] = key 1327 else: 1328 self._pool.append(conn) 1329 1330 return conn
1331 #--------------------------------------------------
1332 - def shutdown(self):
1333 for conn_key in self._used.keys(): 1334 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1335 self._used[conn_key].original_close()
1336 # -----------------------------------------------------------------------
1337 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1338 """Get a raw, unadorned connection. 1339 1340 - this will not set any parameters such as encoding, timezone, datestyle 1341 - the only requirement is a valid DSN 1342 - hence it can be used for "service" connections 1343 for verifying encodings etc 1344 """ 1345 # FIXME: support verbose 1346 if dsn is None: 1347 dsn = get_default_dsn() 1348 1349 if u'host=salaam.homeunix' in dsn: 1350 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1351 1352 try: 1353 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1354 except dbapi.OperationalError, e: 1355 1356 t, v, tb = sys.exc_info() 1357 try: 1358 msg = e.args[0] 1359 except (AttributeError, IndexError, TypeError): 1360 raise 1361 1362 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1363 1364 if msg.find('fe_sendauth') != -1: 1365 raise cAuthenticationError, (dsn, msg), tb 1366 1367 if regex.search('user ".*" does not exist', msg) is not None: 1368 raise cAuthenticationError, (dsn, msg), tb 1369 1370 if msg.find('uthenti') != -1: 1371 raise cAuthenticationError, (dsn, msg), tb 1372 1373 raise 1374 1375 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1376 1377 # do first-time stuff 1378 global postgresql_version 1379 if postgresql_version is None: 1380 curs = conn.cursor() 1381 curs.execute(""" 1382 SELECT 1383 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1384 FROM 1385 pg_settings 1386 WHERE 1387 name = 'server_version' 1388 """) 1389 postgresql_version = curs.fetchone()['version'] 1390 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1391 try: 1392 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1393 _log.info('database size: %s', curs.fetchone()[0]) 1394 except: 1395 pass 1396 if verbose: 1397 __log_PG_settings(curs=curs) 1398 curs.close() 1399 conn.commit() 1400 1401 if _default_client_timezone is None: 1402 __detect_client_timezone(conn = conn) 1403 1404 curs = conn.cursor() 1405 1406 # set access mode 1407 if readonly: 1408 _log.debug('access mode [READ ONLY]') 1409 cmd = 'set session characteristics as transaction READ ONLY' 1410 curs.execute(cmd) 1411 cmd = 'set default_transaction_read_only to on' 1412 curs.execute(cmd) 1413 else: 1414 _log.debug('access mode [READ WRITE]') 1415 cmd = 'set session characteristics as transaction READ WRITE' 1416 curs.execute(cmd) 1417 cmd = 'set default_transaction_read_only to off' 1418 curs.execute(cmd) 1419 1420 curs.close() 1421 conn.commit() 1422 1423 conn.is_decorated = False 1424 1425 return conn
1426 # =======================================================================
1427 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1428 """Get a new connection. 1429 1430 This assumes the locale system has been initialized 1431 unless an encoding is specified. 1432 """ 1433 # FIXME: support pooled on RW, too 1434 # FIXME: for now, support the default DSN only 1435 if pooled and readonly and (dsn is None): 1436 global __ro_conn_pool 1437 if __ro_conn_pool is None: 1438 __ro_conn_pool = cConnectionPool ( 1439 minconn = 1, 1440 maxconn = 2, 1441 dsn = dsn, 1442 verbose = verbose 1443 ) 1444 conn = __ro_conn_pool.getconn() 1445 else: 1446 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1447 1448 if conn.is_decorated: 1449 return conn 1450 1451 if encoding is None: 1452 encoding = _default_client_encoding 1453 if encoding is None: 1454 encoding = gmI18N.get_encoding() 1455 _log.warning('client encoding not specified') 1456 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1457 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1458 1459 # set connection properties 1460 # - client encoding 1461 try: 1462 conn.set_client_encoding(encoding) 1463 except dbapi.OperationalError: 1464 t, v, tb = sys.exc_info() 1465 if str(v).find("can't set encoding to") != -1: 1466 raise cEncodingError, (encoding, v), tb 1467 raise 1468 1469 # - transaction isolation level 1470 if readonly: 1471 iso_level = u'read committed' 1472 else: 1473 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1474 iso_level = u'serializable' 1475 1476 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1477 1478 curs = conn.cursor() 1479 1480 # - client time zone 1481 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1482 1483 conn.commit() 1484 1485 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1486 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1487 # 1488 # - bytea data format 1489 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1490 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1491 # as the transmission mode for bytea output, 1492 # so try to set this setting back to "escape", 1493 # if that's not possible the reason will be that PG < 9.0 does not support 1494 # that setting - which also means we don't need it and can ignore the 1495 # failure 1496 cmd = "set bytea_output to 'escape'" 1497 try: 1498 curs.execute(cmd) 1499 except dbapi.ProgrammingError: 1500 _log.error('cannot set bytea_output format') 1501 1502 curs.close() 1503 conn.commit() 1504 1505 conn.is_decorated = True 1506 1507 return conn
1508 #-----------------------------------------------------------------------
1509 -def shutdown():
1510 if __ro_conn_pool is None: 1511 return 1512 __ro_conn_pool.shutdown()
1513 # ====================================================================== 1514 # internal helpers 1515 #-----------------------------------------------------------------------
1516 -def __noop():
1517 pass
1518 #-----------------------------------------------------------------------
1519 -def _raise_exception_on_ro_conn_close():
1520 raise TypeError(u'close() called on read-only connection')
1521 #-----------------------------------------------------------------------
1522 -def log_database_access(action=None):
1523 run_insert ( 1524 schema = u'gm', 1525 table = u'access_log', 1526 values = {u'user_action': action}, 1527 end_tx = True 1528 )
1529 #-----------------------------------------------------------------------
1530 -def sanity_check_time_skew(tolerance=60):
1531 """Check server time and local time to be within 1532 the given tolerance of each other. 1533 1534 tolerance: seconds 1535 """ 1536 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1537 1538 cmd = u"SELECT now() at time zone 'UTC'" 1539 conn = get_raw_connection(readonly=True) 1540 curs = conn.cursor() 1541 1542 start = time.time() 1543 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1544 end = time.time() 1545 client_now_as_utc = pydt.datetime.utcnow() 1546 1547 curs.close() 1548 conn.commit() 1549 1550 server_now_as_utc = rows[0][0] 1551 query_duration = end - start 1552 _log.info('server "now" (UTC): %s', server_now_as_utc) 1553 _log.info('client "now" (UTC): %s', client_now_as_utc) 1554 _log.debug('wire roundtrip (seconds): %s', query_duration) 1555 1556 if query_duration > tolerance: 1557 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1558 return False 1559 1560 if server_now_as_utc > client_now_as_utc: 1561 real_skew = server_now_as_utc - client_now_as_utc 1562 else: 1563 real_skew = client_now_as_utc - server_now_as_utc 1564 1565 _log.debug('client/server time skew: %s', real_skew) 1566 1567 if real_skew > pydt.timedelta(seconds = tolerance): 1568 _log.error('client/server time skew > tolerance') 1569 return False 1570 1571 return True
1572 #-----------------------------------------------------------------------
1573 -def sanity_check_database_settings():
1574 """Checks database settings. 1575 1576 returns (status, message) 1577 status: 1578 0: no problem 1579 1: non-fatal problem 1580 2: fatal problem 1581 """ 1582 _log.debug('checking database settings') 1583 1584 conn = get_connection() 1585 1586 # - version string 1587 global postgresql_version_string 1588 if postgresql_version_string is None: 1589 curs = conn.cursor() 1590 curs.execute('SELECT version()') 1591 postgresql_version_string = curs.fetchone()['version'] 1592 curs.close() 1593 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1594 1595 options2check = { 1596 # setting: [expected value, risk, fatal?] 1597 u'allow_system_table_mods': [u'off', u'system breakage', False], 1598 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1599 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1600 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1601 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1602 u'fsync': [u'on', u'data loss/corruption', True], 1603 u'full_page_writes': [u'on', u'data loss/corruption', False], 1604 u'lc_messages': [u'C', u'suboptimal error detection', False], 1605 u'password_encryption': [u'on', u'breach of confidentiality', False], 1606 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1607 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1608 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1609 } 1610 1611 from Gnumed.pycommon import gmCfg2 1612 _cfg = gmCfg2.gmCfgData() 1613 if _cfg.get(option = u'hipaa'): 1614 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1615 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1616 else: 1617 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1618 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1619 1620 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1621 rows, idx = run_ro_queries ( 1622 link_obj = conn, 1623 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1624 get_col_idx = False 1625 ) 1626 1627 found_error = False 1628 found_problem = False 1629 msg = [] 1630 for row in rows: 1631 option = row['name'] 1632 value_found = row['setting'] 1633 value_expected = options2check[option][0] 1634 risk = options2check[option][1] 1635 fatal_setting = options2check[option][2] 1636 if value_found != value_expected: 1637 if fatal_setting is True: 1638 found_error = True 1639 elif fatal_setting is False: 1640 found_problem = True 1641 elif fatal_setting is None: 1642 pass 1643 else: 1644 _log.error(options2check[option]) 1645 raise ValueError(u'invalid database configuration sanity check') 1646 msg.append(_(' option [%s]: %s') % (option, value_found)) 1647 msg.append(_(' risk: %s') % risk) 1648 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1649 1650 if found_error: 1651 return 2, u'\n'.join(msg) 1652 1653 if found_problem: 1654 return 1, u'\n'.join(msg) 1655 1656 return 0, u''
1657 #------------------------------------------------------------------------
1658 -def __log_PG_settings(curs=None):
1659 # don't use any of the run_*()s since that might 1660 # create a loop if we fail here 1661 # FIXME: use pg_settings 1662 try: 1663 curs.execute(u'show all') 1664 except: 1665 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1666 return False 1667 settings = curs.fetchall() 1668 if settings is None: 1669 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1670 return False 1671 for setting in settings: 1672 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1673 return True
1674 # =======================================================================
1675 -def extract_msg_from_pg_exception(exc=None):
1676 1677 try: 1678 msg = exc.args[0] 1679 except (AttributeError, IndexError, TypeError): 1680 return u'cannot extract message from exception' 1681 1682 return unicode(msg, gmI18N.get_encoding(), 'replace')
1683 # =======================================================================
1684 -class cAuthenticationError(dbapi.OperationalError):
1685
1686 - def __init__(self, dsn=None, prev_val=None):
1687 self.dsn = dsn 1688 self.prev_val = prev_val
1689
1690 - def __str__(self):
1691 _log.warning('%s.__str__() called', self.__class__.__name__) 1692 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1693 _log.error(tmp) 1694 return tmp.encode(gmI18N.get_encoding(), 'replace')
1695
1696 - def __unicode__(self):
1697 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1698 1699 # ======================================================================= 1700 # custom psycopg2 extensions 1701 # =======================================================================
1702 -class cEncodingError(dbapi.OperationalError):
1703
1704 - def __init__(self, encoding=None, prev_val=None):
1705 self.encoding = encoding 1706 self.prev_val = prev_val
1707
1708 - def __str__(self):
1709 _log.warning('%s.__str__() called', self.__class__.__name__) 1710 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1711
1712 - def __unicode__(self):
1713 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1714 1715 # ----------------------------------------------------------------------- 1716 # Python -> PostgreSQL 1717 # ----------------------------------------------------------------------- 1718 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1719 -class cAdapterPyDateTime(object):
1720
1721 - def __init__(self, dt):
1722 if dt.tzinfo is None: 1723 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1724 self.__dt = dt
1725
1726 - def getquoted(self):
1727 return _timestamp_template % self.__dt.isoformat()
1728 1729 ## remove for 0.9 1730 ## ---------------------------------------------------------------------- 1731 ##class cAdapterMxDateTime(object): 1732 ## 1733 ## def __init__(self, dt): 1734 ## if dt.tz == '???': 1735 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1736 ## self.__dt = dt 1737 ## 1738 ## def getquoted(self): 1739 ## # under some locale settings the mx.DateTime ISO formatter 1740 ## # will insert "," into the ISO string, 1741 ## # while this is allowed per the ISO8601 spec PostgreSQL 1742 ## # cannot currently handle that, 1743 ## # so map those "," to "." to make things work: 1744 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1745 ## 1746 ## ---------------------------------------------------------------------- 1747 ## PostgreSQL -> Python 1748 ## ---------------------------------------------------------------------- 1749 1750 #======================================================================= 1751 # main 1752 #----------------------------------------------------------------------- 1753 1754 # make sure psycopg2 knows how to handle unicode ... 1755 # intended to become standard 1756 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1757 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1758 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1759 1760 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1761 # check in 0.9: 1762 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1763 1764 ## remove for 0.9 1765 #try: 1766 # import mx.DateTime as mxDT 1767 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1768 #except ImportError: 1769 # _log.warning('cannot import mx.DateTime') 1770 1771 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1772 # them adapted to "... ARRAY[]..." so we can support PG arrays 1773 1774 #======================================================================= 1775 if __name__ == "__main__": 1776 1777 if len(sys.argv) < 2: 1778 sys.exit() 1779 1780 if sys.argv[1] != 'test': 1781 sys.exit() 1782 1783 logging.basicConfig(level=logging.DEBUG) 1784 #--------------------------------------------------------------------
1785 - def test_file2bytea():
1786 run_rw_queries(queries = [ 1787 {'cmd': u'create table test_bytea (data bytea)'} 1788 ]) 1789 1790 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1791 try: 1792 file2bytea(query = cmd, filename = sys.argv[2]) 1793 except: 1794 _log.exception('error') 1795 1796 run_rw_queries(queries = [ 1797 {'cmd': u'drop table test_bytea'} 1798 ])
1799 #--------------------------------------------------------------------
1800 - def test_get_connection():
1801 print "testing get_connection()" 1802 1803 dsn = 'foo' 1804 try: 1805 conn = get_connection(dsn=dsn) 1806 except dbapi.OperationalError, e: 1807 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1808 t, v = sys.exc_info()[:2] 1809 print ' ', t 1810 print ' ', v 1811 1812 dsn = 'dbname=gnumed_v9' 1813 try: 1814 conn = get_connection(dsn=dsn) 1815 except cAuthenticationError: 1816 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1817 t, v = sys.exc_info()[:2] 1818 print ' ', t 1819 print ' ', v 1820 1821 dsn = 'dbname=gnumed_v9 user=abc' 1822 try: 1823 conn = get_connection(dsn=dsn) 1824 except cAuthenticationError: 1825 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1826 t, v = sys.exc_info()[:2] 1827 print ' ', t 1828 print ' ', v 1829 1830 dsn = 'dbname=gnumed_v9 user=any-doc' 1831 try: 1832 conn = get_connection(dsn=dsn) 1833 except cAuthenticationError: 1834 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1835 t, v = sys.exc_info()[:2] 1836 print ' ', t 1837 print ' ', v 1838 1839 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1840 try: 1841 conn = get_connection(dsn=dsn) 1842 except cAuthenticationError: 1843 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1844 t, v = sys.exc_info()[:2] 1845 print ' ', t 1846 print ' ', v 1847 1848 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1849 conn = get_connection(dsn=dsn, readonly=True) 1850 1851 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1852 conn = get_connection(dsn=dsn, readonly=False) 1853 1854 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1855 encoding = 'foo' 1856 try: 1857 conn = get_connection(dsn=dsn, encoding=encoding) 1858 except cEncodingError: 1859 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1860 t, v = sys.exc_info()[:2] 1861 print ' ', t 1862 print ' ', v
1863 #--------------------------------------------------------------------
1864 - def test_exceptions():
1865 print "testing exceptions" 1866 1867 try: 1868 raise cAuthenticationError('no dsn', 'no previous exception') 1869 except cAuthenticationError: 1870 t, v, tb = sys.exc_info() 1871 print t 1872 print v 1873 print tb 1874 1875 try: 1876 raise cEncodingError('no dsn', 'no previous exception') 1877 except cEncodingError: 1878 t, v, tb = sys.exc_info() 1879 print t 1880 print v 1881 print tb
1882 #--------------------------------------------------------------------
1883 - def test_ro_queries():
1884 print "testing run_ro_queries()" 1885 1886 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1887 conn = get_connection(dsn, readonly=True) 1888 1889 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1890 print data 1891 print idx 1892 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1893 print data 1894 print idx 1895 1896 curs = conn.cursor() 1897 1898 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1899 print data 1900 print idx 1901 1902 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1903 print data 1904 print idx 1905 1906 try: 1907 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1908 print data 1909 print idx 1910 except psycopg2.ProgrammingError: 1911 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1912 t, v = sys.exc_info()[:2] 1913 print ' ', t 1914 print ' ', v 1915 1916 curs.close()
1917 #--------------------------------------------------------------------
1918 - def test_request_dsn():
1919 conn = get_connection() 1920 print conn 1921 conn.close()
1922 #--------------------------------------------------------------------
1923 - def test_set_encoding():
1924 print "testing set_default_client_encoding()" 1925 1926 enc = 'foo' 1927 try: 1928 set_default_client_encoding(enc) 1929 print "SUCCESS: encoding [%s] worked" % enc 1930 except ValueError: 1931 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1932 t, v = sys.exc_info()[:2] 1933 print ' ', t 1934 print ' ', v 1935 1936 enc = '' 1937 try: 1938 set_default_client_encoding(enc) 1939 print "SUCCESS: encoding [%s] worked" % enc 1940 except ValueError: 1941 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1942 t, v = sys.exc_info()[:2] 1943 print ' ', t 1944 print ' ', v 1945 1946 enc = 'latin1' 1947 try: 1948 set_default_client_encoding(enc) 1949 print "SUCCESS: encoding [%s] worked" % enc 1950 except ValueError: 1951 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1952 t, v = sys.exc_info()[:2] 1953 print ' ', t 1954 print ' ', v 1955 1956 enc = 'utf8' 1957 try: 1958 set_default_client_encoding(enc) 1959 print "SUCCESS: encoding [%s] worked" % enc 1960 except ValueError: 1961 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1962 t, v = sys.exc_info()[:2] 1963 print ' ', t 1964 print ' ', v 1965 1966 enc = 'unicode' 1967 try: 1968 set_default_client_encoding(enc) 1969 print "SUCCESS: encoding [%s] worked" % enc 1970 except ValueError: 1971 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1972 t, v = sys.exc_info()[:2] 1973 print ' ', t 1974 print ' ', v 1975 1976 enc = 'UNICODE' 1977 try: 1978 set_default_client_encoding(enc) 1979 print "SUCCESS: encoding [%s] worked" % enc 1980 except ValueError: 1981 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1982 t, v = sys.exc_info()[:2] 1983 print ' ', t 1984 print ' ', v
1985 #--------------------------------------------------------------------
1986 - def test_connection_pool():
1987 dsn = get_default_dsn() 1988 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1989 print pool 1990 print pool.getconn() 1991 print pool.getconn() 1992 print pool.getconn() 1993 print type(pool.getconn())
1994 #--------------------------------------------------------------------
1995 - def test_list_args():
1996 dsn = get_default_dsn() 1997 conn = get_connection(dsn, readonly=True) 1998 curs = conn.cursor() 1999 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2000 #--------------------------------------------------------------------
2001 - def test_sanitize_pg_regex():
2002 tests = [ 2003 ['(', '\\('] 2004 , ['[', '\\['] 2005 , [')', '\\)'] 2006 ] 2007 for test in tests: 2008 result = sanitize_pg_regex(test[0]) 2009 if result != test[1]: 2010 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2011 #--------------------------------------------------------------------
2012 - def test_is_pg_interval():
2013 status = True 2014 tests = [ 2015 [None, True], # None == NULL == succeeds ! 2016 [1, True], 2017 ['1', True], 2018 ['abc', False] 2019 ] 2020 2021 if not is_pg_interval(): 2022 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2023 status = False 2024 2025 for test in tests: 2026 result = is_pg_interval(test[0]) 2027 if result != test[1]: 2028 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2029 status = False 2030 2031 return status
2032 #--------------------------------------------------------------------
2033 - def test_sanity_check_time_skew():
2034 sanity_check_time_skew()
2035 #--------------------------------------------------------------------
2036 - def test_get_foreign_key_details():
2037 for row in get_foreign_keys2column ( 2038 schema = u'dem', 2039 table = u'identity', 2040 column = u'pk' 2041 ): 2042 print '%s.%s references %s.%s.%s' % ( 2043 row['referencing_table'], 2044 row['referencing_column'], 2045 row['referenced_schema'], 2046 row['referenced_table'], 2047 row['referenced_column'] 2048 )
2049 #--------------------------------------------------------------------
2050 - def test_set_user_language():
2051 # (user, language, result, exception type) 2052 tests = [ 2053 # current user 2054 [None, 'de_DE', True], 2055 [None, 'lang_w/o_tx', False], 2056 [None, None, True], 2057 # valid user 2058 ['any-doc', 'de_DE', True], 2059 ['any-doc', 'lang_w/o_tx', False], 2060 ['any-doc', None, True], 2061 # invalid user 2062 ['invalid user', 'de_DE', None], 2063 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2064 ['invalid user', None, True] 2065 ] 2066 for test in tests: 2067 try: 2068 result = set_user_language(user = test[0], language = test[1]) 2069 if result != test[2]: 2070 print "test:", test 2071 print "result:", result, "expected:", test[2] 2072 except psycopg2.IntegrityError, e: 2073 if test[2] is None: 2074 continue 2075 print "test:", test 2076 print "expected exception" 2077 print "result:", e
2078 #--------------------------------------------------------------------
2079 - def test_get_schema_revision_history():
2080 for line in get_schema_revision_history(): 2081 print u' - '.join(line)
2082 #--------------------------------------------------------------------
2083 - def test_run_query():
2084 gmDateTime.init() 2085 args = {'dt': gmDateTime.pydt_max_here()} 2086 cmd = u"SELECT %(dt)s" 2087 2088 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2089 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2090 print rows
2091 #--------------------------------------------------------------------
2092 - def test_schema_exists():
2093 print schema_exists()
2094 2095 #-------------------------------------------------------------------- 2096 # run tests 2097 #test_file2bytea() 2098 #test_get_connection() 2099 #test_exceptions() 2100 #test_ro_queries() 2101 #test_request_dsn() 2102 #test_set_encoding() 2103 #test_connection_pool() 2104 #test_list_args() 2105 #test_sanitize_pg_regex() 2106 #test_is_pg_interval() 2107 #test_sanity_check_time_skew() 2108 #test_get_foreign_key_details() 2109 #test_set_user_language() 2110 #test_get_schema_revision_history() 2111 #test_run_query() 2112 test_schema_exists() 2113 2114 # ====================================================================== 2115