Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          0: 'not released, testing only', 
  94          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          3: 'e73718eaf230d8f1d2d01afa8462e176', 
  96          4: '4428ccf2e54c289136819e701bb095ea', 
  97          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
  98          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
  99          7: '6c9f6d3981483f8e9433df99d1947b27', 
 100          8: '89b13a7af83337c3aad153b717e52360', 
 101          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          11: '03042ae24f3f92877d986fb0a6184d76', 
 104          12: '06183a6616db62257e22814007a8ed07', 
 105          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 107          15: '70012ff960b77ecdff4981c94b5b55b6', 
 108          16: '0bcf44ca22c479b52976e5eda1de8161', 
 109          17: 'f70d49d7c4fa739f2b3925b8254b8b78' 
 110  } 
 111   
 112  map_schema_hash2version = { 
 113          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 114          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 115          '4428ccf2e54c289136819e701bb095ea': 4, 
 116          '7e7b093af57aea48c288e76632a382e5': 5, 
 117          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 118          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 119          '89b13a7af83337c3aad153b717e52360': 8, 
 120          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 121          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 122          '03042ae24f3f92877d986fb0a6184d76': 11, 
 123          '06183a6616db62257e22814007a8ed07': 12, 
 124          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 125          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 126          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 127          '0bcf44ca22c479b52976e5eda1de8161': 16, 
 128          'f70d49d7c4fa739f2b3925b8254b8b78': 17 
 129  } 
 130   
 131  map_client_branch2required_db_version = { 
 132          u'GIT tree': 0, 
 133          u'0.3': 9, 
 134          u'0.4': 10, 
 135          u'0.5': 11, 
 136          u'0.6': 12, 
 137          u'0.7': 13, 
 138          u'0.8': 14, 
 139          u'0.9': 15, 
 140          u'1.0': 16,             # intentionally duplicate with 1.1 
 141          u'1.1': 16, 
 142          u'1.2': 17 
 143  } 
 144   
 145  # get columns and data types for a given table 
 146  query_table_col_defs = u"""select 
 147          cols.column_name, 
 148          cols.udt_name 
 149  from 
 150          information_schema.columns cols 
 151  where 
 152          cols.table_schema = %s 
 153                  and 
 154          cols.table_name = %s 
 155  order by 
 156          cols.ordinal_position""" 
 157   
 158  query_table_attributes = u"""select 
 159          cols.column_name 
 160  from 
 161          information_schema.columns cols 
 162  where 
 163          cols.table_schema = %s 
 164                  and 
 165          cols.table_name = %s 
 166  order by 
 167          cols.ordinal_position""" 
 168   
 169  # ======================================================================= 
 170  # module globals API 
 171  # ======================================================================= 
172 -def set_default_client_encoding(encoding = None):
173 # check whether psycopg2 can handle this encoding 174 if encoding not in psycopg2.extensions.encodings: 175 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 176 # check whether Python can handle this encoding 177 py_enc = psycopg2.extensions.encodings[encoding] 178 try: 179 codecs.lookup(py_enc) 180 except LookupError: 181 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 182 raise 183 # FIXME: check encoding against the database 184 # FIXME: - but we may not yet have access 185 # FIXME: - psycopg2 will pull its encodings from the database eventually 186 # it seems save to set it 187 global _default_client_encoding 188 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 189 _default_client_encoding = encoding 190 return True
191 #---------------------------------------------------
192 -def set_default_client_timezone(timezone = None):
193 194 # FIXME: use __validate 195 global _default_client_timezone 196 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 197 _default_client_timezone = timezone 198 199 global _sql_set_timezone 200 _sql_set_timezone = u'set timezone to %s' 201 202 return True
203 #---------------------------------------------------
204 -def __validate_timezone(conn=None, timezone=None):
205 206 _log.debug(u'validating time zone [%s]', timezone) 207 208 cmd = u'set timezone to %(tz)s' 209 args = {u'tz': timezone} 210 211 conn.commit() 212 curs = conn.cursor() 213 is_valid = False 214 try: 215 curs.execute(cmd, args) 216 _log.info(u'time zone [%s] is settable', timezone) 217 # can we actually use it, though ? 218 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 219 try: 220 curs.execute(cmd) 221 curs.fetchone() 222 _log.info(u'time zone [%s] is usable', timezone) 223 is_valid = True 224 except: 225 _log.error('error using time zone [%s]', timezone) 226 except dbapi.DataError: 227 _log.warning(u'time zone [%s] is not settable', timezone) 228 except: 229 _log.error(u'failed to set time zone to [%s]', timezone) 230 _log.exception(u'') 231 232 curs.close() 233 conn.rollback() 234 235 return is_valid
236 #---------------------------------------------------
237 -def __expand_timezone(conn=None, timezone=None):
238 """some timezone defs are abbreviations so try to expand 239 them because "set time zone" doesn't take abbreviations""" 240 241 cmd = u""" 242 select distinct on (abbrev) name 243 from pg_timezone_names 244 where 245 abbrev = %(tz)s and 246 name ~ '^[^/]+/[^/]+$' and 247 name !~ '^Etc/' 248 """ 249 args = {u'tz': timezone} 250 251 conn.commit() 252 curs = conn.cursor() 253 254 result = timezone 255 try: 256 curs.execute(cmd, args) 257 rows = curs.fetchall() 258 if len(rows) > 0: 259 result = rows[0][0] 260 _log.debug(u'[%s] maps to [%s]', timezone, result) 261 except: 262 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 263 264 curs.close() 265 conn.rollback() 266 267 return result
268 #---------------------------------------------------
269 -def __detect_client_timezone(conn=None):
270 """This is run on the very first connection.""" 271 272 # FIXME: check whether server.timezone is the same 273 # FIXME: value as what we eventually detect 274 275 # we need gmDateTime to be initialized 276 if gmDateTime.current_local_iso_numeric_timezone_string is None: 277 gmDateTime.init() 278 279 _log.debug('trying to detect timezone from system') 280 281 tz_candidates = [] 282 try: 283 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 284 tz_candidates.append(tz) 285 expanded = __expand_timezone(conn = conn, timezone = tz) 286 if expanded != tz: 287 tz_candidates.append(expanded) 288 except KeyError: 289 pass 290 291 tz_candidates.append(gmDateTime.current_local_timezone_name) 292 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 293 if expanded != gmDateTime.current_local_timezone_name: 294 tz_candidates.append(expanded) 295 296 _log.debug('candidates: %s', str(tz_candidates)) 297 298 # find best among candidates 299 global _default_client_timezone 300 global _sql_set_timezone 301 found = False 302 for tz in tz_candidates: 303 if __validate_timezone(conn = conn, timezone = tz): 304 _default_client_timezone = tz 305 _sql_set_timezone = u'set timezone to %s' 306 found = True 307 break 308 309 if not found: 310 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 311 _sql_set_timezone = u"set time zone interval %s hour to minute" 312 313 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
314 # ======================================================================= 315 # login API 316 # =======================================================================
317 -def __request_login_params_tui():
318 """Text mode request of database login parameters""" 319 import getpass 320 login = gmLoginInfo.LoginInfo() 321 322 print "\nPlease enter the required login parameters:" 323 try: 324 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 325 login.database = prompted_input(prompt = "database", default = 'gnumed_v17') 326 login.user = prompted_input(prompt = "user name", default = '') 327 tmp = 'password for "%s" (not shown): ' % login.user 328 login.password = getpass.getpass(tmp) 329 login.port = prompted_input(prompt = "port", default = 5432) 330 except KeyboardInterrupt: 331 _log.warning("user cancelled text mode login dialog") 332 print "user cancelled text mode login dialog" 333 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 334 335 return login
336 #---------------------------------------------------
337 -def __request_login_params_gui_wx():
338 """GUI (wx) input request for database login parameters. 339 340 Returns gmLoginInfo.LoginInfo object 341 """ 342 import wx 343 # OK, wxPython was already loaded. But has the main Application instance 344 # been initialized yet ? if not, the exception will kick us out 345 if wx.GetApp() is None: 346 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 347 348 # Let's launch the login dialog 349 # if wx was not initialized /no main App loop, an exception should be raised anyway 350 import gmAuthWidgets 351 dlg = gmAuthWidgets.cLoginDialog(None, -1) 352 dlg.ShowModal() 353 login = dlg.panel.GetLoginInfo() 354 dlg.Destroy() 355 356 #if user cancelled or something else went wrong, raise an exception 357 if login is None: 358 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 359 360 return login
361 #---------------------------------------------------
362 -def request_login_params():
363 """Request login parameters for database connection.""" 364 # do we auto-request parameters at all ? 365 if not auto_request_login_params: 366 raise Exception('Cannot request login parameters.') 367 368 # are we inside X ? 369 # (if we aren't wxGTK will crash hard at 370 # C-level with "can't open Display") 371 if os.environ.has_key('DISPLAY'): 372 # try wxPython GUI 373 try: return __request_login_params_gui_wx() 374 except: pass 375 376 # well, either we are on the console or 377 # wxPython does not work, use text mode 378 return __request_login_params_tui()
379 380 # ======================================================================= 381 # DSN API 382 # -----------------------------------------------------------------------
383 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
384 dsn_parts = [] 385 386 if (database is not None) and (database.strip() != ''): 387 dsn_parts.append('dbname=%s' % database) 388 389 if (host is not None) and (host.strip() != ''): 390 dsn_parts.append('host=%s' % host) 391 392 if (port is not None) and (str(port).strip() != ''): 393 dsn_parts.append('port=%s' % port) 394 395 if (user is not None) and (user.strip() != ''): 396 dsn_parts.append('user=%s' % user) 397 398 if (password is not None) and (password.strip() != ''): 399 dsn_parts.append('password=%s' % password) 400 401 dsn_parts.append('sslmode=prefer') 402 403 return ' '.join(dsn_parts)
404 # ------------------------------------------------------
405 -def get_default_login():
406 # make sure we do have a login 407 get_default_dsn() 408 return _default_login
409 # ------------------------------------------------------
410 -def get_default_dsn():
411 global _default_dsn 412 if _default_dsn is not None: 413 return _default_dsn 414 415 login = request_login_params() 416 set_default_login(login=login) 417 418 return _default_dsn
419 # ------------------------------------------------------
420 -def set_default_login(login=None):
421 if login is None: 422 return False 423 424 if login.host is not None: 425 if login.host.strip() == u'': 426 login.host = None 427 428 global _default_login 429 _default_login = login 430 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 431 432 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 433 434 global _default_dsn 435 if _default_dsn is None: 436 old_dsn = u'None' 437 else: 438 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 439 _log.info ('setting default DSN from [%s] to [%s]', 440 old_dsn, 441 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 442 ) 443 _default_dsn = dsn 444 445 return True
446 # ======================================================================= 447 # netadata API 448 # =======================================================================
449 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
450 expected_hash = known_schema_hashes[version] 451 if version == 0: 452 args = {'ver': 9999} 453 else: 454 args = {'ver': version} 455 rows, idx = run_ro_queries ( 456 link_obj = link_obj, 457 queries = [{ 458 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 459 'args': args 460 }] 461 ) 462 if rows[0]['md5'] != expected_hash: 463 _log.error('database schema version mismatch') 464 _log.error('expected: %s (%s)' % (version, expected_hash)) 465 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 466 if verbose: 467 _log.debug('schema dump follows:') 468 for line in get_schema_structure(link_obj=link_obj).split(): 469 _log.debug(line) 470 _log.debug('schema revision history dump follows:') 471 for line in get_schema_revision_history(link_obj=link_obj): 472 _log.debug(u' - '.join(line)) 473 return False 474 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 475 return True
476 #------------------------------------------------------------------------
477 -def get_schema_version(link_obj=None):
478 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 479 try: 480 return map_schema_hash2version[rows[0]['md5']] 481 except KeyError: 482 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
483 #------------------------------------------------------------------------
484 -def get_schema_structure(link_obj=None):
485 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 486 return rows[0][0]
487 #------------------------------------------------------------------------
488 -def get_schema_hash(link_obj=None):
489 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 490 return rows[0]['md5']
491 #------------------------------------------------------------------------
492 -def get_schema_revision_history(link_obj=None):
493 cmd = u""" 494 select 495 imported::text, 496 version, 497 filename 498 from gm.schema_revision 499 order by imported 500 """ 501 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 502 return rows
503 #------------------------------------------------------------------------
504 -def get_current_user():
505 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 506 return rows[0][0]
507 #------------------------------------------------------------------------
508 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
509 """Get the foreign keys pointing to schema.table.column. 510 511 Does not properly work with multi-column FKs. 512 GNUmed doesn't use any, however. 513 """ 514 cmd = u""" 515 select 516 %(schema)s as referenced_schema, 517 %(tbl)s as referenced_table, 518 %(col)s as referenced_column, 519 pgc.confkey as referenced_column_list, 520 pgc.conrelid::regclass as referencing_table, 521 pgc.conkey as referencing_column_list, 522 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 523 from 524 pg_constraint pgc 525 where 526 pgc.contype = 'f' 527 and 528 pgc.confrelid = ( 529 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 530 select oid from pg_namespace where nspname = %(schema)s 531 ) 532 ) and 533 ( 534 select attnum 535 from pg_attribute 536 where 537 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 538 select oid from pg_namespace where nspname = %(schema)s 539 )) 540 and 541 attname = %(col)s 542 ) = any(pgc.confkey) 543 """ 544 545 args = { 546 'schema': schema, 547 'tbl': table, 548 'col': column 549 } 550 551 rows, idx = run_ro_queries ( 552 link_obj = link_obj, 553 queries = [ 554 {'cmd': cmd, 'args': args} 555 ] 556 ) 557 558 return rows
559 #------------------------------------------------------------------------
560 -def get_child_tables(schema='public', table=None, link_obj=None):
561 """Return child tables of <table>.""" 562 cmd = u""" 563 select 564 pgn.nspname as namespace, 565 pgc.relname as table 566 from 567 pg_namespace pgn, 568 pg_class pgc 569 where 570 pgc.relnamespace = pgn.oid 571 and 572 pgc.oid in ( 573 select inhrelid from pg_inherits where inhparent = ( 574 select oid from pg_class where 575 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 576 relname = %(table)s 577 ) 578 )""" 579 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 580 return rows
581 #------------------------------------------------------------------------
582 -def schema_exists(link_obj=None, schema=u'gm'):
583 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 584 args = {'schema': schema} 585 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 586 return rows[0][0]
587 #------------------------------------------------------------------------
588 -def table_exists(link_obj=None, schema=None, table=None):
589 """Returns false, true.""" 590 cmd = u""" 591 select exists ( 592 select 1 from information_schema.tables 593 where 594 table_schema = %s and 595 table_name = %s and 596 table_type = 'BASE TABLE' 597 )""" 598 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 599 return rows[0][0]
600 #------------------------------------------------------------------------
601 -def get_col_indices(cursor = None):
602 if cursor.description is None: 603 _log.error('no result description available: unused cursor or last query did not select rows') 604 return None 605 col_indices = {} 606 col_index = 0 607 for col_desc in cursor.description: 608 col_name = col_desc[0] 609 # a query like "select 1,2;" will return two columns of the same name ! 610 # hence adjust to that, note, however, that dict-style access won't work 611 # on results of such queries ... 612 if col_indices.has_key(col_name): 613 col_name = '%s_%s' % (col_name, col_index) 614 col_indices[col_name] = col_index 615 col_index += 1 616 617 return col_indices
618 #------------------------------------------------------------------------
619 -def get_col_defs(link_obj=None, schema='public', table=None):
620 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 621 col_names = [] 622 col_type = {} 623 for row in rows: 624 col_names.append(row[0]) 625 # map array types 626 if row[1].startswith('_'): 627 col_type[row[0]] = row[1][1:] + '[]' 628 else: 629 col_type[row[0]] = row[1] 630 col_defs = [] 631 col_defs.append(col_names) 632 col_defs.append(col_type) 633 return col_defs
634 #------------------------------------------------------------------------
635 -def get_col_names(link_obj=None, schema='public', table=None):
636 """Return column attributes of table""" 637 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 638 cols = [] 639 for row in rows: 640 cols.append(row[0]) 641 return cols
642 643 #------------------------------------------------------------------------ 644 # i18n functions 645 #------------------------------------------------------------------------
646 -def export_translations_from_database(filename=None):
647 tx_file = codecs.open(filename, 'wb', 'utf8') 648 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 649 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 650 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 651 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 652 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 653 tx_file.write(u'set default_transaction_read_only to off;\n\n') 654 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 655 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 656 657 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 658 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 659 for row in rows: 660 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 661 row['lang'].replace("'", "\\'"), 662 row['orig'].replace("'", "\\'"), 663 row['trans'].replace("'", "\\'") 664 ) 665 tx_file.write(line) 666 tx_file.write(u'\n') 667 668 tx_file.write(u'\set ON_ERROR_STOP 1\n') 669 tx_file.close() 670 671 return True
672 #------------------------------------------------------------------------
673 -def delete_translation_from_database(link_obj=None, language=None, original=None):
674 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 675 args = {'lang': language, 'orig': original} 676 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 677 return True
678 679 #------------------------------------------------------------------------
680 -def update_translation_in_database(language=None, original=None, translation=None):
681 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 682 args = {'lang': language, 'orig': original, 'trans': translation} 683 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 684 return args
685 686 #------------------------------------------------------------------------
687 -def get_translation_languages():
688 rows, idx = run_ro_queries ( 689 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 690 ) 691 return [ r[0] for r in rows ]
692 693 #------------------------------------------------------------------------
694 -def get_database_translations(language=None, order_by=None):
695 696 args = {'lang': language} 697 _log.debug('language [%s]', language) 698 699 if order_by is None: 700 order_by = u'ORDER BY %s' % order_by 701 else: 702 order_by = u'ORDER BY lang, orig' 703 704 if language is None: 705 cmd = u""" 706 SELECT DISTINCT ON (orig, lang) 707 lang, orig, trans 708 FROM (( 709 710 -- strings stored as translation keys whether translated or not 711 SELECT 712 NULL as lang, 713 ik.orig, 714 NULL AS trans 715 FROM 716 i18n.keys ik 717 718 ) UNION ALL ( 719 720 -- already translated strings 721 SELECT 722 it.lang, 723 it.orig, 724 it.trans 725 FROM 726 i18n.translations it 727 728 )) as translatable_strings 729 %s""" % order_by 730 else: 731 cmd = u""" 732 SELECT DISTINCT ON (orig, lang) 733 lang, orig, trans 734 FROM (( 735 736 -- strings stored as translation keys whether translated or not 737 SELECT 738 %%(lang)s as lang, 739 ik.orig, 740 i18n._(ik.orig, %%(lang)s) AS trans 741 FROM 742 i18n.keys ik 743 744 ) UNION ALL ( 745 746 -- already translated strings 747 SELECT 748 %%(lang)s as lang, 749 it.orig, 750 i18n._(it.orig, %%(lang)s) AS trans 751 FROM 752 i18n.translations it 753 754 )) AS translatable_strings 755 %s""" % order_by 756 757 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 758 759 if rows is None: 760 _log.error('no translatable strings found') 761 else: 762 _log.debug('%s translatable strings found', len(rows)) 763 764 return rows
765 766 #------------------------------------------------------------------------
767 -def get_current_user_language():
768 cmd = u'select i18n.get_curr_lang()' 769 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 770 return rows[0][0]
771 772 #------------------------------------------------------------------------
773 -def set_user_language(user=None, language=None):
774 """Set the user language in the database. 775 776 user = None: current db user 777 language = None: unset 778 """ 779 _log.info('setting database language for user [%s] to [%s]', user, language) 780 781 args = { 782 'usr': user, 783 'lang': language 784 } 785 786 if language is None: 787 if user is None: 788 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 789 else: 790 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 791 queries.append({'cmd': u'select True'}) 792 else: 793 if user is None: 794 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 795 else: 796 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 797 798 rows, idx = run_rw_queries(queries = queries, return_data = True) 799 800 if not rows[0][0]: 801 _log.error('cannot set database language to [%s] for user [%s]', language, user) 802 803 return rows[0][0]
804 #------------------------------------------------------------------------
805 -def force_user_language(language=None):
806 """Set the user language in the database. 807 808 - regardless of whether there is any translation available. 809 - only for the current user 810 """ 811 _log.info('forcing database language for current db user to [%s]', language) 812 813 run_rw_queries(queries = [{ 814 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 815 'args': {'lang': language} 816 }])
817 #------------------------------------------------------------------------ 818 #------------------------------------------------------------------------ 819 text_expansion_keywords = None 820
821 -def get_text_expansion_keywords():
822 global text_expansion_keywords 823 if text_expansion_keywords is not None: 824 return text_expansion_keywords 825 826 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 827 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 828 text_expansion_keywords = rows 829 830 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 831 832 return text_expansion_keywords
833 #------------------------------------------------------------------------
834 -def expand_keyword(keyword = None):
835 836 # Easter Egg ;-) 837 if keyword == u'$$steffi': 838 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 839 840 cmd = u"""SELECT expansion FROM clin.v_your_keyword_expansions WHERE keyword = %(kwd)s""" 841 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 842 843 if len(rows) == 0: 844 return None 845 846 return rows[0]['expansion']
847 #------------------------------------------------------------------------
848 -def get_keyword_expansion_candidates(keyword = None):
849 850 if keyword is None: 851 return [] 852 853 get_text_expansion_keywords() 854 855 candidates = [] 856 for kwd in text_expansion_keywords: 857 if kwd['keyword'].startswith(keyword): 858 candidates.append(kwd['keyword']) 859 860 return candidates
861 #------------------------------------------------------------------------
862 -def add_text_expansion(keyword=None, expansion=None, public=None):
863 864 if public: 865 cmd = u"SELECT 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 866 else: 867 cmd = u"SELECT 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 868 869 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 870 if len(rows) != 0: 871 return False 872 873 if public: 874 cmd = u""" 875 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 876 values (%(kwd)s, %(exp)s, null)""" 877 else: 878 cmd = u""" 879 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 880 values (%(kwd)s, %(exp)s, (SELECT pk from dem.staff where db_user = current_user))""" 881 882 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 883 884 global text_expansion_keywords 885 text_expansion_keywords = None 886 887 return True
888 #------------------------------------------------------------------------
889 -def delete_text_expansion(keyword):
890 cmd = u""" 891 delete from clin.keyword_expansion where 892 keyword = %(kwd)s and ( 893 (fk_staff = (SELECT pk from dem.staff where db_user = current_user)) 894 or 895 (fk_staff is null and owner = current_user) 896 )""" 897 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 898 899 global text_expansion_keywords 900 text_expansion_keywords = None
901 #------------------------------------------------------------------------
902 -def edit_text_expansion(keyword, expansion):
903 904 cmd1 = u""" 905 DELETE FROM clin.keyword_expansion 906 WHERE 907 keyword = %(kwd)s 908 AND 909 fk_staff = (SELECT pk FROM dem.staff WHERE db_user = current_user)""" 910 911 cmd2 = u""" 912 INSERT INTO clin.keyword_expansion ( 913 keyword, expansion, fk_staff 914 ) VALUES ( 915 %(kwd)s, 916 %(exp)s, 917 (SELECT pk FROM dem.staff WHERE db_user = current_user) 918 )""" 919 args = {'kwd': keyword, 'exp': expansion} 920 rows, idx = run_rw_queries(queries = [ 921 {'cmd': cmd1, 'args': args}, 922 {'cmd': cmd2, 'args': args}, 923 ]) 924 925 global text_expansion_keywords 926 text_expansion_keywords = None
927 # ======================================================================= 928 # query runners and helpers 929 # =======================================================================
930 -def send_maintenance_notification():
931 cmd = u'notify "db_maintenance_warning:"' 932 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
933 #------------------------------------------------------------------------
934 -def send_maintenance_shutdown():
935 cmd = u'notify "db_maintenance_disconnect:"' 936 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
937 #------------------------------------------------------------------------
938 -def is_pg_interval(candidate=None):
939 cmd = u'SELECT %(candidate)s::interval' 940 try: 941 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 942 return True 943 except: 944 cmd = u'SELECT %(candidate)s::text::interval' 945 try: 946 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 947 return True 948 except: 949 return False
950 #------------------------------------------------------------------------
951 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
952 outfile = file(filename, 'wb') 953 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 954 outfile.close() 955 return result
956 #------------------------------------------------------------------------
957 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
958 """Store data from a bytea field into a file. 959 960 <data_query> 961 - dict {'cmd': ..., 'args': ...} 962 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 963 - 'args' must be a dict 964 - must return one row with one field of type bytea 965 <file> 966 - must be a file like Python object 967 <data_size> 968 - integer of the total size of the expected data or None 969 <data_size_query> 970 - dict {'cmd': ..., 'args': ...} 971 - cmd must be unicode 972 - must return one row with one field with the octet_length() of the data field 973 - used only when <data_size> is None 974 """ 975 if data_size == 0: 976 return True 977 978 # If the client sets an encoding other than the default we 979 # will receive encoding-parsed data which isn't the binary 980 # content we want. Hence we need to get our own connection. 981 # It must be a read-write one so that we don't affect the 982 # encoding for other users of the shared read-only 983 # connections. 984 # Actually, encodings shouldn't be applied to binary data 985 # (eg. bytea types) in the first place but that is only 986 # reported to be fixed > v7.4. 987 # further tests reveal that at least on PG 8.0 this bug still 988 # manifests itself 989 conn = get_raw_connection(readonly=True) 990 991 if data_size is None: 992 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 993 data_size = rows[0][0] 994 if data_size in [None, 0]: 995 conn.rollback() 996 return True 997 998 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 999 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 1000 1001 # chunk size of 0 means "retrieve whole field at once" 1002 if chunk_size == 0: 1003 chunk_size = data_size 1004 _log.debug('chunk size [0] bytes: retrieving all data at once') 1005 1006 # Windoze sucks: it can't transfer objects of arbitrary size, 1007 # anyways, we need to split the transfer, 1008 # however, only possible if postgres >= 7.2 1009 needed_chunks, remainder = divmod(data_size, chunk_size) 1010 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 1011 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 1012 1013 # try setting "bytea_output" 1014 # - fails if not necessary 1015 # - succeeds if necessary 1016 try: 1017 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1018 except dbapi.ProgrammingError: 1019 _log.debug('failed to set bytea_output to "escape", not necessary') 1020 1021 # retrieve chunks, skipped if data size < chunk size, 1022 # does this not carry the danger of cutting up multi-byte escape sequences ? 1023 # no, since bytea is binary, 1024 # yes, since in bytea there are *some* escaped values, still 1025 # no, since those are only escaped during *transfer*, not on-disk, hence 1026 # only complete escape sequences are put on the wire 1027 for chunk_id in range(needed_chunks): 1028 chunk_start = (chunk_id * chunk_size) + 1 1029 data_query['args']['start'] = chunk_start 1030 data_query['args']['size'] = chunk_size 1031 try: 1032 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1033 except: 1034 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1035 conn.rollback() 1036 raise 1037 # it would be a fatal error to see more than one result as ids are supposed to be unique 1038 file_obj.write(str(rows[0][0])) 1039 1040 # retrieve remainder 1041 if remainder > 0: 1042 chunk_start = (needed_chunks * chunk_size) + 1 1043 data_query['args']['start'] = chunk_start 1044 data_query['args']['size'] = remainder 1045 try: 1046 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1047 except: 1048 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1049 conn.rollback() 1050 raise 1051 # it would be a fatal error to see more than one result as ids are supposed to be unique 1052 file_obj.write(str(rows[0][0])) 1053 1054 conn.rollback() 1055 return True
1056 #------------------------------------------------------------------------
1057 -def file2bytea(query=None, filename=None, args=None, conn=None):
1058 """Store data from a file into a bytea field. 1059 1060 The query must: 1061 - be in unicode 1062 - contain a format spec identifying the row (eg a primary key) 1063 matching <args> if it is an UPDATE 1064 - contain a format spec %(data)s::bytea 1065 """ 1066 # read data from file 1067 infile = file(filename, "rb") 1068 data_as_byte_string = infile.read() 1069 infile.close() 1070 if args is None: 1071 args = {} 1072 args['data'] = buffer(data_as_byte_string) 1073 del(data_as_byte_string) 1074 1075 # insert the data 1076 if conn is None: 1077 conn = get_raw_connection(readonly=False) 1078 close_conn = True 1079 else: 1080 close_conn = False 1081 1082 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1083 1084 if close_conn: 1085 conn.close() 1086 1087 return
1088 #------------------------------------------------------------------------
1089 -def sanitize_pg_regex(expression=None, escape_all=False):
1090 """Escape input for use in a PostgreSQL regular expression. 1091 1092 If a fragment comes from user input and is to be used 1093 as a regular expression we need to make sure it doesn't 1094 contain invalid regex patterns such as unbalanced ('s. 1095 1096 <escape_all> 1097 True: try to escape *all* metacharacters 1098 False: only escape those which render the regex invalid 1099 """ 1100 return expression.replace ( 1101 '(', '\(' 1102 ).replace ( 1103 ')', '\)' 1104 ).replace ( 1105 '[', '\[' 1106 ).replace ( 1107 '+', '\+' 1108 ).replace ( 1109 '.', '\.' 1110 ).replace ( 1111 '*', '\*' 1112 )
1113 #']', '\]', # not needed 1114 #------------------------------------------------------------------------
1115 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1116 """Run read-only queries. 1117 1118 <queries> must be a list of dicts: 1119 [ 1120 {'cmd': <string>, 'args': <dict> or <tuple>}, 1121 {...}, 1122 ... 1123 ] 1124 """ 1125 if isinstance(link_obj, dbapi._psycopg.cursor): 1126 curs = link_obj 1127 curs_close = __noop 1128 tx_rollback = __noop 1129 elif isinstance(link_obj, dbapi._psycopg.connection): 1130 curs = link_obj.cursor() 1131 curs_close = curs.close 1132 tx_rollback = link_obj.rollback 1133 elif link_obj is None: 1134 conn = get_connection(readonly=True, verbose=verbose) 1135 curs = conn.cursor() 1136 curs_close = curs.close 1137 tx_rollback = conn.rollback 1138 else: 1139 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1140 1141 if verbose: 1142 _log.debug('cursor: %s', curs) 1143 1144 for query in queries: 1145 if type(query['cmd']) is not types.UnicodeType: 1146 print "run_ro_queries(): non-unicode query" 1147 print query['cmd'] 1148 try: 1149 args = query['args'] 1150 except KeyError: 1151 args = None 1152 try: 1153 curs.execute(query['cmd'], args) 1154 if verbose: 1155 _log.debug('ran query: [%s]', curs.query) 1156 _log.debug('PG status message: %s', curs.statusmessage) 1157 _log.debug('cursor description: %s', str(curs.description)) 1158 except: 1159 # FIXME: use .pgcode 1160 try: 1161 curs_close() 1162 except dbapi.InterfaceError: 1163 _log.exception('cannot close cursor') 1164 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1165 _log.error('query failed: [%s]', curs.query) 1166 _log.error('PG status message: %s', curs.statusmessage) 1167 raise 1168 1169 data = None 1170 col_idx = None 1171 if return_data: 1172 data = curs.fetchall() 1173 if verbose: 1174 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1175 _log.debug('cursor description: %s', str(curs.description)) 1176 if get_col_idx: 1177 col_idx = get_col_indices(curs) 1178 1179 curs_close() 1180 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1181 return (data, col_idx)
1182 #------------------------------------------------------------------------
1183 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1184 """Convenience function for running a transaction 1185 that is supposed to get committed. 1186 1187 <link_obj> 1188 can be either: 1189 - a cursor 1190 - a connection 1191 1192 <queries> 1193 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1194 to be executed as a single transaction, the last 1195 query may usefully return rows (such as a 1196 "SELECT currval('some_sequence')" statement) 1197 1198 <end_tx> 1199 - controls whether the transaction is finalized (eg. 1200 committed/rolled back) or not, this allows the 1201 call to run_rw_queries() to be part of a framing 1202 transaction 1203 - if link_obj is a connection then <end_tx> will 1204 default to False unless it is explicitly set to 1205 True which is taken to mean "yes, you do have full 1206 control over the transaction" in which case the 1207 transaction is properly finalized 1208 - if link_obj is a cursor we CANNOT finalize the 1209 transaction because we would need the connection for that 1210 - if link_obj is None <end_tx> will, of course, always be True 1211 1212 <return_data> 1213 - if true, the returned data will include the rows 1214 the last query selected 1215 - if false, it returns None instead 1216 1217 <get_col_idx> 1218 - if true, the returned data will include a dictionary 1219 mapping field names to column positions 1220 - if false, the returned data returns None instead 1221 1222 method result: 1223 - returns a tuple (data, idx) 1224 - <data>: 1225 * (None, None) if last query did not return rows 1226 * ("fetchall() result", <index>) if last query returned any rows 1227 * for <index> see <get_col_idx> 1228 """ 1229 if isinstance(link_obj, dbapi._psycopg.cursor): 1230 conn_close = __noop 1231 conn_commit = __noop 1232 conn_rollback = __noop 1233 curs = link_obj 1234 curs_close = __noop 1235 elif isinstance(link_obj, dbapi._psycopg.connection): 1236 conn_close = __noop 1237 if end_tx: 1238 conn_commit = link_obj.commit 1239 conn_rollback = link_obj.rollback 1240 else: 1241 conn_commit = __noop 1242 conn_rollback = __noop 1243 curs = link_obj.cursor() 1244 curs_close = curs.close 1245 elif link_obj is None: 1246 conn = get_connection(readonly=False) 1247 conn_close = conn.close 1248 conn_commit = conn.commit 1249 conn_rollback = conn.rollback 1250 curs = conn.cursor() 1251 curs_close = curs.close 1252 else: 1253 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1254 1255 for query in queries: 1256 if type(query['cmd']) is not types.UnicodeType: 1257 print "run_rw_queries(): non-unicode query" 1258 print query['cmd'] 1259 try: 1260 args = query['args'] 1261 except KeyError: 1262 args = None 1263 try: 1264 curs.execute(query['cmd'], args) 1265 except: 1266 _log.exception('error running RW query') 1267 gmLog2.log_stack_trace() 1268 try: 1269 curs_close() 1270 conn_rollback() 1271 conn_close() 1272 except dbapi.InterfaceError: 1273 _log.exception('cannot cleanup') 1274 raise 1275 raise 1276 1277 data = None 1278 col_idx = None 1279 if return_data: 1280 try: 1281 data = curs.fetchall() 1282 except: 1283 _log.exception('error fetching data from RW query') 1284 gmLog2.log_stack_trace() 1285 try: 1286 curs_close() 1287 conn_rollback() 1288 conn_close() 1289 except dbapi.InterfaceError: 1290 _log.exception('cannot cleanup') 1291 raise 1292 raise 1293 if get_col_idx: 1294 col_idx = get_col_indices(curs) 1295 1296 curs_close() 1297 conn_commit() 1298 conn_close() 1299 1300 return (data, col_idx)
1301 #------------------------------------------------------------------------
1302 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1303 """Generates SQL for an INSERT query. 1304 1305 values: dict of values keyed by field to insert them into 1306 """ 1307 if schema is None: 1308 schema = u'public' 1309 1310 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1311 val_snippets = [] 1312 for field in fields: 1313 val_snippets.append(u'%%(%s)s' % field) 1314 1315 if returning is None: 1316 returning = u'' 1317 return_data = False 1318 else: 1319 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1320 return_data = True 1321 1322 cmd = u"""\nINSERT INTO %s.%s ( 1323 %s 1324 ) VALUES ( 1325 %s 1326 )%s""" % ( 1327 schema, 1328 table, 1329 u',\n\t\t'.join(fields), 1330 u',\n\t\t'.join(val_snippets), 1331 returning 1332 ) 1333 1334 _log.debug(u'running SQL: >>>%s<<<', cmd) 1335 1336 return run_rw_queries ( 1337 link_obj = link_obj, 1338 queries = [{'cmd': cmd, 'args': values}], 1339 end_tx = end_tx, 1340 return_data = return_data, 1341 get_col_idx = get_col_idx, 1342 verbose = verbose 1343 )
1344 # ======================================================================= 1345 # connection handling API 1346 # -----------------------------------------------------------------------
1347 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1348 """ 1349 GNUmed database connection pool. 1350 1351 Extends psycopg2's PersistentConnectionPool with 1352 a custom _connect() function. Supports one connection 1353 per thread - which also ties it to one particular DSN. 1354 """ 1355 #--------------------------------------------------
1356 - def _connect(self, key=None):
1357 1358 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1359 1360 conn.original_close = conn.close 1361 conn.close = _raise_exception_on_ro_conn_close 1362 1363 if key is not None: 1364 self._used[key] = conn 1365 self._rused[id(conn)] = key 1366 else: 1367 self._pool.append(conn) 1368 1369 return conn
1370 #--------------------------------------------------
1371 - def shutdown(self):
1372 for conn_key in self._used.keys(): 1373 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1374 self._used[conn_key].original_close()
1375 # -----------------------------------------------------------------------
1376 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1377 """Get a raw, unadorned connection. 1378 1379 - this will not set any parameters such as encoding, timezone, datestyle 1380 - the only requirement is a valid DSN 1381 - hence it can be used for "service" connections 1382 for verifying encodings etc 1383 """ 1384 # FIXME: support verbose 1385 if dsn is None: 1386 dsn = get_default_dsn() 1387 1388 if u'host=salaam.homeunix' in dsn: 1389 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1390 1391 try: 1392 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1393 except dbapi.OperationalError, e: 1394 1395 t, v, tb = sys.exc_info() 1396 try: 1397 msg = e.args[0] 1398 except (AttributeError, IndexError, TypeError): 1399 raise 1400 1401 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1402 1403 if msg.find('fe_sendauth') != -1: 1404 raise cAuthenticationError, (dsn, msg), tb 1405 1406 if regex.search('user ".*" does not exist', msg) is not None: 1407 raise cAuthenticationError, (dsn, msg), tb 1408 1409 if msg.find('uthenti') != -1: 1410 raise cAuthenticationError, (dsn, msg), tb 1411 1412 raise 1413 1414 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1415 1416 # do first-time stuff 1417 global postgresql_version 1418 if postgresql_version is None: 1419 curs = conn.cursor() 1420 curs.execute(""" 1421 SELECT 1422 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1423 FROM 1424 pg_settings 1425 WHERE 1426 name = 'server_version' 1427 """) 1428 postgresql_version = curs.fetchone()['version'] 1429 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1430 try: 1431 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1432 _log.info('database size: %s', curs.fetchone()[0]) 1433 except: 1434 pass 1435 if verbose: 1436 __log_PG_settings(curs=curs) 1437 curs.close() 1438 conn.commit() 1439 1440 if _default_client_timezone is None: 1441 __detect_client_timezone(conn = conn) 1442 1443 curs = conn.cursor() 1444 1445 # set access mode 1446 if readonly: 1447 _log.debug('access mode [READ ONLY]') 1448 cmd = 'set session characteristics as transaction READ ONLY' 1449 curs.execute(cmd) 1450 cmd = 'set default_transaction_read_only to on' 1451 curs.execute(cmd) 1452 else: 1453 _log.debug('access mode [READ WRITE]') 1454 cmd = 'set session characteristics as transaction READ WRITE' 1455 curs.execute(cmd) 1456 cmd = 'set default_transaction_read_only to off' 1457 curs.execute(cmd) 1458 1459 curs.close() 1460 conn.commit() 1461 1462 conn.is_decorated = False 1463 1464 return conn
1465 # =======================================================================
1466 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1467 """Get a new connection. 1468 1469 This assumes the locale system has been initialized 1470 unless an encoding is specified. 1471 """ 1472 # FIXME: support pooled on RW, too 1473 # FIXME: for now, support the default DSN only 1474 if pooled and readonly and (dsn is None): 1475 global __ro_conn_pool 1476 if __ro_conn_pool is None: 1477 __ro_conn_pool = cConnectionPool ( 1478 minconn = 1, 1479 maxconn = 2, 1480 dsn = dsn, 1481 verbose = verbose 1482 ) 1483 conn = __ro_conn_pool.getconn() 1484 else: 1485 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1486 1487 if conn.is_decorated: 1488 return conn 1489 1490 if encoding is None: 1491 encoding = _default_client_encoding 1492 if encoding is None: 1493 encoding = gmI18N.get_encoding() 1494 _log.warning('client encoding not specified') 1495 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1496 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1497 1498 # set connection properties 1499 # - client encoding 1500 try: 1501 conn.set_client_encoding(encoding) 1502 except dbapi.OperationalError: 1503 t, v, tb = sys.exc_info() 1504 if str(v).find("can't set encoding to") != -1: 1505 raise cEncodingError, (encoding, v), tb 1506 raise 1507 1508 # - transaction isolation level 1509 if readonly: 1510 iso_level = u'read committed' 1511 else: 1512 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1513 iso_level = u'serializable' 1514 1515 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1516 1517 curs = conn.cursor() 1518 1519 # - client time zone 1520 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1521 1522 conn.commit() 1523 1524 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1525 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1526 # 1527 # - bytea data format 1528 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1529 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1530 # as the transmission mode for bytea output, 1531 # so try to set this setting back to "escape", 1532 # if that's not possible the reason will be that PG < 9.0 does not support 1533 # that setting - which also means we don't need it and can ignore the 1534 # failure 1535 cmd = "set bytea_output to 'escape'" 1536 try: 1537 curs.execute(cmd) 1538 except dbapi.ProgrammingError: 1539 _log.error('cannot set bytea_output format') 1540 1541 curs.close() 1542 conn.commit() 1543 1544 conn.is_decorated = True 1545 1546 return conn
1547 #-----------------------------------------------------------------------
1548 -def shutdown():
1549 if __ro_conn_pool is None: 1550 return 1551 __ro_conn_pool.shutdown()
1552 # ====================================================================== 1553 # internal helpers 1554 #-----------------------------------------------------------------------
1555 -def __noop():
1556 pass
1557 #-----------------------------------------------------------------------
1558 -def _raise_exception_on_ro_conn_close():
1559 raise TypeError(u'close() called on read-only connection')
1560 #-----------------------------------------------------------------------
1561 -def log_database_access(action=None):
1562 run_insert ( 1563 schema = u'gm', 1564 table = u'access_log', 1565 values = {u'user_action': action}, 1566 end_tx = True 1567 )
1568 #-----------------------------------------------------------------------
1569 -def sanity_check_time_skew(tolerance=60):
1570 """Check server time and local time to be within 1571 the given tolerance of each other. 1572 1573 tolerance: seconds 1574 """ 1575 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1576 1577 cmd = u"SELECT now() at time zone 'UTC'" 1578 conn = get_raw_connection(readonly=True) 1579 curs = conn.cursor() 1580 1581 start = time.time() 1582 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1583 end = time.time() 1584 client_now_as_utc = pydt.datetime.utcnow() 1585 1586 curs.close() 1587 conn.commit() 1588 1589 server_now_as_utc = rows[0][0] 1590 query_duration = end - start 1591 _log.info('server "now" (UTC): %s', server_now_as_utc) 1592 _log.info('client "now" (UTC): %s', client_now_as_utc) 1593 _log.debug('wire roundtrip (seconds): %s', query_duration) 1594 1595 if query_duration > tolerance: 1596 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1597 return False 1598 1599 if server_now_as_utc > client_now_as_utc: 1600 real_skew = server_now_as_utc - client_now_as_utc 1601 else: 1602 real_skew = client_now_as_utc - server_now_as_utc 1603 1604 _log.debug('client/server time skew: %s', real_skew) 1605 1606 if real_skew > pydt.timedelta(seconds = tolerance): 1607 _log.error('client/server time skew > tolerance') 1608 return False 1609 1610 return True
1611 #-----------------------------------------------------------------------
1612 -def sanity_check_database_settings():
1613 """Checks database settings. 1614 1615 returns (status, message) 1616 status: 1617 0: no problem 1618 1: non-fatal problem 1619 2: fatal problem 1620 """ 1621 _log.debug('checking database settings') 1622 1623 conn = get_connection() 1624 1625 # - version string 1626 global postgresql_version_string 1627 if postgresql_version_string is None: 1628 curs = conn.cursor() 1629 curs.execute('SELECT version()') 1630 postgresql_version_string = curs.fetchone()['version'] 1631 curs.close() 1632 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1633 1634 options2check = { 1635 # setting: [expected value, risk, fatal?] 1636 u'allow_system_table_mods': [u'off', u'system breakage', False], 1637 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1638 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1639 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1640 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1641 u'fsync': [u'on', u'data loss/corruption', True], 1642 u'full_page_writes': [u'on', u'data loss/corruption', False], 1643 u'lc_messages': [u'C', u'suboptimal error detection', False], 1644 u'password_encryption': [u'on', u'breach of confidentiality', False], 1645 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1646 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1647 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1648 } 1649 1650 from Gnumed.pycommon import gmCfg2 1651 _cfg = gmCfg2.gmCfgData() 1652 if _cfg.get(option = u'hipaa'): 1653 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1654 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1655 else: 1656 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1657 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1658 1659 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1660 rows, idx = run_ro_queries ( 1661 link_obj = conn, 1662 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1663 get_col_idx = False 1664 ) 1665 1666 found_error = False 1667 found_problem = False 1668 msg = [] 1669 for row in rows: 1670 option = row['name'] 1671 value_found = row['setting'] 1672 value_expected = options2check[option][0] 1673 risk = options2check[option][1] 1674 fatal_setting = options2check[option][2] 1675 if value_found != value_expected: 1676 if fatal_setting is True: 1677 found_error = True 1678 elif fatal_setting is False: 1679 found_problem = True 1680 elif fatal_setting is None: 1681 pass 1682 else: 1683 _log.error(options2check[option]) 1684 raise ValueError(u'invalid database configuration sanity check') 1685 msg.append(_(' option [%s]: %s') % (option, value_found)) 1686 msg.append(_(' risk: %s') % risk) 1687 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1688 1689 if found_error: 1690 return 2, u'\n'.join(msg) 1691 1692 if found_problem: 1693 return 1, u'\n'.join(msg) 1694 1695 return 0, u''
1696 #------------------------------------------------------------------------
1697 -def __log_PG_settings(curs=None):
1698 # don't use any of the run_*()s since that might 1699 # create a loop if we fail here 1700 # FIXME: use pg_settings 1701 try: 1702 curs.execute(u'show all') 1703 except: 1704 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1705 return False 1706 settings = curs.fetchall() 1707 if settings is None: 1708 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1709 return False 1710 for setting in settings: 1711 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1712 return True
1713 # =======================================================================
1714 -def extract_msg_from_pg_exception(exc=None):
1715 1716 try: 1717 msg = exc.args[0] 1718 except (AttributeError, IndexError, TypeError): 1719 return u'cannot extract message from exception' 1720 1721 return unicode(msg, gmI18N.get_encoding(), 'replace')
1722 # =======================================================================
1723 -class cAuthenticationError(dbapi.OperationalError):
1724
1725 - def __init__(self, dsn=None, prev_val=None):
1726 self.dsn = dsn 1727 self.prev_val = prev_val
1728
1729 - def __str__(self):
1730 _log.warning('%s.__str__() called', self.__class__.__name__) 1731 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1732 _log.error(tmp) 1733 return tmp.encode(gmI18N.get_encoding(), 'replace')
1734
1735 - def __unicode__(self):
1736 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1737 1738 # ======================================================================= 1739 # custom psycopg2 extensions 1740 # =======================================================================
1741 -class cEncodingError(dbapi.OperationalError):
1742
1743 - def __init__(self, encoding=None, prev_val=None):
1744 self.encoding = encoding 1745 self.prev_val = prev_val
1746
1747 - def __str__(self):
1748 _log.warning('%s.__str__() called', self.__class__.__name__) 1749 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1750
1751 - def __unicode__(self):
1752 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1753 1754 # ----------------------------------------------------------------------- 1755 # Python -> PostgreSQL 1756 # ----------------------------------------------------------------------- 1757 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1758 -class cAdapterPyDateTime(object):
1759
1760 - def __init__(self, dt):
1761 if dt.tzinfo is None: 1762 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1763 self.__dt = dt
1764
1765 - def getquoted(self):
1766 return _timestamp_template % self.__dt.isoformat()
1767 1768 ## remove for 0.9 1769 ## ---------------------------------------------------------------------- 1770 ##class cAdapterMxDateTime(object): 1771 ## 1772 ## def __init__(self, dt): 1773 ## if dt.tz == '???': 1774 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1775 ## self.__dt = dt 1776 ## 1777 ## def getquoted(self): 1778 ## # under some locale settings the mx.DateTime ISO formatter 1779 ## # will insert "," into the ISO string, 1780 ## # while this is allowed per the ISO8601 spec PostgreSQL 1781 ## # cannot currently handle that, 1782 ## # so map those "," to "." to make things work: 1783 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1784 ## 1785 ## ---------------------------------------------------------------------- 1786 ## PostgreSQL -> Python 1787 ## ---------------------------------------------------------------------- 1788 1789 #======================================================================= 1790 # main 1791 #----------------------------------------------------------------------- 1792 1793 # make sure psycopg2 knows how to handle unicode ... 1794 # intended to become standard 1795 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1796 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1797 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1798 1799 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1800 # check in 0.9: 1801 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1802 1803 ## remove for 0.9 1804 #try: 1805 # import mx.DateTime as mxDT 1806 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1807 #except ImportError: 1808 # _log.warning('cannot import mx.DateTime') 1809 1810 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1811 # them adapted to "... ARRAY[]..." so we can support PG arrays 1812 1813 #======================================================================= 1814 if __name__ == "__main__": 1815 1816 if len(sys.argv) < 2: 1817 sys.exit() 1818 1819 if sys.argv[1] != 'test': 1820 sys.exit() 1821 1822 logging.basicConfig(level=logging.DEBUG) 1823 #--------------------------------------------------------------------
1824 - def test_file2bytea():
1825 run_rw_queries(queries = [ 1826 {'cmd': u'create table test_bytea (data bytea)'} 1827 ]) 1828 1829 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1830 try: 1831 file2bytea(query = cmd, filename = sys.argv[2]) 1832 except: 1833 _log.exception('error') 1834 1835 run_rw_queries(queries = [ 1836 {'cmd': u'drop table test_bytea'} 1837 ])
1838 #--------------------------------------------------------------------
1839 - def test_get_connection():
1840 print "testing get_connection()" 1841 1842 dsn = 'foo' 1843 try: 1844 conn = get_connection(dsn=dsn) 1845 except dbapi.OperationalError, e: 1846 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1847 t, v = sys.exc_info()[:2] 1848 print ' ', t 1849 print ' ', v 1850 1851 dsn = 'dbname=gnumed_v9' 1852 try: 1853 conn = get_connection(dsn=dsn) 1854 except cAuthenticationError: 1855 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1856 t, v = sys.exc_info()[:2] 1857 print ' ', t 1858 print ' ', v 1859 1860 dsn = 'dbname=gnumed_v9 user=abc' 1861 try: 1862 conn = get_connection(dsn=dsn) 1863 except cAuthenticationError: 1864 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1865 t, v = sys.exc_info()[:2] 1866 print ' ', t 1867 print ' ', v 1868 1869 dsn = 'dbname=gnumed_v9 user=any-doc' 1870 try: 1871 conn = get_connection(dsn=dsn) 1872 except cAuthenticationError: 1873 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1874 t, v = sys.exc_info()[:2] 1875 print ' ', t 1876 print ' ', v 1877 1878 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1879 try: 1880 conn = get_connection(dsn=dsn) 1881 except cAuthenticationError: 1882 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1883 t, v = sys.exc_info()[:2] 1884 print ' ', t 1885 print ' ', v 1886 1887 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1888 conn = get_connection(dsn=dsn, readonly=True) 1889 1890 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1891 conn = get_connection(dsn=dsn, readonly=False) 1892 1893 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1894 encoding = 'foo' 1895 try: 1896 conn = get_connection(dsn=dsn, encoding=encoding) 1897 except cEncodingError: 1898 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1899 t, v = sys.exc_info()[:2] 1900 print ' ', t 1901 print ' ', v
1902 #--------------------------------------------------------------------
1903 - def test_exceptions():
1904 print "testing exceptions" 1905 1906 try: 1907 raise cAuthenticationError('no dsn', 'no previous exception') 1908 except cAuthenticationError: 1909 t, v, tb = sys.exc_info() 1910 print t 1911 print v 1912 print tb 1913 1914 try: 1915 raise cEncodingError('no dsn', 'no previous exception') 1916 except cEncodingError: 1917 t, v, tb = sys.exc_info() 1918 print t 1919 print v 1920 print tb
1921 #--------------------------------------------------------------------
1922 - def test_ro_queries():
1923 print "testing run_ro_queries()" 1924 1925 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1926 conn = get_connection(dsn, readonly=True) 1927 1928 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1929 print data 1930 print idx 1931 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1932 print data 1933 print idx 1934 1935 curs = conn.cursor() 1936 1937 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1938 print data 1939 print idx 1940 1941 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1942 print data 1943 print idx 1944 1945 try: 1946 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1947 print data 1948 print idx 1949 except psycopg2.ProgrammingError: 1950 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1951 t, v = sys.exc_info()[:2] 1952 print ' ', t 1953 print ' ', v 1954 1955 curs.close()
1956 #--------------------------------------------------------------------
1957 - def test_request_dsn():
1958 conn = get_connection() 1959 print conn 1960 conn.close()
1961 #--------------------------------------------------------------------
1962 - def test_set_encoding():
1963 print "testing set_default_client_encoding()" 1964 1965 enc = 'foo' 1966 try: 1967 set_default_client_encoding(enc) 1968 print "SUCCESS: encoding [%s] worked" % enc 1969 except ValueError: 1970 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1971 t, v = sys.exc_info()[:2] 1972 print ' ', t 1973 print ' ', v 1974 1975 enc = '' 1976 try: 1977 set_default_client_encoding(enc) 1978 print "SUCCESS: encoding [%s] worked" % enc 1979 except ValueError: 1980 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1981 t, v = sys.exc_info()[:2] 1982 print ' ', t 1983 print ' ', v 1984 1985 enc = 'latin1' 1986 try: 1987 set_default_client_encoding(enc) 1988 print "SUCCESS: encoding [%s] worked" % enc 1989 except ValueError: 1990 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1991 t, v = sys.exc_info()[:2] 1992 print ' ', t 1993 print ' ', v 1994 1995 enc = 'utf8' 1996 try: 1997 set_default_client_encoding(enc) 1998 print "SUCCESS: encoding [%s] worked" % enc 1999 except ValueError: 2000 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2001 t, v = sys.exc_info()[:2] 2002 print ' ', t 2003 print ' ', v 2004 2005 enc = 'unicode' 2006 try: 2007 set_default_client_encoding(enc) 2008 print "SUCCESS: encoding [%s] worked" % enc 2009 except ValueError: 2010 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2011 t, v = sys.exc_info()[:2] 2012 print ' ', t 2013 print ' ', v 2014 2015 enc = 'UNICODE' 2016 try: 2017 set_default_client_encoding(enc) 2018 print "SUCCESS: encoding [%s] worked" % enc 2019 except ValueError: 2020 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2021 t, v = sys.exc_info()[:2] 2022 print ' ', t 2023 print ' ', v
2024 #--------------------------------------------------------------------
2025 - def test_connection_pool():
2026 dsn = get_default_dsn() 2027 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2028 print pool 2029 print pool.getconn() 2030 print pool.getconn() 2031 print pool.getconn() 2032 print type(pool.getconn())
2033 #--------------------------------------------------------------------
2034 - def test_list_args():
2035 dsn = get_default_dsn() 2036 conn = get_connection(dsn, readonly=True) 2037 curs = conn.cursor() 2038 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2039 #--------------------------------------------------------------------
2040 - def test_sanitize_pg_regex():
2041 tests = [ 2042 ['(', '\\('] 2043 , ['[', '\\['] 2044 , [')', '\\)'] 2045 ] 2046 for test in tests: 2047 result = sanitize_pg_regex(test[0]) 2048 if result != test[1]: 2049 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2050 #--------------------------------------------------------------------
2051 - def test_is_pg_interval():
2052 status = True 2053 tests = [ 2054 [None, True], # None == NULL == succeeds ! 2055 [1, True], 2056 ['1', True], 2057 ['abc', False] 2058 ] 2059 2060 if not is_pg_interval(): 2061 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2062 status = False 2063 2064 for test in tests: 2065 result = is_pg_interval(test[0]) 2066 if result != test[1]: 2067 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2068 status = False 2069 2070 return status
2071 #--------------------------------------------------------------------
2072 - def test_sanity_check_time_skew():
2073 sanity_check_time_skew()
2074 #--------------------------------------------------------------------
2075 - def test_keyword_expansion():
2076 print "keywords, from database:" 2077 print get_text_expansion_keywords() 2078 print "keywords, cached:" 2079 print get_text_expansion_keywords() 2080 print "'$keyword' expands to:" 2081 print expand_keyword(keyword = u'$dvt')
2082 #--------------------------------------------------------------------
2083 - def test_get_foreign_key_details():
2084 for row in get_foreign_keys2column ( 2085 schema = u'dem', 2086 table = u'identity', 2087 column = u'pk' 2088 ): 2089 print '%s.%s references %s.%s.%s' % ( 2090 row['referencing_table'], 2091 row['referencing_column'], 2092 row['referenced_schema'], 2093 row['referenced_table'], 2094 row['referenced_column'] 2095 )
2096 #--------------------------------------------------------------------
2097 - def test_set_user_language():
2098 # (user, language, result, exception type) 2099 tests = [ 2100 # current user 2101 [None, 'de_DE', True], 2102 [None, 'lang_w/o_tx', False], 2103 [None, None, True], 2104 # valid user 2105 ['any-doc', 'de_DE', True], 2106 ['any-doc', 'lang_w/o_tx', False], 2107 ['any-doc', None, True], 2108 # invalid user 2109 ['invalid user', 'de_DE', None], 2110 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2111 ['invalid user', None, True] 2112 ] 2113 for test in tests: 2114 try: 2115 result = set_user_language(user = test[0], language = test[1]) 2116 if result != test[2]: 2117 print "test:", test 2118 print "result:", result, "expected:", test[2] 2119 except psycopg2.IntegrityError, e: 2120 if test[2] is None: 2121 continue 2122 print "test:", test 2123 print "expected exception" 2124 print "result:", e
2125 #--------------------------------------------------------------------
2126 - def test_get_schema_revision_history():
2127 for line in get_schema_revision_history(): 2128 print u' - '.join(line)
2129 #--------------------------------------------------------------------
2130 - def test_run_query():
2131 gmDateTime.init() 2132 args = {'dt': gmDateTime.pydt_max_here()} 2133 cmd = u"SELECT %(dt)s" 2134 2135 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2136 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2137 print rows
2138 #--------------------------------------------------------------------
2139 - def test_schema_exists():
2140 print schema_exists()
2141 2142 #-------------------------------------------------------------------- 2143 # run tests 2144 #test_file2bytea() 2145 #test_get_connection() 2146 #test_exceptions() 2147 #test_ro_queries() 2148 #test_request_dsn() 2149 #test_set_encoding() 2150 #test_connection_pool() 2151 #test_list_args() 2152 #test_sanitize_pg_regex() 2153 #test_is_pg_interval() 2154 #test_sanity_check_time_skew() 2155 #test_keyword_expansion() 2156 #test_get_foreign_key_details() 2157 #test_set_user_language() 2158 #test_get_schema_revision_history() 2159 #test_run_query() 2160 test_schema_exists() 2161 2162 # ====================================================================== 2163